diff --git a/.gitignore b/.gitignore
index c5d6618e09..9e8c2abd19 100644
--- a/.gitignore
+++ b/.gitignore
@@ -198,6 +198,9 @@ FakesAssemblies/
# Visual Studio 6 workspace options file
*.opt
+# Files generated by samples with custom Direct2D effects
+*.fxlib
+
# Custom ignores
gallery.xml
project.lock.json
diff --git a/README.md b/README.md
index 6533fdf1e1..4de269934c 100644
--- a/README.md
+++ b/README.md
@@ -67,57 +67,61 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
@@ -408,15 +412,16 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
Composition visual without framework
+ Direct2D advanced color image rendering
Direct2D custom image effects
- Direct2D gradient mesh
+ Direct2D gradient mesh
Direct2D SVG image rendering
Efficient animations (HTML)
- Transform3D animations
+ Transform3D animations
Transform3D parallax
@@ -431,10 +436,11 @@ For additional Windows samples, see [Windows on GitHub](http://microsoft.github.
Holographic spatial mapping
+ Holographic spatial stage
Holographic voice input
- Spatial interaction source
+ Spatial interaction source
Tag-along hologram
diff --git a/Samples/360VideoPlayback/README.md b/Samples/360VideoPlayback/README.md
new file mode 100644
index 0000000000..ccf1d64551
--- /dev/null
+++ b/Samples/360VideoPlayback/README.md
@@ -0,0 +1,114 @@
+
+
+# 360-degree Video Playback sample
+
+Shows how to play 360-degree video.
+
+The sample uses the MediaPlayer class for 360-degree playback.
+For flat devices (PC, Xbox, Mobile) the MediaPlayer handles all aspects of the rendering.
+The developer need only provide the look direction.
+For Mixed Reality, this sample shows how to use
+the MediaPlayer to obtain individual video frames, and render them to a head-mounted display.
+
+> **Note:** This sample is part of a large collection of UWP feature samples.
+> If you are unfamiliar with Git and GitHub, you can download the entire collection as a
+> [ZIP file](https://github.com/Microsoft/Windows-universal-samples/archive/master.zip), but be
+> sure to unzip everything to access shared dependencies. For more info on working with the ZIP file,
+> the samples collection, and GitHub, see [Get the UWP samples from GitHub](https://aka.ms/ovu2uq).
+> For more samples, see the [Samples portal](https://aka.ms/winsamples) on the Windows Dev Center.
+
+Specifically, this samples covers:
+
+- Creating a hybrid application which works across flat devices (PC, Xbox, Mobile) and Mixed Reality devices (MR headset, Hololens).
+- Handling various forms of user input including mouse, keyboard and gamepad.
+- Selecting the playback mode (flat or immersive) based on the environment the app is running in.
+- Creating multiple windows or views of the app and switching between the XAML (flat) and DirectX (immersive) Windows.
+- The C++ version of the sample also implements some transport controls during immersive playback.
+
+## Additional remarks
+
+**Note** The Windows universal samples for Windows 10 Holographic require Visual Studio 2017 Update 2
+to build, and a Windows Holographic device to execute. Windows Holographic devices include the
+Microsoft HoloLens and the Microsoft HoloLens Emulator.
+
+To obtain information about Windows 10 development, go to the [Windows Dev Center](http://go.microsoft.com/fwlink/?LinkID=532421).
+
+To obtain information about the tools used for Windows Holographic development, including
+Visual Studio and the Microsoft HoloLens Emulator, go to
+[Install the tools](https://developer.microsoft.com/windows/mixed-reality/install_the_tools).
+
+### Reference
+[MediaPlayer](https://docs.microsoft.com/en-us/uwp/api/windows.media.playback.mediaplayer)
+[Mixed Reality Development](https://developer.microsoft.com/en-us/windows/mixed-reality/development)
+[Creating a Holographic DirextX Project](https://developer.microsoft.com/en-us/windows/mixed-reality/creating_a_holographic_directx_project)
+[Using XAML with Holographic DirectX Apps](https://developer.microsoft.com/en-us/windows/mixed-reality/using_xaml_with_holographic_directx_apps)
+[ApplicationViewSwitcher](http://msdn.microsoft.com/library/windows/apps/dn281094)
+
+## System requirements
+
+**Client:** Windows 10 build 15063, Windows 10 Holographic
+
+**Phone:** Windows 10 build 15063
+
+## Build the sample
+
+1. If you download the samples ZIP, be sure to unzip the entire archive, not just the folder with
+ the sample you want to build.
+2. Start Microsoft Visual Studio 2017 and select **File** \> **Open** \> **Project/Solution**.
+3. Starting in the folder where you unzipped the samples, go to the Samples subfolder, then the
+ subfolder for this specific sample, then the subfolder for your preferred language (C++, C#, or
+ JavaScript). Double-click the Visual Studio Solution (.sln) file.
+4. Press Ctrl+Shift+B, or select **Build** \> **Build Solution**.
+
+## Run the sample
+
+The next steps depend on whether you just want to deploy the sample or you want to both deploy and
+run it.
+
+### Deploying the sample
+
+- Select Build > Deploy Solution.
+
+### Deploying and running the sample
+
+- To debug the sample and then run it, press F5 or select Debug > Start Debugging. To run the sample without debugging, press Ctrl+F5 or selectDebug > Start Without Debugging.
+
+### Deploying the sample to the Microsoft HoloLens emulator
+
+- Click the debug target drop-down, and select **Microsoft HoloLens Emulator**.
+- Select **Build** \> **Deploy** Solution.
+
+### Deploying the sample to a Microsoft HoloLens
+
+- Developer unlock your Microsoft HoloLens. For instructions, go to
+ [Enable your device for development](https://msdn.microsoft.com/windows/uwp/get-started/enable-your-device-for-development#enable-your-windows-10-devices).
+- Find the IP address of your Microsoft HoloLens. The IP address can be found in **Settings**
+ \> **Network & Internet** \> **Wi-Fi** \> **Advanced options**. Or, you can ask Cortana for this
+ information by saying: "Hey Cortana, what's my IP address?"
+- Right-click on your project in Visual Studio, and then select **Properties**.
+- In the Debugging pane, click the drop-down and select **Remote Machine**.
+- Enter the IP address of your Microsoft HoloLens into the field labelled **Machine Name**.
+- Click **OK**.
+- Select **Build** \> **Deploy** Solution.
+
+### Pairing your developer-unlocked Microsoft HoloLens with Visual Studio
+
+The first time you deploy from your development PC to your developer-unlocked Microsoft HoloLens,
+you will need to use a PIN to pair your PC with the Microsoft HoloLens.
+- When you select **Build** \> **Deploy Solution**, a dialog box will appear for Visual Studio to
+ accept the PIN.
+- On your Microsoft HoloLens, go to **Settings** \> **Update** \> **For developers**, and click on
+ **Pair**.
+- Type the PIN displayed by your Microsoft HoloLens into the Visual Studio dialog box and click
+ **OK**.
+- On your Microsoft HoloLens, select **Done** to accept the pairing.
+- The solution will then start to deploy.
+
+### Deploying and running the sample
+
+- To debug the sample and then run it, follow the steps listed above to connect your
+ developer-unlocked Microsoft HoloLens, then press F5 or select **Debug** \> **Start Debugging**.
+ To run the sample without debugging, press Ctrl+F5 or select **Debug** \> **Start Without Debugging**.
diff --git a/Samples/360VideoPlayback/cpp/360VideoPlayback.sln b/Samples/360VideoPlayback/cpp/360VideoPlayback.sln
new file mode 100644
index 0000000000..20b477ac8f
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/360VideoPlayback.sln
@@ -0,0 +1,32 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 15
+VisualStudioVersion = 15.0.26403.0
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "360VideoPlayback", "360VideoPlayback.vcxproj", "{70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x64.ActiveCfg = Debug|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x64.Build.0 = Debug|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x64.Deploy.0 = Debug|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x86.ActiveCfg = Debug|Win32
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x86.Build.0 = Debug|Win32
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Debug|x86.Deploy.0 = Debug|Win32
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x64.ActiveCfg = Release|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x64.Build.0 = Release|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x64.Deploy.0 = Release|x64
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x86.ActiveCfg = Release|Win32
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x86.Build.0 = Release|Win32
+ {70E4B7FA-6346-4DB9-B2BE-B27068B19B4F}.Release|x86.Deploy.0 = Release|Win32
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj b/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj
new file mode 100644
index 0000000000..0d39a99339
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj
@@ -0,0 +1,271 @@
+
+
+
+ {70e4b7fa-6346-4db9-b2be-b27068b19b4f}
+ HolographicApp
+ _360VideoPlayback
+ en-US
+ 15.0
+ true
+ Windows Store
+ 10.0.15063.0
+ 10.0.15063.0
+ 10.0
+ true
+
+
+
+
+ Debug
+ Win32
+
+
+ Release
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ x64
+
+
+
+ Application
+ true
+ v141
+
+
+ Application
+ false
+ true
+ v141
+
+
+ Application
+ true
+ v141
+
+
+ Application
+ false
+ true
+ v141
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(VC_IncludePath);$(UniversalCRT_IncludePath);$(WindowsSDK_IncludePath);..\..\..\SharedContent\cpp
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies);
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib
+ mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries)
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories)
+ /bigobj %(AdditionalOptions)
+ _DEBUG;%(PreprocessorDefinitions)
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies);
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib
+ mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries)
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories)
+ /bigobj %(AdditionalOptions)
+ NDEBUG;%(PreprocessorDefinitions)
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies);
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\amd64; $(VCInstallDir)\lib\amd64
+ mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries)
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories)
+ /bigobj %(AdditionalOptions)
+ _DEBUG;%(PreprocessorDefinitions)
+
+
+
+
+ d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies);
+ %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store\amd64; $(VCInstallDir)\lib\amd64
+ mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries)
+
+
+ pch.h
+ $(IntDir)pch.pch
+ $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories)
+ /bigobj %(AdditionalOptions)
+ NDEBUG;%(PreprocessorDefinitions)
+
+
+
+
+ Assets\splash-sdk.png
+
+
+ Assets\squareTile-sdk.png
+
+
+ Assets\smallTile-sdk.png
+
+
+ Assets\StoreLogo-sdk.png
+
+
+
+
+ ..\shared\App.xaml
+
+
+
+
+
+
+
+
+
+
+ ..\shared\MainPage.xaml
+
+
+ ..\shared\PlaybackPage.xaml
+
+
+ ..\shared\VideoGallery.xaml
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ..\shared\App.xaml
+
+
+
+
+
+
+
+
+
+
+ ..\shared\MainPage.xaml
+
+
+ ..\shared\PlaybackPage.xaml
+
+
+ ..\shared\VideoGallery.xaml
+
+
+
+
+
+
+ Create
+
+
+
+
+ Designer
+
+
+
+
+ Geometry
+ 5.0
+ 5.0
+ Geometry
+
+
+ Pixel
+ 5.0
+ 5.0
+ Pixel
+
+
+ Vertex
+ 5.0
+ 5.0
+ Vertex
+
+
+ Pixel
+ 5.0
+
+
+ Vertex
+ 5.0
+
+
+ Vertex
+ 5.0
+
+
+ Geometry
+ 5.0
+
+
+
+
+ Document
+
+
+ Designer
+
+
+ Designer
+
+
+
+
+
+
+
+
+
+
+ true
+
+
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj.filters b/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj.filters
new file mode 100644
index 0000000000..3838cf9be7
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/360VideoPlayback.vcxproj.filters
@@ -0,0 +1,156 @@
+
+
+
+
+ 70e4b7fa-6346-4db9-b2be-b27068b19b4f
+
+
+ a32756d0-ab5c-487c-81e1-5113c37943d7
+ bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png
+
+
+ 7a3f21c3-861d-492c-99df-fc27a164c78a
+
+
+ 5d50e774-3a5f-4551-aa54-06a64b57860f
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+
+
+
+
+ Content
+
+
+ Content
+
+
+
+
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Content
+
+
+ Common
+
+
+
+
+
+
+
+ Content
+
+
+ Content
+
+
+ Content
+
+
+
+
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Common
+
+
+ Content
+
+
+ Common
+
+
+
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+ Content\Shaders
+
+
+
+
+
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+ Assets
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.cpp b/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.cpp
new file mode 100644
index 0000000000..4c7c4e2b6e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.cpp
@@ -0,0 +1,495 @@
+#include "pch.h"
+#include "360VideoPlaybackMain.h"
+#include "Common\DirectXHelper.h"
+#include
+#include
+
+using namespace _360VideoPlayback;
+using namespace concurrency;
+using namespace Platform;
+using namespace std;
+using namespace std::placeholders;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Gaming::Input;
+using namespace Windows::Graphics::Holographic;
+using namespace Windows::Perception::Spatial;
+using namespace Windows::System;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::Input::Spatial;
+
+const float MTC_SPAWN_DISTANCE = 1.35f;
+
+std::shared_ptr _360VideoPlaybackMain::m_deviceResources = nullptr;
+
+shared_ptr _360VideoPlaybackMain::GetDeviceResources()
+{
+ if (_360VideoPlaybackMain::m_deviceResources == nullptr)
+ {
+ _360VideoPlaybackMain::m_deviceResources = make_shared();
+ }
+ return _360VideoPlaybackMain::m_deviceResources;
+}
+
+// Loads and initializes application assets when the application is loaded.
+_360VideoPlaybackMain::_360VideoPlaybackMain()
+{
+ // Register to be notified if the device is lost or recreated.
+ m_deviceResources->RegisterDeviceNotify(this);
+}
+
+void _360VideoPlaybackMain::SetHolographicSpace(HolographicSpace^ holographicSpace)
+{
+ UnregisterHolographicEventHandlers();
+
+ m_holographicSpace = holographicSpace;
+
+ //
+ // TODO: Add code here to initialize your holographic content.
+ //
+
+#ifdef DRAW_SAMPLE_CONTENT
+ // Initialize the sample hologram.
+ m_videoRenderer = std::make_unique();
+ m_mediaTransportControls = std::make_unique();
+
+ m_spatialInputHandler = std::make_unique();
+#endif
+
+ // Use the default SpatialLocator to track the motion of the device.
+ m_locator = SpatialLocator::GetDefault();
+
+ // Be able to respond to changes in the positional tracking state.
+ m_locatabilityChangedToken =
+ m_locator->LocatabilityChanged +=
+ ref new TypedEventHandler(
+ std::bind(&_360VideoPlaybackMain::OnLocatabilityChanged, this, _1, _2)
+ );
+
+ // Respond to camera added events by creating any resources that are specific
+ // to that camera, such as the back buffer render target view.
+ // When we add an event handler for CameraAdded, the API layer will avoid putting
+ // the new camera in new HolographicFrames until we complete the deferral we created
+ // for that handler, or return from the handler without creating a deferral. This
+ // allows the app to take more than one frame to finish creating resources and
+ // loading assets for the new holographic camera.
+ // This function should be registered before the app creates any HolographicFrames.
+ m_cameraAddedToken =
+ m_holographicSpace->CameraAdded +=
+ ref new TypedEventHandler(
+ std::bind(&_360VideoPlaybackMain::OnCameraAdded, this, _1, _2)
+ );
+
+ // Respond to camera removed events by releasing resources that were created for that
+ // camera.
+ // When the app receives a CameraRemoved event, it releases all references to the back
+ // buffer right away. This includes render target views, Direct2D target bitmaps, and so on.
+ // The app must also ensure that the back buffer is not attached as a render target, as
+ // shown in DeviceResources::ReleaseResourcesForBackBuffer.
+ m_cameraRemovedToken =
+ m_holographicSpace->CameraRemoved +=
+ ref new TypedEventHandler(
+ std::bind(&_360VideoPlaybackMain::OnCameraRemoved, this, _1, _2)
+ );
+
+ // The simplest way to render video frame is to create a stationary reference frame
+ // when the app is launched. This is roughly analogous to creating a "world" coordinate system
+ // with the origin placed at the device's position as the app is launched.
+ m_referenceFrame = m_locator->CreateAttachedFrameOfReferenceAtCurrentHeading();
+
+ // Notes on spatial tracking APIs:
+ // * Stationary reference frames are designed to provide a best-fit position relative to the
+ // overall space. Individual positions within that reference frame are allowed to drift slightly
+ // as the device learns more about the environment.
+ // * When precise placement of individual holograms is required, a SpatialAnchor should be used to
+ // anchor the individual hologram to a position in the real world - for example, a point the user
+ // indicates to be of special interest. Anchor positions do not drift, but can be corrected; the
+ // anchor will use the corrected position starting in the next frame after the correction has
+ // occurred.
+}
+
+void _360VideoPlaybackMain::UnregisterHolographicEventHandlers()
+{
+ if (m_holographicSpace != nullptr)
+ {
+ // Clear previous event registrations.
+
+ if (m_cameraAddedToken.Value != 0)
+ {
+ m_holographicSpace->CameraAdded -= m_cameraAddedToken;
+ m_cameraAddedToken.Value = 0;
+ }
+
+ if (m_cameraRemovedToken.Value != 0)
+ {
+ m_holographicSpace->CameraRemoved -= m_cameraRemovedToken;
+ m_cameraRemovedToken.Value = 0;
+ }
+ }
+
+ if (m_locator != nullptr)
+ {
+ m_locator->LocatabilityChanged -= m_locatabilityChangedToken;
+ }
+
+ if (m_videoRenderer != nullptr)
+ {
+ m_videoRenderer->ReleaseDeviceDependentResources();
+ }
+}
+
+_360VideoPlaybackMain::~_360VideoPlaybackMain()
+{
+ // Deregister device notification.
+ m_deviceResources->RegisterDeviceNotify(nullptr);
+
+ UnregisterHolographicEventHandlers();
+}
+
+// Updates the application state once per frame.
+HolographicFrame^ _360VideoPlaybackMain::Update()
+{
+ // Before doing the timer update, there is some work to do per-frame
+ // to maintain holographic rendering. First, we will get information
+ // about the current frame.
+
+ // The HolographicFrame has information that the app needs in order
+ // to update and render the current frame. The app begins each new
+ // frame by calling CreateNextFrame.
+ HolographicFrame^ holographicFrame = m_holographicSpace->CreateNextFrame();
+
+ // Get a prediction of where holographic cameras will be when this frame
+ // is presented.
+ HolographicFramePrediction^ prediction = holographicFrame->CurrentPrediction;
+
+ // Back buffers can change from frame to frame. Validate each buffer, and recreate
+ // resource views and depth buffers as needed.
+ m_deviceResources->EnsureCameraResources(holographicFrame, prediction);
+
+ // Next, we get a coordinate system from the attached frame of reference that is
+ // associated with the current frame. Later, this coordinate system is used for
+ // for creating the stereo view matrices when rendering the sample content.
+ SpatialCoordinateSystem^ currentCoordinateSystem = m_referenceFrame->GetStationaryCoordinateSystemAtTimestamp(prediction->Timestamp);
+ SpatialPointerPose^ position = SpatialPointerPose::TryGetAtTimestamp(currentCoordinateSystem, prediction->Timestamp);
+
+ bool inputReceived = false;
+
+
+ // Check for new input state since the last frame.
+ SpatialInteractionSourceState^ pointerState = m_spatialInputHandler->CheckForInput();
+ if (pointerState != nullptr)
+ {
+ inputReceived = true;
+ }
+
+ // Check for Gamepad input
+ auto gamepads = Gamepad::Gamepads;
+ if (gamepads->Size > 0)
+ {
+ auto currentButtonState = gamepads->GetAt(0)->GetCurrentReading().Buttons; // We are only going to support 1 controller at the moment
+ if (static_cast(currentButtonState & GamepadButtons::A)) // Marking 'A' Button as our click button
+ {
+ if (!m_isGamepadPressed) // Control the frequency of calls in Update
+ {
+ inputReceived = true;
+ m_isGamepadPressed = true;
+ }
+ }
+ else
+ {
+ m_isGamepadPressed = false;
+ }
+ }
+
+ // Check Mouse Input
+ if (static_cast(CoreWindow::GetForCurrentThread()->GetKeyState(VirtualKey::LeftButton) & CoreVirtualKeyStates::Down))
+ {
+ if (!m_isMouseLeftKeyPressed) // Control the frequency of calls in Update
+ {
+ inputReceived = true;
+ m_isMouseLeftKeyPressed = true;
+ }
+ }
+ else
+ {
+ m_isMouseLeftKeyPressed = false;
+ }
+
+ // Handle input
+ if (inputReceived)
+ {
+ if (m_mediaTransportControls->IsVisible())
+ {
+ m_mediaTransportControls->PerformPressedAction();
+ }
+ else if (position != nullptr)
+ {
+ m_mediaTransportControls->Show(position, MTC_SPAWN_DISTANCE);
+ }
+ }
+
+ m_timer.Tick([this, position]()
+ {
+ //
+ // TODO: Update scene objects.
+ //
+ // Put time-based updates here. By default this code will run once per frame,
+ // but if you change the StepTimer to use a fixed time step this code will
+ // run as many times as needed to get to the current step.
+ //
+
+#ifdef DRAW_SAMPLE_CONTENT
+ m_videoRenderer->Update(m_timer);
+ m_mediaTransportControls->Update(m_timer, position);
+#endif
+ });
+
+ // We complete the frame update by using information about our content positioning
+ // to set the focus point.
+
+ for (auto cameraPose : prediction->CameraPoses)
+ {
+#ifdef DRAW_SAMPLE_CONTENT
+ // The HolographicCameraRenderingParameters class provides access to set
+ // the image stabilization parameters.
+ HolographicCameraRenderingParameters^ renderingParameters = holographicFrame->GetRenderingParameters(cameraPose);
+
+ // SetFocusPoint informs the system about a specific point in your scene to
+ // prioritize for image stabilization. The focus point is set independently
+ // for each holographic camera.
+ // You should set the focus point near the content that the user is looking at.
+ // In this example, we put the focus point at the center of the sample hologram,
+ // since that is the only hologram available for the user to focus on.
+ // You can also set the relative velocity and facing of that content; the sample
+ // hologram is at a fixed point so we only need to indicate its position.
+ float3 position = { 0.f, 0.f, 0.f };
+ renderingParameters->SetFocusPoint(
+ currentCoordinateSystem,
+ position);
+#endif
+ }
+
+ // The holographic frame will be used to get up-to-date view and projection matrices and
+ // to present the swap chain.
+ return holographicFrame;
+}
+
+// Renders the current frame to each holographic camera, according to the
+// current application and spatial positioning state. Returns true if the
+// frame was rendered to at least one camera.
+bool _360VideoPlaybackMain::Render(HolographicFrame^ holographicFrame)
+{
+ // Don't try to render anything before the first Update.
+ if (m_timer.GetFrameCount() == 0)
+ {
+ return false;
+ }
+
+ //
+ // TODO: Add code for pre-pass rendering here.
+ //
+ // Take care of any tasks that are not specific to an individual holographic
+ // camera. This includes anything that doesn't need the final view or projection
+ // matrix, such as lighting maps.
+ //
+
+ // Lock the set of holographic camera resources, then draw to each camera
+ // in this frame.
+ return m_deviceResources->UseHolographicCameraResources(
+ [this, holographicFrame](std::map>& cameraResourceMap)
+ {
+ // Up-to-date frame predictions enhance the effectiveness of image stablization and
+ // allow more accurate positioning of holograms.
+ holographicFrame->UpdateCurrentPrediction();
+ HolographicFramePrediction^ prediction = holographicFrame->CurrentPrediction;
+
+ bool atLeastOneCameraRendered = false;
+ for (auto cameraPose : prediction->CameraPoses)
+ {
+ // This represents the device-based resources for a HolographicCamera.
+ DX::CameraResources* pCameraResources = cameraResourceMap[cameraPose->HolographicCamera->Id].get();
+
+ // Get the device context.
+ const auto context = m_deviceResources->GetD3DDeviceContext();
+ const auto depthStencilView = pCameraResources->GetDepthStencilView();
+
+ // Set render targets to the current holographic camera.
+ ID3D11RenderTargetView *const targets[1] = { pCameraResources->GetBackBufferRenderTargetView() };
+ context->OMSetRenderTargets(1, targets, depthStencilView);
+
+ // Clear the back buffer and depth stencil view.
+ context->ClearRenderTargetView(targets[0], DirectX::Colors::Transparent);
+ context->ClearDepthStencilView(depthStencilView, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
+
+ //
+ // TODO: Replace the sample content with your own content.
+ //
+ // Notes regarding holographic content:
+ // * For drawing, remember that you have the potential to fill twice as many pixels
+ // in a stereoscopic render target as compared to a non-stereoscopic render target
+ // of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
+ // and only draw holograms that the user can see.
+ // * To help occlude hologram geometry, you can create a depth map using geometry
+ // data obtained via the surface mapping APIs. You can use this depth map to avoid
+ // rendering holograms that are intended to be hidden behind tables, walls,
+ // monitors, and so on.
+ // * Black pixels will appear transparent to the user wearing the device, but you
+ // should still use alpha blending to draw semitransparent holograms. You should
+ // also clear the screen to Transparent as shown above.
+ //
+
+
+ // The view and projection matrices for each holographic camera will change
+ // every frame. This function refreshes the data in the constant buffer for
+ // the holographic camera indicated by cameraPose.
+ pCameraResources->UpdateViewProjectionBuffer(m_deviceResources, cameraPose, m_referenceFrame->GetStationaryCoordinateSystemAtTimestamp(prediction->Timestamp));
+
+ // Attach the view/projection constant buffer for this camera to the graphics pipeline.
+ bool cameraActive = pCameraResources->AttachViewProjectionBuffer(m_deviceResources);
+
+#ifdef DRAW_SAMPLE_CONTENT
+ // Only render world-locked content when positional tracking is active.
+ if (cameraActive)
+ {
+ // Draw the sample hologram.
+ m_videoRenderer->Render();
+ m_mediaTransportControls->Render();
+ }
+#endif
+ atLeastOneCameraRendered = true;
+ }
+
+ return atLeastOneCameraRendered;
+ });
+}
+
+void _360VideoPlaybackMain::SaveAppState()
+{
+ //
+ // TODO: Insert code here to save your app state.
+ // This method is called when the app is about to suspend.
+ //
+ // For example, store information in the SpatialAnchorStore.
+ //
+}
+
+void _360VideoPlaybackMain::LoadAppState()
+{
+ //
+ // TODO: Insert code here to load your app state.
+ // This method is called when the app resumes.
+ //
+ // For example, load information from the SpatialAnchorStore.
+ //
+}
+
+// Notifies classes that use Direct3D device resources that the device resources
+// need to be released before this method returns.
+void _360VideoPlaybackMain::OnDeviceLost()
+{
+#ifdef DRAW_SAMPLE_CONTENT
+ m_videoRenderer->ReleaseDeviceDependentResources();
+#endif
+}
+
+// Notifies classes that use Direct3D device resources that the device resources
+// may now be recreated.
+void _360VideoPlaybackMain::OnDeviceRestored()
+{
+#ifdef DRAW_SAMPLE_CONTENT
+ m_videoRenderer->CreateDeviceDependentResources();
+#endif
+}
+
+void _360VideoPlaybackMain::OnLocatabilityChanged(SpatialLocator^ sender, Object^ args)
+{
+ switch (sender->Locatability)
+ {
+ case SpatialLocatability::Unavailable:
+ // Holograms cannot be rendered.
+ {
+ String^ message = L"Warning! Positional tracking is " +
+ sender->Locatability.ToString() + L".\n";
+ OutputDebugStringW(message->Data());
+ }
+ break;
+
+ // In the following three cases, it is still possible to place holograms using a
+ // SpatialLocatorAttachedFrameOfReference.
+ case SpatialLocatability::PositionalTrackingActivating:
+ // The system is preparing to use positional tracking.
+
+ case SpatialLocatability::OrientationOnly:
+ // Positional tracking has not been activated.
+
+ case SpatialLocatability::PositionalTrackingInhibited:
+ // Positional tracking is temporarily inhibited. User action may be required
+ // in order to restore positional tracking.
+ break;
+
+ case SpatialLocatability::PositionalTrackingActive:
+ // Positional tracking is active. World-locked content can be rendered.
+ break;
+ }
+}
+
+void _360VideoPlaybackMain::OnCameraAdded(
+ HolographicSpace^ sender,
+ HolographicSpaceCameraAddedEventArgs^ args
+)
+{
+ Deferral^ deferral = args->GetDeferral();
+ HolographicCamera^ holographicCamera = args->Camera;
+ create_task([this, deferral, holographicCamera]()
+ {
+ //
+ // TODO: Allocate resources for the new camera and load any content specific to
+ // that camera. Note that the render target size (in pixels) is a property
+ // of the HolographicCamera object, and can be used to create off-screen
+ // render targets that match the resolution of the HolographicCamera.
+ //
+
+ // Create device-based resources for the holographic camera and add it to the list of
+ // cameras used for updates and rendering. Notes:
+ // * Since this function may be called at any time, the AddHolographicCamera function
+ // waits until it can get a lock on the set of holographic camera resources before
+ // adding the new camera. At 60 frames per second this wait should not take long.
+ // * A subsequent Update will take the back buffer from the RenderingParameters of this
+ // camera's CameraPose and use it to create the ID3D11RenderTargetView for this camera.
+ // Content can then be rendered for the HolographicCamera.
+ m_deviceResources->AddHolographicCamera(holographicCamera);
+
+ // Holographic frame predictions will not include any information about this camera until
+ // the deferral is completed.
+ deferral->Complete();
+ });
+}
+
+void _360VideoPlaybackMain::OnCameraRemoved(
+ HolographicSpace^ sender,
+ HolographicSpaceCameraRemovedEventArgs^ args
+)
+{
+ create_task([this]()
+ {
+ //
+ // TODO: Asynchronously unload or deactivate content resources (not back buffer
+ // resources) that are specific only to the camera that was removed.
+ //
+ });
+
+ // Before letting this callback return, ensure that all references to the back buffer
+ // are released.
+ // Since this function may be called at any time, the RemoveHolographicCamera function
+ // waits until it can get a lock on the set of holographic camera resources before
+ // deallocating resources for this camera. At 60 frames per second this wait should
+ // not take long.
+ m_deviceResources->RemoveHolographicCamera(args->Camera);
+}
+
+void _360VideoPlaybackMain::CreateVideoShaders()
+{
+ m_videoRenderer->CreateDeviceDependentResources();
+ m_mediaTransportControls->Initialize();
+ m_mediaTransportControls->ApplyShaders();
+}
diff --git a/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.h b/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.h
new file mode 100644
index 0000000000..034320e27d
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/360VideoPlaybackMain.h
@@ -0,0 +1,103 @@
+#pragma once
+
+//
+// Comment out this preprocessor definition to disable all of the
+// sample content.
+//
+// To remove the content after disabling it:
+// * Remove the unused code from your app's Main class.
+// * Delete the Content folder provided with this template.
+//
+#define DRAW_SAMPLE_CONTENT
+
+#include "Common\DeviceResources.h"
+#include "Common\StepTimer.h"
+
+#ifdef DRAW_SAMPLE_CONTENT
+#include "Content\VideoRenderer.h"
+#include "Content\SpatialInputHandler.h"
+#include "Content\MediaTransportControls.h"
+#endif
+
+// Updates, renders, and presents holographic content using Direct3D.
+namespace _360VideoPlayback
+{
+ class _360VideoPlaybackMain : public DX::IDeviceNotify
+ {
+ public:
+ _360VideoPlaybackMain();
+ ~_360VideoPlaybackMain();
+
+ // Sets the holographic space. This is our closest analogue to setting a new window
+ // for the app.
+ void SetHolographicSpace(Windows::Graphics::Holographic::HolographicSpace^ holographicSpace);
+
+ // Starts the holographic frame and updates the content.
+ Windows::Graphics::Holographic::HolographicFrame^ Update();
+
+ // Renders holograms, including world-locked content.
+ bool Render(Windows::Graphics::Holographic::HolographicFrame^ holographicFrame);
+
+ // Handle saving and loading of app state owned by AppMain.
+ void SaveAppState();
+ void LoadAppState();
+
+ // IDeviceNotify
+ virtual void OnDeviceLost();
+ virtual void OnDeviceRestored();
+ void CreateVideoShaders();
+ // Clears event registration state. Used when changing to a new HolographicSpace
+ // and when tearing down AppMain.
+ void UnregisterHolographicEventHandlers();
+ static std::shared_ptr GetDeviceResources();
+
+ private:
+ // Asynchronously creates resources for new holographic cameras.
+ void OnCameraAdded(
+ Windows::Graphics::Holographic::HolographicSpace^ sender,
+ Windows::Graphics::Holographic::HolographicSpaceCameraAddedEventArgs^ args);
+
+ // Synchronously releases resources for holographic cameras that are no longer
+ // attached to the system.
+ void OnCameraRemoved(
+ Windows::Graphics::Holographic::HolographicSpace^ sender,
+ Windows::Graphics::Holographic::HolographicSpaceCameraRemovedEventArgs^ args);
+
+ // Used to notify the app when the positional tracking state changes.
+ void OnLocatabilityChanged(
+ Windows::Perception::Spatial::SpatialLocator^ sender,
+ Platform::Object^ args);
+
+#ifdef DRAW_SAMPLE_CONTENT
+ // Renders a colorful holographic cube that's 20 centimeters wide. This sample content
+ // is used to demonstrate world-locked rendering.
+ std::unique_ptr m_videoRenderer;
+ std::unique_ptr m_mediaTransportControls;
+
+ // Listens for the Pressed spatial input event.
+ std::shared_ptr m_spatialInputHandler;
+#endif
+
+ // static pointer to device resources.
+ static std::shared_ptr m_deviceResources;
+
+ // Render loop timer.
+ DX::StepTimer m_timer;
+
+ // Represents the holographic space around the user.
+ Windows::Graphics::Holographic::HolographicSpace^ m_holographicSpace;
+
+ // SpatialLocator that is attached to the primary camera.
+ Windows::Perception::Spatial::SpatialLocator^ m_locator;
+
+ // A reference frame attached to the holographic camera.
+ Windows::Perception::Spatial::SpatialLocatorAttachedFrameOfReference^ m_referenceFrame;
+
+ // Event registration tokens.
+ Windows::Foundation::EventRegistrationToken m_cameraAddedToken;
+ Windows::Foundation::EventRegistrationToken m_cameraRemovedToken;
+ Windows::Foundation::EventRegistrationToken m_locatabilityChangedToken;
+ bool m_isGamepadPressed = false;
+ bool m_isMouseLeftKeyPressed = false;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/App.xaml.cpp b/Samples/360VideoPlayback/cpp/App.xaml.cpp
new file mode 100644
index 0000000000..c8f9bf2df4
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/App.xaml.cpp
@@ -0,0 +1,111 @@
+//
+// App.xaml.cpp
+// Implementation of the App class.
+//
+
+#include "pch.h"
+#include "MainPage.xaml.h"
+
+using namespace _360VideoPlayback;
+
+using namespace Platform;
+using namespace Windows::ApplicationModel;
+using namespace Windows::ApplicationModel::Activation;
+using namespace Windows::Foundation;
+using namespace Windows::UI::Xaml;
+using namespace Windows::UI::Xaml::Controls;
+using namespace Windows::UI::Xaml::Interop;
+using namespace Windows::UI::Xaml::Navigation;
+
+///
+/// Initializes the singleton application object. This is the first line of authored code
+/// executed, and as such is the logical equivalent of main() or WinMain().
+///
+App::App()
+{
+ InitializeComponent();
+ Suspending += ref new SuspendingEventHandler(this, &App::OnSuspending);
+}
+
+///
+/// Invoked when the application is launched normally by the end user. Other entry points
+/// will be used such as when the application is launched to open a specific file.
+///
+/// Details about the launch request and process.
+void App::OnLaunched(LaunchActivatedEventArgs^ e)
+{
+ auto rootFrame = dynamic_cast (Window::Current->Content);
+
+ // Do not repeat app initialization when the Window already has content,
+ // just ensure that the window is active
+ if (rootFrame == nullptr)
+ {
+ // Create a Frame to act as the navigation context and associate it with
+ // a SuspensionManager key
+ rootFrame = ref new Frame();
+
+ rootFrame->NavigationFailed += ref new NavigationFailedEventHandler(this, &App::OnNavigationFailed);
+
+ if (e->PreviousExecutionState == ApplicationExecutionState::Terminated)
+ {
+ // TODO: Restore the saved session state only when appropriate, scheduling the
+ // final launch steps after the restore is complete
+
+ }
+
+ if (e->PrelaunchActivated == false)
+ {
+ if (rootFrame->Content == nullptr)
+ {
+ // When the navigation stack isn't restored navigate to the first page,
+ // configuring the new page by passing required information as a navigation
+ // parameter
+ rootFrame->Navigate(TypeName(MainPage::typeid), e->Arguments);
+ }
+ // Place the frame in the current Window
+ Window::Current->Content = rootFrame;
+ // Ensure the current window is active
+ Window::Current->Activate();
+ }
+ }
+ else
+ {
+ if (e->PrelaunchActivated == false)
+ {
+ if (rootFrame->Content == nullptr)
+ {
+ // When the navigation stack isn't restored navigate to the first page,
+ // configuring the new page by passing required information as a navigation
+ // parameter
+ rootFrame->Navigate(TypeName(MainPage::typeid), e->Arguments);
+ }
+ // Ensure the current window is active
+ Window::Current->Activate();
+ }
+ }
+}
+
+///
+/// Invoked when application execution is being suspended. Application state is saved
+/// without knowing whether the application will be terminated or resumed with the contents
+/// of memory still intact.
+///
+/// The source of the suspend request.
+/// Details about the suspend request.
+void App::OnSuspending(Object^ sender, SuspendingEventArgs^ e)
+{
+ (void) sender; // Unused parameter
+ (void) e; // Unused parameter
+
+ //TODO: Save application state and stop any background activity
+}
+
+///
+/// Invoked when Navigation to a certain page fails
+///
+/// The Frame which failed navigation
+/// Details about the navigation failure
+void App::OnNavigationFailed(Object^ sender, NavigationFailedEventArgs^ e)
+{
+ throw ref new FailureException("Failed to load Page " + e->SourcePageType.Name);
+}
diff --git a/Samples/360VideoPlayback/cpp/App.xaml.h b/Samples/360VideoPlayback/cpp/App.xaml.h
new file mode 100644
index 0000000000..646c1e408e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/App.xaml.h
@@ -0,0 +1,27 @@
+//
+// App.xaml.h
+// Declaration of the App class.
+//
+
+#pragma once
+
+#include "App.g.h"
+
+namespace _360VideoPlayback
+{
+ ///
+ /// Provides application-specific behavior to supplement the default Application class.
+ ///
+ ref class App sealed
+ {
+ protected:
+ virtual void OnLaunched(Windows::ApplicationModel::Activation::LaunchActivatedEventArgs^ e) override;
+
+ internal:
+ App();
+
+ private:
+ void OnSuspending(Platform::Object^ sender, Windows::ApplicationModel::SuspendingEventArgs^ e);
+ void OnNavigationFailed(Platform::Object^ sender, Windows::UI::Xaml::Navigation::NavigationFailedEventArgs^ e);
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/AppView.cpp b/Samples/360VideoPlayback/cpp/AppView.cpp
new file mode 100644
index 0000000000..75b1a9bef6
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/AppView.cpp
@@ -0,0 +1,289 @@
+#include "pch.h"
+#include "AppView.h"
+#include "VideoGallery.xaml.h"
+#include
+
+using namespace _360VideoPlayback;
+using namespace concurrency;
+using namespace Platform;
+using namespace std;
+using namespace Windows::ApplicationModel;
+using namespace Windows::ApplicationModel::Activation;
+using namespace Windows::ApplicationModel::Core;
+using namespace Windows::Foundation;
+using namespace Windows::Graphics::Holographic;
+using namespace Windows::Media::Core;
+using namespace Windows::Media::Playback;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::ViewManagement;
+
+MediaPlayer^ AppView::m_mediaPlayer = nullptr;
+
+MediaPlayer^ AppView::GetMediaPlayer()
+{
+ return m_mediaPlayer;
+}
+
+AppViewSource::AppViewSource(Uri^ sourceUri)
+ : m_sourceUri(sourceUri)
+{
+}
+
+IFrameworkView^ AppViewSource::CreateView()
+{
+ return ref new AppView(m_sourceUri);
+}
+
+AppView::AppView(Uri^ sourceUri)
+ : m_sourceUri(sourceUri)
+{
+ m_mediaPlayer = ref new MediaPlayer();
+}
+
+// IFrameworkView methods
+
+// The first method called when the IFrameworkView is being created.
+// Use this method to subscribe for Windows shell events and to initialize your app.
+void AppView::Initialize(CoreApplicationView^ applicationView)
+{
+ CoreWindow^ window = CoreWindow::GetForCurrentThread();
+ if (window != nullptr)
+ {
+ window->Activated += ref new TypedEventHandler(this, &AppView::OnMainViewActivated);
+ }
+ applicationView->Activated +=
+ ref new TypedEventHandler(this, &AppView::OnViewActivated);
+}
+
+void AppView::OnMainViewActivated(CoreWindow^ sender, WindowActivatedEventArgs^ args)
+{
+ m_mainWindowReactivated = true;
+ m_windowClosed = true;
+ TryCloseWindow();
+}
+
+// Called when the CoreWindow object is created (or re-created).
+void AppView::SetWindow(CoreWindow^ window)
+{
+ ApplicationView::GetForCurrentView()->Consolidated +=
+ ref new TypedEventHandler(this, &AppView::OnConsolidated);
+
+ // Register event handlers for app lifecycle.
+ CoreApplication::Suspending +=
+ ref new EventHandler(this, &AppView::OnSuspending);
+
+ CoreApplication::Resuming +=
+ ref new EventHandler(this, &AppView::OnResuming);
+
+ // Register for keypress notifications.
+ window->KeyDown +=
+ ref new TypedEventHandler(this, &AppView::OnKeyPressed);
+
+ // Register for notification that the app window is being closed.
+ window->Closed +=
+ ref new TypedEventHandler(this, &AppView::OnWindowClosed);
+
+ // Register for notifications that the app window is losing focus.
+ window->VisibilityChanged +=
+ ref new TypedEventHandler(this, &AppView::OnVisibilityChanged);
+
+ // At this point we have access to the device and we can create device-dependent
+ // resources.
+ m_deviceResources = _360VideoPlaybackMain::GetDeviceResources();
+
+ m_main = std::make_unique<_360VideoPlaybackMain>();
+
+ // Create a holographic space for the core window for the current view.
+ // Presenting holographic frames that are created by this holographic space will put
+ // the app into exclusive mode.
+ m_holographicSpace = HolographicSpace::CreateForCoreWindow(window);
+
+ // The DeviceResources class uses the preferred DXGI adapter ID from the holographic
+ // space (when available) to create a Direct3D device. The HolographicSpace
+ // uses this ID3D11Device to create and manage device-based resources such as
+ // swap chains.
+ m_deviceResources->SetHolographicSpace(m_holographicSpace);
+
+ // The main class uses the holographic space for updates and rendering.
+ m_main->SetHolographicSpace(m_holographicSpace);
+
+ MediaSource^ source = MediaSource::CreateFromUri(m_sourceUri);
+ m_mediaPlayer->MediaOpened += ref new TypedEventHandler(this, &AppView::OnMediaOpened);
+ m_mediaPlayer->Source = source;
+
+ m_mediaPlayer->IsVideoFrameServerEnabled = true;
+ m_mediaPlayer->IsLoopingEnabled = true;
+}
+
+// The Load method can be used to initialize scene resources or to load a
+// previously saved app state.
+void AppView::Load(String^ entryPoint)
+{
+}
+
+// This method is called after the window becomes active. It oversees the
+// update, draw, and present loop, and it also oversees window message processing.
+void AppView::Run()
+{
+ CoreWindow^ window = CoreWindow::GetForCurrentThread();
+ window->Activate();
+
+ while (!m_windowClosed)
+ {
+ if (m_ShadersNeed)
+ {
+ if (!m_ShadersCreated)
+ {
+ m_main->CreateVideoShaders();
+ m_ShadersCreated = true;
+ }
+ }
+ else
+ {
+ window->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessOneAndAllPending);
+ continue;
+ }
+
+ if (m_appViewConsolidated || m_mainWindowReactivated)
+ {
+ m_windowClosed = true;
+ TryCloseWindow();
+ }
+ else
+ {
+ window->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent);
+ if (m_windowVisible && (m_holographicSpace != nullptr))
+ {
+ HolographicFrame^ holographicFrame = m_main->Update();
+
+ if (m_main->Render(holographicFrame))
+ {
+ // The holographic frame has an API that presents the swap chain for each
+ // holographic camera.
+ m_deviceResources->Present(holographicFrame);
+ }
+ }
+ }
+ }
+}
+
+// Terminate events do not cause Uninitialize to be called. It will be called if your IFrameworkView
+// class is torn down while the app is in the foreground.
+// This method is not often used, but IFrameworkView requires it and it will be called for
+// holographic apps.
+void AppView::Uninitialize()
+{
+ if (m_mediaPlayer != nullptr)
+ {
+ m_mediaPlayer->Source = nullptr;
+ m_mediaPlayer = nullptr;
+ }
+ if (m_main != nullptr)
+ {
+ m_main->UnregisterHolographicEventHandlers();
+ }
+}
+
+
+// Application lifecycle event handlers
+
+// Called when the app view is activated. Activates the app's CoreWindow.
+void AppView::OnViewActivated(CoreApplicationView^ sender, IActivatedEventArgs^ args)
+{
+ // Run() won't start until the CoreWindow is activated.
+ sender->CoreWindow->Activate();
+}
+
+void AppView::OnSuspending(Object^ sender, SuspendingEventArgs^ args)
+{
+ // Save app state asynchronously after requesting a deferral. Holding a deferral
+ // indicates that the application is busy performing suspending operations. Be
+ // aware that a deferral may not be held indefinitely; after about five seconds,
+ // the app will be forced to exit.
+ SuspendingDeferral^ deferral = args->SuspendingOperation->GetDeferral();
+
+ create_task([this, deferral]()
+ {
+ m_deviceResources->Trim();
+
+ if (m_main != nullptr)
+ {
+ m_main->SaveAppState();
+ }
+
+ //
+ // TODO: Insert code here to save your app state.
+ //
+
+ deferral->Complete();
+ });
+}
+
+void AppView::OnResuming(Object^ sender, Object^ args)
+{
+ // Restore any data or state that was unloaded on suspend. By default, data
+ // and state are persisted when resuming from suspend. Note that this event
+ // does not occur if the app was previously terminated.
+
+ if (m_main != nullptr)
+ {
+ m_main->LoadAppState();
+ }
+
+ //
+ // TODO: Insert code here to load your app state.
+ //
+}
+
+
+// Window event handlers
+
+void AppView::OnVisibilityChanged(CoreWindow^ sender, VisibilityChangedEventArgs^ args)
+{
+ m_windowVisible = args->Visible;
+}
+
+void AppView::OnWindowClosed(CoreWindow^ sender, CoreWindowEventArgs^ args)
+{
+ m_windowClosed = true;
+}
+
+
+// Input event handlers
+
+void AppView::OnKeyPressed(CoreWindow^ sender, KeyEventArgs^ args)
+{
+ //
+ // TODO: Bluetooth keyboards are supported by HoloLens. You can use this method for
+ // keyboard input if you want to support it as an optional input method for
+ // your holographic app.
+ //
+
+ if (args->VirtualKey == Windows::System::VirtualKey::Escape)
+ {
+ ApplicationViewSwitcher::SwitchAsync(VideoGallery::GetMainViewId(), ApplicationView::GetForCurrentView()->Id, ApplicationViewSwitchingOptions::ConsolidateViews);
+ }
+}
+
+
+void AppView::OnConsolidated(ApplicationView^ sender, ApplicationViewConsolidatedEventArgs^ args)
+{
+ m_appViewConsolidated = true;
+ m_windowClosed = true;
+ TryCloseWindow();
+ Uninitialize();
+}
+
+void AppView::OnMediaOpened(MediaPlayer^ sender, Object^ args)
+{
+ m_ShadersNeed = true;
+}
+
+void AppView::TryCloseWindow()
+{
+ CoreWindow^ window = CoreWindow::GetForCurrentThread();
+ if (window != nullptr)
+ {
+ window->Close();
+ }
+}
diff --git a/Samples/360VideoPlayback/cpp/AppView.h b/Samples/360VideoPlayback/cpp/AppView.h
new file mode 100644
index 0000000000..1c91197606
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/AppView.h
@@ -0,0 +1,64 @@
+#pragma once
+
+#include "Common\DeviceResources.h"
+#include "360VideoPlaybackMain.h"
+
+namespace _360VideoPlayback
+{
+ // IFrameworkView class. Connects the app with the Windows shell and handles application lifecycle events.
+ ref class AppView sealed : public Windows::ApplicationModel::Core::IFrameworkView
+ {
+ public:
+ AppView(Windows::Foundation::Uri^ sourceUri);
+
+ // IFrameworkView methods.
+ virtual void Initialize(Windows::ApplicationModel::Core::CoreApplicationView^ applicationView);
+ virtual void SetWindow(Windows::UI::Core::CoreWindow^ window);
+ virtual void Load(Platform::String^ entryPoint);
+ virtual void Run();
+ virtual void Uninitialize();
+ static Windows::Media::Playback::MediaPlayer^ GetMediaPlayer();
+
+ protected:
+ // Application lifecycle event handlers.
+ void OnViewActivated(Windows::ApplicationModel::Core::CoreApplicationView^ sender, Windows::ApplicationModel::Activation::IActivatedEventArgs^ args);
+ void OnMainViewActivated(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::WindowActivatedEventArgs^ args);
+ void OnSuspending(Platform::Object^ sender, Windows::ApplicationModel::SuspendingEventArgs^ args);
+ void OnResuming(Platform::Object^ sender, Platform::Object^ args);
+
+ // Window event handlers.
+ void OnVisibilityChanged(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::VisibilityChangedEventArgs^ args);
+ void OnWindowClosed(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::CoreWindowEventArgs^ args);
+
+ // CoreWindow input event handlers.
+ void OnKeyPressed(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::KeyEventArgs^ args);
+
+ private:
+ std::unique_ptr<_360VideoPlaybackMain> m_main;
+ std::shared_ptr m_deviceResources;
+ bool m_windowClosed = false;
+ bool m_windowVisible = true;
+ bool m_ShadersNeed = false;
+ bool m_ShadersCreated = false;
+ bool m_appViewConsolidated = false;
+ bool m_mainWindowReactivated = false;
+ // The holographic space the app will use for rendering.
+ Windows::Graphics::Holographic::HolographicSpace^ m_holographicSpace = nullptr;
+ static Windows::Media::Playback::MediaPlayer^ m_mediaPlayer;
+ void OnConsolidated(Windows::UI::ViewManagement::ApplicationView^ sender, Windows::UI::ViewManagement::ApplicationViewConsolidatedEventArgs^ args);
+ void OnMediaOpened(Windows::Media::Playback::MediaPlayer^ sender, Platform::Object^ args);
+ void TryCloseWindow();
+ Windows::Foundation::Uri^ m_sourceUri;
+ };
+
+ // The entry point for the app.
+ ref class AppViewSource sealed : Windows::ApplicationModel::Core::IFrameworkViewSource
+ {
+ public:
+ AppViewSource(Windows::Foundation::Uri^ sourceUri);
+ virtual Windows::ApplicationModel::Core::IFrameworkView^ CreateView();
+ private:
+ Windows::Foundation::Uri^ m_sourceUri;
+ };
+}
+
diff --git a/Samples/360VideoPlayback/cpp/Common/BaseControl.cpp b/Samples/360VideoPlayback/cpp/Common/BaseControl.cpp
new file mode 100644
index 0000000000..81d71aafe1
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/BaseControl.cpp
@@ -0,0 +1,505 @@
+#include "pch.h"
+#include "BaseControl.h"
+#include "DirectXHelper.h"
+#include "DirectXCollision.h"
+#include "..\360VideoPlaybackMain.h"
+
+using namespace _360VideoPlayback;
+using namespace Concurrency;
+using namespace DirectX;
+using namespace DirectX::TriangleTests;
+using namespace D2D1;
+using namespace DX;
+using namespace Microsoft::WRL;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI::Input::Spatial;
+
+const float4 defaultGlassColor = { 0.0f, 0.0f, 0.0f, 0.5f };
+const float4 constantFocusPointColor = { 1.0f, 1.0f, 1.0f, 1.0f };
+const float constantFocusPointRadius = 0.1f;
+const float constantFocusPointIntensity = 1.0f;
+
+BaseControl::BaseControl(const std::shared_ptr geometry)
+{
+ m_deviceResources = _360VideoPlaybackMain::GetDeviceResources();
+ m_isVisible = false;
+ m_isInitialized = false;
+ m_isFocused = false;
+ m_textureWidth = DEFAULT_TEXTURE_SIZE;
+ m_textureHeight = DEFAULT_TEXTURE_SIZE;
+ m_geometry = geometry;
+ m_scale = float3(1.0f, 1.0f, 1.0f);
+ m_position = float3(0, 0, 0);
+ m_isVisible = true;
+ m_transform = float4x4::identity();
+ m_parentTransform = float4x4::identity();
+ m_relativeIntersectionPoint = float2(0.0f);
+}
+
+void BaseControl::Initialize()
+{
+ const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(_360VideoPlayback::ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
+
+ DX::ThrowIfFailed(m_deviceResources->GetD3DDevice()->CreateBuffer(
+ &constantBufferDesc,
+ nullptr,
+ &m_modelConstantBuffer));
+
+ CalculateInitialSize();
+
+ AllocateTexture();
+
+ InitializeFocusPoint();
+
+ m_isInitialized = true;
+}
+
+void BaseControl::AllocateTexture()
+{
+ // Create the texture to render the glyph for this control as well as the glyph for the control.
+ // NOTE: If we want visual fidelity that better matches the distance the user is from the button we should
+ // have different sizes of textures (and glyphs) depending on how close the user is to the control.
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ auto d3ddevice = m_deviceResources->GetD3DDevice();
+ ComPtr cubeTextureSurface;
+ CD3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
+ D3D11_SAMPLER_DESC samplerDescription;
+ CD3D11_TEXTURE2D_DESC textureDesc;
+ FLOAT dpiX;
+ FLOAT dpiY;
+
+ // Create texture for rendering d2d onto
+ textureDesc = CD3D11_TEXTURE2D_DESC(
+ DXGI_FORMAT_B8G8R8A8_UNORM,
+ m_textureWidth, // Width
+ m_textureHeight, // Height
+ 1, // MipLevels
+ 1, // ArraySize
+ D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET
+ );
+
+ DX::ThrowIfFailed(d3ddevice->CreateTexture2D(
+ &textureDesc,
+ nullptr,
+ &m_texture));
+ shaderResourceViewDesc = CD3D11_SHADER_RESOURCE_VIEW_DESC(
+ m_texture.Get(),
+ D3D11_SRV_DIMENSION_TEXTURE2D);
+ DX::ThrowIfFailed(d3ddevice->CreateShaderResourceView(
+ m_texture.Get(),
+ &shaderResourceViewDesc,
+ &m_textureShaderResourceView));
+
+ d2dContext->GetDpi(&dpiX, &dpiY);
+ D2D1_BITMAP_PROPERTIES1 bitmapProperties =
+ D2D1::BitmapProperties1(
+ D2D1_BITMAP_OPTIONS_TARGET | D2D1_BITMAP_OPTIONS_CANNOT_DRAW,
+ D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_PREMULTIPLIED),
+ dpiX,
+ dpiY);
+ m_texture.As(&cubeTextureSurface);
+
+ DX::ThrowIfFailed(d2dContext->CreateBitmapFromDxgiSurface(
+ cubeTextureSurface.Get(),
+ &bitmapProperties,
+ &m_textureTarget));
+
+ // create the sampler
+ ZeroMemory(&samplerDescription, sizeof(D3D11_SAMPLER_DESC));
+ samplerDescription.Filter = D3D11_FILTER_ANISOTROPIC;
+ samplerDescription.AddressU = D3D11_TEXTURE_ADDRESS_BORDER;
+ samplerDescription.AddressV = D3D11_TEXTURE_ADDRESS_BORDER;
+ samplerDescription.AddressW = D3D11_TEXTURE_ADDRESS_BORDER;
+ samplerDescription.MipLODBias = 0.0f;
+ samplerDescription.MaxAnisotropy = 4;
+ samplerDescription.ComparisonFunc = D3D11_COMPARISON_NEVER;
+ samplerDescription.BorderColor[0] = defaultGlassColor.x;
+ samplerDescription.BorderColor[1] = defaultGlassColor.y;
+ samplerDescription.BorderColor[2] = defaultGlassColor.z;
+ samplerDescription.BorderColor[3] = defaultGlassColor.w;
+ // allow use of all mip levels
+ samplerDescription.MinLOD = 0;
+ samplerDescription.MaxLOD = D3D11_FLOAT32_MAX;
+ DX::ThrowIfFailed(d3ddevice->CreateSamplerState(
+ &samplerDescription,
+ &m_sampler));
+ DX::ThrowIfFailed(m_deviceResources->GetD2DFactory()->CreateDrawingStateBlock(&m_stateBlock));
+}
+
+void BaseControl::Update(const StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ auto scale = m_scale;
+ const XMMATRIX modelScale = XMMatrixScalingFromVector(XMLoadFloat3(&scale));
+ const XMMATRIX modelTranslation = XMMatrixTranslationFromVector(XMLoadFloat3(&m_position));
+ const XMMATRIX localTransform = XMMatrixMultiply(modelScale, modelTranslation);
+ const XMMATRIX parentTransform = XMLoadFloat4x4(&m_parentTransform);
+ const XMMATRIX modelTransform = XMMatrixMultiply(localTransform, parentTransform);
+
+ XMStoreFloat4x4(&m_transform, modelTransform);
+ // Update local copy of Constant buffer
+ XMStoreFloat4x4(&m_modelConstantBufferData.model, XMMatrixTranspose(XMLoadFloat4x4(&m_transform)));
+
+ if (m_isVisible && cameraPose != nullptr)
+ {
+ m_isFocused = IsFocusOnControl(XMLoadFloat4x4(&m_transform), cameraPose);
+ }
+
+ // Update spot light parameters
+ m_focusPointConstantBufferData.focusPointOrigin = { cameraPose->Head->Position.x, cameraPose->Head->Position.y, cameraPose->Head->Position.z, 1.0f };
+ m_focusPointConstantBufferData.focusPointDirection = { cameraPose->Head->ForwardDirection.x, cameraPose->Head->ForwardDirection.y, cameraPose->Head->ForwardDirection.z, 1.0f };
+ m_deviceResources->GetD3DDeviceContext()->UpdateSubresource(
+ m_focusPointConstantBuffer.Get(),
+ 0,
+ nullptr,
+ &m_focusPointConstantBufferData,
+ 0,
+ 0);
+ m_renderItem->SetParentControlInformation(m_isFocused, m_relativeIntersectionPoint, m_textureWidth, m_textureHeight);
+ m_renderItem->Update(timer, cameraPose);
+ if (m_renderItem->GetIsRenderPassNeeded())
+ {
+ m_isUITextureRenderNeeded = true;
+ }
+}
+
+void BaseControl::Render()
+{
+ if (m_isVisible)
+ {
+ UploadGeometryAndShaders();
+ ApplyPixelShader();
+ // At this point everything should be set, so Draw the objects.
+ m_deviceResources->GetD3DDeviceContext()->DrawIndexedInstanced(
+ m_geometry->IndexCount, // Index count per instance.
+ 2, // Instance count.
+ 0, // Start index location.
+ 0, // Base vertex location.
+ 0); // Start instance location.
+ }
+}
+
+void BaseControl::UploadGeometryAndShaders()
+{
+ const auto context = m_deviceResources->GetD3DDeviceContext();
+
+ // Update the model transform buffer
+ context->UpdateSubresource(
+ m_modelConstantBuffer.Get(),
+ 0,
+ nullptr,
+ &m_modelConstantBufferData,
+ 0,
+ 0);
+
+ const UINT stride = m_geometry->VertexStride;
+ const UINT offset = 0;
+ context->IASetVertexBuffers(
+ 0,
+ 1,
+ m_geometry->VertexBuffer.GetAddressOf(),
+ &stride,
+ &offset);
+ context->IASetIndexBuffer(
+ m_geometry->IndexBuffer.Get(),
+ DXGI_FORMAT_R16_UINT, // Each index is one 16-bit unsigned integer (short).
+ 0);
+ context->IASetPrimitiveTopology(m_geometry->Topology);
+ context->IASetInputLayout(m_inputLayout.Get());
+
+ // Attach the vertex shader.
+ context->VSSetShader(
+ m_vertexShader.Get(),
+ nullptr, 0);
+ // Apply the model constant buffer to the vertex shader.
+ context->VSSetConstantBuffers(
+ 0,
+ 1,
+ m_modelConstantBuffer.GetAddressOf());
+ m_deviceResources->GetD3DDeviceContext()->GSSetShader(
+ m_geometryShader.Get(),
+ nullptr,
+ 0);
+}
+
+void BaseControl::ApplyPixelShader()
+{
+ m_deviceResources->GetD3DDeviceContext()->PSSetShader(
+ m_pixelShader.Get(),
+ nullptr,
+ 0);
+
+ m_deviceResources->GetD3DDeviceContext()->PSSetConstantBuffers(
+ 1, /*StartSlot*/
+ 1, /*NumBuffers*/
+ m_focusPointConstantBuffer.GetAddressOf()
+ );
+
+ if (m_isUITextureRenderNeeded)
+ {
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SaveDrawingState(m_stateBlock.Get());
+ d2dContext->GetTarget(m_d2dContextTargetToRestore.GetAddressOf());
+ // Set target to current texture
+ d2dContext->SetTarget(m_textureTarget.Get());
+ d2dContext->BeginDraw();
+ ColorF clearColor = D2D1::ColorF(defaultGlassColor.x, defaultGlassColor.y, defaultGlassColor.z, defaultGlassColor.w);
+ d2dContext->Clear(clearColor);
+
+ m_renderItem->TryRender();
+
+ // We ignore D2DERR_RECREATE_TARGET here. This error indicates that the device
+ // is lost. It will be handled during the next call to Present.
+ auto endDrawHR = d2dContext->EndDraw();
+ if (endDrawHR == D2DERR_RECREATE_TARGET)
+ {
+ return;
+ }
+ // We need to restore the previously set d2dContext target so
+ // future draw calls draw to the correct place.
+ d2dContext->SetTarget(m_d2dContextTargetToRestore.Get());
+ d2dContext->RestoreDrawingState(m_stateBlock.Get());
+ m_isUITextureRenderNeeded = false;
+ }
+
+ // Tell the shader to use the texture
+ m_deviceResources->GetD3DDeviceContext()->PSSetShaderResources(
+ 0,
+ 1,
+ m_textureShaderResourceView.GetAddressOf());
+
+ m_deviceResources->GetD3DDeviceContext()->PSSetSamplers(
+ 0,
+ 1,
+ m_sampler.GetAddressOf());
+}
+
+void BaseControl::SetParentTransform(float4x4 transform)
+{
+ m_parentTransform = transform;
+}
+
+void BaseControl::SetScale(float3 value)
+{
+ m_scale = value;
+}
+
+void BaseControl::SetPosition(float3 value)
+{
+ m_position = value;
+}
+
+
+void BaseControl::InitializeFocusPoint()
+{
+ const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(FocusPointConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateBuffer(
+ &constantBufferDesc,
+ nullptr,
+ &m_focusPointConstantBuffer));
+
+ m_focusPointConstantBufferData.focusPointIntensity = { constantFocusPointIntensity, constantFocusPointIntensity, constantFocusPointIntensity, constantFocusPointIntensity };
+ m_focusPointConstantBufferData.focusPointColor = { constantFocusPointColor.x, constantFocusPointColor.y, constantFocusPointColor.z, constantFocusPointColor.w };
+ m_focusPointConstantBufferData.focusPointRadius = { constantFocusPointRadius, constantFocusPointRadius, constantFocusPointRadius, constantFocusPointRadius };
+ m_deviceResources->GetD3DDeviceContext()->UpdateSubresource(
+ m_focusPointConstantBuffer.Get(),
+ 0,
+ nullptr,
+ &m_focusPointConstantBufferData,
+ 0,
+ 0
+ );
+}
+
+void BaseControl::SetIsVisible(bool value)
+{
+ m_isVisible = value;
+ if (!m_isVisible) // clear focus if you are turned off
+ {
+ m_isFocused = false;
+ }
+}
+
+void BaseControl::PerformPressedAction()
+{
+ if (m_renderItem->IsFocused())
+ {
+ m_renderItem->PerformAction();
+ }
+}
+
+void BaseControl::ApplyShaders()
+{
+ task> loadVSTask = DX::ReadDataAsync(L"ms-appx:///GlassVertexShader.cso");
+ task createVSTask = loadVSTask.then([this](const std::vector& fileData)
+ {
+ std::vector vertexPositionTextureDesc =
+ { {
+ { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ { "TEXCOORD", 1, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ } };
+ DX::ThrowIfFailed(m_deviceResources->GetD3DDevice()->CreateVertexShader(
+ fileData.data(),
+ fileData.size(),
+ nullptr,
+ &m_vertexShader));
+
+ DX::ThrowIfFailed(m_deviceResources->GetD3DDevice()->CreateInputLayout(
+ vertexPositionTextureDesc.data(),
+ static_cast(vertexPositionTextureDesc.size()),
+ fileData.data(),
+ fileData.size(),
+ &m_inputLayout));
+ });
+
+ task> loadPSTask = DX::ReadDataAsync(L"ms-appx:///GlassPixelShader.cso");
+ task createPSTask = loadPSTask.then([this](const std::vector& fileData)
+ {
+ DX::ThrowIfFailed(m_deviceResources->GetD3DDevice()->CreatePixelShader(
+ fileData.data(),
+ fileData.size(),
+ nullptr,
+ &m_pixelShader));
+ });
+
+ task> loadGSTask = DX::ReadDataAsync(L"ms-appx:///GlassGeometryShader.cso");
+
+ task createGSTask = loadGSTask.then([this](const std::vector& fileData)
+ {
+ DX::ThrowIfFailed(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
+ fileData.data(),
+ fileData.size(),
+ nullptr,
+ &m_geometryShader));
+ });
+
+ task shaderTaskGroup = (createPSTask && createVSTask && createGSTask);
+ task createSphereTask = shaderTaskGroup.then([this]()
+ {
+ });
+}
+
+void BaseControl::SnapInitialSizeToRootElement()
+{
+ m_snapSizeToRootElement = true;
+}
+
+void BaseControl::CalculateInitialSize()
+{
+ // Calculate the geometry scale based on the texture dimensions
+ if (m_snapSizeToRootElement)
+ {
+ m_textureWidth = static_cast(m_renderItem->GetElementSize().x);
+ if (m_textureWidth == 0)
+ {
+ m_textureWidth = DEFAULT_TEXTURE_SIZE;
+ }
+ m_textureHeight = static_cast(m_renderItem->GetElementSize().y);
+ if (m_textureHeight == 0)
+ {
+ m_textureHeight = DEFAULT_TEXTURE_SIZE;
+ }
+
+ // Scale the geometry so we fit 128 texture pixels per geometry unit.
+ float geometryWidth = m_textureWidth / static_cast(DEFAULT_TEXTURE_SIZE);
+ float geomeryHeight = m_textureHeight / static_cast(DEFAULT_TEXTURE_SIZE);
+
+ m_scale.x *= geometryWidth;
+ m_scale.y *= geomeryHeight;
+ }
+ else
+ {
+ // Scale the geometry so we fit 128 texture pixels per geometry unit.
+ float geometryWidth = m_textureWidth / static_cast(DEFAULT_TEXTURE_SIZE);
+ float geomeryHeight = m_textureHeight / static_cast(DEFAULT_TEXTURE_SIZE);
+
+ m_scale.x *= geometryWidth;
+ m_scale.y *= geomeryHeight;
+ }
+}
+
+void BaseControl::SetRenderElement(std::shared_ptr renderItem)
+{
+ m_renderItem = renderItem;
+}
+
+bool BaseControl::IsFocusOnControl(const XMMATRIX& transform, SpatialPointerPose^ cameraPose)
+{
+ // Compute Base Plane Cube Bounds
+ float4 topLeft = float4(-0.5f, 0.5f, 0.5f, 1.0f);
+ float4 topRight = float4(0.5f, 0.5f, 0.5f, 1.0f);
+ float4 bottomRight = float4(0.5f, -0.5f, 0.5f, 1.0f);
+ float4 bottomLeft = float4(-0.5f, -0.5f, 0.5f, 1.0f);
+
+ XMVECTOR transformedVerts[4];
+ transformedVerts[0] = XMVector4Transform(XMLoadFloat4(&topLeft), transform);
+ transformedVerts[1] = XMVector4Transform(XMLoadFloat4(&topRight), transform);
+ transformedVerts[2] = XMVector4Transform(XMLoadFloat4(&bottomRight), transform);
+ transformedVerts[3] = XMVector4Transform(XMLoadFloat4(&bottomLeft), transform);
+
+ // After applying trasformation, W component of the transformed 4 value vectors to be 1
+#ifdef _XM_SSE_INTRINSICS_
+ _ASSERTE(transformedVerts[0].m128_f32[3] == 1);
+#endif // _XM_SSE_INTRINSICS_
+#ifdef _XM_ARM_NEON_INTRINSICS_
+ _ASSERTE(transformedVerts[0].n128_f32[3] == 1);
+#endif // _XM_ARM_NEON_INTRINSICS_
+
+ // This is fine to do Vector3
+ XMVECTOR normal = XMVector3Cross(XMVectorSubtract(transformedVerts[3], transformedVerts[0]), XMVectorSubtract(transformedVerts[1], transformedVerts[0]));
+
+ float3 rayPosition = cameraPose->Head->Position;
+ float3 rayDirection = cameraPose->Head->ForwardDirection;
+ // If the Dot product between two vectors is less than 0, then the two normals are pointing the same direction
+ // The normal computed above points in the direction that the front face is facing, so if another vector points the same direction as it, then it is looking at the back side
+ // When using XMVector3Dot(), the scalar result of the Dot product is copied to all members of the returned XMVECTOR type. To get the result you just need to
+ // access a member of the XMVECTOR
+#ifdef _XM_SSE_INTRINSICS_
+ if (XMVector3Dot(XMLoadFloat3(&rayDirection), normal).m128_f32[0] < 0.0f) // only perform intersection if we're looking at the front of the plane
+#endif // _XM_SSE_INTRINSICS_
+#ifdef _XM_ARM_NEON_INTRINSICS_
+ if (XMVector3Dot(XMLoadFloat3(&rayDirection), normal).n128_f32[0] < 0.0f) // only perform intersection if we're looking at the front of the plane
+#endif // _XM_ARM_NEON_INTRINSICS_
+ {
+ float t1IntersectDistance;
+ float t2IntersectDistance;
+ float focusedDistance;
+ bool t1Intersects = Intersects(
+ XMLoadFloat3(&rayPosition),
+ XMLoadFloat3(&rayDirection),
+ transformedVerts[0],
+ transformedVerts[1],
+ transformedVerts[3],
+ OUT t1IntersectDistance);
+ bool t2Intersects = Intersects(
+ XMLoadFloat3(&rayPosition),
+ XMLoadFloat3(&rayDirection),
+ transformedVerts[3],
+ transformedVerts[2],
+ transformedVerts[1],
+ OUT t2IntersectDistance);
+ if (t1Intersects || t2Intersects)
+ {
+ if (t1Intersects && t2Intersects)
+ {
+ focusedDistance = t1IntersectDistance < t2IntersectDistance ? t1IntersectDistance : t2IntersectDistance;
+ }
+ else
+ {
+ focusedDistance = t1Intersects ? t1IntersectDistance : t2IntersectDistance;
+ }
+ float3 intersectionPosition = rayPosition + (focusedDistance * rayDirection);
+ XMVECTOR modelSpaceIntersectionPoint = XMVector3Transform(XMLoadFloat3(&intersectionPosition), XMMatrixInverse(nullptr, transform));
+
+#ifdef _XM_SSE_INTRINSICS_
+ m_relativeIntersectionPoint = float2(abs(modelSpaceIntersectionPoint.m128_f32[0] - (-0.5f)), abs(modelSpaceIntersectionPoint.m128_f32[1] - 0.5f));
+#endif // _XM_SSE_INTRINSICS_
+#ifdef _XM_ARM_NEON_INTRINSICS_
+ m_relativeIntersectionPoint = float2(abs(modelSpaceIntersectionPoint.n128_f32[0] - m_minXY.x), abs(modelSpaceIntersectionPoint.n128_f32[1] - m_minXY.y));
+#endif // _XM_ARM_NEON_INTRINSICS_
+
+ return true;
+ }
+ }
+ return false;
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/BaseControl.h b/Samples/360VideoPlayback/cpp/Common/BaseControl.h
new file mode 100644
index 0000000000..2bb6e5e60e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/BaseControl.h
@@ -0,0 +1,79 @@
+#pragma once
+#include "CameraResources.h"
+#include "DeviceResources.h"
+#include "BaseElement.h"
+#include "MeshGeometry.h"
+#include "StepTimer.h"
+#include "BaseElement.h"
+
+#define DEFAULT_TEXTURE_SIZE 128
+
+namespace DX
+{
+ class BaseControl
+ {
+ public:
+ BaseControl(
+ const std::shared_ptr geometry = MeshGeometry::MakeTexturedCube());
+ // Whether to use the child element to determine the initial texture dimensions and geometry scale.
+ void SnapInitialSizeToRootElement();
+ void Initialize();
+ void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose);
+ void Render();
+ void SetScale(Windows::Foundation::Numerics::float3 value);
+ void SetPosition(Windows::Foundation::Numerics::float3 value);
+ void SetParentTransform(Windows::Foundation::Numerics::float4x4 transform);
+ void ApplyShaders();
+ // Render stage overrides
+ bool IsFocused() { return m_isFocused; };
+ void SetIsVisible(bool value);
+ void PerformPressedAction();
+ void SetRenderElement(std::shared_ptr renderItem);
+ void SetTextureWidth(UINT width) { m_textureWidth = width; m_calculateTextureDimensionsFromScale = false; }
+ void SetTextureHeight(UINT height) { m_textureHeight = height; m_calculateTextureDimensionsFromScale = false; }
+
+ private:
+ // Calculate the initial texture dimensions and geometry scale.
+ virtual void CalculateInitialSize();
+ void AllocateTexture();
+ void UploadGeometryAndShaders();
+ void InitializeFocusPoint();
+ void ApplyPixelShader();
+
+ bool m_isInitialized;
+ bool m_isFocused;
+ bool m_isVisible;
+ bool m_isUITextureRenderNeeded = true;
+ Microsoft::WRL::ComPtr m_focusPointConstantBuffer;
+ _360VideoPlayback::FocusPointConstantBuffer m_focusPointConstantBufferData;
+ float m_calculateTextureDimensionsFromScale = true;
+ UINT m_textureWidth;
+ UINT m_textureHeight;
+ // Resources
+ std::shared_ptr m_deviceResources;
+ std::shared_ptr m_renderItem;
+ bool m_snapSizeToRootElement = false;
+ std::shared_ptr m_geometry;
+ _360VideoPlayback::ModelConstantBuffer m_modelConstantBufferData;
+ Microsoft::WRL::ComPtr m_modelConstantBuffer;
+ Microsoft::WRL::ComPtr m_vertexShader;
+ Microsoft::WRL::ComPtr m_inputLayout;
+ Microsoft::WRL::ComPtr m_pixelShader;
+ Microsoft::WRL::ComPtr m_geometryShader;
+ Windows::Foundation::Numerics::float3 m_scale;
+ Windows::Foundation::Numerics::float4 m_rotation;
+ Windows::Foundation::Numerics::float3 m_position;
+ Windows::Foundation::Numerics::float4x4 m_transform;
+ // Parent Transform for objects within a parent
+ Windows::Foundation::Numerics::float4x4 m_parentTransform;
+ // Status
+ Windows::Foundation::Numerics::float2 m_relativeIntersectionPoint;
+ bool IsFocusOnControl(const DirectX::XMMATRIX& transform, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose);
+ Microsoft::WRL::ComPtr m_d2dContextTargetToRestore;
+ Microsoft::WRL::ComPtr m_sampler;
+ Microsoft::WRL::ComPtr m_stateBlock;
+ Microsoft::WRL::ComPtr m_texture;
+ Microsoft::WRL::ComPtr m_textureTarget;
+ Microsoft::WRL::ComPtr m_textureShaderResourceView;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/BaseElement.cpp b/Samples/360VideoPlayback/cpp/Common/BaseElement.cpp
new file mode 100644
index 0000000000..3166d24dc1
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/BaseElement.cpp
@@ -0,0 +1,480 @@
+#include "pch.h"
+#include "BaseElement.h"
+#include "DirectXHelper.h"
+#include "..\360VideoPlaybackMain.h"
+
+using namespace _360VideoPlayback;
+using namespace D2D1;
+using namespace DirectX;
+using namespace DX;
+using namespace Microsoft::WRL;
+using namespace Platform;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI::Input::Spatial;
+
+const float MAX_TEXT_LAYOUT_SIZE = 500;
+
+BaseElement::BaseElement() :
+ m_isInitialized(false),
+ m_transformRecalculationNeeded(false),
+ m_isRenderPassNeeded(true),
+ m_isVisible(true),
+ m_translation(0.0f, 0.0f),
+ m_transformedTranslation(0.0f, 0.0f),
+ m_parentTranslation(0.0f, 0.0f),
+ m_elementSize(0.0f, 0.0f),
+ m_ignoreLayout(true),
+ m_brush(nullptr)
+{
+ m_deviceResources = _360VideoPlaybackMain::GetDeviceResources();
+}
+void BaseElement::ArrangeElements()
+{
+ float maxWidth = 0.0f;
+ float maxHeight = 0.0f;
+ float containerWidth = 0.0f;
+ float containerHeight = 0.0f;
+
+ // Arrange Children And Compute whole ContainerSize
+ for (const auto& element : m_childElements)
+ {
+ if (!element->GetIgnoreLayout())
+ {
+ // Skip stuff that doesn't want to be stacked in its container (like the focus rect).
+ continue;
+ }
+
+ maxWidth = max(maxWidth, element->GetElementSize().x);
+ maxHeight = max(maxHeight, element->GetElementSize().y);
+
+ if (m_isLayoutHorizontal)
+ {
+ containerWidth += element->GetElementSize().x + element->GetElementMargin().left + element->GetElementMargin().right;
+ containerHeight = max(containerHeight, element->GetElementSize().y + element->GetElementMargin().top + element->GetElementMargin().bottom);
+ }
+ else
+ {
+ containerWidth = max(containerWidth, element->GetElementSize().x + element->GetElementMargin().left + element->GetElementMargin().right);
+ containerHeight = max(containerHeight, element->GetElementSize().y + element->GetElementMargin().top + element->GetElementMargin().bottom);
+ }
+ }
+
+ float minWidth = m_elementSize.x;
+ float minHeight = m_elementSize.y;
+
+ containerWidth = max(minWidth, containerWidth);
+ containerHeight = max(minHeight, containerHeight);
+ m_elementSize.x = containerWidth;
+ m_elementSize.y = containerHeight;
+
+ // Actually position the elements. We need to do this after calculating the container size so we can center the elements.
+ float currentStackPosition = 0.0;
+ for (const auto& element : m_childElements)
+ {
+ if (!element->GetIgnoreLayout())
+ {
+ // Skip stuff that doesn't want to be stacked in its container (like the focus rect).
+ continue;
+ }
+
+ if (m_isLayoutHorizontal)
+ {
+ // Position element along the stacking axis
+ float elementPositionX = currentStackPosition + element->GetElementMargin().left;
+
+ // Center the element so the stacking axis bisects the element
+ float elementPositionY = (containerHeight / 2.0f) - (element->GetElementSize().y / 2.0f);
+
+ // Adjust centered position to respect top margin
+ if (elementPositionY < element->GetElementMargin().top)
+ {
+ elementPositionY = element->GetElementMargin().top;
+ }
+
+ // Adjust centered position to respect bottom margin
+ if (elementPositionY + element->GetElementSize().y + element->GetElementMargin().bottom > containerHeight)
+ {
+ elementPositionY = containerHeight - element->GetElementSize().y - element->GetElementMargin().bottom;
+ }
+
+ element->SetTranslation({ elementPositionX, elementPositionY });
+
+ currentStackPosition += element->GetElementMargin().left + element->GetElementSize().x + element->GetElementMargin().right;
+ }
+ else
+ {
+ float elementPositionX = (containerWidth / 2.0f) - (element->GetElementSize().x / 2.0f);
+ float elementPositionY = (containerHeight / 2.0f) - (element->GetElementSize().y / 2.0f);
+ element->SetTranslation({ elementPositionX, elementPositionY });
+ }
+ }
+}
+
+void BaseElement::Initialize()
+{
+ for (const auto& child : m_childElements)
+ {
+ child->Initialize();
+ }
+ m_isInitialized = true;
+}
+
+void BaseElement::SetBrush(ID2D1SolidColorBrush* brush)
+{
+ m_brush = brush;
+ m_isRenderPassNeeded = true;
+}
+
+void BaseElement::Update(const DX::StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ // If one of BaseElement's transforms have changed or its parent's transforms have been updated
+ // we will recalculate the BaseElement's transforms and tell its children to update their transforms
+ // as well.
+ // NOTE: For BaseElement's with complex nested children we should try not to update the transforms too
+ // frequently because this can cause perf issues. (fonts, textures, and glyphs may need to be resized)
+ if (m_transformRecalculationNeeded)
+ {
+ m_transformedTranslation = m_translation + m_parentTranslation;
+ m_transformMatrix = D2D1::Matrix3x2F::Translation(m_transformedTranslation.x, m_transformedTranslation.y);
+
+ // Update children with updated transforms
+ for (const auto& child : m_childElements)
+ {
+ child->SetParentTransform(m_transformedTranslation);
+ }
+
+ m_transformRecalculationNeeded = false;
+ m_isRenderPassNeeded = true;
+ }
+
+ if (!m_isInitialized)
+ {
+ Initialize();
+ }
+
+ for (const auto& child : m_childElements)
+ {
+ child->Update(timer, cameraPose);
+ m_isRenderPassNeeded |= child->GetIsRenderPassNeeded();
+ }
+}
+
+void BaseElement::TryRender()
+{
+ if (m_isVisible)
+ {
+ Render();
+
+ for (const auto& child : m_childElements)
+ {
+ child->TryRender();
+ }
+ }
+
+ m_isRenderPassNeeded = false;
+}
+
+void BaseElement::Render()
+{
+}
+
+void BaseElement::SetTranslation(float2 translation)
+{
+ m_transformRecalculationNeeded |= m_translation != translation;
+ m_translation = translation;
+}
+
+void BaseElement::SetParentTransform(float2 translation)
+{
+ m_transformRecalculationNeeded |= (m_parentTranslation != translation);
+ m_parentTranslation = translation;
+}
+
+void BaseElement::AddChildElement(const std::shared_ptr& childElement)
+{
+ childElement->SetParentTransform(m_transformedTranslation);
+ m_childElements.push_back(childElement);
+}
+
+void BaseElement::SetIsVisible(bool visible)
+{
+ m_isRenderPassNeeded |= (visible != m_isVisible);
+ m_isVisible = visible;
+};
+
+FocusableElement::FocusableElement() :
+ BaseElement(),
+ m_isFocused(false),
+ m_isParentFocused(false),
+ m_isFocusIntersectionCalculated(false),
+ m_elementCoordIntersectionPoint({ 0.0f,0.0f }),
+ m_parentTextureWidth(0),
+ m_parentTextureHeight(0),
+ m_parentFocusedPosition(float2(0.0f))
+{
+}
+
+void FocusableElement::Update(const StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ __super::Update(timer, cameraPose);
+ if (m_isParentFocused)
+ {
+ bool isFocused = false;
+
+ float2 minBounds = m_transformedTranslation;
+ float2 maxBounds = m_transformedTranslation + m_elementSize;
+ // The focused position is relative to the model size, so we need to scale it to the texture size
+ float2 textureScaledParentFocusedPosition = float2(m_parentFocusedPosition.x * m_parentTextureWidth, m_parentFocusedPosition.y * m_parentTextureHeight);
+ if (minBounds.x <= textureScaledParentFocusedPosition.x && minBounds.y <= textureScaledParentFocusedPosition.y &&
+ textureScaledParentFocusedPosition.x <= maxBounds.x && textureScaledParentFocusedPosition.y <= maxBounds.y)
+ {
+ isFocused = true;
+ }
+
+ if (m_isFocused != isFocused)
+ {
+ m_isRenderPassNeeded = true; // If this is a change in focus, mark we need a render pass
+ m_isFocused = isFocused;
+ FocusUpdate();
+ }
+
+ if (m_isFocused && m_isFocusIntersectionCalculated) // This is mainly used by Slider Elements, which is why its behind a flag
+ {
+ m_elementCoordIntersectionPoint = float2(
+ textureScaledParentFocusedPosition.x - (m_parentTextureWidth - m_elementSize.x),
+ textureScaledParentFocusedPosition.y - (m_parentTextureHeight - m_elementSize.y));
+ }
+ }
+ else
+ {
+ m_isFocused = false;
+ }
+}
+
+void FocusableElement::SetParentControlInformation(bool focused, Windows::Foundation::Numerics::float2 focusedPosition, UINT parentTextureWidth, UINT parentTextureHeight)
+{
+ m_isParentFocused = focused;
+ if (!m_isParentFocused)
+ {
+ if (m_isFocused)
+ {
+ m_isFocused = false;
+ FocusUpdate();
+ }
+ }
+ m_parentFocusedPosition = focusedPosition;
+ m_parentTextureWidth = parentTextureWidth;
+ m_parentTextureHeight = parentTextureHeight;
+}
+
+Text::Text(std::wstring text, FLOAT fontSize, DWRITE_FONT_WEIGHT fontWeight) :
+ m_transformedFontSize(fontSize),
+ m_fontWeight(fontWeight)
+{
+ this->SetText(text);
+}
+
+void Text::Render()
+{
+ D2D1_SIZE_F renderTargetSize = m_deviceResources->GetD2DDeviceContext()->GetSize();
+ D2D1_POINT_2F textOrigin{ 0.0, 0.0 };
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(D2D1::Matrix3x2F::Translation(m_transformedTranslation.x, m_transformedTranslation.y));
+ d2dContext->DrawTextLayout(textOrigin, m_textLayout.Get(), m_brush.Get());
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
+
+void Text::CreateTextContext()
+{
+ DX::ThrowIfFailed(m_deviceResources->GetDWriteFactory()->CreateTextFormat(
+ L"Segoe UI",
+ nullptr,
+ m_fontWeight,
+ DWRITE_FONT_STYLE_NORMAL,
+ DWRITE_FONT_STRETCH_NORMAL,
+ m_transformedFontSize,
+ Windows::Globalization::ApplicationLanguages::Languages->GetAt(0)->Data(),
+ &m_textFormat));
+
+ m_textFormat->SetWordWrapping(DWRITE_WORD_WRAPPING_NO_WRAP);
+
+ DX::ThrowIfFailed(m_textFormat->SetTextAlignment(DWRITE_TEXT_ALIGNMENT_LEADING));
+ DX::ThrowIfFailed(m_deviceResources->GetDWriteFactory()->CreateTextLayout(
+ m_text.data(),
+ static_cast(m_text.size()),
+ m_textFormat.Get(),
+ MAX_TEXT_LAYOUT_SIZE,
+ MAX_TEXT_LAYOUT_SIZE,
+ &m_textLayout));
+
+ DWRITE_TEXT_METRICS textMetrics;
+ m_textLayout->GetMetrics(&textMetrics);
+ m_elementSize = { textMetrics.width, textMetrics.height };
+ m_isRenderPassNeeded = true;
+}
+
+void Text::SetText(std::wstring text)
+{
+ if (text != m_text)
+ {
+ m_text = text;
+ this->CreateTextContext();
+ }
+}
+
+Line::Line(FLOAT strokeThickness) :
+ m_strokeThickness(strokeThickness)
+{
+ m_lineStart = D2D1::Point2F(0.0f, 0.0f);
+ m_lineEnd = D2D1::Point2F(1.0f, 0.0f);
+}
+
+void Line::Render()
+{
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(m_transformMatrix);
+ d2dContext->DrawLine(m_lineStart, m_lineEnd, m_brush.Get(), m_strokeThickness);
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
+
+void Line::SetStartPoint(D2D1_POINT_2F& point)
+{
+ m_transformRecalculationNeeded |= (point.x != m_lineStart.x || point.y != m_lineStart.y);
+ m_lineStart = point;
+}
+
+void Line::SetEndPoint(D2D1_POINT_2F& point)
+{
+ m_transformRecalculationNeeded |= (point.x != m_lineEnd.x || point.y != m_lineEnd.y);
+ m_lineEnd = point;
+}
+
+Ellipse::Ellipse(FLOAT radiusX, FLOAT radiusY, FLOAT strokeThickness, ColorF brushColor) :
+ BaseElement(),
+ m_radiusX(radiusX),
+ m_radiusY(radiusY),
+ m_strokeThickness(strokeThickness),
+ m_brushColor(brushColor)
+{
+ this->SetRadius(radiusX, radiusY);
+}
+
+void Ellipse::Initialize()
+{
+ if (m_brush == nullptr)
+ {
+ const auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+ DX::ThrowIfFailed(d2dcontext->CreateSolidColorBrush(
+ m_brushColor,
+ &m_brush));
+ }
+ __super::Initialize();
+}
+
+void Ellipse::SetRadius(FLOAT x, FLOAT y)
+{
+ m_radiusX = x; m_radiusY = y;
+ m_elementSize = { m_radiusX * 2.0f, m_radiusY * 2.0f };
+ m_ellipse = D2D1::Ellipse(D2D1::Point2F(m_radiusX, m_radiusY), m_radiusX, m_radiusY);
+}
+
+void Ellipse::Render()
+{
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(m_transformMatrix);
+ d2dContext->DrawEllipse(m_ellipse, m_brush.Get(), m_strokeThickness);
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
+
+FilledEllipse::FilledEllipse(FLOAT radiusX, FLOAT radiusY, ColorF brushColor) :
+ BaseElement(),
+ m_radiusX(radiusX),
+ m_radiusY(radiusY),
+ m_brushColor(brushColor)
+{
+ this->SetRadius(radiusX, radiusY);
+}
+
+void FilledEllipse::SetRadius(FLOAT x, FLOAT y)
+{
+ m_radiusX = x; m_radiusY = y;
+ m_elementSize = { m_radiusX * 2.0f, m_radiusY * 2.0f };
+ m_ellipse = D2D1::Ellipse(D2D1::Point2F(m_radiusX, m_radiusY), m_radiusX, m_radiusY);
+}
+
+void FilledEllipse::Initialize()
+{
+ if (m_brush == nullptr)
+ {
+ const auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+ d2dcontext->CreateSolidColorBrush(
+ m_brushColor,
+ &m_brush);
+ }
+ __super::Initialize();
+}
+
+void FilledEllipse::Render()
+{
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(m_transformMatrix);
+ d2dContext->FillEllipse(m_ellipse, m_brush.Get());
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
+
+Rectangle::Rectangle(FLOAT width, FLOAT height, FLOAT strokeThickness, ColorF brushColor) :
+ BaseElement(),
+ m_rect(D2D1::RectF(0.0f, 0.0f, width, height)),
+ m_strokeThickness(strokeThickness),
+ m_brushColor(brushColor)
+{
+}
+
+void Rectangle::Initialize()
+{
+ if (m_brush == nullptr)
+ {
+ const auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+ DX::ThrowIfFailed(d2dcontext->CreateSolidColorBrush(
+ m_brushColor,
+ &m_brush));
+ }
+
+ __super::Initialize();
+}
+
+void Rectangle::Render()
+{
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(m_transformMatrix);
+ d2dContext->DrawRectangle(m_rect, m_brush.Get(), m_strokeThickness);
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
+
+FilledRectangle::FilledRectangle(FLOAT width, FLOAT height, ColorF brushColor) :
+ BaseElement(),
+ m_rect(D2D1::RectF(0.0f, 0.0f, width, height)),
+ m_brushColor(brushColor)
+{
+}
+
+void FilledRectangle::Initialize()
+{
+ if (m_brush == nullptr)
+ {
+ const auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+ d2dcontext->CreateSolidColorBrush(
+ m_brushColor,
+ &m_brush
+ );
+ }
+ __super::Initialize();
+}
+
+void FilledRectangle::Render()
+{
+ auto d2dContext = m_deviceResources->GetD2DDeviceContext();
+ d2dContext->SetTransform(m_transformMatrix);
+ d2dContext->FillRectangle(m_rect, m_brush.Get());
+ d2dContext->SetTransform(D2D1::IdentityMatrix());
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/BaseElement.h b/Samples/360VideoPlayback/cpp/Common/BaseElement.h
new file mode 100644
index 0000000000..e771206f6d
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/BaseElement.h
@@ -0,0 +1,217 @@
+#pragma once
+#include "StepTimer.h"
+#include "DeviceResources.h"
+
+#define FONT_SIZE 10.667f
+#define BUTTON_SIZE 64
+
+namespace DX
+{
+ // Margin in terms of left, top, right, bottom.
+ // "Pushes" the element away from its sibling elements and away from boundaries of its parent container.
+ // Does not change the intristic size of the element; the parent element will take the margin of its child elements into consideration during arrangement.
+ struct ElementMargin
+ {
+ ElementMargin() = default;
+
+ ElementMargin(float l, float t, float r, float b) :
+ left(l),
+ top(t),
+ right(r),
+ bottom(b)
+ {
+ _ASSERTE(left >= 0.0f);
+ _ASSERTE(top >= 0.0f);
+ _ASSERTE(right >= 0.0f);
+ _ASSERTE(bottom >= 0.0f);
+ }
+
+ float left = 0.0f;
+ float top = 0.0f;
+ float right = 0.0f;
+ float bottom = 0.0f;
+ };
+
+ class BaseElement
+ {
+ public:
+ BaseElement();
+ virtual ~BaseElement() = default;
+ virtual void Initialize();
+ virtual void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose);
+ void TryRender();
+ void SetIsVisible(bool visible);
+ bool GetIsVisible() { return m_isVisible; };
+ void SetElementSize(Windows::Foundation::Numerics::float2 size) { m_elementSize = size; }
+ bool GetIsRenderPassNeeded() { return m_isRenderPassNeeded; };
+ Windows::Foundation::Numerics::float2 GetElementSize() { return m_elementSize; };
+ // NOTE: This translation is the translation of the 2D BaseElement from the texture's origin
+ virtual void SetTranslation(Windows::Foundation::Numerics::float2 translation);
+ void SetParentTransform(Windows::Foundation::Numerics::float2 translation);
+ void AddChildElement(const std::shared_ptr& childElement);
+ void SetElementMargin(ElementMargin margin) { m_elementMargin = margin; };
+ void SetIgnoreLayout(bool ignoreLayout) { m_ignoreLayout = ignoreLayout; }
+ ElementMargin& GetElementMargin() { return m_elementMargin; };
+ bool GetIgnoreLayout() { return m_ignoreLayout; };
+ void SetLayoutHorizontal(bool isLayoutHorizontal) { m_isLayoutHorizontal = isLayoutHorizontal; }
+ void SetBrush(ID2D1SolidColorBrush* brush);
+ // Applies layout properties down the element tree.
+ // ArrangeElements must be manually called any time:
+ // - The element's layout properties have changed (layout direction, content alignment, etc.)
+ // - Any child's layout properties are updated.
+ // - Children elements are added or removed.
+ virtual void ArrangeElements();
+
+ protected:
+ virtual void Render();
+
+ bool m_isInitialized;
+ bool m_transformRecalculationNeeded;
+ bool m_isRenderPassNeeded;
+ bool m_isVisible;
+ bool m_ignoreLayout;
+
+ Windows::Foundation::Numerics::float2 m_translation;
+ Windows::Foundation::Numerics::float2 m_transformedTranslation;
+ Windows::Foundation::Numerics::float2 m_parentTranslation;
+ // Width and height dimensions
+ Windows::Foundation::Numerics::float2 m_elementSize;
+ D2D1::Matrix3x2F m_transformMatrix = D2D1::Matrix3x2F::Identity();
+ bool m_isLayoutHorizontal = false;
+ ElementMargin m_elementMargin;
+
+ std::shared_ptr m_deviceResources;
+ std::vector> m_childElements;
+ Microsoft::WRL::ComPtr m_brush;
+ // Cached pointer to device resources.
+ };
+
+ // Manage focus across multiple Elements
+ class FocusableElement : public BaseElement
+ {
+ public:
+ FocusableElement();
+ virtual ~FocusableElement() = default;
+ virtual void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose) override;
+ virtual bool IsFocused() { return m_isFocused; };
+ virtual void PerformAction() {};
+ virtual void SetParentControlInformation(bool focused, Windows::Foundation::Numerics::float2 focusedPosition, UINT parentTextureWidth, UINT parentTextureHeight);
+ virtual void FocusUpdate() {};
+ protected:
+ bool m_isFocused;
+ bool m_isParentFocused;
+ // If an Element needs to know where the intersection occurs relative to its coordinates, set m_isSubElementIntersectionInfoNeeded true
+ // For example, the parentTexture might be 100x100 and your Element might be 50x50 centered on the texture and lets say the Focus is also the center of the texture.
+ // For most Elements, simply knowing that the coordinate 50,50 is inside the Element and thus its focused. But, some Elements such as Sliders need to know
+ // Where in its Element Coordinate system did the intersection occur (so you can adjust the seekbar). In this case, by turning this parameter on, it will store the
+ // Element Coordinate Intersection into the below field. So sticking with the above example, it would store 25,25 since the Element is centered in the Parent Texture
+ bool m_isFocusIntersectionCalculated;
+ Windows::Foundation::Numerics::float2 m_elementCoordIntersectionPoint;
+ UINT m_parentTextureWidth;
+ UINT m_parentTextureHeight;
+ Windows::Foundation::Numerics::float2 m_parentFocusedPosition;
+ };
+
+ class Text : public BaseElement
+ {
+ public:
+ Text(std::wstring text,
+ FLOAT fontSize,
+ DWRITE_FONT_WEIGHT fontWeight = DWRITE_FONT_WEIGHT_ULTRA_BOLD);
+ void SetText(std::wstring text);
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ void CreateTextContext();
+ std::wstring m_text;
+ Microsoft::WRL::ComPtr m_textFormat;
+ Microsoft::WRL::ComPtr m_textLayout;
+ DWRITE_FONT_WEIGHT m_fontWeight;
+ float m_transformedFontSize;
+ };
+
+ class Line : public BaseElement
+ {
+ public:
+ Line(FLOAT strokeThickness);
+ void SetStartPoint(D2D1_POINT_2F& point);
+ void SetEndPoint(D2D1_POINT_2F& point);
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ FLOAT m_strokeThickness;
+ D2D1_POINT_2F m_lineStart;
+ D2D1_POINT_2F m_lineEnd;
+ };
+
+ class Ellipse : public BaseElement
+ {
+ public:
+ Ellipse(FLOAT radiusX, FLOAT radiusY, FLOAT strokeThickness, D2D1::ColorF brushColor);
+ virtual void Initialize() override;
+ void SetRadius(FLOAT x, FLOAT y);
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ D2D1::ColorF m_brushColor;
+ FLOAT m_strokeThickness;
+ FLOAT m_radiusX;
+ FLOAT m_radiusY;
+ D2D1_ELLIPSE m_ellipse;
+ };
+
+ class FilledEllipse : public BaseElement
+ {
+ public:
+ FilledEllipse(FLOAT radiusX, FLOAT radiusY, D2D1::ColorF brushColor);
+ virtual void Initialize() override;
+ void SetRadius(FLOAT x, FLOAT y);
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ D2D1::ColorF m_brushColor;
+ FLOAT m_radiusX;
+ FLOAT m_radiusY;
+ D2D1_ELLIPSE m_ellipse;
+ };
+
+ class Rectangle : public BaseElement
+ {
+ public:
+ Rectangle(FLOAT width, FLOAT height, FLOAT strokeThickness, D2D1::ColorF brushColor);
+ virtual void Initialize() override;
+ void SetRect(const D2D1_RECT_F& rect) { m_rect = rect; }
+ FLOAT GetStrokeThickness() { return m_strokeThickness; };
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ D2D1_RECT_F m_rect;
+ FLOAT m_strokeThickness;
+ D2D1::ColorF m_brushColor;
+ };
+
+ class FilledRectangle : public BaseElement
+ {
+ public:
+ FilledRectangle(FLOAT width, FLOAT height, D2D1::ColorF brushColor);
+ virtual void Initialize() override;
+ void SetRect(const D2D1_RECT_F& rect) { m_rect = rect; }
+
+ protected:
+ virtual void Render() override;
+
+ private:
+ D2D1_RECT_F m_rect;
+ D2D1::ColorF m_brushColor;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/CameraResources.cpp b/Samples/360VideoPlayback/cpp/Common/CameraResources.cpp
new file mode 100644
index 0000000000..169d63c110
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/CameraResources.cpp
@@ -0,0 +1,274 @@
+#include "pch.h"
+
+#include "CameraResources.h"
+#include "Common\DirectXHelper.h"
+#include "DeviceResources.h"
+#include
+
+using namespace DirectX;
+using namespace Microsoft::WRL;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Graphics::Holographic;
+using namespace Windows::Perception::Spatial;
+
+DX::CameraResources::CameraResources(HolographicCamera^ camera) :
+ m_holographicCamera(camera),
+ m_isStereo(camera->IsStereo),
+ m_d3dRenderTargetSize(camera->RenderTargetSize)
+{
+ m_d3dViewport = CD3D11_VIEWPORT(
+ 0.f, 0.f,
+ m_d3dRenderTargetSize.Width,
+ m_d3dRenderTargetSize.Height
+ );
+};
+
+// Updates resources associated with a holographic camera's swap chain.
+// The app does not access the swap chain directly, but it does create
+// resource views for the back buffer.
+void DX::CameraResources::CreateResourcesForBackBuffer(
+ DX::DeviceResources* pDeviceResources,
+ HolographicCameraRenderingParameters^ cameraParameters
+ )
+{
+ const auto device = pDeviceResources->GetD3DDevice();
+
+ // Get the WinRT object representing the holographic camera's back buffer.
+ IDirect3DSurface^ surface = cameraParameters->Direct3D11BackBuffer;
+
+ // Get a DXGI interface for the holographic camera's back buffer.
+ // Holographic cameras do not provide the DXGI swap chain, which is owned
+ // by the system. The Direct3D back buffer resource is provided using WinRT
+ // interop APIs.
+ ComPtr resource;
+ ThrowIfFailed(
+ GetDXGIInterfaceFromObject(surface, IID_PPV_ARGS(&resource))
+ );
+
+ // Get a Direct3D interface for the holographic camera's back buffer.
+ ComPtr cameraBackBuffer;
+ ThrowIfFailed(
+ resource.As(&cameraBackBuffer)
+ );
+
+ // Determine if the back buffer has changed. If so, ensure that the render target view
+ // is for the current back buffer.
+ if (m_d3dBackBuffer.Get() != cameraBackBuffer.Get())
+ {
+ // This can change every frame as the system moves to the next buffer in the
+ // swap chain. This mode of operation will occur when certain rendering modes
+ // are activated.
+ m_d3dBackBuffer = cameraBackBuffer;
+
+ // Create a render target view of the back buffer.
+ // Creating this resource is inexpensive, and is better than keeping track of
+ // the back buffers in order to pre-allocate render target views for each one.
+ DX::ThrowIfFailed(
+ device->CreateRenderTargetView(
+ m_d3dBackBuffer.Get(),
+ nullptr,
+ &m_d3dRenderTargetView
+ )
+ );
+
+ // Get the DXGI format for the back buffer.
+ // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
+ D3D11_TEXTURE2D_DESC backBufferDesc;
+ m_d3dBackBuffer->GetDesc(&backBufferDesc);
+ m_dxgiFormat = backBufferDesc.Format;
+
+ // Check for render target size changes.
+ Windows::Foundation::Size currentSize = m_holographicCamera->RenderTargetSize;
+ if (m_d3dRenderTargetSize != currentSize)
+ {
+ // Set render target size.
+ m_d3dRenderTargetSize = currentSize;
+
+ // A new depth stencil view is also needed.
+ m_d3dDepthStencilView.Reset();
+ }
+ }
+
+ // Refresh depth stencil resources, if needed.
+ if (m_d3dDepthStencilView == nullptr)
+ {
+ // Create a depth stencil view for use with 3D rendering if needed.
+ CD3D11_TEXTURE2D_DESC depthStencilDesc(
+ DXGI_FORMAT_D16_UNORM,
+ static_cast(m_d3dRenderTargetSize.Width),
+ static_cast(m_d3dRenderTargetSize.Height),
+ m_isStereo ? 2 : 1, // Create two textures when rendering in stereo.
+ 1, // Use a single mipmap level.
+ D3D11_BIND_DEPTH_STENCIL
+ );
+
+ ComPtr depthStencil;
+ DX::ThrowIfFailed(
+ device->CreateTexture2D(
+ &depthStencilDesc,
+ nullptr,
+ &depthStencil
+ )
+ );
+
+ CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc(
+ m_isStereo ? D3D11_DSV_DIMENSION_TEXTURE2DARRAY : D3D11_DSV_DIMENSION_TEXTURE2D
+ );
+ DX::ThrowIfFailed(
+ device->CreateDepthStencilView(
+ depthStencil.Get(),
+ &depthStencilViewDesc,
+ &m_d3dDepthStencilView
+ )
+ );
+ }
+
+ // Create the constant buffer, if needed.
+ if (m_viewProjectionConstantBuffer == nullptr)
+ {
+ // Create a constant buffer to store view and projection matrices for the camera.
+ CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ViewProjectionConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
+ DX::ThrowIfFailed(
+ device->CreateBuffer(
+ &constantBufferDesc,
+ nullptr,
+ &m_viewProjectionConstantBuffer
+ )
+ );
+ }
+}
+
+// Releases resources associated with a back buffer.
+void DX::CameraResources::ReleaseResourcesForBackBuffer(DX::DeviceResources* pDeviceResources)
+{
+ const auto context = pDeviceResources->GetD3DDeviceContext();
+
+ // Release camera-specific resources.
+ m_d3dBackBuffer.Reset();
+ m_d3dRenderTargetView.Reset();
+ m_d3dDepthStencilView.Reset();
+ m_viewProjectionConstantBuffer.Reset();
+
+ // Ensure system references to the back buffer are released by clearing the render
+ // target from the graphics pipeline state, and then flushing the Direct3D context.
+ ID3D11RenderTargetView* nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = { nullptr };
+ context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
+ context->Flush();
+}
+
+// Updates the view/projection constant buffer for a holographic camera.
+void DX::CameraResources::UpdateViewProjectionBuffer(
+ std::shared_ptr deviceResources,
+ HolographicCameraPose^ cameraPose,
+ SpatialCoordinateSystem^ coordinateSystem
+ )
+{
+ // The system changes the viewport on a per-frame basis for system optimizations.
+ m_d3dViewport = CD3D11_VIEWPORT(
+ cameraPose->Viewport.Left,
+ cameraPose->Viewport.Top,
+ cameraPose->Viewport.Width,
+ cameraPose->Viewport.Height
+ );
+
+ // The projection transform for each frame is provided by the HolographicCameraPose.
+ HolographicStereoTransform cameraProjectionTransform = cameraPose->ProjectionTransform;
+
+ // Get a container object with the view and projection matrices for the given
+ // pose in the given coordinate system.
+ Platform::IBox^ viewTransformContainer = cameraPose->TryGetViewTransform(coordinateSystem);
+
+ // If TryGetViewTransform returns a null pointer, that means the pose and coordinate
+ // system cannot be understood relative to one another; content cannot be rendered
+ // in this coordinate system for the duration of the current frame.
+ // This usually means that positional tracking is not active for the current frame, in
+ // which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render
+ // content that is not world-locked instead.
+ DX::ViewProjectionConstantBuffer viewProjectionConstantBufferData;
+ bool viewTransformAcquired = viewTransformContainer != nullptr;
+ if (viewTransformAcquired)
+ {
+ // Otherwise, the set of view transforms can be retrieved.
+ HolographicStereoTransform viewCoordinateSystemTransform = viewTransformContainer->Value;
+
+ // Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are
+ // constantly moving relative to the world. The view matrices need to be updated
+ // every frame.
+ XMStoreFloat4x4(
+ &viewProjectionConstantBufferData.viewProjection[0],
+ XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Left) * XMLoadFloat4x4(&cameraProjectionTransform.Left))
+ );
+ XMStoreFloat4x4(
+ &viewProjectionConstantBufferData.viewProjection[1],
+ XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Right) * XMLoadFloat4x4(&cameraProjectionTransform.Right))
+ );
+ }
+
+ // Use the D3D device context to update Direct3D device-based resources.
+ const auto context = deviceResources->GetD3DDeviceContext();
+
+ // Loading is asynchronous. Resources must be created before they can be updated.
+ if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || !viewTransformAcquired)
+ {
+ m_framePending = false;
+ }
+ else
+ {
+ // Update the view and projection matrices.
+ context->UpdateSubresource(
+ m_viewProjectionConstantBuffer.Get(),
+ 0,
+ nullptr,
+ &viewProjectionConstantBufferData,
+ 0,
+ 0
+ );
+
+ m_framePending = true;
+ }
+}
+
+// Gets the view-projection constant buffer for the HolographicCamera and attaches it
+// to the shader pipeline.
+bool DX::CameraResources::AttachViewProjectionBuffer(
+ std::shared_ptr deviceResources
+ )
+{
+ // This method uses Direct3D device-based resources.
+ const auto context = deviceResources->GetD3DDeviceContext();
+
+ // Loading is asynchronous. Resources must be created before they can be updated.
+ // Cameras can also be added asynchronously, in which case they must be initialized
+ // before they can be used.
+ if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || m_framePending == false)
+ {
+ return false;
+ }
+
+ // Set the viewport for this camera.
+ context->RSSetViewports(1, &m_d3dViewport);
+
+ // Send the constant buffer to the vertex shader.
+ context->VSSetConstantBuffers(
+ 1,
+ 1,
+ m_viewProjectionConstantBuffer.GetAddressOf()
+ );
+
+ // The template includes a pass-through geometry shader that is used by
+ // default on systems that don't support the D3D11_FEATURE_D3D11_OPTIONS3::
+ // VPAndRTArrayIndexFromAnyShaderFeedingRasterizer extension. The shader
+ // will be enabled at run-time on systems that require it.
+ // If your app will also use the geometry shader for other tasks and those
+ // tasks require the view/projection matrix, uncomment the following line
+ // of code to send the constant buffer to the geometry shader as well.
+ /*context->GSSetConstantBuffers(
+ 1,
+ 1,
+ m_viewProjectionConstantBuffer.GetAddressOf()
+ );*/
+
+ m_framePending = false;
+
+ return true;
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/CameraResources.h b/Samples/360VideoPlayback/cpp/Common/CameraResources.h
new file mode 100644
index 0000000000..64bfaccd76
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/CameraResources.h
@@ -0,0 +1,76 @@
+#pragma once
+
+namespace DX
+{
+ class DeviceResources;
+
+ // Constant buffer used to send the view-projection matrices to the shader pipeline.
+ struct ViewProjectionConstantBuffer
+ {
+ DirectX::XMFLOAT4X4 viewProjection[2];
+ };
+
+ // Assert that the constant buffer remains 16-byte aligned (best practice).
+ static_assert((sizeof(ViewProjectionConstantBuffer) % (sizeof(float) * 4)) == 0, "ViewProjection constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
+
+ // Manages DirectX device resources that are specific to a holographic camera, such as the
+ // back buffer, ViewProjection constant buffer, and viewport.
+ class CameraResources
+ {
+ public:
+ CameraResources(Windows::Graphics::Holographic::HolographicCamera^ holographicCamera);
+
+ void CreateResourcesForBackBuffer(
+ DX::DeviceResources* pDeviceResources,
+ Windows::Graphics::Holographic::HolographicCameraRenderingParameters^ cameraParameters
+ );
+ void ReleaseResourcesForBackBuffer(
+ DX::DeviceResources* pDeviceResources
+ );
+
+ void UpdateViewProjectionBuffer(
+ std::shared_ptr deviceResources,
+ Windows::Graphics::Holographic::HolographicCameraPose^ cameraPose,
+ Windows::Perception::Spatial::SpatialCoordinateSystem^ coordinateSystem);
+
+ bool AttachViewProjectionBuffer(
+ std::shared_ptr deviceResources);
+
+ // Direct3D device resources.
+ ID3D11RenderTargetView* GetBackBufferRenderTargetView() const { return m_d3dRenderTargetView.Get(); }
+ ID3D11DepthStencilView* GetDepthStencilView() const { return m_d3dDepthStencilView.Get(); }
+ ID3D11Texture2D* GetBackBufferTexture2D() const { return m_d3dBackBuffer.Get(); }
+ D3D11_VIEWPORT GetViewport() const { return m_d3dViewport; }
+ DXGI_FORMAT GetBackBufferDXGIFormat() const { return m_dxgiFormat; }
+
+ // Render target properties.
+ Windows::Foundation::Size GetRenderTargetSize() const { return m_d3dRenderTargetSize; }
+ bool IsRenderingStereoscopic() const { return m_isStereo; }
+
+ // The holographic camera these resources are for.
+ Windows::Graphics::Holographic::HolographicCamera^ GetHolographicCamera() const { return m_holographicCamera; }
+
+ private:
+ // Direct3D rendering objects. Required for 3D.
+ Microsoft::WRL::ComPtr m_d3dRenderTargetView;
+ Microsoft::WRL::ComPtr m_d3dDepthStencilView;
+ Microsoft::WRL::ComPtr m_d3dBackBuffer;
+
+ // Device resource to store view and projection matrices.
+ Microsoft::WRL::ComPtr m_viewProjectionConstantBuffer;
+
+ // Direct3D rendering properties.
+ DXGI_FORMAT m_dxgiFormat;
+ Windows::Foundation::Size m_d3dRenderTargetSize;
+ D3D11_VIEWPORT m_d3dViewport;
+
+ // Indicates whether the camera supports stereoscopic rendering.
+ bool m_isStereo = false;
+
+ // Indicates whether this camera has a pending frame.
+ bool m_framePending = false;
+
+ // Pointer to the holographic camera these resources are for.
+ Windows::Graphics::Holographic::HolographicCamera^ m_holographicCamera = nullptr;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/Controls.cpp b/Samples/360VideoPlayback/cpp/Common/Controls.cpp
new file mode 100644
index 0000000000..fffad4154f
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/Controls.cpp
@@ -0,0 +1,247 @@
+#include "pch.h"
+#include "Controls.h"
+#include "DeviceResources.h"
+#include "DirectXHelper.h"
+#include "..\AppView.h"
+#include "..\VideoGallery.xaml.h"
+
+using namespace _360VideoPlayback;
+using namespace D2D1;
+using namespace DirectX;
+using namespace DX;
+using namespace Microsoft::WRL;
+using namespace std;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Media::Core;
+using namespace Windows::Media::Playback;
+using namespace Windows::UI::Core;
+using namespace Windows::UI::Input::Spatial;
+using namespace Windows::UI::ViewManagement;
+
+const ColorF focusOutlineColor(0xFFFFFF);
+const ColorF pressedBackgroundFillColor(0x00AFFF);
+const ColorF enabledForegroundColor(0xFFFFFF);
+const float c_maxSecondsInPerformPressedFeedbackState = 0.25f;
+UINT DEFAULT_SIZE = 18;
+ElementMargin TEXT_MARGIN{ 15.0f, 12.0f, 15.0f, 12.0f };
+
+Button::Button(UINT initialSize, ButtonShape shape) :
+ FocusableElement(),
+ m_pressingTimeOut(0.0f),
+ m_shape(shape)
+{
+ m_elementSize = float2(static_cast(initialSize));
+
+ // Add the button pressed UI element first so it appears under the focus rect.
+ if (m_shape == ButtonShape::Rectangle)
+ {
+ m_RectPressedBackgroundFill = make_shared(m_elementSize.x, m_elementSize.y, pressedBackgroundFillColor);
+ m_RectFocusOutline = make_shared(m_elementSize.x, m_elementSize.y, 1.5f, focusOutlineColor);
+ m_RectPressedBackgroundFill->SetIgnoreLayout(false);
+ m_RectFocusOutline->SetIgnoreLayout(false);
+ m_RectPressedBackgroundFill->SetIsVisible(false);
+ m_RectFocusOutline->SetIsVisible(false);
+ this->AddChildElement(m_RectPressedBackgroundFill);
+ this->AddChildElement(m_RectFocusOutline);
+
+ }
+ else if (m_shape == ButtonShape::Circle)
+ {
+ m_ElliPressedBackgroundFill = make_shared(m_elementSize.x * 0.5f, m_elementSize.y * 0.5f, pressedBackgroundFillColor);
+ m_ElliFocusOutline = make_shared(m_elementSize.x * 0.5f, m_elementSize.y* 0.5f, 5.0f, focusOutlineColor);
+ m_ElliPressedBackgroundFill->SetIgnoreLayout(false);
+ m_ElliFocusOutline->SetIgnoreLayout(false);
+ m_ElliPressedBackgroundFill->SetIsVisible(false);
+ m_ElliFocusOutline->SetIsVisible(false);
+ this->AddChildElement(m_ElliPressedBackgroundFill);
+ this->AddChildElement(m_ElliFocusOutline);
+ }
+}
+
+void Button::ArrangeElements()
+{
+ __super::ArrangeElements();
+
+ if (m_shape == ButtonShape::Rectangle)
+ {
+ // Update the focus rect to be the same size as the button.
+ FLOAT selectionBorderStrokeThickness = m_RectFocusOutline->GetStrokeThickness();
+ m_RectFocusOutline->SetRect({
+ selectionBorderStrokeThickness,
+ selectionBorderStrokeThickness,
+ m_elementSize.x - selectionBorderStrokeThickness,
+ m_elementSize.y - selectionBorderStrokeThickness });
+
+ m_RectPressedBackgroundFill->SetRect({ 0.0f, 0.0f, m_elementSize.x, m_elementSize.y });
+ }
+ else if (m_shape == ButtonShape::Circle)
+ {
+ m_ElliFocusOutline->SetRadius(m_elementSize.x * 0.5f, m_elementSize.y * 0.5f);
+ m_ElliPressedBackgroundFill->SetRadius(m_elementSize.x * 0.5f, m_elementSize.y * 0.5f);
+ }
+}
+
+void Button::Initialize()
+{
+ auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+
+ __super::Initialize();
+
+ // Recreate the brush for the glyphs, font, etc. This is the final brush we want to hold on to in the button
+ DX::ThrowIfFailed(
+ d2dcontext->CreateSolidColorBrush(
+ enabledForegroundColor,
+ &m_brush));
+}
+
+void Button::Update(const DX::StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ __super::Update(timer, cameraPose);
+ if (m_buttonState == ButtonState::Pressing)
+ {
+ if (m_pressingTimeOut > m_maxSecondsInPressingState)
+ {
+ m_buttonState = ButtonState::Pressed;
+ if (m_shape == ButtonShape::Rectangle)
+ {
+ m_RectPressedBackgroundFill->SetIsVisible(false);
+ }
+ else
+ {
+ m_ElliPressedBackgroundFill->SetIsVisible(false);
+ }
+ Button::PerformAction();
+ m_pressingTimeOut = 0.0f;
+ }
+ else
+ {
+ m_pressingTimeOut += static_cast(timer.GetElapsedSeconds());
+ if (m_shape == ButtonShape::Rectangle)
+ {
+ if (!m_RectPressedBackgroundFill->GetIsVisible())
+ {
+ m_RectPressedBackgroundFill->SetIsVisible(true);
+ }
+ }
+ else
+ {
+ if (!m_ElliPressedBackgroundFill->GetIsVisible())
+ {
+ m_ElliPressedBackgroundFill->SetIsVisible(true);
+ }
+ }
+ }
+ }
+}
+
+void Button::FocusUpdate()
+{
+ bool update = false;
+ if (this->IsFocused())
+ {
+ update = true;
+ }
+ (m_shape == ButtonShape::Rectangle) ? m_RectFocusOutline->SetIsVisible(update) : m_ElliFocusOutline->SetIsVisible(update);
+}
+
+void Button::PerformAction()
+{
+ // If the user invokes a button we need to trigger the invoked state
+ // and give visual feedback to the user that the button has been pressed.
+ if (m_buttonState == ButtonState::None)
+ {
+ m_buttonState = ButtonState::Pressing;
+ m_pressingTimeOut = 0.0f;
+ }
+ else if (m_buttonState == ButtonState::Pressed)
+ {
+ __super::PerformAction();
+ m_buttonState = ButtonState::None;
+ }
+}
+
+PlayPauseButton::PlayPauseButton(UINT buttonSize, UINT iconSize) :
+ Button(iconSize, ButtonShape::Circle)
+{
+ this->SetElementSize({ static_cast(buttonSize), static_cast(buttonSize) });
+ m_playText = std::make_shared(L"Play", FONT_SIZE * 4, DWRITE_FONT_WEIGHT::DWRITE_FONT_WEIGHT_NORMAL);
+ m_playText->SetElementMargin(TEXT_MARGIN);
+ this->AddChildElement(m_playText);
+
+ m_pauseText = std::make_shared(L"Pause", FONT_SIZE * 4, DWRITE_FONT_WEIGHT::DWRITE_FONT_WEIGHT_NORMAL);
+ m_pauseText->SetElementMargin(TEXT_MARGIN);
+ this->AddChildElement(m_pauseText);
+
+ this->ArrangeElements();
+}
+
+void PlayPauseButton::Initialize()
+{
+ __super::Initialize();
+ m_playText->SetBrush(m_brush.Get());
+ m_playText->Initialize();
+ m_pauseText->SetBrush(m_brush.Get());
+ m_pauseText->Initialize();
+}
+
+void PlayPauseButton::Update(const StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ __super::Update(timer, cameraPose);
+
+ // Update the playback state.
+ bool isPlaying = (AppView::GetMediaPlayer()->PlaybackSession->PlaybackState == MediaPlaybackState::Playing);
+
+ // When the video is playing, you show the Pause Text since hitting the button in this state will pause the video
+ if (isPlaying)
+ {
+ m_playText->SetIsVisible(false);
+ m_pauseText->SetIsVisible(true);
+ }
+ // When the video is Paused, you show the P:ay Text since hitting the button in this state will play the video
+ else
+ {
+ m_playText->SetIsVisible(true);
+ m_pauseText->SetIsVisible(false);
+ }
+}
+
+void PlayPauseButton::PerformAction()
+{
+ __super::PerformAction();
+ if (AppView::GetMediaPlayer()->PlaybackSession->PlaybackState != MediaPlaybackState::None)
+ {
+ if (AppView::GetMediaPlayer()->PlaybackSession->PlaybackState == MediaPlaybackState::Playing)
+ {
+ AppView::GetMediaPlayer()->Pause();
+ }
+ else
+ {
+ AppView::GetMediaPlayer()->Play();
+ }
+ }
+}
+
+ExitButton::ExitButton() :
+ Button(DEFAULT_SIZE)
+{
+ m_exitText = std::make_shared(L"Exit Playback", FONT_SIZE * 2, DWRITE_FONT_WEIGHT::DWRITE_FONT_WEIGHT_NORMAL);
+ m_exitText->SetElementMargin(TEXT_MARGIN);
+ this->AddChildElement(m_exitText);
+
+ this->SetLayoutHorizontal(true);
+}
+
+void ExitButton::Initialize()
+{
+ __super::Initialize();
+
+ m_exitText->SetBrush(m_brush.Get());
+
+ m_exitText->Initialize();
+}
+
+void ExitButton::PerformAction()
+{
+ __super::PerformAction();
+ ApplicationViewSwitcher::SwitchAsync(VideoGallery::GetMainViewId(), ApplicationView::GetForCurrentView()->Id, ApplicationViewSwitchingOptions::ConsolidateViews);
+}
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/Common/Controls.h b/Samples/360VideoPlayback/cpp/Common/Controls.h
new file mode 100644
index 0000000000..e7549a7326
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/Controls.h
@@ -0,0 +1,69 @@
+#pragma once
+#include "BaseElement.h"
+
+namespace DX
+{
+ enum class ButtonShape
+ {
+ Rectangle,
+ Circle,
+ };
+
+ enum class ButtonState
+ {
+ None,
+ Pressing,
+ Pressed
+ };
+
+ class Button : public FocusableElement
+ {
+ public:
+ Button(
+ UINT initialSize = BUTTON_SIZE,
+ ButtonShape shape = ButtonShape::Rectangle);
+
+ virtual void ArrangeElements() override;
+ virtual void Initialize() override;
+ virtual void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose) override;
+ virtual void PerformAction() override;
+ virtual void FocusUpdate() override;
+ protected:
+ bool m_forceIconCentered;
+ Windows::Foundation::Numerics::float4x4 m_parentControlTransform;
+ Microsoft::WRL::ComPtr m_brush;
+ ButtonState m_buttonState = ButtonState::None;
+ float m_pressingTimeOut;
+ const ButtonShape m_shape = ButtonShape::Rectangle;
+ const float m_maxSecondsInPressingState = 0.25f;
+ private:
+ std::shared_ptr m_RectFocusOutline;
+ std::shared_ptr m_RectPressedBackgroundFill;
+ std::shared_ptr m_ElliFocusOutline;
+ std::shared_ptr m_ElliPressedBackgroundFill;
+ };
+
+ class PlayPauseButton : public Button
+ {
+ public:
+ PlayPauseButton(UINT buttonSize, UINT iconSize);
+ virtual void Initialize() override;
+ virtual void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose) override;
+ virtual void PerformAction() override;
+
+ private:
+ std::shared_ptr m_playText;
+ std::shared_ptr m_pauseText;
+ };
+
+ class ExitButton : public Button
+ {
+ public:
+ ExitButton();
+ virtual void Initialize() override;
+ virtual void PerformAction() override;
+
+ private:
+ std::shared_ptr m_exitText;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp b/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp
new file mode 100644
index 0000000000..340b7d087e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/DeviceResources.cpp
@@ -0,0 +1,391 @@
+#include "pch.h"
+#include "DeviceResources.h"
+#include "DirectXHelper.h"
+
+#include
+#include
+
+using namespace D2D1;
+using namespace Microsoft::WRL;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Graphics::Display;
+using namespace Windows::Graphics::Holographic;
+
+const FLOAT dxgiDefaultDPI = 96;
+
+// Constructor for DeviceResources.
+DX::DeviceResources::DeviceResources()
+{
+ CreateDeviceIndependentResources();
+}
+
+// Configures resources that don't depend on the Direct3D device.
+void DX::DeviceResources::CreateDeviceIndependentResources()
+{
+ // Initialize Direct2D resources.
+ D2D1_FACTORY_OPTIONS options{};
+
+#if defined(_DEBUG)
+ // If the project is in a debug build, enable Direct2D debugging via SDK Layers.
+ options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
+#endif
+
+ // Initialize the Direct2D Factory.
+ DX::ThrowIfFailed(
+ D2D1CreateFactory(
+ D2D1_FACTORY_TYPE_SINGLE_THREADED,
+ __uuidof(ID2D1Factory2),
+ &options,
+ &m_d2dFactory
+ )
+ );
+
+ // Initialize the DirectWrite Factory.
+ DX::ThrowIfFailed(
+ DWriteCreateFactory(
+ DWRITE_FACTORY_TYPE_SHARED,
+ __uuidof(IDWriteFactory2),
+ &m_dwriteFactory
+ )
+ );
+
+ // Initialize the Windows Imaging Component (WIC) Factory.
+ DX::ThrowIfFailed(
+ CoCreateInstance(
+ CLSID_WICImagingFactory2,
+ nullptr,
+ CLSCTX_INPROC_SERVER,
+ IID_PPV_ARGS(&m_wicFactory)
+ )
+ );
+}
+
+void DX::DeviceResources::SetHolographicSpace(HolographicSpace^ holographicSpace)
+{
+ // Cache the holographic space. Used to re-initalize during device-lost scenarios.
+ m_holographicSpace = holographicSpace;
+
+ InitializeUsingHolographicSpace();
+}
+
+void DX::DeviceResources::InitializeUsingHolographicSpace()
+{
+ // The holographic space might need to determine which adapter supports
+ // holograms, in which case it will specify a non-zero PrimaryAdapterId.
+ LUID id =
+ {
+ m_holographicSpace->PrimaryAdapterId.LowPart,
+ m_holographicSpace->PrimaryAdapterId.HighPart
+ };
+
+ // When a primary adapter ID is given to the app, the app should find
+ // the corresponding DXGI adapter and use it to create Direct3D devices
+ // and device contexts. Otherwise, there is no restriction on the DXGI
+ // adapter the app can use.
+ if ((id.HighPart != 0) || (id.LowPart != 0))
+ {
+ UINT createFlags = 0;
+#ifdef DEBUG
+ if (DX::SdkLayersAvailable())
+ {
+ createFlags |= DXGI_CREATE_FACTORY_DEBUG;
+ }
+#endif
+ // Create the DXGI factory.
+ ComPtr dxgiFactory;
+ DX::ThrowIfFailed(
+ CreateDXGIFactory2(
+ createFlags,
+ IID_PPV_ARGS(&dxgiFactory)
+ )
+ );
+ ComPtr dxgiFactory4;
+ DX::ThrowIfFailed(dxgiFactory.As(&dxgiFactory4));
+
+ // Retrieve the adapter specified by the holographic space.
+ DX::ThrowIfFailed(
+ dxgiFactory4->EnumAdapterByLuid(
+ id,
+ IID_PPV_ARGS(&m_dxgiAdapter)
+ )
+ );
+ }
+ else
+ {
+ m_dxgiAdapter.Reset();
+ }
+
+ CreateDeviceResources();
+
+ m_holographicSpace->SetDirect3D11Device(m_d3dInteropDevice);
+}
+
+// Configures the Direct3D device, and stores handles to it and the device context.
+void DX::DeviceResources::CreateDeviceResources()
+{
+ // This flag adds support for surfaces with a different color channel ordering
+ // than the API default. It is required for compatibility with Direct2D.
+ UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
+
+#if defined(_DEBUG)
+ if (DX::SdkLayersAvailable())
+ {
+ // If the project is in a debug build, enable debugging via SDK Layers with this flag.
+ creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
+ }
+#endif
+
+ // This array defines the set of DirectX hardware feature levels this app will support.
+ // Note the ordering should be preserved.
+ // Note that HoloLens supports feature level 11.1. The HoloLens emulator is also capable
+ // of running on graphics cards starting with feature level 10.0.
+ D3D_FEATURE_LEVEL featureLevels[] =
+ {
+ D3D_FEATURE_LEVEL_12_1,
+ D3D_FEATURE_LEVEL_12_0,
+ D3D_FEATURE_LEVEL_11_1,
+ D3D_FEATURE_LEVEL_11_0,
+ D3D_FEATURE_LEVEL_10_1,
+ D3D_FEATURE_LEVEL_10_0
+ };
+
+ D3D11_RASTERIZER_DESC rasterDesc{
+ D3D11_FILL_SOLID,
+ D3D11_CULL_NONE,
+ false,
+ 0,
+ 0.0f,
+ 0.0f,
+ true,
+ false,
+ true,
+ true };
+
+
+ // Create the Direct3D 11 API device object and a corresponding context.
+ ComPtr device;
+ ComPtr context;
+
+ const D3D_DRIVER_TYPE driverType = m_dxgiAdapter == nullptr ? D3D_DRIVER_TYPE_HARDWARE : D3D_DRIVER_TYPE_UNKNOWN;
+ const HRESULT hr = D3D11CreateDevice(
+ m_dxgiAdapter.Get(), // Either nullptr, or the primary adapter determined by Windows Holographic.
+ driverType, // Create a device using the hardware graphics driver.
+ 0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE.
+ creationFlags, // Set debug and Direct2D compatibility flags.
+ featureLevels, // List of feature levels this app can support.
+ ARRAYSIZE(featureLevels), // Size of the list above.
+ D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Store apps.
+ &device, // Returns the Direct3D device created.
+ &m_d3dFeatureLevel, // Returns feature level of device created.
+ &context // Returns the device immediate context.
+ );
+
+ if (FAILED(hr))
+ {
+ // If the initialization fails, fall back to the WARP device.
+ // For more information on WARP, see:
+ // http://go.microsoft.com/fwlink/?LinkId=286690
+ DX::ThrowIfFailed(
+ D3D11CreateDevice(
+ nullptr, // Use the default DXGI adapter for WARP.
+ D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware device.
+ 0,
+ creationFlags,
+ featureLevels,
+ ARRAYSIZE(featureLevels),
+ D3D11_SDK_VERSION,
+ &device,
+ &m_d3dFeatureLevel,
+ &context
+ )
+ );
+ }
+
+
+ // Store pointers to the Direct3D device and immediate context.
+ DX::ThrowIfFailed(
+ device.As(&m_d3dDevice)
+ );
+
+ DX::ThrowIfFailed(
+ context.As(&m_d3dContext)
+ );
+
+ // Acquire the DXGI interface for the Direct3D device.
+ ComPtr dxgiDevice;
+ DX::ThrowIfFailed(
+ m_d3dDevice.As(&dxgiDevice)
+ );
+
+ // Wrap the native device using a WinRT interop object.
+ m_d3dInteropDevice = CreateDirect3DDevice(dxgiDevice.Get());
+
+ // Cache the DXGI adapter.
+ // This is for the case of no preferred DXGI adapter, or fallback to WARP.
+ ComPtr dxgiAdapter;
+ DX::ThrowIfFailed(
+ dxgiDevice->GetAdapter(&dxgiAdapter)
+ );
+ DX::ThrowIfFailed(
+ dxgiAdapter.As(&m_dxgiAdapter)
+ );
+
+
+ DX::ThrowIfFailed(
+ m_d2dFactory->CreateDevice(dxgiDevice.Get(), &m_d2dDevice)
+ );
+
+ DX::ThrowIfFailed(
+ m_d2dDevice->CreateDeviceContext(
+ D2D1_DEVICE_CONTEXT_OPTIONS_NONE,
+ &m_d2dContext
+ )
+ );
+ // Set DPI that will be used when drawing to D2D
+ m_d2dContext->SetDpi(dxgiDefaultDPI, dxgiDefaultDPI);
+
+ // Check for device support for the optional feature that allows setting the render target array index from the vertex shader stage.
+ D3D11_FEATURE_DATA_D3D11_OPTIONS3 options;
+ m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS3, &options, sizeof(options));
+ if (options.VPAndRTArrayIndexFromAnyShaderFeedingRasterizer)
+ {
+ m_supportsVprt = true;
+ }
+
+ ID3D11RasterizerState* praster;
+ DX::ThrowIfFailed(m_d3dDevice->CreateRasterizerState(&rasterDesc, &praster));
+ m_d3dContext->RSSetState(praster);
+
+ ComPtr spMultithread;
+ DX::ThrowIfFailed(
+ m_d3dDevice->QueryInterface(IID_PPV_ARGS(&spMultithread))
+ );
+ spMultithread->SetMultithreadProtected(TRUE);
+}
+
+// Validates the back buffer for each HolographicCamera and recreates
+// resources for back buffers that have changed.
+// Locks the set of holographic camera resources until the function exits.
+void DX::DeviceResources::EnsureCameraResources(
+ HolographicFrame^ frame,
+ HolographicFramePrediction^ prediction)
+{
+ UseHolographicCameraResources([this, frame, prediction](std::map>& cameraResourceMap)
+ {
+ for (HolographicCameraPose^ pose : prediction->CameraPoses)
+ {
+ HolographicCameraRenderingParameters^ renderingParameters = frame->GetRenderingParameters(pose);
+ CameraResources* pCameraResources = cameraResourceMap[pose->HolographicCamera->Id].get();
+
+ pCameraResources->CreateResourcesForBackBuffer(this, renderingParameters);
+ }
+ });
+}
+
+// Prepares to allocate resources and adds resource views for a camera.
+// Locks the set of holographic camera resources until the function exits.
+void DX::DeviceResources::AddHolographicCamera(HolographicCamera^ camera)
+{
+ UseHolographicCameraResources([this, camera](std::map>& cameraResourceMap)
+ {
+ cameraResourceMap[camera->Id] = std::make_unique(camera);
+ });
+}
+
+// Deallocates resources for a camera and removes the camera from the set.
+// Locks the set of holographic camera resources until the function exits.
+void DX::DeviceResources::RemoveHolographicCamera(HolographicCamera^ camera)
+{
+ UseHolographicCameraResources([this, camera](std::map>& cameraResourceMap)
+ {
+ CameraResources* pCameraResources = cameraResourceMap[camera->Id].get();
+
+ if (pCameraResources != nullptr)
+ {
+ pCameraResources->ReleaseResourcesForBackBuffer(this);
+ cameraResourceMap.erase(camera->Id);
+ }
+ });
+}
+
+// Recreate all device resources and set them back to the current state.
+// Locks the set of holographic camera resources until the function exits.
+void DX::DeviceResources::HandleDeviceLost()
+{
+ if (m_deviceNotify != nullptr)
+ {
+ m_deviceNotify->OnDeviceLost();
+ }
+
+ UseHolographicCameraResources([this](std::map>& cameraResourceMap)
+ {
+ for (auto& pair : cameraResourceMap)
+ {
+ CameraResources* pCameraResources = pair.second.get();
+ pCameraResources->ReleaseResourcesForBackBuffer(this);
+ }
+ });
+
+ InitializeUsingHolographicSpace();
+
+ if (m_deviceNotify != nullptr)
+ {
+ m_deviceNotify->OnDeviceRestored();
+ }
+}
+
+// Register our DeviceNotify to be informed on device lost and creation.
+void DX::DeviceResources::RegisterDeviceNotify(DX::IDeviceNotify* deviceNotify)
+{
+ m_deviceNotify = deviceNotify;
+}
+
+// Call this method when the app suspends. It provides a hint to the driver that the app
+// is entering an idle state and that temporary buffers can be reclaimed for use by other apps.
+void DX::DeviceResources::Trim()
+{
+ m_d3dContext->ClearState();
+
+ ComPtr dxgiDevice;
+ DX::ThrowIfFailed(m_d3dDevice.As(&dxgiDevice));
+ dxgiDevice->Trim();
+}
+
+// Present the contents of the swap chain to the screen.
+// Locks the set of holographic camera resources until the function exits.
+void DX::DeviceResources::Present(HolographicFrame^ frame)
+{
+ // By default, this API waits for the frame to finish before it returns.
+ // Holographic apps should wait for the previous frame to finish before
+ // starting work on a new frame. This allows for better results from
+ // holographic frame predictions.
+ HolographicFramePresentResult presentResult = frame->PresentUsingCurrentPrediction();
+
+ HolographicFramePrediction^ prediction = frame->CurrentPrediction;
+ UseHolographicCameraResources([this, prediction](std::map>& cameraResourceMap)
+ {
+ for (auto cameraPose : prediction->CameraPoses)
+ {
+ // This represents the device-based resources for a HolographicCamera.
+ DX::CameraResources* pCameraResources = cameraResourceMap[cameraPose->HolographicCamera->Id].get();
+
+ // Discard the contents of the render target.
+ // This is a valid operation only when the existing contents will be
+ // entirely overwritten. If dirty or scroll rects are used, this call
+ // should be removed.
+ m_d3dContext->DiscardView(pCameraResources->GetBackBufferRenderTargetView());
+
+ // Discard the contents of the depth stencil.
+ m_d3dContext->DiscardView(pCameraResources->GetDepthStencilView());
+ }
+ });
+
+ // The PresentUsingCurrentPrediction API will detect when the graphics device
+ // changes or becomes invalid. When this happens, it is considered a Direct3D
+ // device lost scenario.
+ if (presentResult == HolographicFramePresentResult::DeviceRemoved)
+ {
+ // The Direct3D device, context, and resources should be recreated.
+ HandleDeviceLost();
+ }
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/DeviceResources.h b/Samples/360VideoPlayback/cpp/Common/DeviceResources.h
new file mode 100644
index 0000000000..7dc952c4ee
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/DeviceResources.h
@@ -0,0 +1,112 @@
+
+#pragma once
+
+#include "CameraResources.h"
+
+namespace DX
+{
+ // Provides an interface for an application that owns DeviceResources to be notified of the device being lost or created.
+ interface IDeviceNotify
+ {
+ virtual void OnDeviceLost() = 0;
+ virtual void OnDeviceRestored() = 0;
+ };
+
+ // Creates and manages a Direct3D device and immediate context, Direct2D device and context (for debug), and the holographic swap chain.
+ class DeviceResources
+ {
+ public:
+ DeviceResources();
+
+ // Public methods related to Direct3D devices.
+ void HandleDeviceLost();
+ void RegisterDeviceNotify(IDeviceNotify* deviceNotify);
+ void Trim();
+ void Present(Windows::Graphics::Holographic::HolographicFrame^ frame);
+
+ // Public methods related to holographic devices.
+ void SetHolographicSpace(Windows::Graphics::Holographic::HolographicSpace^ space);
+ void EnsureCameraResources(
+ Windows::Graphics::Holographic::HolographicFrame^ frame,
+ Windows::Graphics::Holographic::HolographicFramePrediction^ prediction);
+
+ void AddHolographicCamera(Windows::Graphics::Holographic::HolographicCamera^ camera);
+ void RemoveHolographicCamera(Windows::Graphics::Holographic::HolographicCamera^ camera);
+
+ // Holographic accessors.
+ template
+ RetType UseHolographicCameraResources(const LCallback& callback);
+
+ Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice^
+ GetD3DInteropDevice() const { return m_d3dInteropDevice; }
+
+ // D3D accessors.
+ ID3D11Device4* GetD3DDevice() const { return m_d3dDevice.Get(); }
+ ID3D11DeviceContext3* GetD3DDeviceContext() const { return m_d3dContext.Get(); }
+ D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const { return m_d3dFeatureLevel; }
+ bool GetDeviceSupportsVprt() const { return m_supportsVprt; }
+
+ // DXGI acessors.
+ IDXGIAdapter3* GetDXGIAdapter() const { return m_dxgiAdapter.Get(); }
+
+ // D2D accessors.
+ ID2D1Factory2* GetD2DFactory() const { return m_d2dFactory.Get(); }
+ ID2D1Device1* GetD2DDevice() const { return m_d2dDevice.Get(); }
+ ID2D1DeviceContext1* GetD2DDeviceContext() const { return m_d2dContext.Get(); }
+ IDWriteFactory2* GetDWriteFactory() const { return m_dwriteFactory.Get(); }
+ IWICImagingFactory2* GetWicImagingFactory() const { return m_wicFactory.Get(); }
+ Windows::Foundation::Size m_logicalSize;
+
+ private:
+ // Private methods related to the Direct3D device, and resources based on that device.
+ void CreateDeviceIndependentResources();
+ void InitializeUsingHolographicSpace();
+ void CreateDeviceResources();
+
+ // Direct3D objects.
+ Microsoft::WRL::ComPtr m_d3dDevice;
+ Microsoft::WRL::ComPtr m_d3dContext;
+ Microsoft::WRL::ComPtr m_dxgiAdapter;
+
+ // Direct3D interop objects.
+ Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice^ m_d3dInteropDevice;
+
+ // Direct2D factories.
+ Microsoft::WRL::ComPtr m_d2dFactory;
+ Microsoft::WRL::ComPtr m_d2dDevice;
+ Microsoft::WRL::ComPtr m_d2dContext;
+ Microsoft::WRL::ComPtr m_dwriteFactory;
+ Microsoft::WRL::ComPtr m_wicFactory;
+
+ // The holographic space provides a preferred DXGI adapter ID.
+ Windows::Graphics::Holographic::HolographicSpace^ m_holographicSpace = nullptr;
+
+ // Properties of the Direct3D device currently in use.
+ D3D_FEATURE_LEVEL m_d3dFeatureLevel = D3D_FEATURE_LEVEL_10_0;
+
+ // The IDeviceNotify can be held directly as it owns the DeviceResources.
+ IDeviceNotify* m_deviceNotify = nullptr;
+
+ // Whether or not the current Direct3D device supports the optional feature
+ // for setting the render target array index from the vertex shader stage.
+ bool m_supportsVprt = false;
+
+ // Back buffer resources, etc. for attached holographic cameras.
+ std::map> m_cameraResources;
+ std::mutex m_cameraResourcesLock;
+ };
+}
+
+// Device-based resources for holographic cameras are stored in a std::map. Access this list by providing a
+// callback to this function, and the std::map will be guarded from add and remove
+// events until the callback returns. The callback is processed immediately and must
+// not contain any nested calls to UseHolographicCameraResources.
+// The callback takes a parameter of type std::map>&
+// through which the list of cameras will be accessed.
+template
+RetType DX::DeviceResources::UseHolographicCameraResources(const LCallback& callback)
+{
+ std::lock_guard guard(m_cameraResourcesLock);
+ return callback(m_cameraResources);
+}
+
diff --git a/Samples/360VideoPlayback/cpp/Common/DirectXHelper.h b/Samples/360VideoPlayback/cpp/Common/DirectXHelper.h
new file mode 100644
index 0000000000..f73985f6e3
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/DirectXHelper.h
@@ -0,0 +1,59 @@
+#pragma once
+
+#include // For create_task
+
+namespace DX
+{
+ inline void ThrowIfFailed(HRESULT hr)
+ {
+ if (FAILED(hr))
+ {
+ // Set a breakpoint on this line to catch Win32 API errors.
+ throw Platform::Exception::CreateException(hr);
+ }
+ }
+
+ // Function that reads from a binary file asynchronously.
+ inline Concurrency::task> ReadDataAsync(const std::wstring& filename)
+ {
+ using namespace Windows::Storage;
+ using namespace Concurrency;
+
+ return create_task(PathIO::ReadBufferAsync(Platform::StringReference(filename.c_str()))).then(
+ [] (Streams::IBuffer^ fileBuffer) -> std::vector
+ {
+ std::vector returnBuffer;
+ returnBuffer.resize(fileBuffer->Length);
+ Streams::DataReader::FromBuffer(fileBuffer)->ReadBytes(Platform::ArrayReference(returnBuffer.data(), static_cast(returnBuffer.size())));
+ return returnBuffer;
+ });
+ }
+
+ // Converts a length in device-independent pixels (DIPs) to a length in physical pixels.
+ inline float ConvertDipsToPixels(float dips, float dpi)
+ {
+ constexpr float dipsPerInch = 96.0f;
+ return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer.
+ }
+
+#if defined(_DEBUG)
+ // Check for SDK Layer support.
+ inline bool SdkLayersAvailable()
+ {
+ HRESULT hr = D3D11CreateDevice(
+ nullptr,
+ D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware device.
+ 0,
+ D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers.
+ nullptr, // Any feature level will do.
+ 0,
+ D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Store apps.
+ nullptr, // No need to keep the D3D device reference.
+ nullptr, // No need to know the feature level.
+ nullptr // No need to keep the D3D device context reference.
+ );
+
+ return SUCCEEDED(hr);
+ }
+#endif
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/MeshGeometry.cpp b/Samples/360VideoPlayback/cpp/Common/MeshGeometry.cpp
new file mode 100644
index 0000000000..02f7cee78e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/MeshGeometry.cpp
@@ -0,0 +1,306 @@
+#include "pch.h"
+#include "MeshGeometry.h"
+#include "..\360VideoPlaybackMain.h"
+#include "DeviceResources.h"
+#include "DirectXHelper.h"
+#include "..\Content\ShaderStructures.h"
+
+using namespace _360VideoPlayback;
+using namespace DirectX;
+using namespace DX;
+using namespace Microsoft::WRL;
+using namespace std;
+using namespace Windows::Foundation::Numerics;
+
+shared_ptr MeshGeometry::MakeTexturedCube()
+{
+ _360VideoPlayback::VertexPositionTexture cubeVertices[] =
+ {
+ { XMFLOAT3(-0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) }, // +Y (top face)
+ { XMFLOAT3(0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(0.5f, 0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(-0.5f, 0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+
+ { XMFLOAT3(-0.5f, -0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) }, // -Y (bottom face)
+ { XMFLOAT3(0.5f, -0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(-0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+
+ { XMFLOAT3(0.5f, 0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) }, // +X (right face)
+ { XMFLOAT3(0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(0.5f, -0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+
+ { XMFLOAT3(-0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) }, // -X (left face)
+ { XMFLOAT3(-0.5f, 0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(-0.5f, -0.5f, 0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(-0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+
+ { XMFLOAT3(-0.5f, 0.5f, 0.5f), XMFLOAT2(0.0f, 0.0f) }, // +Z (front face)
+ { XMFLOAT3(0.5f, 0.5f, 0.5f), XMFLOAT2(1.0f, 0.0f) }, // WARNING: This Front face is tied directly to the DEFAULT_CUBE_BOUNDING_PLANE. If you modify this, update both
+ { XMFLOAT3(0.5f, -0.5f, 0.5f), XMFLOAT2(1.0f, 1.0f) },
+ { XMFLOAT3(-0.5f, -0.5f, 0.5f), XMFLOAT2(0.0f, 1.0f) },
+
+ { XMFLOAT3(0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) }, // -Z (back face)
+ { XMFLOAT3(-0.5f, 0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(-0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ { XMFLOAT3(0.5f, -0.5f, -0.5f), XMFLOAT2(-1.0f, -1.0f) },
+ };
+
+ unsigned short cubeIndices[] =
+ {
+ 0, 1, 2,
+ 0, 2, 3,
+
+ 4, 5, 6,
+ 4, 6, 7,
+
+ 8, 9, 10,
+ 8, 10, 11,
+
+ 12, 13, 14,
+ 12, 14, 15,
+
+ 16, 17, 18,
+ 16, 18, 19,
+
+ 20, 21, 22,
+ 20, 22, 23
+ };
+
+ ComPtr vertexBuffer = CreateVertexBuffer(ARRAYSIZE(cubeVertices), cubeVertices);
+ ComPtr indexBuffer = CreateIndexBuffer(ARRAYSIZE(cubeIndices), cubeIndices);
+
+ return make_shared(
+ D3D11_PRIMITIVE_TOPOLOGY::D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,
+ vertexBuffer,
+ indexBuffer,
+ static_cast(sizeof(_360VideoPlayback::VertexPositionTexture)),
+ static_cast(ARRAYSIZE(cubeIndices))
+ );
+}
+
+shared_ptr MeshGeometry::MakeTexturedCylinder(unsigned short tessellation)
+{
+ const float frontFaceZ = 0.5f;
+ const float backFaceZ = -0.5f;
+ vector vertices = {
+ {{ 0.0f, 0.0f, frontFaceZ }, { 0.5f, 0.5f }}, // Front face center vertex
+ {{ 0.0f, 0.0f, backFaceZ }, { 0.0f, 0.0f }} // Back face center vertex
+ };
+
+ const unsigned short frontFaceCenterVertexIndex = 0;
+ const unsigned short backFaceCenterVertexIndex = 1;
+
+ unsigned short frontFaceVerticesStartIndex = static_cast(vertices.size());
+ auto frontFaceVertices = MeshGeometry::MakeCylinderFaceVertices(
+ tessellation,
+ frontFaceZ,
+ [](float x, float y) -> XMFLOAT2
+ {
+ const float u = x + 0.5f;
+ const float v = -y + 0.5f;
+
+ return { u, v };
+ }
+ );
+ copy(begin(frontFaceVertices), end(frontFaceVertices), back_inserter(vertices));
+
+ // While the vertex positions of the front face and the "front" side vertices are the same,
+ // we want the UV coordinates to be different.
+ // So duplicate the vertices and set the UV coordinates to [0,0]
+ unsigned short frontFaceSideVerticesStartIndex = static_cast(vertices.size());
+ auto frontFaceSideVertices = MeshGeometry::MakeCylinderFaceVertices(
+ tessellation,
+ frontFaceZ,
+ [](float /*x*/, float /*y*/) -> XMFLOAT2
+ {
+ return { 0.0f, 0.0f };
+ }
+ );
+ copy(begin(frontFaceSideVertices), end(frontFaceSideVertices), back_inserter(vertices));
+
+ // Shared by the back face and "back" side vertices.
+ unsigned short backFaceVerticesStartIndex = static_cast(vertices.size());
+ auto backFaceVertices = MeshGeometry::MakeCylinderFaceVertices(
+ tessellation,
+ backFaceZ,
+ [](float /*x*/, float /*y*/) -> XMFLOAT2
+ {
+ return { 0.0f, 0.0f };
+ }
+ );
+ copy(begin(backFaceVertices), end(backFaceVertices), back_inserter(vertices));
+
+ // Front face indices
+ vector indices = MeshGeometry::MakeCylinderFaceIndices(
+ tessellation,
+ /*vertexIndexOffset*/ frontFaceVerticesStartIndex,
+ /*centerVertexIndex*/ frontFaceCenterVertexIndex,
+ /*isFrontFace*/ true);
+
+ // Back face indices.
+ // This may be shown brifly during intro/outro animations so we need to show it.
+ vector backFaceIndices = MeshGeometry::MakeCylinderFaceIndices(
+ tessellation,
+ /*vertexIndexOffset*/ backFaceVerticesStartIndex,
+ /*centerVertexIndex*/ backFaceCenterVertexIndex,
+ /*isFrontFace*/ false);
+ copy(begin(backFaceIndices), end(backFaceIndices), back_inserter(indices));
+
+ // Side indices
+ vector sideIndices = MeshGeometry::MakeCylinderSideIndices(
+ tessellation,
+ /*vertexIndexOffset*/ frontFaceSideVerticesStartIndex,
+ /*cylinderFaceVertexStride*/ static_cast(frontFaceSideVertices.size()));
+ copy(begin(sideIndices), end(sideIndices), back_inserter(indices));
+
+ ComPtr vertexBuffer = CreateVertexBuffer(
+ static_cast(vertices.size()),
+ vertices.data()
+ );
+
+ ComPtr indexBuffer = CreateIndexBuffer(
+ static_cast(indices.size()),
+ indices.data());
+
+ return make_shared(
+ D3D11_PRIMITIVE_TOPOLOGY::D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,
+ vertexBuffer,
+ indexBuffer,
+ static_cast(sizeof(VertexPositionTexture)),
+ static_cast(indices.size())
+ );
+}
+
+vector MeshGeometry::MakeCylinderFaceVertices(unsigned short tessellation, float z, function uvGenerator)
+{
+ // Create a fan of triangles from the center of the circle.
+ // Reserve space for all vertices along the outer edges and the center vertex.
+ vector vertices;
+ vertices.resize(tessellation + 1);
+ for (int i = 0; i < tessellation; i++)
+ {
+ const float angle = XM_2PI * (static_cast(i) / tessellation);
+
+ const float x = 0.5f * cos(angle);
+ const float y = 0.5f * sin(angle);
+
+ const XMFLOAT3 pos{ x, y, z };
+ const XMFLOAT2 uv = uvGenerator(x, y);
+
+ vertices[i] = { pos, uv };
+ }
+
+ return vertices;
+}
+
+vector MeshGeometry::MakeCylinderFaceIndices(unsigned short tessellation, unsigned short vertexIndexOffset, unsigned short centerVertexIndex, bool isFrontFace)
+{
+ vector indices;
+ indices.resize(tessellation * 3);
+
+ for (unsigned short i = 0; i < tessellation; i++)
+ {
+ // The next 3 vertices are for the i-th triangle
+ const int baseTriangleIndex = (i * 3);
+
+ unsigned short currentVertexIndex = vertexIndexOffset + i;
+ unsigned short nextVertexIndex = vertexIndexOffset + ((i + 1) % tessellation);
+
+ if (isFrontFace)
+ {
+ // Clockwise ordering
+ indices[baseTriangleIndex] = currentVertexIndex;
+ indices[baseTriangleIndex + 1] = centerVertexIndex;
+ indices[baseTriangleIndex + 2] = nextVertexIndex;
+ }
+ else
+ {
+ // Counter-clockwise ordering
+ indices[baseTriangleIndex] = nextVertexIndex;
+ indices[baseTriangleIndex + 1] = centerVertexIndex;
+ indices[baseTriangleIndex + 2] = currentVertexIndex;
+ }
+ }
+
+ return indices;
+}
+
+vector MeshGeometry::MakeCylinderSideIndices(unsigned short tessellation, unsigned short vertexIndexOffset, unsigned short cylinderFaceVertexStride)
+{
+ vector indices;
+ indices.resize(tessellation * 6);
+
+ for (unsigned short i = 0; i < tessellation; i++)
+ {
+ // The next 6 vertices are for the i-th quad making up the i-th side of the cylinder
+ const int baseTriangleIndex = (i * 6);
+
+ unsigned short currentVertexIndexFrontFace = vertexIndexOffset + i;
+ unsigned short currentVertexIndexBackFace = vertexIndexOffset + i + cylinderFaceVertexStride;
+ unsigned short nextVertexIndexFrontFace = vertexIndexOffset + ((i + 1) % tessellation);
+ unsigned short nextVertexIndexBackFace = vertexIndexOffset + ((i + 1) % tessellation) + cylinderFaceVertexStride;
+
+ indices[baseTriangleIndex] = currentVertexIndexFrontFace;
+ indices[baseTriangleIndex + 1] = nextVertexIndexFrontFace;
+ indices[baseTriangleIndex + 2] = currentVertexIndexBackFace;
+
+ indices[baseTriangleIndex + 3] = nextVertexIndexFrontFace;
+ indices[baseTriangleIndex + 4] = nextVertexIndexBackFace;
+ indices[baseTriangleIndex + 5] = currentVertexIndexBackFace;
+ }
+
+ return indices;
+}
+
+ComPtr MeshGeometry::CreateVertexBuffer(unsigned int numVertices, const VertexPositionTexture* vertexData)
+{
+ ComPtr vertexBufferInternal;
+ D3D11_BUFFER_DESC VertexBufferDesc;
+ VertexBufferDesc.ByteWidth = sizeof(_360VideoPlayback::VertexPositionTexture) * numVertices;
+ VertexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
+ VertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
+ VertexBufferDesc.CPUAccessFlags = 0;
+ VertexBufferDesc.MiscFlags = 0;
+ VertexBufferDesc.StructureByteStride = 0;
+
+ D3D11_SUBRESOURCE_DATA VertexBufferData;
+ VertexBufferData.pSysMem = vertexData;
+ VertexBufferData.SysMemPitch = 0;
+ VertexBufferData.SysMemSlicePitch = 0;
+
+ DX::ThrowIfFailed(
+ _360VideoPlaybackMain::GetDeviceResources()->GetD3DDevice()->CreateBuffer(
+ &VertexBufferDesc,
+ &VertexBufferData,
+ &vertexBufferInternal));
+
+ return vertexBufferInternal;
+}
+
+ComPtr MeshGeometry::CreateIndexBuffer(unsigned int numIndices, const unsigned short* indexData)
+{
+ ComPtr indexBufferInternal;
+
+ D3D11_BUFFER_DESC IndexBufferDesc;
+ IndexBufferDesc.ByteWidth = sizeof(unsigned short) * numIndices;
+ IndexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
+ IndexBufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
+ IndexBufferDesc.CPUAccessFlags = 0;
+ IndexBufferDesc.MiscFlags = 0;
+ IndexBufferDesc.StructureByteStride = 0;
+
+ D3D11_SUBRESOURCE_DATA IndexBufferData;
+ IndexBufferData.pSysMem = indexData;
+ IndexBufferData.SysMemPitch = 0;
+ IndexBufferData.SysMemSlicePitch = 0;
+
+ DX::ThrowIfFailed(
+ _360VideoPlaybackMain::GetDeviceResources()->GetD3DDevice()->CreateBuffer(
+ &IndexBufferDesc,
+ &IndexBufferData,
+ &indexBufferInternal));
+
+ return indexBufferInternal;
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/MeshGeometry.h b/Samples/360VideoPlayback/cpp/Common/MeshGeometry.h
new file mode 100644
index 0000000000..b4b0cf20cd
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/MeshGeometry.h
@@ -0,0 +1,39 @@
+#pragma once
+#include "..\Content\ShaderStructures.h"
+namespace DX
+{
+ struct MeshGeometry
+ {
+ public:
+ const D3D11_PRIMITIVE_TOPOLOGY Topology;
+ const Microsoft::WRL::ComPtr VertexBuffer;
+ const Microsoft::WRL::ComPtr IndexBuffer;
+ const uint32 VertexStride;
+ const uint32 IndexCount;
+
+ static std::shared_ptr MakeTexturedCube();
+ static std::shared_ptr MakeTexturedCylinder(unsigned short tessellation);
+
+ MeshGeometry(
+ D3D11_PRIMITIVE_TOPOLOGY topology,
+ const Microsoft::WRL::ComPtr& vertexBuffer,
+ const Microsoft::WRL::ComPtr& indexBuffer,
+ uint32 vertexStride,
+ uint32 indexCount) :
+ Topology(topology),
+ VertexBuffer(vertexBuffer),
+ IndexBuffer(indexBuffer),
+ VertexStride(vertexStride),
+ IndexCount(indexCount)
+ {
+ }
+
+ private:
+ static Microsoft::WRL::ComPtr CreateVertexBuffer(unsigned int numVertices, const _360VideoPlayback::VertexPositionTexture* vertexData);
+ static Microsoft::WRL::ComPtr CreateIndexBuffer(unsigned int numIndices, const unsigned short* indexData);
+
+ static std::vector<_360VideoPlayback::VertexPositionTexture> MakeCylinderFaceVertices(unsigned short tessellation, float z, std::function uvGenerator);
+ static std::vector MakeCylinderFaceIndices(unsigned short tessellation, unsigned short vertexIndexOffset, unsigned short centerVertexIndex, bool isFrontFace);
+ static std::vector MakeCylinderSideIndices(unsigned short tessellation, unsigned short vertexIndexOffset, unsigned short cylinderFaceVertexStride);
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/SeekBarElement.cpp b/Samples/360VideoPlayback/cpp/Common/SeekBarElement.cpp
new file mode 100644
index 0000000000..ae29231f83
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/SeekBarElement.cpp
@@ -0,0 +1,216 @@
+#include "pch.h"
+#include "SeekBarElement.h"
+#include "DeviceResources.h"
+#include "DirectXHelper.h"
+#include "..\360VideoPlaybackMain.h"
+#include "..\AppView.h"
+
+using namespace _360VideoPlayback;
+using namespace DX;
+using namespace Microsoft::WRL;
+using namespace Platform;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Collections;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Globalization::NumberFormatting;
+using namespace Windows::Media::Core;
+using namespace Windows::Media::Playback;
+using namespace Windows::UI::Input::Spatial;
+
+const long long s_hnsPerMillisecond = 10000;
+const int64 ticksPerSecond = 10000000LL;
+const UINT millisecondsPerSecond = 1000;
+const unsigned int END_OF_PLAYBACK_BUFFER_MS = 200;
+
+SeekBarElement::SeekBarElement(float elementWidth, float elementHeight, float leftRightPadding, float topBottomPadding) :
+ m_leftRightPadding(leftRightPadding),
+ m_topBottomPadding(topBottomPadding),
+ m_currentPlayPosition(0),
+ m_currentVideoDuration(0),
+ m_secondsElapsedSinceTimeUpdate(0.0)
+{
+ m_isFocusIntersectionCalculated = true;
+ m_elementSize = float2(elementWidth, elementHeight);
+ m_sliderPosition = 0.0f;
+ m_sliderWidth = max((m_elementSize.x - (m_leftRightPadding * 2.0f)), 0.0f);
+ m_sliderHeight = max((m_elementSize.y - (m_topBottomPadding * 2.0f)), 0.0f);
+
+ m_sliderFrontSegment = std::make_shared(m_sliderHeight);
+ AddChildElement(m_sliderFrontSegment);
+
+ m_sliderRemainingSegment = std::make_shared(m_sliderHeight);
+ AddChildElement(m_sliderRemainingSegment);
+
+ float seekBarYCoordinate = m_topBottomPadding + (m_sliderHeight * 0.5f);
+ m_sliderFrontSegmentStart = D2D1::Point2F(m_leftRightPadding, 0.0f);
+ m_sliderFrontSegment->SetStartPoint(m_sliderFrontSegmentStart);
+ m_sliderFrontSegment->SetTranslation({ 0.0f, seekBarYCoordinate });
+
+ m_sliderRemainingSegmentEnd = D2D1::Point2F(m_elementSize.x - m_leftRightPadding, 0.0f);
+ m_sliderRemainingSegment->SetEndPoint(m_sliderRemainingSegmentEnd);
+ m_sliderRemainingSegment->SetTranslation({ 0.0f, seekBarYCoordinate });
+ m_textSize = FONT_SIZE * 1.25f;
+ m_elapsedTimeText = std::make_shared(L"", m_textSize, DWRITE_FONT_WEIGHT_NORMAL);
+ AddChildElement(m_elapsedTimeText);
+
+ m_remainingTimeText = std::make_shared(L"", m_textSize, DWRITE_FONT_WEIGHT_NORMAL);
+ AddChildElement(m_remainingTimeText);
+}
+
+void SeekBarElement::Initialize()
+{
+ auto d2dcontext = m_deviceResources->GetD2DDeviceContext();
+ __super::Initialize();
+
+ DX::ThrowIfFailed(
+ d2dcontext->CreateSolidColorBrush(
+ D2D1::ColorF(D2D1::ColorF::White),
+ &m_frontSegmentBrush));
+
+ DX::ThrowIfFailed(
+ d2dcontext->CreateSolidColorBrush(
+ D2D1::ColorF(D2D1::ColorF::Gray),
+ &m_endSegmentBrush));
+
+ DX::ThrowIfFailed(
+ d2dcontext->CreateSolidColorBrush(
+ D2D1::ColorF(D2D1::ColorF::White),
+ &m_brush));
+
+ m_sliderFrontSegment->SetBrush(m_frontSegmentBrush.Get());
+ m_sliderFrontSegment->Initialize();
+
+ m_sliderRemainingSegment->SetBrush(m_endSegmentBrush.Get());
+ m_sliderRemainingSegment->Initialize();
+
+
+ m_elapsedTimeText->SetBrush(m_brush.Get());
+ m_elapsedTimeText->Initialize();
+ m_remainingTimeText->SetBrush(m_brush.Get());
+ m_remainingTimeText->Initialize();
+ // Text goes below the seekbar
+ float startYCoordinateOfText = m_elementSize.y - m_topBottomPadding + 4.0f;
+ m_elapsedTimeText->SetTranslation({ m_leftRightPadding, startYCoordinateOfText });
+ m_remainingTimeText->SetTranslation({ (m_elementSize.x - m_leftRightPadding - (FONT_SIZE * 3.0f)), startYCoordinateOfText });
+
+ UpdateLayoutFromCurrentTime();
+}
+
+void SeekBarElement::UpdateSliderPosition(float value)
+{
+ m_sliderPosition = value;
+ // Clamp [0,1]
+ if (m_sliderPosition < 0.0f)
+ {
+ m_sliderPosition = 0.0f;
+ }
+ else if (m_sliderPosition > 1.0f)
+ {
+ m_sliderPosition = 1.0f;
+ }
+
+ float sliderUIPosition = m_sliderPosition > 0.0f ? (m_sliderWidth * m_sliderPosition) + m_leftRightPadding : m_leftRightPadding;
+ m_sliderFrontSegmentEnd = D2D1::Point2F(sliderUIPosition, 0.0f);
+ m_sliderFrontSegment->SetEndPoint(m_sliderFrontSegmentEnd);
+ m_sliderRemainingSegmentStart = D2D1::Point2F(sliderUIPosition, 0.0f);
+ m_sliderRemainingSegment->SetStartPoint(m_sliderRemainingSegmentStart);
+}
+
+void SeekBarElement::Update(const DX::StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ __super::Update(timer, cameraPose);
+ m_secondsElapsedSinceTimeUpdate += timer.GetElapsedSeconds();
+
+ // Update the current seek bar time no more than once every second.
+ if (m_secondsElapsedSinceTimeUpdate > 1.0)
+ {
+ UpdateLayoutFromCurrentTime();
+ m_secondsElapsedSinceTimeUpdate = 0.0;
+ }
+}
+
+void SeekBarElement::UpdateLayoutFromCurrentTime()
+{
+ m_currentVideoDuration = AppView::GetMediaPlayer()->PlaybackSession->NaturalDuration.Duration / s_hnsPerMillisecond;
+ if (m_currentVideoDuration != 0)
+ {
+ m_currentPlayPosition = AppView::GetMediaPlayer()->PlaybackSession->Position.Duration / s_hnsPerMillisecond;
+ if (m_currentPlayPosition > m_currentVideoDuration)
+ {
+ m_currentVideoDuration = m_currentPlayPosition;
+ }
+ }
+ // Update current and remaining time indicators
+ m_elapsedTimeString = FormatTimeString(m_currentPlayPosition)->Data();
+ m_remainingTimeString = FormatTimeString(m_currentVideoDuration - m_currentPlayPosition)->Data();
+ m_elapsedTimeText->SetText(m_elapsedTimeString);
+ m_remainingTimeText->SetText(m_remainingTimeString);
+
+ float currentTimeSliderPosition = static_cast(m_currentVideoDuration) > 0.0f ? static_cast(m_currentPlayPosition) / static_cast(m_currentVideoDuration) : 0.0f;
+ UpdateSliderPosition(currentTimeSliderPosition);
+}
+
+void SeekBarElement::PerformAction()
+{
+ // Update the Slider's position
+ float xIntersectionPoint = m_elementCoordIntersectionPoint.x;
+ // Clamp before and after the slider
+ // Otherwise compute slider based on relative position
+ if (xIntersectionPoint < m_leftRightPadding)
+ {
+ UpdateSliderPosition(0.0f);
+ }
+ else if (xIntersectionPoint > (m_leftRightPadding + m_sliderWidth))
+ {
+ UpdateSliderPosition(1.0f);
+ }
+ else
+ {
+ UpdateSliderPosition((xIntersectionPoint - m_leftRightPadding) / m_sliderWidth);
+ }
+
+ unsigned int seekTime = static_cast(m_currentVideoDuration * m_sliderPosition);
+
+ if (seekTime >= (static_cast(AppView::GetMediaPlayer()->PlaybackSession->NaturalDuration.Duration / s_hnsPerMillisecond)) - END_OF_PLAYBACK_BUFFER_MS)
+ {
+ seekTime = (static_cast(AppView::GetMediaPlayer()->PlaybackSession->NaturalDuration.Duration / s_hnsPerMillisecond)) - END_OF_PLAYBACK_BUFFER_MS;
+ }
+ auto seekTimeSpan = TimeSpan();
+ seekTimeSpan.Duration = static_cast(seekTime) * s_hnsPerMillisecond;
+ AppView::GetMediaPlayer()->PlaybackSession->Position = seekTimeSpan;
+}
+
+String^ SeekBarElement::FormatTimeString(int64 intervalValue)
+{
+ String^ formattedTimeSpan;
+ std::chrono::milliseconds duration{ intervalValue };
+ auto seconds = static_cast(std::chrono::duration_cast(duration).count());
+ auto minutes = static_cast(std::chrono::duration_cast(duration).count());
+ seconds -= minutes * 60;
+ auto hours = static_cast(std::chrono::duration_cast(duration).count());
+ minutes -= hours * 60;
+
+ DecimalFormatter^ twoDigitNumberFormatter = ref new DecimalFormatter(
+ Windows::System::UserProfile::GlobalizationPreferences::Languages,
+ Windows::System::UserProfile::GlobalizationPreferences::HomeGeographicRegion);
+ twoDigitNumberFormatter->FractionDigits = 0;
+ twoDigitNumberFormatter->IsGrouped = false;
+ twoDigitNumberFormatter->IntegerDigits = 2;
+
+ if (hours > 0)
+ {
+ // Display Hours naturally, but display minutes and seconds using 2 digits
+ formattedTimeSpan = ref new String(
+ (twoDigitNumberFormatter->FormatUInt(hours) + L":" +
+ twoDigitNumberFormatter->FormatUInt(minutes) + L":" +
+ twoDigitNumberFormatter->FormatUInt(seconds))->Data());
+ }
+ else
+ {
+ // If the hour count is 0, then only display minutes and seconds
+ formattedTimeSpan = ref new String(
+ (twoDigitNumberFormatter->FormatUInt(minutes) + L":" +
+ twoDigitNumberFormatter->FormatUInt(seconds))->Data());
+ }
+ return formattedTimeSpan;
+}
diff --git a/Samples/360VideoPlayback/cpp/Common/SeekBarElement.h b/Samples/360VideoPlayback/cpp/Common/SeekBarElement.h
new file mode 100644
index 0000000000..7606747047
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/SeekBarElement.h
@@ -0,0 +1,51 @@
+#pragma once
+#include "BaseElement.h"
+
+namespace DX
+{
+ class SeekBarElement : public FocusableElement
+ {
+ public:
+ SeekBarElement(
+ float elementWidth,
+ float elementHeight = FONT_SIZE * 5.0f,
+ float leftRightPadding = FONT_SIZE * 2.0f,
+ float topBottomPadding = FONT_SIZE * 2.0f);
+
+ virtual void Initialize() override;
+ virtual void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose) override;
+
+ virtual void PerformAction() override;
+
+
+ private:
+ void UpdateLayoutFromCurrentTime();
+ void UpdateSliderPosition(float position);
+ Platform::String^ FormatTimeString(int64 intervalValue);
+
+ float m_sliderPosition;
+ float m_sliderWidth;
+ float m_sliderHeight;
+ float m_leftRightPadding;
+ float m_topBottomPadding;
+ Microsoft::WRL::ComPtr m_frontSegmentBrush;
+ Microsoft::WRL::ComPtr m_endSegmentBrush;
+ D2D1_POINT_2F m_sliderFrontSegmentStart;
+ D2D1_POINT_2F m_sliderFrontSegmentEnd;
+ D2D1_POINT_2F m_sliderRemainingSegmentStart;
+ D2D1_POINT_2F m_sliderRemainingSegmentEnd;
+ std::shared_ptr m_sliderFrontSegment;
+ std::shared_ptr m_sliderRemainingSegment;
+ Microsoft::WRL::ComPtr m_brush;
+ int64 m_currentVideoDuration;
+ int64 m_currentPlayPosition;
+ std::wstring m_elapsedTimeString;
+ std::wstring m_remainingTimeString;
+ std::shared_ptr m_elapsedTimeText;
+ std::shared_ptr m_remainingTimeText;
+ float m_textSize;
+ double m_secondsElapsedSinceTimeUpdate;
+
+ };
+
+}
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/Common/StepTimer.h b/Samples/360VideoPlayback/cpp/Common/StepTimer.h
new file mode 100644
index 0000000000..eb9f8f6d72
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Common/StepTimer.h
@@ -0,0 +1,189 @@
+#pragma once
+
+namespace DX
+{
+ // Helper class for animation and simulation timing.
+ class StepTimer
+ {
+ public:
+ StepTimer() :
+ m_elapsedTicks(0),
+ m_totalTicks(0),
+ m_leftOverTicks(0),
+ m_frameCount(0),
+ m_framesPerSecond(0),
+ m_framesThisSecond(0),
+ m_qpcSecondCounter(0),
+ m_isFixedTimeStep(false),
+ m_targetElapsedTicks(TicksPerSecond / 60)
+ {
+ m_qpcFrequency = GetPerformanceFrequency();
+
+ // Initialize max delta to 1/10 of a second.
+ m_qpcMaxDelta = m_qpcFrequency / 10;
+ }
+
+ // Get elapsed time since the previous Update call.
+ uint64 GetElapsedTicks() const { return m_elapsedTicks; }
+ double GetElapsedSeconds() const { return TicksToSeconds(m_elapsedTicks); }
+
+ // Get total time since the start of the program.
+ uint64 GetTotalTicks() const { return m_totalTicks; }
+ double GetTotalSeconds() const { return TicksToSeconds(m_totalTicks); }
+
+ // Get total number of updates since start of the program.
+ uint32 GetFrameCount() const { return m_frameCount; }
+
+ // Get the current framerate.
+ uint32 GetFramesPerSecond() const { return m_framesPerSecond; }
+
+ // Set whether to use fixed or variable timestep mode.
+ void SetFixedTimeStep(bool isFixedTimestep) { m_isFixedTimeStep = isFixedTimestep; }
+
+ // Set how often to call Update when in fixed timestep mode.
+ void SetTargetElapsedTicks(uint64 targetElapsed) { m_targetElapsedTicks = targetElapsed; }
+ void SetTargetElapsedSeconds(double targetElapsed) { m_targetElapsedTicks = SecondsToTicks(targetElapsed); }
+
+ // Integer format represents time using 10,000,000 ticks per second.
+ static const uint64 TicksPerSecond = 10'000'000;
+
+ static double TicksToSeconds(uint64 ticks) { return static_cast(ticks) / TicksPerSecond; }
+ static uint64 SecondsToTicks(double seconds) { return static_cast(seconds * TicksPerSecond); }
+
+ // Convenient wrapper for QueryPerformanceFrequency. Throws an exception if
+ // the call to QueryPerformanceFrequency fails.
+ static inline uint64 GetPerformanceFrequency()
+ {
+ LARGE_INTEGER freq;
+ if (!QueryPerformanceFrequency(&freq))
+ {
+ throw ref new Platform::FailureException();
+ }
+ return freq.QuadPart;
+ }
+
+ // Gets the current number of ticks from QueryPerformanceCounter. Throws an
+ // exception if the call to QueryPerformanceCounter fails.
+ static inline int64 GetTicks()
+ {
+ LARGE_INTEGER ticks;
+ if (!QueryPerformanceCounter(&ticks))
+ {
+ throw ref new Platform::FailureException();
+ }
+ return ticks.QuadPart;
+ }
+
+ // After an intentional timing discontinuity (for instance a blocking IO operation)
+ // call this to avoid having the fixed timestep logic attempt a set of catch-up
+ // Update calls.
+
+ void ResetElapsedTime()
+ {
+ m_qpcLastTime = GetTicks();
+
+ m_leftOverTicks = 0;
+ m_framesPerSecond = 0;
+ m_framesThisSecond = 0;
+ m_qpcSecondCounter = 0;
+ }
+
+ // Update timer state, calling the specified Update function the appropriate number of times.
+ template
+ void Tick(const TUpdate& update)
+ {
+ // Query the current time.
+ uint64 currentTime = GetTicks();
+ uint64 timeDelta = currentTime - m_qpcLastTime;
+
+ m_qpcLastTime = currentTime;
+ m_qpcSecondCounter += timeDelta;
+
+ // Clamp excessively large time deltas (e.g. after paused in the debugger).
+ if (timeDelta > m_qpcMaxDelta)
+ {
+ timeDelta = m_qpcMaxDelta;
+ }
+
+ // Convert QPC units into a canonical tick format. This cannot overflow due to the previous clamp.
+ timeDelta *= TicksPerSecond;
+ timeDelta /= m_qpcFrequency;
+
+ uint32 lastFrameCount = m_frameCount;
+
+ if (m_isFixedTimeStep)
+ {
+ // Fixed timestep update logic
+
+ // If the app is running very close to the target elapsed time (within 1/4 of a millisecond) just clamp
+ // the clock to exactly match the target value. This prevents tiny and irrelevant errors
+ // from accumulating over time. Without this clamping, a game that requested a 60 fps
+ // fixed update, running with vsync enabled on a 59.94 NTSC display, would eventually
+ // accumulate enough tiny errors that it would drop a frame. It is better to just round
+ // small deviations down to zero to leave things running smoothly.
+
+ if (abs(static_cast(timeDelta - m_targetElapsedTicks)) < TicksPerSecond / 4000)
+ {
+ timeDelta = m_targetElapsedTicks;
+ }
+
+ m_leftOverTicks += timeDelta;
+
+ while (m_leftOverTicks >= m_targetElapsedTicks)
+ {
+ m_elapsedTicks = m_targetElapsedTicks;
+ m_totalTicks += m_targetElapsedTicks;
+ m_leftOverTicks -= m_targetElapsedTicks;
+ m_frameCount++;
+
+ update();
+ }
+ }
+ else
+ {
+ // Variable timestep update logic.
+ m_elapsedTicks = timeDelta;
+ m_totalTicks += timeDelta;
+ m_leftOverTicks = 0;
+ m_frameCount++;
+
+ update();
+ }
+
+ // Track the current framerate.
+ if (m_frameCount != lastFrameCount)
+ {
+ m_framesThisSecond++;
+ }
+
+ if (m_qpcSecondCounter >= static_cast(m_qpcFrequency))
+ {
+ m_framesPerSecond = m_framesThisSecond;
+ m_framesThisSecond = 0;
+ m_qpcSecondCounter %= m_qpcFrequency;
+ }
+ }
+
+ private:
+
+ // Source timing data uses QPC units.
+ uint64 m_qpcFrequency;
+ uint64 m_qpcLastTime;
+ uint64 m_qpcMaxDelta;
+
+ // Derived timing data uses a canonical tick format.
+ uint64 m_elapsedTicks;
+ uint64 m_totalTicks;
+ uint64 m_leftOverTicks;
+
+ // Members for tracking the framerate.
+ uint32 m_frameCount;
+ uint32 m_framesPerSecond;
+ uint32 m_framesThisSecond;
+ uint64 m_qpcSecondCounter;
+
+ // Members for configuring fixed timestep mode.
+ bool m_isFixedTimeStep;
+ uint64 m_targetElapsedTicks;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/GeometryShader.hlsl b/Samples/360VideoPlayback/cpp/Content/GeometryShader.hlsl
new file mode 100644
index 0000000000..4065a36e62
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/GeometryShader.hlsl
@@ -0,0 +1,31 @@
+// Per-vertex data from the vertex shader.
+struct GeometryShaderInput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint instId : TEXCOORD0;
+};
+
+// Per-vertex data passed to the rasterizer.
+struct GeometryShaderOutput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint rtvId : SV_RenderTargetArrayIndex;
+};
+
+// This geometry shader is a pass-through that leaves the geometry unmodified
+// and sets the render target array index.
+[maxvertexcount(3)]
+void main(triangle GeometryShaderInput input[3], inout TriangleStream outStream)
+{
+ GeometryShaderOutput output;
+ [unroll(3)]
+ for (int i = 0; i < 3; ++i)
+ {
+ output.pos = input[i].pos;
+ output.tex = input[i].tex;
+ output.rtvId = input[i].instId;
+ outStream.Append(output);
+ }
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/GlassGeometryShader.hlsl b/Samples/360VideoPlayback/cpp/Content/GlassGeometryShader.hlsl
new file mode 100644
index 0000000000..d0def7e652
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/GlassGeometryShader.hlsl
@@ -0,0 +1,34 @@
+// Per-vertex data from the vertex shader.
+struct GeometryShaderInput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ float4 worldCoord : TEXCOORD2;
+ uint instId : TEXCOORD0;
+};
+
+// Per-vertex data passed to the rasterizer.
+struct GeometryShaderOutput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ float4 worldCoord : TEXCOORD2;
+ uint rtvId : SV_RenderTargetArrayIndex;
+};
+
+// This geometry shader is a pass-through that leaves the geometry unmodified
+// and sets the render target array index.
+[maxvertexcount(3)]
+void main(triangle GeometryShaderInput input[3], inout TriangleStream outStream)
+{
+ GeometryShaderOutput output;
+ [unroll(3)]
+ for (int i = 0; i < 3; ++i)
+ {
+ output.pos = input[i].pos;
+ output.tex = input[i].tex;
+ output.worldCoord = input[i].worldCoord;
+ output.rtvId = input[i].instId;
+ outStream.Append(output);
+ }
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/GlassPixelShader.hlsl b/Samples/360VideoPlayback/cpp/Content/GlassPixelShader.hlsl
new file mode 100644
index 0000000000..1fcbf3bb42
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/GlassPixelShader.hlsl
@@ -0,0 +1,41 @@
+cbuffer FocusPointConstantBuffer : register(b1)
+{
+ min16float4 focusPointOrigin;
+ min16float4 focusPointDirection;
+ min16float4 focusPointColor;
+ min16float4 focusPointRadius; // Using XMFLOAT4 for byte alignment
+ min16float4 focusPointIntensity; // Using XMFLOAT4 for byte alignment
+};
+
+Texture2D Texture : register(t0);
+SamplerState Sampler : register(s0);
+
+// Per-pixel color data passed through the pixel shader.
+struct PixelShaderInput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ float4 worldCoord : TEXCOORD2;
+};
+
+// Apply the glass shader color and look to anything marked
+// as glass.
+min16float4 main(PixelShaderInput input) : SV_TARGET
+{
+ float4 textureColor = Texture.Sample(Sampler, input.tex);
+
+ // Apply light
+ float fallOffDistance = focusPointRadius.x;
+ float3 worldPos = float3(input.worldCoord.x, input.worldCoord.y, input.worldCoord.z);
+ float3 normalizedFocusDirectionVector = normalize(float3(focusPointDirection.x, focusPointDirection.y, focusPointDirection.z));
+ float3 normalizedFocusToPointVect = normalize(worldPos - float3(focusPointOrigin.x, focusPointOrigin.y, focusPointOrigin.z));
+ float dotProduct = dot(normalizedFocusDirectionVector, normalizedFocusToPointVect);
+ float focusDirectionToPointAngle = acos(dotProduct);
+ float sinAngle = sin(focusDirectionToPointAngle);
+ float focusToPointVectLength = (length(worldPos - float3(focusPointOrigin.x, focusPointOrigin.y, focusPointOrigin.z)));
+ float distance = sinAngle * focusToPointVectLength;
+ float focusIntensity = focusPointIntensity.x * (1.0f - saturate(distance / fallOffDistance));
+
+ float4 outputColor = lerp(textureColor, focusPointColor, focusIntensity);
+ return (min16float4)outputColor;
+}
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/Content/GlassVertexShader.hlsl b/Samples/360VideoPlayback/cpp/Content/GlassVertexShader.hlsl
new file mode 100644
index 0000000000..54b182161e
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/GlassVertexShader.hlsl
@@ -0,0 +1,61 @@
+// A constant buffer that stores the model transform.
+cbuffer ModelConstantBuffer : register(b0)
+{
+ float4x4 model;
+};
+
+// A constant buffer that stores each set of view and projection matrices in column-major format.
+cbuffer ViewProjectionConstantBuffer : register(b1)
+{
+ float4x4 viewProjection[2];
+};
+
+// Per-vertex data used as input to the vertex shader.
+struct VertexShaderInput
+{
+ min16float3 pos : POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint instId : SV_InstanceID;
+};
+
+// Per-vertex data passed to the geometry shader.
+// Note that the render target array index will be set by the geometry shader
+// using the value of viewId.
+struct VertexShaderOutput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ float4 worldCoord : TEXCOORD2;
+ uint viewId : TEXCOORD0; // SV_InstanceID % 2
+};
+
+// Simple shader to do vertex processing on the GPU.
+VertexShaderOutput main(VertexShaderInput input)
+{
+ VertexShaderOutput output;
+ float4 pos = float4(input.pos, 1.0f);
+
+ // Note which view this vertex has been sent to. Used for matrix lookup.
+ // Taking the modulo of the instance ID allows geometry instancing to be used
+ // along with stereo instanced drawing; in that case, two copies of each
+ // instance would be drawn, one for left and one for right.
+ int idx = input.instId % 2;
+
+ // Transform the vertex position into world space.
+ pos = mul(pos, model);
+
+ output.worldCoord = pos;
+
+ // Correct for perspective and project the vertex position onto the screen.
+ pos = mul(pos, viewProjection[idx]);
+ output.pos = (min16float4)pos;
+
+ // Pass the texture through without modification.
+ output.tex = input.tex;
+
+ // Set the instance ID. The pass-through geometry shader will set the
+ // render target array index to whatever value is set here.
+ output.viewId = idx;
+
+ return output;
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.cpp b/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.cpp
new file mode 100644
index 0000000000..50ac6258aa
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.cpp
@@ -0,0 +1,204 @@
+#include "pch.h"
+#include "MediaTransportControls.h"
+#include "..\Common\DirectXHelper.h"
+#include "..\Common\Controls.h"
+#include "VideoRenderer.h"
+#include "..\Common\SeekbarElement.h"
+#include "..\Common\MeshGeometry.h"
+
+using namespace _360VideoPlayback;
+using namespace DX;
+using namespace DirectX;
+using namespace Microsoft::WRL;
+using namespace std;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::UI::Input::Spatial;
+
+const float MAX_INACTIVE_TIME_TO_HIDE = 5.0f; //In seconds
+const float3 EXITBUTTON_SCALE{ 0.25f, 0.25f, 0.025f };
+const float3 EXITBUTTON_POSITION{ 0.0f, -0.2f, 0.0f };
+const float3 PLAYPAUSE_SCALE{ 0.25f, 0.25f, 0.025f };
+const float3 PLAYPAUSE_POSITION{ 0.0f, 0.1f, 0.0f };
+const float3 SEEKBAR_POSITION{ 0.0f, -0.08f, 0.0f };
+const unsigned short PLAYPAUSE_TESSELATION = 24;
+const UINT PLAYPAUSE_CONTAINER_SIZE = BUTTON_SIZE * 2;
+const UINT PLAYPAUSE_TEXT_SIZE = PLAYPAUSE_CONTAINER_SIZE / 2;
+
+MediaTransportControls::MediaTransportControls()
+{
+ m_isVisible = false;
+ m_rotation = float4(0, 0, 0, 0);
+ m_position = float3(0, 0, 0);
+ m_shadersLoaded = false;
+ m_transform = float4x4::identity();
+}
+
+void MediaTransportControls::Initialize()
+{
+ this->AddPlayPauseButton();
+ this->AddSeekbar();
+ this->AddExitButton();
+}
+
+void MediaTransportControls::AddPlayPauseButton()
+{
+ auto playPauseButtonElement = std::make_shared(PLAYPAUSE_CONTAINER_SIZE, PLAYPAUSE_TEXT_SIZE);
+ playPauseButtonElement->Initialize();
+
+ // Play/pause button control
+ m_playPauseButtonControl = std::make_shared(MeshGeometry::MakeTexturedCylinder(PLAYPAUSE_TESSELATION));
+ m_playPauseButtonControl->SetScale(PLAYPAUSE_SCALE);
+ m_playPauseButtonControl->SetPosition(PLAYPAUSE_POSITION);
+ m_playPauseButtonControl->SnapInitialSizeToRootElement();
+ m_playPauseButtonControl->SetRenderElement(playPauseButtonElement);
+ m_playPauseButtonControl->Initialize();
+ m_baseControls.push_back(m_playPauseButtonControl);
+}
+
+void MediaTransportControls::AddSeekbar()
+{
+ auto seekBarStripSize = float2(300.0f, 50.0f);
+
+ auto seekbarElement = std::make_shared(
+ static_cast(seekBarStripSize.x),
+ static_cast(seekBarStripSize.y),
+ static_cast(seekBarStripSize.x) * 0.05f, /*left/right padding*/
+ static_cast(seekBarStripSize.y) * 0.44f /*bottom/top padding*/);
+
+ seekbarElement->Initialize();
+ seekbarElement->SetTranslation(float2(0.0f, static_cast(seekBarStripSize.y) - seekbarElement->GetElementSize().y));
+
+ m_seekbarControl = std::make_shared();
+ m_seekbarControl->SetTextureWidth(static_cast(seekBarStripSize.x));
+ m_seekbarControl->SetTextureHeight(static_cast(seekBarStripSize.y));
+ m_seekbarControl->SetScale(float3(0.25f, 0.25f, 0.25f));
+ m_seekbarControl->SetPosition(SEEKBAR_POSITION);
+ m_seekbarControl->SetRenderElement(seekbarElement);
+ m_seekbarControl->Initialize();
+
+ m_baseControls.push_back(m_seekbarControl);
+}
+
+void MediaTransportControls::AddExitButton()
+{
+ // Exit button element
+ auto exitButtonElement = std::make_shared();
+ exitButtonElement->ArrangeElements();
+ exitButtonElement->Initialize();
+
+ // Exit button control
+ m_exitButtonControl = std::make_shared();
+ m_exitButtonControl->SetScale(EXITBUTTON_SCALE);
+ m_exitButtonControl->SetPosition(EXITBUTTON_POSITION);
+ m_exitButtonControl->SnapInitialSizeToRootElement();
+ m_exitButtonControl->SetRenderElement(exitButtonElement);
+ m_exitButtonControl->Initialize();
+
+ m_baseControls.push_back(m_exitButtonControl);
+}
+
+void MediaTransportControls::Update(const StepTimer& timer, SpatialPointerPose^ cameraPose)
+{
+ const XMMATRIX modelRotation = XMMatrixRotationQuaternion(XMLoadFloat4(&m_rotation));
+ const XMMATRIX modelTranslation = XMMatrixTranslationFromVector(XMLoadFloat3(&m_position));
+ const XMMATRIX localTransform = XMMatrixMultiply(modelRotation, modelTranslation);
+ XMStoreFloat4x4(&m_transform, localTransform);
+
+ if (m_isVisible)
+ {
+ if (this->IsFocused())
+ {
+ m_inactiveSecondsCounter = 0.0f;
+ }
+ else
+ {
+ m_inactiveSecondsCounter += static_cast(timer.GetElapsedSeconds());
+ if (m_inactiveSecondsCounter >= MAX_INACTIVE_TIME_TO_HIDE)
+ {
+ Hide();
+ }
+ }
+ }
+
+ // Update the Controls
+ for (const auto& control : m_baseControls)
+ {
+ control->SetParentTransform(m_transform);
+ control->Update(timer, cameraPose);
+ }
+}
+
+void MediaTransportControls::Render()
+{
+ if (m_isVisible && m_shadersLoaded)
+ {
+ for (const auto& control : m_baseControls)
+ {
+ control->Render();
+ }
+ }
+}
+
+void MediaTransportControls::PerformPressedAction()
+{
+ bool pressedFocusedElement = false;
+ for (const auto& control : m_baseControls)
+ {
+ if (control->IsFocused())
+ {
+ control->PerformPressedAction();
+ pressedFocusedElement = true;
+ break;
+ }
+ }
+
+ if (!pressedFocusedElement)
+ {
+ Hide();
+ }
+}
+
+void MediaTransportControls::Show(SpatialPointerPose^ cameraPose, float distance)
+{
+ const float3 headPosition = cameraPose->Head->Position;
+ const float3 headDirection = cameraPose->Head->ForwardDirection;
+ m_position = headPosition + (distance * headDirection);
+ m_rotation = ComputeLookAtQuaternion(m_position, cameraPose->Head->Position, float3(0.0f, 1.0f, 0.0f));
+ m_isVisible = true;
+ for (const auto& control : m_baseControls)
+ {
+ control->SetIsVisible(true);
+ }
+ m_inactiveSecondsCounter = 0.0f;
+}
+
+void MediaTransportControls::Hide()
+{
+ m_isVisible = false;
+ for (const auto& control : m_baseControls)
+ {
+ control->SetIsVisible(false);
+ }
+ m_inactiveSecondsCounter = 0.0f;
+}
+
+void MediaTransportControls::ApplyShaders()
+{
+ for (const auto& control : m_baseControls)
+ {
+ control->ApplyShaders();
+ }
+ m_shadersLoaded = true;
+}
+
+bool MediaTransportControls::IsFocused()
+{
+ for (const auto& control : m_baseControls)
+ {
+ if (control->IsFocused())
+ {
+ return true;
+ }
+ }
+ return false;
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.h b/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.h
new file mode 100644
index 0000000000..03ee6b2beb
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/MediaTransportControls.h
@@ -0,0 +1,55 @@
+#pragma once
+
+#include "..\Common\CameraResources.h"
+#include "..\Common\DeviceResources.h"
+#include "..\Common\StepTimer.h"
+#include "..\Common\BaseControl.h"
+
+using namespace DX;
+namespace _360VideoPlayback
+{
+ // Media Transport Controls (MTC) for playback in a 3D environment
+ // Although structured to a RenderObject, the MTC itself has no Geometry, it contains other items which are renderable
+ class MediaTransportControls
+ {
+ public:
+ MediaTransportControls();
+
+ void Initialize();
+ void Update(const DX::StepTimer& timer, Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose);
+ void Render();
+ void PerformPressedAction();
+ void ApplyShaders();
+ void Show(Windows::UI::Input::Spatial::SpatialPointerPose^ cameraPose, float distance);
+ void Hide();
+ bool IsFocused();
+ bool IsVisible() { return m_isVisible; };
+
+ private:
+ float m_inactiveSecondsCounter = 0.0f;
+ void AddExitButton();
+ void AddPlayPauseButton();
+ void AddSeekbar();
+ inline Windows::Foundation::Numerics::float4 ComputeLookAtQuaternion(Windows::Foundation::Numerics::float3 objectPosition, Windows::Foundation::Numerics::float3 targetPosition, Windows::Foundation::Numerics::float3 upVector)
+ {
+ // The Windows::Foundation::Numerics::make_float4x4_look_at() and DirectX::XMLookAtRH() functions are both designed to compute ViewProjection Matrices
+ // View Projection Matrices are typically the inverse of the Camera's transform matrices and are used to convert vertices from World space to View space
+ // To "fix" this for our application of pointing models at things, we just need to inverse the resulting ViewProjection from your inputs.
+ // You can also roll your own vector math to just compute the rotation directly, but we'll stick with the platform calls for now since the inverse call is a minor tax.
+ Windows::Foundation::Numerics::float4x4 lookAtViewMatrix = Windows::Foundation::Numerics::make_float4x4_look_at(targetPosition, objectPosition, upVector);
+ Windows::Foundation::Numerics::quaternion worldSpaceLookAtQuaternion = Windows::Foundation::Numerics::inverse(Windows::Foundation::Numerics::make_quaternion_from_rotation_matrix(lookAtViewMatrix));
+ return Windows::Foundation::Numerics::float4(worldSpaceLookAtQuaternion.x, worldSpaceLookAtQuaternion.y, worldSpaceLookAtQuaternion.z, worldSpaceLookAtQuaternion.w);
+ };
+
+ std::shared_ptr m_exitButtonControl;
+ std::list> m_baseControls;
+ std::shared_ptr m_playPauseButtonControl;
+ std::shared_ptr m_seekbarControl;
+ bool m_isVisible;
+ bool m_shadersLoaded;
+ Windows::Foundation::Numerics::float4 m_rotation;
+ Windows::Foundation::Numerics::float3 m_position;
+ Windows::Foundation::Numerics::float4x4 m_transform;
+ };
+
+}
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/Content/PixelShader.hlsl b/Samples/360VideoPlayback/cpp/Content/PixelShader.hlsl
new file mode 100644
index 0000000000..ad94d557d1
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/PixelShader.hlsl
@@ -0,0 +1,17 @@
+Texture2D objectTexture: t0;
+SamplerState samp: s0;
+
+// Per-pixel color data passed through the pixel shader.
+struct PixelShaderInput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+};
+
+// The pixel shader passes through the color data. The color data from
+// is interpolated and assigned to a pixel at the rasterization step.
+min16float4 main(PixelShaderInput input) : SV_TARGET
+{
+ min16float4 output = min16float4(objectTexture.Sample(samp, input.tex));
+ return output;
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/ShaderStructures.h b/Samples/360VideoPlayback/cpp/Content/ShaderStructures.h
new file mode 100644
index 0000000000..1bb62a6700
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/ShaderStructures.h
@@ -0,0 +1,31 @@
+#pragma once
+
+namespace _360VideoPlayback
+{
+ // Constant buffer used to send hologram position transform to the shader pipeline.
+ struct ModelConstantBuffer
+ {
+ DirectX::XMFLOAT4X4 model;
+ };
+
+ // Assert that the constant buffer remains 16-byte aligned (best practice).
+ static_assert((sizeof(ModelConstantBuffer) % (sizeof(float) * 4)) == 0, "Model constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
+
+
+ // Used to send per-vertex data to the vertex shader.
+ struct VertexPositionTexture
+ {
+ DirectX::XMFLOAT3 position;
+ DirectX::XMFLOAT2 textureCoordinate;
+ };
+
+ struct FocusPointConstantBuffer
+ {
+ DirectX::XMFLOAT4 focusPointOrigin;
+ DirectX::XMFLOAT4 focusPointDirection;
+ DirectX::XMFLOAT4 focusPointColor;
+ DirectX::XMFLOAT4 focusPointRadius; // Using XMFLOAT4 for byte alignment
+ DirectX::XMFLOAT4 focusPointIntensity; // Using XMFLOAT4 for byte alignment
+ };
+ static_assert((sizeof(FocusPointConstantBuffer) % (sizeof(float) * 4)) == 0, "FocusPointConstantBuffer size must be 16-byte aligned (16 bytes is the length of four floats).");
+}
\ No newline at end of file
diff --git a/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.cpp b/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.cpp
new file mode 100644
index 0000000000..27d3ecc960
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.cpp
@@ -0,0 +1,55 @@
+#include "pch.h"
+#include "SpatialInputHandler.h"
+#include
+
+using namespace _360VideoPlayback;
+
+using namespace std::placeholders;
+using namespace Windows::Foundation;
+using namespace Windows::UI::Input::Spatial;
+
+// Creates and initializes a GestureRecognizer that listens to a Person.
+SpatialInputHandler::SpatialInputHandler()
+{
+ // The interaction manager provides an event that informs the app when
+ // spatial interactions are detected.
+ m_interactionManager = SpatialInteractionManager::GetForCurrentView();
+
+ // Bind a handler to the SourcePressed event.
+ m_sourcePressedEventToken =
+ m_interactionManager->SourcePressed +=
+ ref new TypedEventHandler(
+ bind(&SpatialInputHandler::OnSourcePressed, this, _1, _2)
+ );
+
+ //
+ // TODO: Expand this class to use other gesture-based input events as applicable to
+ // your app.
+ //
+}
+
+SpatialInputHandler::~SpatialInputHandler()
+{
+ // Unregister our handler for the OnSourcePressed event.
+ m_interactionManager->SourcePressed -= m_sourcePressedEventToken;
+}
+
+// Checks if the user performed an input gesture since the last call to this method.
+// Allows the main update loop to check for asynchronous changes to the user
+// input state.
+SpatialInteractionSourceState^ SpatialInputHandler::CheckForInput()
+{
+ SpatialInteractionSourceState^ sourceState = m_sourceState;
+ m_sourceState = nullptr;
+ return sourceState;
+}
+
+void SpatialInputHandler::OnSourcePressed(SpatialInteractionManager^ sender, SpatialInteractionSourceEventArgs^ args)
+{
+ m_sourceState = args->State;
+
+ //
+ // TODO: In your app or game engine, rewrite this method to queue
+ // input events in your input class or event handler.
+ //
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.h b/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.h
new file mode 100644
index 0000000000..0f4663c91a
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/SpatialInputHandler.h
@@ -0,0 +1,30 @@
+#pragma once
+
+namespace _360VideoPlayback
+{
+ // Sample gesture handler.
+ // Hooks up events to recognize a tap gesture, and keeps track of input using a boolean value.
+ class SpatialInputHandler
+ {
+ public:
+ SpatialInputHandler();
+ ~SpatialInputHandler();
+
+ Windows::UI::Input::Spatial::SpatialInteractionSourceState^ CheckForInput();
+
+ private:
+ // Interaction event handler.
+ void OnSourcePressed(
+ Windows::UI::Input::Spatial::SpatialInteractionManager^ sender,
+ Windows::UI::Input::Spatial::SpatialInteractionSourceEventArgs^ args);
+
+ // API objects used to process gesture input, and generate gesture events.
+ Windows::UI::Input::Spatial::SpatialInteractionManager^ m_interactionManager;
+
+ // Event registration token.
+ Windows::Foundation::EventRegistrationToken m_sourcePressedEventToken;
+
+ // Used to indicate that a Pressed input event was received this frame.
+ Windows::UI::Input::Spatial::SpatialInteractionSourceState^ m_sourceState = nullptr;
+ };
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/VPRTVertexShader.hlsl b/Samples/360VideoPlayback/cpp/Content/VPRTVertexShader.hlsl
new file mode 100644
index 0000000000..56dd228107
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/VPRTVertexShader.hlsl
@@ -0,0 +1,58 @@
+// A constant buffer that stores the model transform.
+cbuffer ModelConstantBuffer : register(b0)
+{
+ float4x4 model;
+};
+
+// A constant buffer that stores each set of view and projection matrices in column-major format.
+cbuffer ViewProjectionConstantBuffer : register(b1)
+{
+ float4x4 viewProjection[2];
+};
+
+// Per-vertex data used as input to the vertex shader.
+struct VertexShaderInput
+{
+ min16float3 pos : POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint instId : SV_InstanceID;
+};
+
+// Per-vertex data passed to the geometry shader.
+// Note that the render target array index will be set by the geometry shader
+// using the value of viewId.
+struct VertexShaderOutput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint viewId : SV_RenderTargetArrayIndex; // SV_InstanceID % 2
+};
+
+
+// Simple shader to do vertex processing on the GPU.
+VertexShaderOutput main(VertexShaderInput input)
+{
+ VertexShaderOutput output;
+ float4 pos = float4(input.pos, 1.0f);
+
+ // Note which view this vertex has been sent to. Used for matrix lookup.
+ // Taking the modulo of the instance ID allows geometry instancing to be used
+ // along with stereo instanced drawing; in that case, two copies of each
+ // instance would be drawn, one for left and one for right.
+ int idx = input.instId % 2;
+
+ // Transform the vertex position into world space.
+ pos = mul(pos, model);
+
+ // Correct for perspective and project the vertex position onto the screen.
+ pos = mul(pos, viewProjection[idx]);
+ output.pos = (min16float4)pos;
+
+ // Pass the color through without modification.
+ output.tex = input.tex;
+
+ // Set the render target array index.
+ output.viewId = idx;
+
+ return output;
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/VertexShader.hlsl b/Samples/360VideoPlayback/cpp/Content/VertexShader.hlsl
new file mode 100644
index 0000000000..76b2a63b30
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/VertexShader.hlsl
@@ -0,0 +1,58 @@
+// A constant buffer that stores the model transform.
+cbuffer ModelConstantBuffer : register(b0)
+{
+ float4x4 model;
+};
+
+// A constant buffer that stores each set of view and projection matrices in column-major format.
+cbuffer ViewProjectionConstantBuffer : register(b1)
+{
+ float4x4 viewProjection[2];
+};
+
+// Per-vertex data used as input to the vertex shader.
+struct VertexShaderInput
+{
+ min16float3 pos : POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint instId : SV_InstanceID;
+};
+
+// Per-vertex data passed to the geometry shader.
+// Note that the render target array index will be set by the geometry shader
+// using the value of viewId.
+struct VertexShaderOutput
+{
+ min16float4 pos : SV_POSITION;
+ min16float2 tex : TEXCOORD1;
+ uint viewId : TEXCOORD0; // SV_InstanceID % 2
+};
+
+// Simple shader to do vertex processing on the GPU.
+VertexShaderOutput main(VertexShaderInput input)
+{
+ VertexShaderOutput output;
+ float4 pos = float4(input.pos, 1.0f);
+
+ // Note which view this vertex has been sent to. Used for matrix lookup.
+ // Taking the modulo of the instance ID allows geometry instancing to be used
+ // along with stereo instanced drawing; in that case, two copies of each
+ // instance would be drawn, one for left and one for right.
+ int idx = input.instId % 2;
+
+ // Transform the vertex position into world space.
+ pos = mul(pos, model);
+
+ // Correct for perspective and project the vertex position onto the screen.
+ pos = mul(pos, viewProjection[idx]);
+ output.pos = (min16float4)pos;
+
+ // Pass the text through without modification.
+ output.tex = input.tex;
+
+ // Set the instance ID. The pass-through geometry shader will set the
+ // render target array index to whatever value is set here.
+ output.viewId = idx;
+
+ return output;
+}
diff --git a/Samples/360VideoPlayback/cpp/Content/VideoRenderer.cpp b/Samples/360VideoPlayback/cpp/Content/VideoRenderer.cpp
new file mode 100644
index 0000000000..ff6840e414
--- /dev/null
+++ b/Samples/360VideoPlayback/cpp/Content/VideoRenderer.cpp
@@ -0,0 +1,441 @@
+#include "pch.h"
+#include "VideoRenderer.h"
+#include "Common\DirectXHelper.h"
+#include "Windows.Graphics.DirectX.Direct3D11.interop.h"
+#include "AppView.h"
+
+using namespace _360VideoPlayback;
+using namespace concurrency;
+using namespace DirectX;
+using namespace Microsoft::WRL;
+using namespace std::placeholders;
+using namespace Windows::Foundation;
+using namespace Windows::Foundation::Numerics;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Media::Core;
+using namespace Windows::Media::Playback;
+using namespace Windows::UI::Input::Spatial;
+
+// Loads vertex and pixel shaders from files and instantiates the cube geometry.
+VideoRenderer::VideoRenderer()
+
+{
+ m_deviceResources = _360VideoPlaybackMain::GetDeviceResources();
+ const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateBuffer(
+ &constantBufferDesc,
+ nullptr,
+ &m_modelConstantBuffer));
+}
+
+// Called once per frame.
+void VideoRenderer::Update(const DX::StepTimer& timer)
+{
+ float3 scale = 10.0f;
+ const XMMATRIX modelScale = XMMatrixScalingFromVector(XMLoadFloat3(&scale));
+ const XMMATRIX localTransform = modelScale;
+ XMStoreFloat4x4(&m_modelConstantBufferData.model, XMMatrixTranspose(localTransform));
+}
+
+// Renders one frame using the vertex and pixel shaders.
+// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
+// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
+// a pass-through geometry shader is also used to set the render
+// target array index.
+void VideoRenderer::Render()
+{
+ critical_section::scoped_lock lock(m_critical);
+ // Loading is asynchronous. Resources must be created before drawing can occur.
+ if (!m_loadingComplete)
+ {
+ return;
+ }
+
+ // Use the D3D device context to update Direct3D device-based resources.
+ const auto context = m_deviceResources->GetD3DDeviceContext();
+
+ // Update the model transform buffer for the hologram.
+ context->UpdateSubresource(
+ m_modelConstantBuffer.Get(),
+ 0,
+ nullptr,
+ &m_modelConstantBufferData,
+ 0,
+ 0
+ );
+
+
+ // Each vertex is one instance of the VertexPositionColor struct.
+ const UINT stride = sizeof(VertexPositionTexture);
+ const UINT offset = 0;
+ context->IASetVertexBuffers(
+ 0,
+ 1,
+ m_vertexBuffer.GetAddressOf(),
+ &stride,
+ &offset
+ );
+ context->IASetIndexBuffer(
+ m_indexBuffer.Get(),
+ DXGI_FORMAT_R16_UINT, // Each index is one 16-bit unsigned integer (short).
+ 0
+ );
+ context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+ context->IASetInputLayout(m_inputLayout.Get());
+
+ // Attach the vertex shader.
+ context->VSSetShader(
+ m_vertexShader.Get(),
+ nullptr,
+ 0
+ );
+ // Apply the model constant buffer to the vertex shader.
+ context->VSSetConstantBuffers(
+ 0,
+ 1,
+ m_modelConstantBuffer.GetAddressOf()
+ );
+
+ if (!m_usingVprtShaders)
+ {
+ // On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
+ // VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
+ // a pass-through geometry shader is used to set the render target
+ // array index.
+ context->GSSetShader(
+ m_geometryShader.Get(),
+ nullptr,
+ 0
+ );
+ }
+
+ // Attach the pixel shader.
+ context->PSSetShader(
+ m_pixelShader.Get(),
+ nullptr,
+ 0
+ );
+
+ // Set the Texture Shader resource and samplers
+ context->PSSetShaderResources(0, 1, m_textureView.GetAddressOf());
+ context->PSSetSamplers(
+ 0,
+ 1,
+ m_quadTextureSamplerState.GetAddressOf()
+ );
+
+
+ // Draw the objects.
+ context->DrawIndexedInstanced(
+ m_indexCount, // Index count per instance.
+ 2, // Instance count.
+ 0, // Start index location.
+ 0, // Base vertex location.
+ 0 // Start instance location.
+ );
+
+}
+
+void VideoRenderer::CreateDeviceDependentResources()
+{
+ m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
+
+ // Create the Texture, ShaderResource and Sampler state
+
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateTexture2D(
+ &CD3D11_TEXTURE2D_DESC(
+ DXGI_FORMAT_R8G8B8A8_UNORM,
+ AppView::GetMediaPlayer()->PlaybackSession->NaturalVideoWidth, // Width
+ AppView::GetMediaPlayer()->PlaybackSession->NaturalVideoHeight, // Height
+ 1, // MipLevels
+ 1, // ArraySize
+ D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET
+ ),
+ nullptr,
+ &m_texture
+ )
+ );
+
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateShaderResourceView(
+ m_texture.Get(), nullptr,
+ &m_textureView
+ )
+ );
+
+
+ D3D11_SAMPLER_DESC desc;
+ ZeroMemory(&desc, sizeof(D3D11_SAMPLER_DESC));
+
+
+ desc.Filter = D3D11_FILTER_ANISOTROPIC;
+ desc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
+ desc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
+ desc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
+ desc.MaxAnisotropy = 3;
+ desc.MinLOD = 0;
+ desc.MaxLOD = 3;
+ desc.MipLODBias = 0.f;
+ desc.BorderColor[0] = 0.f;
+ desc.BorderColor[1] = 0.f;
+ desc.BorderColor[2] = 0.f;
+ desc.BorderColor[3] = 0.f;
+ desc.ComparisonFunc = D3D11_COMPARISON_NEVER;
+
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateSamplerState(
+ &desc,
+ &m_quadTextureSamplerState
+ )
+ );
+
+ CreateD3D11Surface();
+ LoadShaders();
+}
+
+void VideoRenderer::LoadShaders()
+{
+ // On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
+ // VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
+ // we can avoid using a pass-through geometry shader to set the render
+ // target array index, thus avoiding any overhead that would be
+ // incurred by setting the geometry shader stage.
+ std::wstring vertexShaderFileName = m_usingVprtShaders ? L"ms-appx:///VprtVertexShader.cso" : L"ms-appx:///VertexShader.cso";
+
+ // Load shaders asynchronously.
+ task> loadVSTask = DX::ReadDataAsync(vertexShaderFileName);
+ task> loadPSTask = DX::ReadDataAsync(L"ms-appx:///PixelShader.cso");
+
+ task> loadGSTask;
+ if (!m_usingVprtShaders)
+ {
+ // Load the pass-through geometry shader.
+ loadGSTask = DX::ReadDataAsync(L"ms-appx:///GeometryShader.cso");
+ }
+
+ // After the vertex shader file is loaded, create the shader and input layout.
+ task createVSTask = loadVSTask.then([this](const std::vector& fileData)
+ {
+ DX::ThrowIfFailed(
+ m_deviceResources->GetD3DDevice()->CreateVertexShader(
+ fileData.data(),
+ fileData.size(),
+ nullptr,
+ &m_vertexShader
+ )
+ );
+
+ constexpr std::array