diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR.m index 88c0f4add..8ef841e64 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR.m @@ -995,6 +995,7 @@ newhmd.handTrackingSupported = 0; newhmd.hapticFeedbackSupported = 0; newhmd.eyeTrackingSupported = 0; + newhmd.articulatedHandTrackingSupported = 0; % Default autoclose flag to "no autoclose": newhmd.autoclose = 0; diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR1.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR1.m index 7241d3d15..fbc0d3db2 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR1.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOculusVR1.m @@ -1235,7 +1235,7 @@ if strcmpi(cmd, 'Open') if isempty(firsttime) firsttime = 1; - fprintf('Copyright (c) 2018 - 2023 Mario Kleiner. Licensed under the MIT license.\n'); + fprintf('Copyright (c) 2018 - 2024 Mario Kleiner. Licensed under the MIT license.\n'); fprintf('The underlying PsychOculusVRCore1 mex driver uses the Oculus SDK, which is\n'); fprintf('“Copyright © Facebook Technologies, LLC and its affiliates. All rights reserved.”\n'); fprintf('A copy of the Oculus SDK license, its terms of use and thereby redistribution\n'); @@ -1261,6 +1261,7 @@ newhmd.handTrackingSupported = 1; newhmd.hapticFeedbackSupported = 1; newhmd.eyeTrackingSupported = 0; + newhmd.articulatedHandTrackingSupported = 0; % Default autoclose flag to "no autoclose": newhmd.autoclose = 0; diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenHMDVR.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenHMDVR.m index f665b3395..f14a576f5 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenHMDVR.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenHMDVR.m @@ -1221,6 +1221,7 @@ newhmd.eyeTrackingSupported = 0; newhmd.needEyeTracking = 0; + newhmd.articulatedHandTrackingSupported = 0; % Eye gaze tracking via SRAnipal on MS-Windows on HTC Vive Pro Eye etc. supported? if IsWin && exist('SRAnipalMex', 'file') && ~isempty(strfind(newhmd.modelName, 'Vive')) diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenXR.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenXR.m index 67aa9bdf2..e2bd7583a 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenXR.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychOpenXR.m @@ -314,6 +314,27 @@ % resolution is reduced to at best 16.6 msecs for at most 60 gaze samples % per second. % +% +% 'Handtracking' = Request articulated hand and finger tracking via a supported +% hand tracker. This keyword asks the driver to enable articulated hand tracking. +% Typical methods are markerless vision based hand and finger tracking, e.g., +% from external cameras or HMD builtin cameras, or marker based optical tracking, +% or sensor equipped hand gloves, or other technologies, depending on your OpenXR +% runtime. The info struct returned by info = PsychVRHMD('GetInfo'); contains info +% about hand tracking capabilities as a bitmask in info.articulatedHandTrackingSupported: +% A value of +1 means that basic OpenXR hand tracking of finger and hand joint poses, +% typically for both hands of a user, is supported. A value of zero means lack of +% any support. NOTE: Current Psychtoolbox releases do not yet support hand tracking, +% this help text is preparation for future use and subject to incompatible changes! +% +% If hand tracking is requested via the keyword, and supported, then the user +% script can request return of hand tracking sample data by calling the +% state = PsychVRHMD('PrepareRender', ..., reqmask, ...) function with reqmask +% flag +8. This will cause the returned 'state' struct to contain additional fields +% with information about recently tracked articulated hand configuration. See the +% help text for the 'PrepareRender' function for details. +% +% % 'basicQuality' defines the basic tradeoff between quality and required % computational power. A setting of 0 gives lowest quality, but with the % lowest performance requirements. A setting of 1 gives maximum quality at @@ -369,6 +390,15 @@ % +1024 means that HTC's proprietary SRAnipal eyetracking is available for % more extensive gaze data reporting. % +% articulatedHandTrackingSupported = Info about hand tracking capabilities. A +% value of +1 means that basic articulated hand tracking is supported, usually +% for both hands. Zero means no support for articulated hand tracking. The hand +% tracking methods could be based on cameras and computer-vision markerless optical +% tracking, or on marker based tracking, or it could be, e.g., with some sensor +% glove input device, or with any other suitable future modality supported by your +% OpenXR runtime. +% +% % The returned struct may contain more information, but the fields mentioned % above are the only ones guaranteed to be available over the long run. Other % fields may disappear or change their format and meaning anytime without @@ -757,6 +787,22 @@ % some HTC HMDs under SRAnipal, but has not been confirmed to work in % practice on the tested HTC Vive Pro Eye. % +% +8 = Request return of articulated hand tracking information on suitable OpenXR +% systems. +% +% NOTE: This feature is NOT YET IMPLEMENTED in current Psychtoolbox releases! +% +% Returned information may represent the latest available measured hand and +% finger configuration data, or it may be predicted configuration information +% for the specified 'targetTime', computed via interpolation or extrapolation +% from actual previously tracked configurations. This is dependent on the +% specific hand tracker implementation of your XR system. +% +% The following fields are mandatory as part of the returned state struct, +% if hand tracking is supported and enabled and requested: +% +% TODO, IMPLEMENTATION OF FEATURE NOT YET FINISHED. +% % % More flags to follow... % @@ -1785,7 +1831,7 @@ if strcmpi(cmd, 'Open') if isempty(firsttime) firsttime = 1; - fprintf('Copyright (c) 2022-2023 Mario Kleiner. Licensed to you under the MIT license.\n'); + fprintf('Copyright (c) 2022-2024 Mario Kleiner. Licensed to you under the MIT license.\n'); fprintf('Our underlying PsychOpenXRCore mex driver builds against the Khronos OpenXR SDK public\n'); fprintf('headers, and links against the OpenXR open-source dynamic loader, to implement the\n'); fprintf('interface to a system-installed OpenXR runtime. These components are dual-licensed by\n'); @@ -1809,6 +1855,8 @@ newhmd.controllerTypes = 0; newhmd.eyeTrackingSupported = hasEyeTracking; newhmd.needEyeTracking = 0; + newhmd.articulatedHandTrackingSupported = 0; + newhmd.needHandTracking = 0; % Usually HMD tracking also works for mono display mode: newhmd.noTrackingInMono = 0; @@ -2297,6 +2345,16 @@ end end + % Hand tracking requested? + if ~isempty(strfind(basicRequirements, 'Handtracking')) + if ~hmd{myhmd.handle}.articulatedHandTrackingSupported + warning('PsychOpenXR:SetupRenderingParameters: Articulated ''Handtracking'' requested in ''basicRequirements'', but this XR system does not support it!'); + hmd{myhmd.handle}.needHandTracking = 0; + else + hmd{myhmd.handle}.needHandTracking = 1; + end + end + return; end diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychVRHMD.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychVRHMD.m index 4c8c0fd16..162094f67 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychVRHMD.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/PsychVRHMD.m @@ -325,6 +325,26 @@ % per second. % % +% 'Handtracking' = Request articulated hand and finger tracking via a supported +% hand tracker. This keyword asks the driver to enable articulated hand tracking. +% Typical methods are markerless vision based hand and finger tracking, e.g., +% from external cameras or HMD builtin cameras, or marker based optical tracking, +% or sensor equipped hand gloves, or other technologies, depending on your OpenXR +% runtime. The info struct returned by info = PsychVRHMD('GetInfo'); contains info +% about hand tracking capabilities as a bitmask in info.articulatedHandTrackingSupported: +% A value of +1 means that basic OpenXR hand tracking of finger and hand joint poses, +% typically for both hands of a user, is supported. A value of zero means lack of +% any support. NOTE: Current Psychtoolbox releases do not yet support hand tracking, +% this help text is preparation for future use and subject to incompatible changes! +% +% If hand tracking is requested via the keyword, and supported, then the user +% script can request return of hand tracking sample data by calling the +% state = PsychVRHMD('PrepareRender', ..., reqmask, ...) function with reqmask +% flag +8. This will cause the returned 'state' struct to contain additional fields +% with information about recently tracked articulated hand configuration. See the +% help text for the 'PrepareRender' function for details. +% +% % These basic requirements get translated into a device specific set of % settings. The settings can also be specific to the selected 'basicTask', % and if a quality vs. performance / system load tradeoff is unavoidable @@ -392,9 +412,9 @@ % ie. mapping keyboard keys to OVR.Button_XXX buttons. % % handTrackingSupported = 1 if PsychVRHMD('PrepareRender') with reqmask +2 will provide -% valid hand tracking info, 0 if this is not supported and will -% just report fake values. A driver may report 1 here but still -% don't provide meaningful info at runtime, e.g., if required +% valid tracked hand controller info, 0 if this is not supported +% and will just report fake values. A driver may report 1 here but +% still don't provide meaningful info at runtime, e.g., if required % tracking hardware is missing or gets disconnected. The flag % just aids extra performance optimizations in your code. % @@ -413,6 +433,14 @@ % +1024 means that HTC's proprietary SRAnipal eyetracking is available for % more extensive gaze data reporting. % +% articulatedHandTrackingSupported = Info about hand tracking capabilities. A +% value of +1 means that basic articulated hand tracking is supported, usually +% for both hands. Zero means no support for articulated hand tracking. The hand +% tracking methods could be based on cameras and computer-vision markerless optical +% tracking, or on marker based tracking, or it could be, e.g., with some sensor +% glove input device, or with any other suitable future modality supported by your +% OpenXR runtime. +% % % The info struct may contain much more vendor specific information, but the above % set is supported across all devices. @@ -790,6 +818,22 @@ % some HTC HMDs under SRAnipal, but has not been confirmed to work in % practice on the tested HTC Vive Pro Eye. % +% +8 = Request return of articulated hand tracking information on suitable OpenXR +% systems. +% +% NOTE: This feature is NOT YET IMPLEMENTED in current Psychtoolbox releases! +% +% Returned information may represent the latest available measured hand and +% finger configuration data, or it may be predicted configuration information +% for the specified 'targetTime', computed via interpolation or extrapolation +% from actual previously tracked configurations. This is dependent on the +% specific hand tracker implementation of your XR system. +% +% The following fields are mandatory as part of the returned state struct, +% if hand tracking is supported and enabled and requested: +% +% TODO, IMPLEMENTATION OF FEATURE NOT YET FINISHED. +% % % More flags to follow... % diff --git a/Psychtoolbox/PsychHardware/PsychVRToolbox/VRInputStuffTest.m b/Psychtoolbox/PsychHardware/PsychVRToolbox/VRInputStuffTest.m index 5f00107d8..a2d135ccd 100644 --- a/Psychtoolbox/PsychHardware/PsychVRToolbox/VRInputStuffTest.m +++ b/Psychtoolbox/PsychHardware/PsychVRToolbox/VRInputStuffTest.m @@ -1,5 +1,7 @@ -function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, refSpace, withGazeTracking) -% VRInputStuffTest([withHapticFeedback=0][, withMTStressTest=0][, specialReqs='DebugDisplay'][, refSpace][, withGazeTracking=0]) - Test input functionality related to VR devices. +function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, refSpace, withGazeTracking, withHandTracking) +% VRInputStuffTest([withHapticFeedback=0][, withMTStressTest=0][, specialReqs='DebugDisplay'][, refSpace][, withGazeTracking=0][, withHandTracking=0]) +% +% Test input functionality related to VR devices. % % Tries to enumerate available controllers and other properties related to % input. After any key press or controller button press, reports live state @@ -48,6 +50,10 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref % eye tracking. A setting of 1 will visualize the 2D gaze position, a setting of % 2 will visualize a 3D gaze ray in addition. % +% The optional parameter 'withHandTracking', if provided and non-zero, will +% enable some basic tests of hand tracking with VR hardware that supports hand +% tracking. A setting of 1 will visualize the measured hand joint locations. +% % After a keypress (or Enter/Back button press on the controller), % visualizes tracked hand position and orientation of hand controllers and % allows to do some nice visual effects based on trigger / grip button @@ -95,6 +101,15 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref specialReqs = [specialReqs ' Eyetracking ']; end +if nargin < 6 || isempty(withHandTracking) + withHandTracking = 0; +end + +if withHandTracking + % Tell that hand tracking is desired: + specialReqs = [specialReqs ' Handtracking ']; +end + canary = onCleanup(@sca); % Setup unified keymapping and unit color range: @@ -155,6 +170,14 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref withGazeTracking = 0; end +if withHandTracking && hmdinfo.articulatedHandTrackingSupported + % Yes. Request hand tracker samples during calls to 'PrepareRender': + reqMask = reqMask + 8; +else + % No. Disable any hand tracking: + withHandTracking = 0; +end + clc; % Mark our own tracking + rendering loop as stopped for initial section of test/demo: @@ -412,7 +435,7 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref end % Part 3: Actual hand tracking and visualisation: -if hmdinfo.handTrackingSupported || withGazeTracking +if hmdinfo.handTrackingSupported || withGazeTracking || withHandTracking % Number of fountain particles whose positions are computed on the GPU: nparticles = 10000; @@ -718,6 +741,12 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref % Visualize users hands / hand controllers: for hand = 1:2 + % Skip hand controller visualization if hand tracking is enabled and this + % hand is tracked. We use hand tracking visualization instead in this case: + if withHandTracking && state.trackedHandStatus(hand) + continue; + end + % Position and orientation of hand tracked? Otherwise we don't show them: if bitand(state.handStatus(hand), 3) == 3 % Yes: Lets visualize it: @@ -758,6 +787,27 @@ function VRInputStuffTest(withHapticFeedback, withMTStressTest, specialReqs, ref end end + % Visualize tracked hand joints locations if withHandTracking: + if withHandTracking + glDisable(GL.LIGHTING); + + % Iterate over both hands: + for hand = 1:2 + % hand fully tracked? Otherwise we don't show it: + if state.trackedHandStatus(hand) + % Yes: Lets visualize all its tracked joint locations. + glColor3f(hand - 1, 1, 1); + + for joint = find(state.trackedJoints(hand, :)) + glPushMatrix; + glMultMatrixd(state.globalJointPoseMatrix{hand, joint}); + glutSolidCube(state.trackedJointsRadius(hand, joint)); + glPopMatrix; + end + end + end + end + % Manually disable 3D mode before switching to other eye or to flip: Screen('EndOpenGL', win);