mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 20:53:33 +02:00
Merge branch 'master' into 20639
Conflicts: interface/CMakeLists.txt interface/src/Application.cpp interface/src/Menu.h tests/ui/src/main.cpp
This commit is contained in:
commit
e6473fc8b9
124 changed files with 6638 additions and 3759 deletions
cmake
examples
interface
CMakeLists.txt
src
Application.cppApplication.hCamera.cppCamera.hGLCanvas.cppGLCanvas.hMainWindow.cppMenu.cppMenu.hUIUtil.cpp
avatar
devices
3DConnexionClient.cpp3DConnexionClient.hOculusManager.cppOculusManager.hTV3DManager.cppTV3DManager.h
scripting
ui
ui
libraries
avatars/src
display-plugins
CMakeLists.txt
src/display-plugins
Basic2DWindowOpenGLDisplayPlugin.cppBasic2DWindowOpenGLDisplayPlugin.hDisplayPlugin.cppDisplayPlugin.hMainWindowOpenGLDisplayPlugin.cppMainWindowOpenGLDisplayPlugin.hNullDisplayPlugin.cppNullDisplayPlugin.hOpenGLDisplayPlugin.cppOpenGLDisplayPlugin.hWindowOpenGLDisplayPlugin.cppWindowOpenGLDisplayPlugin.h
oculus
OculusBaseDisplayPlugin.cppOculusBaseDisplayPlugin.hOculusHelpers.cppOculusHelpers.hOculus_0_5_DisplayPlugin.cppOculus_0_5_DisplayPlugin.hOculus_0_6_DisplayPlugin.cppOculus_0_6_DisplayPlugin.h
openvr
stereo
entities-renderer/src
gpu/src/gpu
input-plugins
4
cmake/externals/LibOVR/CMakeLists.txt
vendored
4
cmake/externals/LibOVR/CMakeLists.txt
vendored
|
@ -9,8 +9,8 @@ if (WIN32)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/0.6.0.0/1431634088/ovr_sdk_win_0.6.0.0.zip
|
||||
URL_MD5 a3dfdab037a854fdcf7e6033fa8d7028
|
||||
URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
|
||||
URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
5
cmake/externals/openvr/CMakeLists.txt
vendored
5
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -26,16 +26,19 @@ if (WIN32)
|
|||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/win32/openvr_api.lib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux64)
|
||||
|
||||
endif()
|
||||
|
||||
|
|
60
cmake/externals/sixense/CMakeLists.txt
vendored
Normal file
60
cmake/externals/sixense/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME Sixense)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL ./SixenseSDK_062612.zip
|
||||
URL_MD5 10cc8dc470d2ac1244a88cf04bc549cc
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/osx_x64/release_dll/libsixense_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
find_library(SIXENSE_LIBRARY_DEBUG lib/osx_x64/debug_dll/libsixensed_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
# find_library(SIXENSE_LIBRARY_DEBUG lib/linux_x64/debug/libsixensed_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (WIN32)
|
||||
endif ()
|
||||
|
||||
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
|
||||
|
||||
if (WIN32)
|
||||
|
||||
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(ARCH_DIR "x64")
|
||||
set(ARCH_SUFFIX "_x64")
|
||||
else()
|
||||
set(ARCH_DIR "Win32")
|
||||
set(ARCH_SUFFIX "")
|
||||
endif()
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES "${SOURCE_DIR}/lib/${ARCH_DIR}/release_dll/sixense${ARCH_SUFFIX}.lib" CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux32)
|
||||
|
||||
endif()
|
||||
|
16
cmake/macros/GroupSources.cmake
Normal file
16
cmake/macros/GroupSources.cmake
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(GroupSources curdir)
|
||||
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
|
||||
foreach(child ${children})
|
||||
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
GroupSources(${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
331
examples/controllers/handGrab.js
Normal file
331
examples/controllers/handGrab.js
Normal file
|
@ -0,0 +1,331 @@
|
|||
// handGrab.js
|
||||
// examples
|
||||
//
|
||||
// Created by Sam Gondelman on 8/3/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Allow avatar to grab the closest object to each hand and throw them
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
|
||||
|
||||
var nullActionID = "00000000-0000-0000-0000-000000000000";
|
||||
var controllerID;
|
||||
var controllerActive;
|
||||
var leftHandObjectID = null;
|
||||
var rightHandObjectID = null;
|
||||
var leftHandActionID = nullActionID;
|
||||
var rightHandActionID = nullActionID;
|
||||
|
||||
var TRIGGER_THRESHOLD = 0.2;
|
||||
var GRAB_RADIUS = 0.25;
|
||||
|
||||
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
|
||||
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
var ACTION1 = Controller.findAction("ACTION1");
|
||||
var ACTION2 = Controller.findAction("ACTION2");
|
||||
|
||||
var rightHandGrabAction = RIGHT_HAND_CLICK;
|
||||
var leftHandGrabAction = LEFT_HAND_CLICK;
|
||||
|
||||
var rightHandGrabValue = 0;
|
||||
var leftHandGrabValue = 0;
|
||||
var prevRightHandGrabValue = 0;
|
||||
var prevLeftHandGrabValue = 0;
|
||||
|
||||
var grabColor = { red: 0, green: 255, blue: 0};
|
||||
var releaseColor = { red: 0, green: 0, blue: 255};
|
||||
|
||||
var toolBar = new ToolBar(0, 0, ToolBar.vertical, "highfidelity.toybox.toolbar", function() {
|
||||
return {
|
||||
x: 100,
|
||||
y: 380
|
||||
};
|
||||
});
|
||||
|
||||
var BUTTON_SIZE = 32;
|
||||
var SWORD_IMAGE = "https://hifi-public.s3.amazonaws.com/images/sword/sword.svg"; // replace this with a table icon
|
||||
var CLEANUP_IMAGE = "http://s3.amazonaws.com/hifi-public/images/delete.png"; // cleanup table
|
||||
var tableButton = toolBar.addOverlay("image", {
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: SWORD_IMAGE,
|
||||
alpha: 1
|
||||
});
|
||||
var cleanupButton = toolBar.addOverlay("image", {
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: CLEANUP_IMAGE,
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
var leftHandOverlay = Overlays.addOverlay("sphere", {
|
||||
position: MyAvatar.getLeftPalmPosition(),
|
||||
size: GRAB_RADIUS,
|
||||
color: releaseColor,
|
||||
alpha: 0.5,
|
||||
solid: false
|
||||
});
|
||||
var rightHandOverlay = Overlays.addOverlay("sphere", {
|
||||
position: MyAvatar.getRightPalmPosition(),
|
||||
size: GRAB_RADIUS,
|
||||
color: releaseColor,
|
||||
alpha: 0.5,
|
||||
solid: false
|
||||
});
|
||||
|
||||
var OBJECT_HEIGHT_OFFSET = 0.5;
|
||||
var MIN_OBJECT_SIZE = 0.05;
|
||||
var MAX_OBJECT_SIZE = 0.3;
|
||||
var TABLE_DIMENSIONS = {
|
||||
x: 10.0,
|
||||
y: 0.2,
|
||||
z: 5.0
|
||||
};
|
||||
|
||||
var GRAVITY = {
|
||||
x: 0,
|
||||
y: -2,
|
||||
z: 0
|
||||
}
|
||||
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
|
||||
var tableCreated = false;
|
||||
|
||||
var NUM_OBJECTS = 100;
|
||||
var tableEntities = Array(NUM_OBJECTS + 1); // Also includes table
|
||||
|
||||
var VELOCITY_MAG = 0.3;
|
||||
|
||||
var MODELS = Array(
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx" },
|
||||
{ modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Vehicles/clara/spaceshuttle.fbx" },
|
||||
{ modelURL: "https://s3.amazonaws.com/hifi-public/cozza13/apartment/Stargate.fbx" },
|
||||
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/kelectricguitar10/kelectricguitar10.fbx" },
|
||||
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/ktoilet10/ktoilet10.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/BilliardsTable.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicRed/robotMedicRed.fst" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicFaceRig/robotMedic.fst" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/029db3d4-da2c-4cb2-9c08-b9612ba576f5/02949063e7c4aed42ad9d1a58461f56d.fst?1427169842" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/Bar.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/96124d04-d603-4707-a5b3-e03bf47a53b2/1431770eba362c1c25c524126f2970fb.fst?1436924721" }
|
||||
// { modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Architecture/sketchfab/cudillero.fbx" },
|
||||
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/musicality/musicality.fbx" },
|
||||
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/statelyHome/statelyHome.fbx" }
|
||||
);
|
||||
|
||||
function letGo(hand) {
|
||||
var actionIDToRemove = (hand == LEFT) ? leftHandActionID : rightHandActionID;
|
||||
var entityIDToEdit = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
|
||||
var handVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmVelocity() : MyAvatar.getRightPalmVelocity();
|
||||
var handAngularVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmAngularVelocity() :
|
||||
MyAvatar.getRightPalmAngularVelocity();
|
||||
if (actionIDToRemove != nullActionID && entityIDToEdit != null) {
|
||||
Entities.deleteAction(entityIDToEdit, actionIDToRemove);
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = null;
|
||||
leftHandActionID = nullActionID;
|
||||
} else {
|
||||
rightHandObjectID = null;
|
||||
rightHandActionID = nullActionID;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setGrabbedObject(hand) {
|
||||
var handPosition = (hand == LEFT) ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition();
|
||||
var entities = Entities.findEntities(handPosition, GRAB_RADIUS);
|
||||
var objectID = null;
|
||||
var minDistance = GRAB_RADIUS;
|
||||
for (var i = 0; i < entities.length; i++) {
|
||||
if ((hand == LEFT && entities[i] == rightHandObjectID) ||
|
||||
(hand == RIGHT) && entities[i] == leftHandObjectID) {
|
||||
continue;
|
||||
} else {
|
||||
var distance = Vec3.distance(Entities.getEntityProperties(entities[i]).position, handPosition);
|
||||
if (distance < minDistance) {
|
||||
objectID = entities[i];
|
||||
minDistance = distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (objectID == null) {
|
||||
return false;
|
||||
}
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = objectID;
|
||||
} else {
|
||||
rightHandObjectID = objectID;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function grab(hand) {
|
||||
if (!setGrabbedObject(hand)) {
|
||||
return;
|
||||
}
|
||||
var objectID = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
|
||||
var handRotation = (hand == LEFT) ? MyAvatar.getLeftPalmRotation() : MyAvatar.getRightPalmRotation();
|
||||
|
||||
var objectRotation = Entities.getEntityProperties(objectID).rotation;
|
||||
var offsetRotation = Quat.multiply(Quat.inverse(handRotation), objectRotation);
|
||||
var actionID = Entities.addAction("hold", objectID, {
|
||||
relativePosition: {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
},
|
||||
relativeRotation: offsetRotation,
|
||||
hand: (hand == LEFT) ? "left" : "right",
|
||||
timeScale: 0.05
|
||||
});
|
||||
if (actionID == nullActionID) {
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = null;
|
||||
} else {
|
||||
rightHandObjectID = null;
|
||||
}
|
||||
} else {
|
||||
// Entities.editEntity(objectID, { ignore});
|
||||
if (hand == LEFT) {
|
||||
leftHandActionID = actionID;
|
||||
} else {
|
||||
rightHandActionID = actionID;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function update() {
|
||||
Overlays.editOverlay(leftHandOverlay, { position: MyAvatar.getLeftPalmPosition() });
|
||||
Overlays.editOverlay(rightHandOverlay, { position: MyAvatar.getRightPalmPosition() });
|
||||
|
||||
rightHandGrabValue = Controller.getActionValue(rightHandGrabAction);
|
||||
leftHandGrabValue = Controller.getActionValue(leftHandGrabAction);
|
||||
|
||||
if (rightHandGrabValue > TRIGGER_THRESHOLD && rightHandObjectID == null) {
|
||||
Overlays.editOverlay(rightHandOverlay, { color: grabColor });
|
||||
grab(RIGHT);
|
||||
} else if (rightHandGrabValue < TRIGGER_THRESHOLD &&
|
||||
prevRightHandGrabValue > TRIGGER_THRESHOLD) {
|
||||
Overlays.editOverlay(rightHandOverlay, { color: releaseColor });
|
||||
letGo(RIGHT);
|
||||
}
|
||||
|
||||
if (leftHandGrabValue > TRIGGER_THRESHOLD && leftHandObjectID == null) {
|
||||
Overlays.editOverlay(leftHandOverlay, { color: grabColor });
|
||||
grab(LEFT);
|
||||
} else if (leftHandGrabValue < TRIGGER_THRESHOLD &&
|
||||
prevLeftHandGrabValue > TRIGGER_THRESHOLD) {
|
||||
Overlays.editOverlay(leftHandOverlay, { color: releaseColor });
|
||||
letGo(LEFT);
|
||||
}
|
||||
|
||||
prevRightHandGrabValue = rightHandGrabValue;
|
||||
prevLeftHandGrabValue = leftHandGrabValue;
|
||||
}
|
||||
|
||||
function cleanUp() {
|
||||
letGo(RIGHT);
|
||||
letGo(LEFT);
|
||||
Overlays.deleteOverlay(leftHandOverlay);
|
||||
Overlays.deleteOverlay(rightHandOverlay);
|
||||
removeTable();
|
||||
toolBar.cleanup();
|
||||
}
|
||||
|
||||
function onClick(event) {
|
||||
if (event.deviceID != 0) {
|
||||
return;
|
||||
}
|
||||
switch (Overlays.getOverlayAtPoint(event)) {
|
||||
case tableButton:
|
||||
if (!tableCreated) {
|
||||
createTable();
|
||||
tableCreated = true;
|
||||
}
|
||||
break;
|
||||
case cleanupButton:
|
||||
if (tableCreated) {
|
||||
removeTable();
|
||||
tableCreated = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
randFloat = function(low, high) {
|
||||
return low + Math.random() * (high - low);
|
||||
}
|
||||
|
||||
randInt = function(low, high) {
|
||||
return Math.floor(randFloat(low, high));
|
||||
}
|
||||
|
||||
function createTable() {
|
||||
var tablePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(MyAvatar.orientation)));
|
||||
tableEntities[0] = Entities.addEntity( {
|
||||
type: "Box",
|
||||
position: tablePosition,
|
||||
dimensions: TABLE_DIMENSIONS,
|
||||
rotation: MyAvatar.orientation,
|
||||
color: { red: 255, green: 0, blue: 0 }
|
||||
});
|
||||
|
||||
for (var i = 1; i < NUM_OBJECTS + 1; i++) {
|
||||
var objectOffset = { x: TABLE_DIMENSIONS.x/2.0 * randFloat(-1, 1),
|
||||
y: OBJECT_HEIGHT_OFFSET,
|
||||
z: TABLE_DIMENSIONS.z/2.0 * randFloat(-1, 1) };
|
||||
var objectPosition = Vec3.sum(tablePosition, Vec3.multiplyQbyV(MyAvatar.orientation, objectOffset));
|
||||
var type;
|
||||
var randType = randInt(0, 3);
|
||||
switch (randType) {
|
||||
case 0:
|
||||
type = "Box";
|
||||
break;
|
||||
case 1:
|
||||
type = "Sphere";
|
||||
// break;
|
||||
case 2:
|
||||
type = "Model";
|
||||
break;
|
||||
}
|
||||
tableEntities[i] = Entities.addEntity( {
|
||||
type: type,
|
||||
position: objectPosition,
|
||||
velocity: { x: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
|
||||
y: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
|
||||
z: randFloat(-VELOCITY_MAG, VELOCITY_MAG) },
|
||||
dimensions: { x: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
|
||||
y: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
|
||||
z: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE) },
|
||||
rotation: MyAvatar.orientation,
|
||||
gravity: GRAVITY,
|
||||
damping: 0.1,
|
||||
restitution: 0.01,
|
||||
density: 0.5,
|
||||
collisionsWillMove: true,
|
||||
color: { red: randInt(0, 255), green: randInt(0, 255), blue: randInt(0, 255) }
|
||||
});
|
||||
if (type == "Model") {
|
||||
var randModel = randInt(0, MODELS.length);
|
||||
Entities.editEntity(tableEntities[i], {
|
||||
shapeType: "box",
|
||||
modelURL: MODELS[randModel].modelURL
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function removeTable() {
|
||||
for (var i = 0; i < tableEntities.length; i++) {
|
||||
Entities.deleteEntity(tableEntities[i]);
|
||||
}
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanUp);
|
||||
Script.update.connect(update);
|
||||
Controller.mousePressEvent.connect(onClick);
|
79
examples/controllers/squeezeHands2.js
Normal file
79
examples/controllers/squeezeHands2.js
Normal file
|
@ -0,0 +1,79 @@
|
|||
//
|
||||
// squeezeHands.js
|
||||
// examples
|
||||
//
|
||||
// Created by Philip Rosedale on June 4, 2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
var rightHandAnimation = HIFI_PUBLIC_BUCKET + "animations/RightHandAnimPhilip.fbx";
|
||||
var leftHandAnimation = HIFI_PUBLIC_BUCKET + "animations/LeftHandAnimPhilip.fbx";
|
||||
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
|
||||
var lastLeftFrame = 0;
|
||||
var lastRightFrame = 0;
|
||||
|
||||
var leftDirection = true;
|
||||
var rightDirection = true;
|
||||
|
||||
var LAST_FRAME = 15.0; // What is the number of the last frame we want to use in the animation?
|
||||
var SMOOTH_FACTOR = 0.0;
|
||||
var MAX_FRAMES = 30.0;
|
||||
|
||||
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
|
||||
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
|
||||
Script.update.connect(function(deltaTime) {
|
||||
var leftTriggerValue = Controller.getActionValue(LEFT_HAND_CLICK);
|
||||
var rightTriggerValue = Controller.getActionValue(RIGHT_HAND_CLICK);
|
||||
|
||||
var leftFrame, rightFrame;
|
||||
|
||||
// Average last few trigger frames together for a bit of smoothing
|
||||
leftFrame = (leftTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastLeftFrame * SMOOTH_FACTOR;
|
||||
rightFrame = (rightTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastRightFrame * SMOOTH_FACTOR;
|
||||
|
||||
if (!leftDirection) {
|
||||
leftFrame = MAX_FRAMES - leftFrame;
|
||||
}
|
||||
if (!rightDirection) {
|
||||
rightFrame = MAX_FRAMES - rightFrame;
|
||||
}
|
||||
|
||||
if ((leftTriggerValue == 1.0) && (leftDirection == true)) {
|
||||
leftDirection = false;
|
||||
lastLeftFrame = MAX_FRAMES - leftFrame;
|
||||
} else if ((leftTriggerValue == 0.0) && (leftDirection == false)) {
|
||||
leftDirection = true;
|
||||
lastLeftFrame = leftFrame;
|
||||
}
|
||||
if ((rightTriggerValue == 1.0) && (rightDirection == true)) {
|
||||
rightDirection = false;
|
||||
lastRightFrame = MAX_FRAMES - rightFrame;
|
||||
} else if ((rightTriggerValue == 0.0) && (rightDirection == false)) {
|
||||
rightDirection = true;
|
||||
lastRightFrame = rightFrame;
|
||||
}
|
||||
|
||||
if ((leftFrame != lastLeftFrame) && leftHandAnimation.length){
|
||||
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
|
||||
}
|
||||
if ((rightFrame != lastRightFrame) && rightHandAnimation.length) {
|
||||
MyAvatar.startAnimation(rightHandAnimation, 30.0, 1.0, false, true, rightFrame, rightFrame);
|
||||
}
|
||||
|
||||
lastLeftFrame = leftFrame;
|
||||
lastRightFrame = rightFrame;
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
MyAvatar.stopAnimation(leftHandAnimation);
|
||||
MyAvatar.stopAnimation(rightHandAnimation);
|
||||
});
|
|
@ -13,11 +13,15 @@
|
|||
/*jslint vars: true*/
|
||||
var Script, Entities, MyAvatar, Window, Overlays, Controller, Vec3, Quat, print, ToolBar, Settings; // Referenced globals provided by High Fidelity.
|
||||
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
|
||||
var zombieGameScriptURL = "https://hifi-public.s3.amazonaws.com/eric/scripts/zombieFight.js?v2";
|
||||
// var zombieGameScriptURL = "zombieFight.js";
|
||||
Script.include(zombieGameScriptURL);
|
||||
|
||||
var zombieFight;
|
||||
|
||||
var zombieFight = new ZombieFight();
|
||||
|
||||
var hand = "right";
|
||||
|
||||
var zombieFight;
|
||||
var nullActionID = "00000000-0000-0000-0000-000000000000";
|
||||
var controllerID;
|
||||
var controllerActive;
|
||||
|
@ -78,7 +82,7 @@ var cleanupButton = toolBar.addOverlay("image", {
|
|||
|
||||
var flasher;
|
||||
|
||||
var leftTriggerButton = 0;
|
||||
var leftHandClick = 14;
|
||||
var leftTriggerValue = 0;
|
||||
var prevLeftTriggerValue = 0;
|
||||
|
||||
|
@ -88,7 +92,7 @@ var RIGHT = 1;
|
|||
|
||||
var leftPalm = 2 * LEFT;
|
||||
var rightPalm = 2 * RIGHT;
|
||||
var rightTriggerButton = 1;
|
||||
var rightHandClick = 15;
|
||||
var prevRightTriggerValue = 0;
|
||||
var rightTriggerValue = 0;
|
||||
var TRIGGER_THRESHOLD = 0.2;
|
||||
|
@ -357,8 +361,8 @@ function update() {
|
|||
}
|
||||
|
||||
function updateControllerState() {
|
||||
rightTriggerValue = Controller.getTriggerValue(rightTriggerButton);
|
||||
leftTriggerValue = Controller.getTriggerValue(leftTriggerButton);
|
||||
rightTriggerValue = Controller.getActionValue(rightHandClick);
|
||||
leftTriggerValue = Controller.getActionValue(leftHandClick);
|
||||
|
||||
if (rightTriggerValue > TRIGGER_THRESHOLD && !swordHeld) {
|
||||
grabSword("right")
|
||||
|
@ -470,4 +474,4 @@ function onClick(event) {
|
|||
|
||||
Script.scriptEnding.connect(cleanUp);
|
||||
Script.update.connect(update);
|
||||
Controller.mousePressEvent.connect(onClick);
|
||||
Controller.mousePressEvent.connect(onClick);
|
||||
|
|
|
@ -55,8 +55,8 @@ var warpLine = Overlays.addOverlay("line3d", {
|
|||
var velocity = { x: 0, y: 0, z: 0 };
|
||||
var VERY_LONG_TIME = 1000000.0;
|
||||
|
||||
var active = Menu.isOptionChecked("Enable VR Mode");
|
||||
var prevVRMode = Menu.isOptionChecked("Enable VR Mode");
|
||||
var active = HMD.active;
|
||||
var prevVRMode = HMD.active;
|
||||
|
||||
var hmdControls = (function () {
|
||||
|
||||
|
@ -121,28 +121,28 @@ var hmdControls = (function () {
|
|||
velocity = Vec3.sum(velocity, direction);
|
||||
break;
|
||||
case findAction("YAW_LEFT"):
|
||||
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (yawTimer < 0.0 && HMD.active) {
|
||||
yawChange = yawChange + (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
|
||||
yawTimer = CAMERA_UPDATE_TIME;
|
||||
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
} else if (!HMD.active) {
|
||||
yawChange = yawChange + (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("YAW_RIGHT"):
|
||||
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (yawTimer < 0.0 && HMD.active) {
|
||||
yawChange = yawChange - (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
|
||||
yawTimer = CAMERA_UPDATE_TIME;
|
||||
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
} else if (!HMD.active) {
|
||||
yawChange = yawChange - (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("PITCH_DOWN"):
|
||||
if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (!HMD.active) {
|
||||
pitchChange = pitchChange - (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("PITCH_UP"):
|
||||
if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (!HMD.active) {
|
||||
pitchChange = pitchChange + (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
|
||||
}
|
||||
break;
|
||||
|
@ -175,9 +175,9 @@ var hmdControls = (function () {
|
|||
}
|
||||
|
||||
function update(dt) {
|
||||
if (prevVRMode != Menu.isOptionChecked("Enable VR Mode")) {
|
||||
active = Menu.isOptionChecked("Enable VR Mode");
|
||||
prevVRMode = Menu.isOptionChecked("Enable VR Mode");
|
||||
if (prevVRMode != HMD.active) {
|
||||
active = HMD.active;
|
||||
prevVRMode = HMD.active;
|
||||
}
|
||||
|
||||
if (yawTimer >= 0.0) {
|
||||
|
|
|
@ -130,7 +130,6 @@ var heights = [];
|
|||
var myAlpha = [];
|
||||
var arrays = [];
|
||||
var isOnHMD = false,
|
||||
ENABLE_VR_MODE = "Enable VR Mode",
|
||||
NOTIFICATIONS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
|
||||
NOTIFICATIONS_3D_DISTANCE = 0.6, // Horizontal distance from avatar position.
|
||||
NOTIFICATIONS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
|
||||
|
@ -414,7 +413,7 @@ function update() {
|
|||
j,
|
||||
k;
|
||||
|
||||
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE)) {
|
||||
if (isOnHMD !== HMD.active) {
|
||||
while (arrays.length > 0) {
|
||||
deleteNotification(0);
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@
|
|||
SCALE_2D = 0.35, // Scale the SVGs for 2D display.
|
||||
background3D = {},
|
||||
bar3D = {},
|
||||
ENABLE_VR_MODE_MENU_ITEM = "Enable VR Mode",
|
||||
PROGRESS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
|
||||
PROGRESS_3D_DISTANCE = 0.602, // Horizontal distance from avatar position.
|
||||
PROGRESS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
|
||||
|
@ -157,7 +156,7 @@
|
|||
eyePosition,
|
||||
avatarOrientation;
|
||||
|
||||
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE_MENU_ITEM)) {
|
||||
if (isOnHMD !== HMD.active) {
|
||||
deleteOverlays();
|
||||
isOnHMD = !isOnHMD;
|
||||
createOverlays();
|
||||
|
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
|||
project(${TARGET_NAME})
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK" "3DConnexionClient" "iViewHMD")
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient" "iViewHMD")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
|
@ -29,18 +29,6 @@ endif()
|
|||
|
||||
configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVersion.h")
|
||||
|
||||
macro(GroupSources curdir)
|
||||
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
|
||||
foreach(child ${children})
|
||||
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
GroupSources(${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
# grab the implementation and header files from src dirs
|
||||
file(GLOB_RECURSE INTERFACE_SRCS "src/*.cpp" "src/*.h")
|
||||
GroupSources("src")
|
||||
|
@ -115,16 +103,12 @@ else()
|
|||
add_executable(${TARGET_NAME} ${INTERFACE_SRCS} ${QM})
|
||||
endif()
|
||||
|
||||
# set up the external glm library
|
||||
add_dependency_external_projects(glm bullet)
|
||||
|
||||
# set up the external glm library
|
||||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
find_package(Bullet REQUIRED)
|
||||
|
||||
# perform the system include hack for OS X to ignore warnings
|
||||
|
@ -137,9 +121,10 @@ endif()
|
|||
target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES})
|
||||
|
||||
# link required hifi libraries
|
||||
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer ui auto-updater)
|
||||
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer ui auto-updater
|
||||
plugins display-plugins input-plugins)
|
||||
|
||||
add_dependency_external_projects(sdl2)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -36,6 +36,8 @@
|
|||
#include <StDev.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "Bookmarks.h"
|
||||
|
@ -47,13 +49,12 @@
|
|||
#include "Stars.h"
|
||||
#include "avatar/Avatar.h"
|
||||
#include "avatar/MyAvatar.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include <input-plugins/KeyboardMouseDevice.h>
|
||||
#include "scripting/ControllerScriptingInterface.h"
|
||||
#include "scripting/DialogsManagerScriptingInterface.h"
|
||||
#include "scripting/WebWindowClass.h"
|
||||
#include "ui/AudioStatsDialog.h"
|
||||
#include "ui/BandwidthDialog.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "ui/ModelsBrowser.h"
|
||||
#include "ui/OctreeStatsDialog.h"
|
||||
#include "ui/SnapshotShareDialog.h"
|
||||
|
@ -62,10 +63,9 @@
|
|||
#include "ui/overlays/Overlays.h"
|
||||
#include "ui/ApplicationOverlay.h"
|
||||
#include "ui/ApplicationCompositor.h"
|
||||
#include "ui/OverlayConductor.h"
|
||||
#include "ui/RunningScriptsWidget.h"
|
||||
#include "ui/ToolWindow.h"
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "devices/KeyboardMouseDevice.h"
|
||||
#include "octree/OctreePacketProcessor.h"
|
||||
#include "UndoStackScriptingInterface.h"
|
||||
|
||||
|
@ -79,6 +79,7 @@ class QMouseEvent;
|
|||
class QSystemTrayIcon;
|
||||
class QTouchEvent;
|
||||
class QWheelEvent;
|
||||
class OffscreenGlCanvas;
|
||||
|
||||
class GLCanvas;
|
||||
class FaceTracker;
|
||||
|
@ -86,6 +87,12 @@ class MainWindow;
|
|||
class Node;
|
||||
class ScriptEngine;
|
||||
|
||||
namespace gpu {
|
||||
class Context;
|
||||
typedef std::shared_ptr<Context> ContextPointer;
|
||||
}
|
||||
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
|
@ -124,7 +131,7 @@ class Application;
|
|||
|
||||
typedef bool (Application::* AcceptURLMethod)(const QString &);
|
||||
|
||||
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface {
|
||||
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface, PluginContainer {
|
||||
Q_OBJECT
|
||||
|
||||
friend class OctreePacketProcessor;
|
||||
|
@ -136,7 +143,6 @@ public:
|
|||
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
|
||||
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
|
||||
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
|
||||
static UserInputMapper* getUserInputMapper() { return &getInstance()->_userInputMapper; }
|
||||
static void initPlugins();
|
||||
static void shutdownPlugins();
|
||||
|
||||
|
@ -179,6 +185,7 @@ public:
|
|||
bool eventFilter(QObject* object, QEvent* event);
|
||||
|
||||
glm::uvec2 getCanvasSize() const;
|
||||
glm::uvec2 getUiSize() const;
|
||||
QSize getDeviceSize() const;
|
||||
bool hasFocus() const;
|
||||
PickRay computePickRay() const;
|
||||
|
@ -262,11 +269,6 @@ public:
|
|||
void displaySide(RenderArgs* renderArgs, Camera& whichCamera, bool selfAvatarOnly = false, bool billboard = false);
|
||||
|
||||
virtual const glm::vec3& getShadowDistances() const { return _shadowDistances; }
|
||||
|
||||
/// Computes the off-axis frustum parameters for the view frustum, taking mirroring into account.
|
||||
virtual void computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const;
|
||||
|
||||
virtual ViewFrustum* getCurrentViewFrustum() { return getDisplayViewFrustum(); }
|
||||
virtual QThread* getMainThread() { return thread(); }
|
||||
virtual float getSizeScale() const;
|
||||
|
@ -277,6 +279,24 @@ public:
|
|||
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
|
||||
virtual qreal getDevicePixelRatio();
|
||||
|
||||
// Plugin container support
|
||||
virtual void addMenu(const QString& menuName);
|
||||
virtual void removeMenu(const QString& menuName);
|
||||
virtual void addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName);
|
||||
virtual void removeMenuItem(const QString& menuName, const QString& menuItem);
|
||||
virtual bool isOptionChecked(const QString& name);
|
||||
virtual void setIsOptionChecked(const QString& path, bool checked);
|
||||
virtual void setFullscreen(const QScreen* target) override;
|
||||
virtual void unsetFullscreen(const QScreen* avoid) override;
|
||||
virtual void showDisplayPluginsTools() override;
|
||||
virtual QGLWidget* getPrimarySurface() override;
|
||||
|
||||
void setActiveDisplayPlugin(const QString& pluginName);
|
||||
private:
|
||||
DisplayPlugin * getActiveDisplayPlugin();
|
||||
const DisplayPlugin * getActiveDisplayPlugin() const;
|
||||
public:
|
||||
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
||||
glm::vec2 getViewportDimensions() const;
|
||||
|
@ -300,10 +320,9 @@ public:
|
|||
// rendering of several elements depend on that
|
||||
// TODO: carry that information on the Camera as a setting
|
||||
bool isHMDMode() const;
|
||||
glm::quat getHeadOrientation() const;
|
||||
glm::vec3 getHeadPosition() const;
|
||||
glm::mat4 getHeadPose() const;
|
||||
glm::mat4 getHMDSensorPose() const;
|
||||
glm::mat4 getEyePose(int eye) const;
|
||||
glm::mat4 getEyeOffset(int eye) const;
|
||||
glm::mat4 getEyeProjection(int eye) const;
|
||||
|
||||
QRect getDesirableApplicationGeometry();
|
||||
|
@ -353,6 +372,7 @@ signals:
|
|||
void fullAvatarURLChanged(const QString& newValue, const QString& modelName);
|
||||
|
||||
void beforeAboutToQuit();
|
||||
void activeDisplayPluginChanged();
|
||||
|
||||
public slots:
|
||||
void setSessionUUID(const QUuid& sessionUUID);
|
||||
|
@ -361,6 +381,8 @@ public slots:
|
|||
void nodeAdded(SharedNodePointer node);
|
||||
void nodeKilled(SharedNodePointer node);
|
||||
void packetSent(quint64 length);
|
||||
void updateDisplayMode();
|
||||
void updateInputModes();
|
||||
|
||||
QVector<EntityItemID> pasteEntities(float x, float y, float z);
|
||||
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs);
|
||||
|
@ -436,15 +458,8 @@ private slots:
|
|||
|
||||
void connectedToDomain(const QString& hostname);
|
||||
|
||||
friend class HMDToolsDialog;
|
||||
void setFullscreen(bool fullscreen);
|
||||
void setEnable3DTVMode(bool enable3DTVMode);
|
||||
void setEnableVRMode(bool enableVRMode);
|
||||
|
||||
void rotationModeChanged();
|
||||
|
||||
glm::vec2 getScaledScreenPoint(glm::vec2 projectedPoint);
|
||||
|
||||
void closeMirrorView();
|
||||
void restoreMirrorView();
|
||||
void shrinkMirrorView();
|
||||
|
@ -472,6 +487,9 @@ private:
|
|||
|
||||
void update(float deltaTime);
|
||||
|
||||
void setPalmData(Hand* hand, UserInputMapper::PoseValue pose, float deltaTime, int index);
|
||||
void emulateMouse(Hand* hand, float click, float shift, int index);
|
||||
|
||||
// Various helper functions called during update()
|
||||
void updateLOD();
|
||||
void updateMouseRay();
|
||||
|
@ -500,6 +518,11 @@ private:
|
|||
int sendNackPackets();
|
||||
|
||||
bool _dependencyManagerIsSetup;
|
||||
|
||||
OffscreenGlCanvas* _offscreenContext;
|
||||
DisplayPluginPointer _displayPlugin;
|
||||
InputPluginList _activeInputPlugins;
|
||||
|
||||
MainWindow* _window;
|
||||
|
||||
ToolWindow* _toolWindow;
|
||||
|
@ -537,11 +560,10 @@ private:
|
|||
|
||||
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
||||
|
||||
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
|
||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||
Camera _myCamera; // My view onto the world
|
||||
Camera _mirrorCamera; // Cammera for mirror view
|
||||
KeyboardMouseDevice* _keyboardMouseDevice{ nullptr }; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||
Camera _myCamera; // My view onto the world
|
||||
Camera _mirrorCamera; // Cammera for mirror view
|
||||
QRect _mirrorViewRect;
|
||||
|
||||
Setting::Handle<bool> _firstRun;
|
||||
|
@ -628,9 +650,6 @@ private:
|
|||
|
||||
void checkSkeleton();
|
||||
|
||||
QWidget* _fullscreenMenuWidget = new QWidget();
|
||||
int _menuBarHeight;
|
||||
|
||||
QHash<QString, AcceptURLMethod> _acceptedExtensions;
|
||||
|
||||
QList<QString> _domainConnectionRefusals;
|
||||
|
@ -647,8 +666,16 @@ private:
|
|||
Overlays _overlays;
|
||||
ApplicationOverlay _applicationOverlay;
|
||||
ApplicationCompositor _compositor;
|
||||
OverlayConductor _overlayConductor;
|
||||
|
||||
int _oldHandMouseX[2];
|
||||
int _oldHandMouseY[2];
|
||||
bool _oldHandLeftClick[2];
|
||||
bool _oldHandRightClick[2];
|
||||
int _numFramesSinceLastResize = 0;
|
||||
|
||||
bool _overlayEnabled = true;
|
||||
QRect _savedGeometry;
|
||||
DialogsManagerScriptingInterface* _dialogsManagerScriptingInterface = new DialogsManagerScriptingInterface();
|
||||
};
|
||||
|
||||
|
|
|
@ -46,11 +46,7 @@ QString modeToString(CameraMode mode) {
|
|||
}
|
||||
|
||||
Camera::Camera() :
|
||||
_mode(CAMERA_MODE_THIRD_PERSON),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP)),
|
||||
_isKeepLookingAt(false),
|
||||
_lookingAt(0.0f, 0.0f, 0.0f)
|
||||
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -61,12 +57,33 @@ void Camera::update(float deltaTime) {
|
|||
return;
|
||||
}
|
||||
|
||||
void Camera::recompose() {
|
||||
mat4 orientation = glm::mat4_cast(_rotation);
|
||||
mat4 translation = glm::translate(mat4(), _position);
|
||||
_transform = translation * orientation;
|
||||
}
|
||||
|
||||
void Camera::decompose() {
|
||||
_position = vec3(_transform[3]);
|
||||
_rotation = glm::quat_cast(_transform);
|
||||
}
|
||||
|
||||
void Camera::setTransform(const glm::mat4& transform) {
|
||||
_transform = transform;
|
||||
decompose();
|
||||
}
|
||||
|
||||
void Camera::setPosition(const glm::vec3& position) {
|
||||
_position = position;
|
||||
_position = position;
|
||||
recompose();
|
||||
if (_isKeepLookingAt) {
|
||||
lookAt(_lookingAt);
|
||||
}
|
||||
}
|
||||
|
||||
void Camera::setRotation(const glm::quat& rotation) {
|
||||
_rotation = rotation;
|
||||
recompose();
|
||||
if (_isKeepLookingAt) {
|
||||
lookAt(_lookingAt);
|
||||
}
|
||||
|
@ -129,3 +146,21 @@ void Camera::keepLookingAt(const glm::vec3& point) {
|
|||
_isKeepLookingAt = true;
|
||||
_lookingAt = point;
|
||||
}
|
||||
|
||||
void Camera::loadViewFrustum(ViewFrustum& frustum) const {
|
||||
// We will use these below, from either the camera or head vectors calculated above
|
||||
frustum.setProjection(getProjection());
|
||||
|
||||
// Set the viewFrustum up with the correct position and orientation of the camera
|
||||
frustum.setPosition(getPosition());
|
||||
frustum.setOrientation(getRotation());
|
||||
|
||||
// Ask the ViewFrustum class to calculate our corners
|
||||
frustum.calculate();
|
||||
}
|
||||
|
||||
ViewFrustum Camera::toViewFrustum() const {
|
||||
ViewFrustum result;
|
||||
loadViewFrustum(result);
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -43,24 +43,31 @@ public:
|
|||
|
||||
void update( float deltaTime );
|
||||
|
||||
void setRotation(const glm::quat& rotation);
|
||||
void setProjection(const glm::mat4 & projection);
|
||||
CameraMode getMode() const { return _mode; }
|
||||
void setMode(CameraMode m);
|
||||
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
const glm::mat4& getProjection() const { return _projection; }
|
||||
CameraMode getMode() const { return _mode; }
|
||||
void loadViewFrustum(ViewFrustum& frustum) const;
|
||||
ViewFrustum toViewFrustum() const;
|
||||
|
||||
public slots:
|
||||
QString getModeString() const;
|
||||
void setModeString(const QString& mode);
|
||||
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
void setRotation(const glm::quat& rotation);
|
||||
|
||||
glm::vec3 getPosition() const { return _position; }
|
||||
void setPosition(const glm::vec3& position);
|
||||
|
||||
glm::quat getOrientation() const { return getRotation(); }
|
||||
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
|
||||
|
||||
const glm::mat4& getTransform() const { return _transform; }
|
||||
void setTransform(const glm::mat4& transform);
|
||||
|
||||
const glm::mat4& getProjection() const { return _projection; }
|
||||
void setProjection(const glm::mat4& projection);
|
||||
|
||||
PickRay computePickRay(float x, float y);
|
||||
|
||||
// These only work on independent cameras
|
||||
|
@ -78,11 +85,17 @@ signals:
|
|||
void modeUpdated(const QString& newMode);
|
||||
|
||||
private:
|
||||
CameraMode _mode;
|
||||
void recompose();
|
||||
void decompose();
|
||||
|
||||
CameraMode _mode{ CAMERA_MODE_THIRD_PERSON };
|
||||
glm::mat4 _transform;
|
||||
glm::mat4 _projection;
|
||||
|
||||
// derived
|
||||
glm::vec3 _position;
|
||||
glm::quat _rotation;
|
||||
glm::mat4 _projection;
|
||||
bool _isKeepLookingAt;
|
||||
bool _isKeepLookingAt{ false };
|
||||
glm::vec3 _lookingAt;
|
||||
};
|
||||
|
||||
|
|
|
@ -9,12 +9,13 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "GLCanvas.h"
|
||||
|
||||
#include <QMimeData>
|
||||
#include <QUrl>
|
||||
#include <QWindow>
|
||||
|
||||
#include "Application.h"
|
||||
#include "GLCanvas.h"
|
||||
#include "MainWindow.h"
|
||||
|
||||
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
|
||||
|
@ -63,7 +64,6 @@ int GLCanvas::getDeviceHeight() const {
|
|||
}
|
||||
|
||||
void GLCanvas::initializeGL() {
|
||||
Application::getInstance()->initializeGL();
|
||||
setAttribute(Qt::WA_AcceptTouchEvents);
|
||||
setAcceptDrops(true);
|
||||
connect(Application::getInstance(), SIGNAL(applicationStateChanged(Qt::ApplicationState)), this, SLOT(activeChanged(Qt::ApplicationState)));
|
||||
|
|
|
@ -22,7 +22,7 @@ class GLCanvas : public QGLWidget {
|
|||
|
||||
public:
|
||||
GLCanvas();
|
||||
|
||||
|
||||
void stopFrameTimer();
|
||||
|
||||
bool isThrottleRendering() const;
|
||||
|
|
|
@ -97,10 +97,6 @@ void MainWindow::changeEvent(QEvent* event) {
|
|||
} else {
|
||||
emit windowShown(true);
|
||||
}
|
||||
|
||||
if (isFullScreen() != Menu::getInstance()->isOptionChecked(MenuOption::Fullscreen)) {
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::Fullscreen, isFullScreen());
|
||||
}
|
||||
} else if (event->type() == QEvent::ActivationChange) {
|
||||
if (isActiveWindow()) {
|
||||
emit windowShown(true);
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/RealSense.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "devices/3DConnexionClient.h"
|
||||
#include "MainWindow.h"
|
||||
#include "scripting/MenuScriptingInterface.h"
|
||||
|
@ -221,9 +220,20 @@ Menu::Menu() {
|
|||
addActionToQMenuAndActionHash(toolsMenu, MenuOption::PackageModel, 0,
|
||||
qApp, SLOT(packageModel()));
|
||||
|
||||
MenuWrapper* displayMenu = addMenu("Display");
|
||||
{
|
||||
MenuWrapper* displayModeMenu = addMenu(MenuOption::OutputMenu);
|
||||
QActionGroup* displayModeGroup = new QActionGroup(displayModeMenu);
|
||||
displayModeGroup->setExclusive(true);
|
||||
}
|
||||
|
||||
MenuWrapper* avatarMenu = addMenu("Avatar");
|
||||
QObject* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
MenuWrapper* inputModeMenu = addMenu(MenuOption::InputMenu);
|
||||
QActionGroup* inputModeGroup = new QActionGroup(inputModeMenu);
|
||||
inputModeGroup->setExclusive(false);
|
||||
|
||||
MenuWrapper* avatarSizeMenu = avatarMenu->addMenu("Size");
|
||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||
MenuOption::IncreaseAvatarSize,
|
||||
|
@ -242,26 +252,16 @@ Menu::Menu() {
|
|||
SLOT(resetSize()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::KeyboardMotorControl,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehavior()));
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehaviorFromMenu()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
avatar, SLOT(updateMotionBehaviorFromMenu()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
||||
MenuWrapper* viewMenu = addMenu("View");
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::Fullscreen,
|
||||
#ifdef Q_OS_MAC
|
||||
Qt::CTRL | Qt::META | Qt::Key_F,
|
||||
#else
|
||||
Qt::CTRL | Qt::Key_F,
|
||||
#endif
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setFullscreen(bool)));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
|
||||
|
||||
MenuWrapper* cameraModeMenu = viewMenu->addMenu("Camera Mode");
|
||||
QActionGroup* cameraModeGroup = new QActionGroup(cameraModeMenu);
|
||||
|
@ -299,18 +299,11 @@ Menu::Menu() {
|
|||
dialogsManager.data(),
|
||||
SLOT(hmdTools(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::EnableVRMode, 0,
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setEnableVRMode(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Enable3DTVMode, 0,
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setEnable3DTVMode(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
|
||||
avatar, SLOT(updateStandingHMDModeFromMenu()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
|
||||
|
@ -471,30 +464,11 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::HandMouseInput, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
|
||||
qApp, SLOT(setLowVelocityFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false);
|
||||
|
||||
MenuWrapper* sixenseOptionsMenu = handOptionsMenu->addMenu("Sixense");
|
||||
#ifdef __APPLE__
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::SixenseEnabled,
|
||||
0, false,
|
||||
&SixenseManager::getInstance(),
|
||||
SLOT(toggleSixense(bool)));
|
||||
#endif
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::FilterSixense,
|
||||
0,
|
||||
true,
|
||||
&SixenseManager::getInstance(),
|
||||
SLOT(setFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::LowVelocityFilter,
|
||||
0,
|
||||
true,
|
||||
qApp,
|
||||
SLOT(setLowVelocityFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
|
||||
|
||||
MenuWrapper* leapOptionsMenu = handOptionsMenu->addMenu("Leap Motion");
|
||||
addCheckableActionToQMenuAndActionHash(leapOptionsMenu, MenuOption::LeapMotionOnHMD, 0, false);
|
||||
|
||||
|
|
|
@ -80,6 +80,13 @@ public:
|
|||
const QKeySequence& shortcut = 0,
|
||||
QAction::MenuRole role = QAction::NoRole,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const bool checked = false,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
void removeAction(MenuWrapper* menu, const QString& actionName);
|
||||
|
||||
|
@ -109,14 +116,6 @@ private:
|
|||
void addDisabledActionAndSeparator(MenuWrapper* destinationMenu, const QString& actionName,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const bool checked = false,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
QAction* getActionFromName(const QString& menuName, MenuWrapper* menu);
|
||||
MenuWrapper* getSubMenuFromName(const QString& menuName, MenuWrapper* menu);
|
||||
MenuWrapper* getMenuParent(const QString& menuName, QString& finalMenuPart);
|
||||
|
@ -186,23 +185,23 @@ namespace MenuOption {
|
|||
const QString EditEntitiesHelp = "Edit Entities Help...";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
const QString ExpandPaintGLTiming = "Expand /paintGL";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FilterSixense = "Smooth Sixense Movement";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString FivePointCalibration = "5 Point Calibration";
|
||||
const QString Forward = "Forward";
|
||||
const QString FrameTimer = "Show Timer";
|
||||
const QString Fullscreen = "Fullscreen";
|
||||
const QString FullscreenMirror = "Fullscreen Mirror";
|
||||
const QString GlowWhenSpeaking = "Glow When Speaking";
|
||||
const QString HandMouseInput = "Enable Hand Mouse Input";
|
||||
const QString HMDTools = "HMD Tools";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
const QString InputMenu = "Avatar>Input Devices";
|
||||
const QString KeyboardMotorControl = "Enable Keyboard Motor Control";
|
||||
const QString LeapMotionOnHMD = "Leap Motion on HMD";
|
||||
const QString LoadScript = "Open and Run Script File...";
|
||||
|
@ -222,6 +221,7 @@ namespace MenuOption {
|
|||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OutputMenu = "Display>Mode";
|
||||
const QString PackageModel = "Package Model...";
|
||||
const QString Pair = "Pair";
|
||||
const QString PhysicsShowOwned = "Highlight Simulation Ownership";
|
||||
|
@ -273,8 +273,7 @@ namespace MenuOption {
|
|||
const QString ShowIKConstraints = "Show IK Constraints";
|
||||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
|
||||
const QString SixenseEnabled = "Enable Hydra Support";
|
||||
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
|
||||
const QString SimulateEyeTracking = "Simulate";
|
||||
const QString SMIEyeTracking = "SMI Eye Tracking";
|
||||
const QString Stars = "Stars";
|
||||
|
|
|
@ -12,8 +12,6 @@
|
|||
#include <QStyle>
|
||||
#include <QStyleOptionTitleBar>
|
||||
|
||||
#include "GLCanvas.h"
|
||||
|
||||
#include "UIUtil.h"
|
||||
|
||||
int UIUtil::getWindowTitleBarHeight(const QWidget* window) {
|
||||
|
|
|
@ -247,7 +247,7 @@ void Avatar::simulate(float deltaTime) {
|
|||
}
|
||||
|
||||
void Avatar::slamPosition(const glm::vec3& newPosition) {
|
||||
AvatarData::setPosition(newPosition);
|
||||
setPosition(newPosition);
|
||||
_positionDeltaAccumulator = glm::vec3(0.0f);
|
||||
_velocity = glm::vec3(0.0f);
|
||||
_lastVelocity = glm::vec3(0.0f);
|
||||
|
|
|
@ -279,7 +279,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
|
|||
const QString& collisionSoundURL = myAvatar->getCollisionSoundURL();
|
||||
if (!collisionSoundURL.isEmpty()) {
|
||||
const float velocityChange = glm::length(collision.velocityChange);
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01;
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01f;
|
||||
const bool isSound = (collision.type == CONTACT_EVENT_TYPE_START) && (velocityChange > MIN_AVATAR_COLLISION_ACCELERATION);
|
||||
|
||||
if (!isSound) {
|
||||
|
|
|
@ -123,12 +123,15 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
||||
// since everyone else will see the full joint rotations for other people.
|
||||
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
||||
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
||||
float currentTwist = getTorsoTwist();
|
||||
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
||||
|
||||
if (!myAvatar->getStandingHMDSensorMode()) {
|
||||
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
||||
// since everyone else will see the full joint rotations for other people.
|
||||
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
||||
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
||||
float currentTwist = getTorsoTwist();
|
||||
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
||||
}
|
||||
}
|
||||
|
||||
if (!(_isFaceTrackerConnected || billboard)) {
|
||||
|
@ -365,14 +368,20 @@ bool Head::isLookingAtMe() {
|
|||
glm::quat Head::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
return getOrientation();
|
||||
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
||||
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
return getOrientation();
|
||||
}
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
|
||||
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||
|
|
|
@ -34,7 +34,6 @@
|
|||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "AvatarManager.h"
|
||||
|
@ -97,6 +96,15 @@ MyAvatar::MyAvatar(RigPointer rig) :
|
|||
_eyeContactTarget(LEFT_EYE),
|
||||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
_hmdSensorMatrix(),
|
||||
_hmdSensorOrientation(),
|
||||
_hmdSensorPosition(),
|
||||
_bodySensorMatrix(),
|
||||
_sensorToWorldMatrix(),
|
||||
_standingHMDSensorMode(false),
|
||||
_goToPending(false),
|
||||
_goToPosition(),
|
||||
_goToOrientation(),
|
||||
_rig(rig),
|
||||
_prevShouldDrawHead(true)
|
||||
{
|
||||
|
@ -142,6 +150,13 @@ void MyAvatar::reset() {
|
|||
}
|
||||
|
||||
void MyAvatar::update(float deltaTime) {
|
||||
|
||||
if (_goToPending) {
|
||||
setPosition(_goToPosition);
|
||||
setOrientation(_goToOrientation);
|
||||
_goToPending = false;
|
||||
}
|
||||
|
||||
if (_referential) {
|
||||
_referential->update();
|
||||
}
|
||||
|
@ -149,6 +164,7 @@ void MyAvatar::update(float deltaTime) {
|
|||
Head* head = getHead();
|
||||
head->relaxLean(deltaTime);
|
||||
updateFromTrackers(deltaTime);
|
||||
|
||||
// Get audio loudness data from audio input device
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
head->setAudioLoudness(audio->getLastInputLoudness());
|
||||
|
@ -228,6 +244,41 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
maybeUpdateBillboard();
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
|
||||
if (getStandingHMDSensorMode()) {
|
||||
return _sensorToWorldMatrix;
|
||||
} else {
|
||||
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
|
||||
}
|
||||
}
|
||||
|
||||
// best called at start of main loop just after we have a fresh hmd pose.
|
||||
// update internal body position from new hmd pose.
|
||||
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
||||
// update the sensorMatrices based on the new hmd pose
|
||||
_hmdSensorMatrix = hmdSensorMatrix;
|
||||
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||
_bodySensorMatrix = deriveBodyFromHMDSensor();
|
||||
|
||||
if (getStandingHMDSensorMode()) {
|
||||
// set the body position/orientation to reflect motion due to the head.
|
||||
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
|
||||
setPosition(extractTranslation(worldMat));
|
||||
setOrientation(glm::quat_cast(worldMat));
|
||||
}
|
||||
}
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void MyAvatar::updateSensorToWorldMatrix() {
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
// position when driven from the head.
|
||||
glm::mat4 desiredMat = createMatFromQuatAndPos(getOrientation(), getPosition());
|
||||
_sensorToWorldMatrix = desiredMat * glm::inverse(_bodySensorMatrix);
|
||||
}
|
||||
|
||||
// Update avatar head rotation with sensor data
|
||||
void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
@ -242,7 +293,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
bool inFacetracker = tracker && !tracker->isMuted();
|
||||
|
||||
if (inHmd) {
|
||||
estimatedPosition = qApp->getHeadPosition();
|
||||
estimatedPosition = extractTranslation(getHMDSensorMatrix());
|
||||
estimatedPosition.x *= -1.0f;
|
||||
_trackedHeadPosition = estimatedPosition;
|
||||
|
||||
|
@ -286,15 +337,18 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
|
||||
Head* head = getHead();
|
||||
if (inHmd || isPlaying()) {
|
||||
head->setDeltaPitch(estimatedRotation.x);
|
||||
head->setDeltaYaw(estimatedRotation.y);
|
||||
if (!getStandingHMDSensorMode()) {
|
||||
head->setDeltaPitch(estimatedRotation.x);
|
||||
head->setDeltaYaw(estimatedRotation.y);
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
}
|
||||
} else {
|
||||
float magnifyFieldOfView = qApp->getFieldOfView() /
|
||||
_realWorldFieldOfView.get();
|
||||
head->setDeltaPitch(estimatedRotation.x * magnifyFieldOfView);
|
||||
head->setDeltaYaw(estimatedRotation.y * magnifyFieldOfView);
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
}
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
|
||||
// Update torso lean distance based on accelerometer data
|
||||
const float TORSO_LENGTH = 0.5f;
|
||||
|
@ -309,10 +363,12 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
relativePosition.x = -relativePosition.x;
|
||||
}
|
||||
|
||||
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
if (!(inHmd && getStandingHMDSensorMode())) {
|
||||
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -343,6 +399,22 @@ glm::vec3 MyAvatar::getLeftPalmPosition() {
|
|||
return leftHandPosition;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getLeftPalmVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getLeftPalmAngularVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getRawAngularVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getLeftPalmRotation() {
|
||||
glm::quat leftRotation;
|
||||
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getLeftHandJointIndex(), leftRotation);
|
||||
|
@ -358,6 +430,22 @@ glm::vec3 MyAvatar::getRightPalmPosition() {
|
|||
return rightHandPosition;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getRightPalmVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getRightPalmAngularVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getRawAngularVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getRightPalmRotation() {
|
||||
glm::quat rightRotation;
|
||||
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getRightHandJointIndex(), rightRotation);
|
||||
|
@ -845,10 +933,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
|
||||
|
||||
if (Application::getInstance()->isHMDMode()) {
|
||||
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
|
||||
// + OculusManager::getMidEyePosition() + gazeOffset);
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ OculusManager::getMidEyePosition() + gazeOffset);
|
||||
+ glm::vec3(qApp->getHMDSensorPose()[3]) + gazeOffset);
|
||||
} else {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ gazeOffset);
|
||||
|
@ -891,7 +977,7 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
|
|||
}
|
||||
|
||||
glm::vec3 MyAvatar::getDefaultEyePosition() const {
|
||||
return _position + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
|
||||
return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
|
||||
}
|
||||
|
||||
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
|
||||
|
@ -1274,10 +1360,13 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta * deltaTime, 0.0f))));
|
||||
|
||||
if (qApp->isHMDMode()) {
|
||||
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();
|
||||
glm::quat bodyOrientation = getWorldBodyOrientation();
|
||||
glm::quat localOrientation = glm::inverse(bodyOrientation) * orientation;
|
||||
|
||||
// these angles will be in radians
|
||||
glm::quat orientation = qApp->getHeadOrientation();
|
||||
// ... so they need to be converted to degrees before we do math...
|
||||
glm::vec3 euler = glm::eulerAngles(orientation) * DEGREES_PER_RADIAN;
|
||||
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
||||
|
||||
//Invert yaw and roll when in mirror mode
|
||||
if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
|
||||
|
@ -1340,6 +1429,8 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
|
|||
glm::vec3 direction = front + right + up;
|
||||
float directionLength = glm::length(direction);
|
||||
|
||||
//qCDebug(interfaceapp, "direction = (%.5f, %.5f, %.5f)", direction.x, direction.y, direction.z);
|
||||
|
||||
// Compute motor magnitude
|
||||
if (directionLength > EPSILON) {
|
||||
direction /= directionLength;
|
||||
|
@ -1444,7 +1535,6 @@ void MyAvatar::updatePosition(float deltaTime) {
|
|||
// update _moving flag based on speed
|
||||
const float MOVING_SPEED_THRESHOLD = 0.01f;
|
||||
_moving = speed > MOVING_SPEED_THRESHOLD;
|
||||
|
||||
}
|
||||
|
||||
void MyAvatar::updateCollisionSound(const glm::vec3 &penetration, float deltaTime, float frequency) {
|
||||
|
@ -1539,32 +1629,31 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
|
|||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
|
||||
<< newPosition.y << ", " << newPosition.z;
|
||||
|
||||
glm::vec3 shiftedPosition = newPosition;
|
||||
|
||||
_goToPending = true;
|
||||
_goToPosition = newPosition;
|
||||
_goToOrientation = getOrientation();
|
||||
if (hasOrientation) {
|
||||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
|
||||
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||
|
||||
// orient the user to face the target
|
||||
glm::quat quatOrientation = newOrientation;
|
||||
|
||||
if (shouldFaceLocation) {
|
||||
|
||||
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
||||
// move the user a couple units away
|
||||
const float DISTANCE_TO_USER = 2.0f;
|
||||
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
_goToPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
}
|
||||
|
||||
setOrientation(quatOrientation);
|
||||
_goToOrientation = quatOrientation;
|
||||
}
|
||||
|
||||
slamPosition(shiftedPosition);
|
||||
emit transformChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::updateMotionBehavior() {
|
||||
void MyAvatar::updateMotionBehaviorFromMenu() {
|
||||
Menu* menu = Menu::getInstance();
|
||||
if (menu->isOptionChecked(MenuOption::KeyboardMotorControl)) {
|
||||
_motionBehaviors |= AVATAR_MOTION_KEYBOARD_MOTOR_ENABLED;
|
||||
|
@ -1579,6 +1668,11 @@ void MyAvatar::updateMotionBehavior() {
|
|||
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
|
||||
}
|
||||
|
||||
void MyAvatar::updateStandingHMDModeFromMenu() {
|
||||
Menu* menu = Menu::getInstance();
|
||||
_standingHMDSensorMode = menu->isOptionChecked(MenuOption::StandingHMDSensorMode);
|
||||
}
|
||||
|
||||
//Renders sixense laser pointers for UI selection with controllers
|
||||
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
|
||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||
|
@ -1630,3 +1724,37 @@ void MyAvatar::relayDriveKeysToCharacterController() {
|
|||
_characterController.jump();
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getWorldBodyPosition() const {
|
||||
return transformPoint(_sensorToWorldMatrix, extractTranslation(_bodySensorMatrix));
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getWorldBodyOrientation() const {
|
||||
return glm::quat_cast(_sensorToWorldMatrix * _bodySensorMatrix);
|
||||
}
|
||||
|
||||
// derive avatar body position and orientation from the current HMD Sensor location.
|
||||
// results are in sensor space
|
||||
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
||||
|
||||
// HMD is in sensor space.
|
||||
const glm::vec3 hmdPosition = getHMDSensorPosition();
|
||||
const glm::quat hmdOrientation = getHMDSensorOrientation();
|
||||
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
|
||||
|
||||
// In sensor space, figure out where the avatar body should be,
|
||||
// by applying offsets from the avatar's neck & head joints.
|
||||
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
|
||||
vec3 localNeck(0.0f, 0.48f, 0.0f); // start with some kind of guess if the skeletonModel is not loaded yet.
|
||||
_skeletonModel.getLocalNeckPosition(localNeck);
|
||||
|
||||
// apply simplistic head/neck model
|
||||
// eyeToNeck offset is relative full HMD orientation.
|
||||
// while neckToRoot offset is only relative to HMDs yaw.
|
||||
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
|
||||
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
|
||||
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
|
||||
|
||||
// avatar facing is determined solely by hmd orientation.
|
||||
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,20 @@ public:
|
|||
void update(float deltaTime);
|
||||
void preRender(RenderArgs* renderArgs);
|
||||
|
||||
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
||||
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
||||
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
||||
glm::mat4 getSensorToWorldMatrix() const;
|
||||
|
||||
// best called at start of main loop just after we have a fresh hmd pose.
|
||||
// update internal body position from new hmd pose.
|
||||
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void updateSensorToWorldMatrix();
|
||||
|
||||
void setLeanScale(float scale) { _leanScale = scale; }
|
||||
void setRealWorldFieldOfView(float realWorldFov) { _realWorldFieldOfView.set(realWorldFov); }
|
||||
|
||||
|
@ -59,6 +73,7 @@ public:
|
|||
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
|
||||
bool hold = false, float firstFrame = 0.0f,
|
||||
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
|
||||
/// Stops an animation as identified by a URL.
|
||||
Q_INVOKABLE void stopAnimation(const QString& url);
|
||||
|
||||
|
@ -148,6 +163,8 @@ public:
|
|||
static const float ZOOM_MAX;
|
||||
static const float ZOOM_DEFAULT;
|
||||
|
||||
bool getStandingHMDSensorMode() const { return _standingHMDSensorMode; }
|
||||
|
||||
public slots:
|
||||
void increaseSize();
|
||||
void decreaseSize();
|
||||
|
@ -162,11 +179,16 @@ public slots:
|
|||
glm::vec3 getThrust() { return _thrust; };
|
||||
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
|
||||
|
||||
void updateMotionBehavior();
|
||||
void updateMotionBehaviorFromMenu();
|
||||
void updateStandingHMDModeFromMenu();
|
||||
|
||||
glm::vec3 getLeftPalmPosition();
|
||||
glm::vec3 getLeftPalmVelocity();
|
||||
glm::vec3 getLeftPalmAngularVelocity();
|
||||
glm::quat getLeftPalmRotation();
|
||||
glm::vec3 getRightPalmPosition();
|
||||
glm::vec3 getRightPalmVelocity();
|
||||
glm::vec3 getRightPalmAngularVelocity();
|
||||
glm::quat getRightPalmRotation();
|
||||
|
||||
void clearReferential();
|
||||
|
@ -189,6 +211,8 @@ signals:
|
|||
|
||||
private:
|
||||
|
||||
glm::vec3 getWorldBodyPosition() const;
|
||||
glm::quat getWorldBodyOrientation() const;
|
||||
QByteArray toByteArray();
|
||||
void simulate(float deltaTime);
|
||||
void updateFromTrackers(float deltaTime);
|
||||
|
@ -224,6 +248,10 @@ private:
|
|||
|
||||
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);
|
||||
|
||||
// derive avatar body position and orientation from the current HMD Sensor location.
|
||||
// results are in sensor space
|
||||
glm::mat4 deriveBodyFromHMDSensor() const;
|
||||
|
||||
glm::vec3 _gravity;
|
||||
|
||||
float _driveKeys[MAX_DRIVE_KEYS];
|
||||
|
@ -281,6 +309,26 @@ private:
|
|||
|
||||
RigPointer _rig;
|
||||
bool _prevShouldDrawHead;
|
||||
|
||||
// cache of the current HMD sensor position and orientation
|
||||
// in sensor space.
|
||||
glm::mat4 _hmdSensorMatrix;
|
||||
glm::quat _hmdSensorOrientation;
|
||||
glm::vec3 _hmdSensorPosition;
|
||||
|
||||
// cache of the current body position and orientation of the avatar's body,
|
||||
// in sensor space.
|
||||
glm::mat4 _bodySensorMatrix;
|
||||
|
||||
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
||||
glm::mat4 _sensorToWorldMatrix;
|
||||
|
||||
bool _standingHMDSensorMode;
|
||||
|
||||
bool _goToPending;
|
||||
glm::vec3 _goToPosition;
|
||||
glm::quat _goToOrientation;
|
||||
|
||||
std::unordered_set<int> _headBoneSet;
|
||||
};
|
||||
|
||||
|
|
|
@ -392,6 +392,10 @@ bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
|
|||
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
|
||||
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
|
|
|
@ -80,6 +80,8 @@ public:
|
|||
/// \return whether or not the neck was found
|
||||
bool getNeckPosition(glm::vec3& neckPosition) const;
|
||||
|
||||
bool getLocalNeckPosition(glm::vec3& neckPosition) const;
|
||||
|
||||
/// Returns the rotation of the neck joint's parent from default orientation
|
||||
/// \return whether or not the neck was found
|
||||
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,12 +11,13 @@
|
|||
#ifndef hifi_3DConnexionClient_h
|
||||
#define hifi_3DConnexionClient_h
|
||||
|
||||
#include <qobject.h>
|
||||
#include <qlibrary.h>
|
||||
#include <QObject>
|
||||
#include <QLibrary>
|
||||
#include <input-plugins/UserInputMapper.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
|
||||
#ifndef HAVE_3DCONNEXIONCLIENT
|
||||
class ConnexionClient : public QObject {
|
||||
|
|
|
@ -1,887 +0,0 @@
|
|||
//
|
||||
// OculusManager.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OculusManager.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <QDesktopWidget>
|
||||
#include <QGuiApplication>
|
||||
#include <gpu/GPUConfig.h>
|
||||
#include <QScreen>
|
||||
#include <CursorManager.h>
|
||||
#include <QOpenGLTimerQuery>
|
||||
#include <QGLWidget>
|
||||
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GlWindow.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <OglplusHelpers.h>
|
||||
#include <PathUtils.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <UserActivityLogger.h>
|
||||
#include <FramebufferCache.h>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
#include "ui/overlays/Text3DOverlay.h"
|
||||
|
||||
template <typename Function>
|
||||
void for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Function>
|
||||
void for_each_eye(const ovrHmd & hmd, Function function) {
|
||||
for (int i = 0; i < ovrEye_Count; ++i) {
|
||||
ovrEyeType eye = hmd->EyeRenderOrder[i];
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
enum CalibrationState {
|
||||
UNCALIBRATED,
|
||||
WAITING_FOR_DELTA,
|
||||
WAITING_FOR_ZERO,
|
||||
WAITING_FOR_ZERO_HELD,
|
||||
CALIBRATED
|
||||
};
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
|
||||
return glm::ivec2(ov.x, ov.y);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
// A base class for FBO wrappers that need to use the Oculus C
|
||||
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
|
||||
// ovrHmd_CreateMirrorTextureGL, etc
|
||||
template <typename C>
|
||||
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
|
||||
ovrHmd hmd;
|
||||
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
|
||||
color = 0;
|
||||
depth = 0;
|
||||
};
|
||||
|
||||
void Resize(const uvec2 & size) {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
this->size = size;
|
||||
initColor();
|
||||
initDone();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initDepth() override final {
|
||||
}
|
||||
};
|
||||
|
||||
// A wrapper for constructing and using a swap texture set,
|
||||
// where each frame you draw to a texture via the FBO,
|
||||
// then submit it and increment to the next texture.
|
||||
// The Oculus SDK manages the creation and destruction of
|
||||
// the textures
|
||||
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
|
||||
SwapFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
~SwapFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void Increment() {
|
||||
++color->CurrentIndex;
|
||||
color->CurrentIndex %= color->TextureCount;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
|
||||
ovrResult result = ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
|
||||
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
virtual void initDone() override {
|
||||
}
|
||||
|
||||
virtual void onBind(oglplus::Framebuffer::Target target) override {
|
||||
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
|
||||
}
|
||||
|
||||
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// We use a FBO to wrap the mirror texture because it makes it easier to
|
||||
// render to the screen via glBlitFramebuffer
|
||||
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
|
||||
MirrorFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
virtual ~MirrorFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
}
|
||||
|
||||
void initDone() override {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
};
|
||||
|
||||
static SwapFramebufferWrapper* _swapFbo{ nullptr };
|
||||
static MirrorFramebufferWrapper* _mirrorFbo{ nullptr };
|
||||
static ovrLayerEyeFov _sceneLayer;
|
||||
|
||||
#else
|
||||
|
||||
static ovrTexture _eyeTextures[ovrEye_Count];
|
||||
static GlWindow* _outputWindow{ nullptr };
|
||||
|
||||
#endif
|
||||
|
||||
static bool _isConnected = false;
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static glm::mat4 _eyeProjection[ovrEye_Count];
|
||||
static unsigned int _frameIndex = 0;
|
||||
static bool _frameTimingActive = false;
|
||||
static Camera* _camera = NULL;
|
||||
static ovrEyeType _activeEye = ovrEye_Count;
|
||||
static bool _hswDismissed = false;
|
||||
|
||||
static const float CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
|
||||
static const float CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
|
||||
static const float CALIBRATION_ZERO_MAXIMUM_LENGTH = 0.01f;
|
||||
static const float CALIBRATION_ZERO_MAXIMUM_ANGLE = 2.0f * RADIANS_PER_DEGREE;
|
||||
static const quint64 CALIBRATION_ZERO_HOLD_TIME = 3000000; // usec
|
||||
static const float CALIBRATION_MESSAGE_DISTANCE = 2.5f;
|
||||
static CalibrationState _calibrationState;
|
||||
static glm::vec3 _calibrationPosition;
|
||||
static glm::quat _calibrationOrientation;
|
||||
static quint64 _calibrationStartTime;
|
||||
static int _calibrationMessage = 0;
|
||||
static glm::vec3 _eyePositions[ovrEye_Count];
|
||||
// TODO expose this as a developer toggle
|
||||
static bool _eyePerFrameMode = false;
|
||||
static ovrEyeType _lastEyeRendered = ovrEye_Count;
|
||||
static ovrSizei _recommendedTexSize = { 0, 0 };
|
||||
static float _offscreenRenderScale = 1.0;
|
||||
static glm::mat4 _combinedProjection;
|
||||
static ovrPosef _eyeRenderPoses[ovrEye_Count];
|
||||
static ovrRecti _eyeViewports[ovrEye_Count];
|
||||
static ovrVector3f _eyeOffsets[ovrEye_Count];
|
||||
|
||||
|
||||
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
|
||||
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
|
||||
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
|
||||
|
||||
void OculusManager::connect(QOpenGLContext* shareContext) {
|
||||
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
|
||||
|
||||
ovrInitParams initParams; memset(&initParams, 0, sizeof(initParams));
|
||||
|
||||
#ifdef DEBUG
|
||||
initParams.Flags |= ovrInit_Debug;
|
||||
#endif
|
||||
|
||||
ovr_Initialize(&initParams);
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
ovrResult res = ovrHmd_Create(0, &_ovrHmd);
|
||||
#ifdef DEBUG
|
||||
if (!OVR_SUCCESS(res)) {
|
||||
res = ovrHmd_CreateDebug(ovrHmd_DK2, &_ovrHmd);
|
||||
Q_ASSERT(OVR_SUCCESS(res));
|
||||
}
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
#ifdef DEBUG
|
||||
if (!_ovrHmd) {
|
||||
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
if (!_ovrHmd) {
|
||||
_isConnected = false;
|
||||
|
||||
// we're definitely not in "VR mode" so tell the menu that
|
||||
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
|
||||
ovr_Shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
_calibrationState = UNCALIBRATED;
|
||||
if (!_isConnected) {
|
||||
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
|
||||
}
|
||||
_isConnected = true;
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
|
||||
_eyeProjection[eye] = toGlm(ovrMatrix4f_Projection(_eyeFov[eye],
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
|
||||
ovrEyeRenderDesc erd = ovrHmd_GetRenderDesc(_ovrHmd, eye, _eyeFov[eye]);
|
||||
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
|
||||
});
|
||||
ovrFovPort combinedFov = _ovrHmd->MaxEyeFov[0];
|
||||
combinedFov.RightTan = _ovrHmd->MaxEyeFov[1].RightTan;
|
||||
_combinedProjection = toGlm(ovrMatrix4f_Projection(combinedFov,
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
|
||||
|
||||
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
|
||||
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
_mirrorFbo = new MirrorFramebufferWrapper(_ovrHmd);
|
||||
_swapFbo = new SwapFramebufferWrapper(_ovrHmd);
|
||||
_swapFbo->Init(toGlm(_renderTargetSize));
|
||||
_sceneLayer.ColorTexture[0] = _swapFbo->color;
|
||||
_sceneLayer.ColorTexture[1] = nullptr;
|
||||
_sceneLayer.Viewport[0].Pos = { 0, 0 };
|
||||
_sceneLayer.Viewport[0].Size = _recommendedTexSize;
|
||||
_sceneLayer.Viewport[1].Pos = { _recommendedTexSize.w, 0 };
|
||||
_sceneLayer.Viewport[1].Size = _recommendedTexSize;
|
||||
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
|
||||
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeViewports[eye] = _sceneLayer.Viewport[eye];
|
||||
_sceneLayer.Fov[eye] = _eyeFov[eye];
|
||||
});
|
||||
|
||||
|
||||
|
||||
#else
|
||||
_outputWindow = new GlWindow(shareContext);
|
||||
_outputWindow->show();
|
||||
// _outputWindow->setFlags(Qt::FramelessWindowHint );
|
||||
// _outputWindow->resize(_ovrHmd->Resolution.w, _ovrHmd->Resolution.h);
|
||||
// _outputWindow->setPosition(_ovrHmd->WindowsPos.x, _ovrHmd->WindowsPos.y);
|
||||
ivec2 desiredPosition = toGlm(_ovrHmd->WindowsPos);
|
||||
foreach(QScreen* screen, qGuiApp->screens()) {
|
||||
ivec2 screenPosition = toGlm(screen->geometry().topLeft());
|
||||
if (screenPosition == desiredPosition) {
|
||||
_outputWindow->setScreen(screen);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_outputWindow->showFullScreen();
|
||||
_outputWindow->makeCurrent();
|
||||
|
||||
ovrGLConfig cfg;
|
||||
memset(&cfg, 0, sizeof(cfg));
|
||||
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
|
||||
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
|
||||
cfg.OGL.Header.Multisample = 0;
|
||||
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_Vignette
|
||||
| ovrDistortionCap_Overdrive
|
||||
| ovrDistortionCap_TimeWarp;
|
||||
|
||||
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
|
||||
distortionCaps, _eyeFov, _eyeRenderDesc);
|
||||
assert(configResult);
|
||||
Q_UNUSED(configResult);
|
||||
|
||||
_outputWindow->doneCurrent();
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
//Get texture size
|
||||
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
|
||||
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
|
||||
_eyeTextures[eye].Header.RenderViewport.Size = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Size.w /= 2;
|
||||
});
|
||||
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeViewports[eye] = _eyeTextures[eye].Header.RenderViewport;
|
||||
});
|
||||
#endif
|
||||
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd,
|
||||
ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
|
||||
|
||||
ovrHmd_ConfigureTracking(_ovrHmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection,
|
||||
ovrTrackingCap_Orientation);
|
||||
|
||||
if (!_camera) {
|
||||
_camera = new Camera;
|
||||
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
|
||||
}
|
||||
}
|
||||
|
||||
//Disconnects and deallocates the OR
|
||||
void OculusManager::disconnect() {
|
||||
if (_isConnected) {
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
if (_swapFbo) {
|
||||
delete _swapFbo;
|
||||
_swapFbo = nullptr;
|
||||
}
|
||||
|
||||
if (_mirrorFbo) {
|
||||
delete _mirrorFbo;
|
||||
_mirrorFbo = nullptr;
|
||||
}
|
||||
#else
|
||||
_outputWindow->showNormal();
|
||||
_outputWindow->deleteLater();
|
||||
_outputWindow = nullptr;
|
||||
#endif
|
||||
|
||||
if (_ovrHmd) {
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
_ovrHmd = nullptr;
|
||||
}
|
||||
ovr_Shutdown();
|
||||
|
||||
_isConnected = false;
|
||||
// Prepare to potentially have to dismiss the HSW again
|
||||
// if the user re-enables VR
|
||||
_hswDismissed = false;
|
||||
}
|
||||
}
|
||||
|
||||
void positionCalibrationBillboard(Text3DOverlay* billboard) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
glm::quat headOrientation = myAvatar->getHeadOrientation();
|
||||
headOrientation.x = 0;
|
||||
headOrientation.z = 0;
|
||||
glm::normalize(headOrientation);
|
||||
billboard->setPosition(myAvatar->getHeadPosition()
|
||||
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
||||
billboard->setRotation(headOrientation);
|
||||
}
|
||||
|
||||
void calibrate(const glm::vec3& position, const glm::quat& orientation) {
|
||||
static QString instructionMessage = "Hold still to calibrate";
|
||||
static QString progressMessage;
|
||||
static Text3DOverlay* billboard;
|
||||
|
||||
switch (_calibrationState) {
|
||||
|
||||
case UNCALIBRATED:
|
||||
if (position != glm::vec3() && orientation != glm::quat()) { // Handle zero values at start-up.
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_DELTA;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_DELTA:
|
||||
if (glm::length(position - _calibrationPosition) > CALIBRATION_DELTA_MINIMUM_LENGTH
|
||||
|| glm::angle(orientation * glm::inverse(_calibrationOrientation)) > CALIBRATION_DELTA_MINIMUM_ANGLE) {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_ZERO;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_ZERO:
|
||||
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
|
||||
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
|
||||
_calibrationStartTime = usecTimestampNow();
|
||||
_calibrationState = WAITING_FOR_ZERO_HELD;
|
||||
|
||||
if (!_calibrationMessage) {
|
||||
qCDebug(interfaceapp) << "Hold still to calibrate HMD";
|
||||
|
||||
billboard = new Text3DOverlay();
|
||||
billboard->setDimensions(glm::vec2(2.0f, 1.25f));
|
||||
billboard->setTopMargin(0.35f);
|
||||
billboard->setLeftMargin(0.28f);
|
||||
billboard->setText(instructionMessage);
|
||||
billboard->setAlpha(0.5f);
|
||||
billboard->setLineHeight(0.1f);
|
||||
billboard->setIsFacingAvatar(false);
|
||||
positionCalibrationBillboard(billboard);
|
||||
|
||||
_calibrationMessage = Application::getInstance()->getOverlays().addOverlay(billboard);
|
||||
}
|
||||
|
||||
progressMessage = "";
|
||||
} else {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_ZERO_HELD:
|
||||
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
|
||||
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
|
||||
if ((usecTimestampNow() - _calibrationStartTime) > CALIBRATION_ZERO_HOLD_TIME) {
|
||||
_calibrationState = CALIBRATED;
|
||||
qCDebug(interfaceapp) << "HMD calibrated";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = 0;
|
||||
Application::getInstance()->resetSensors();
|
||||
} else {
|
||||
quint64 quarterSeconds = (usecTimestampNow() - _calibrationStartTime) / 250000;
|
||||
if (quarterSeconds + 1 > (quint64)progressMessage.length()) {
|
||||
// 3...2...1...
|
||||
if (quarterSeconds == 4 * (quarterSeconds / 4)) {
|
||||
quint64 wholeSeconds = CALIBRATION_ZERO_HOLD_TIME / 1000000 - quarterSeconds / 4;
|
||||
|
||||
if (wholeSeconds == 3) {
|
||||
positionCalibrationBillboard(billboard);
|
||||
}
|
||||
|
||||
progressMessage += QString::number(wholeSeconds);
|
||||
} else {
|
||||
progressMessage += ".";
|
||||
}
|
||||
billboard->setText(instructionMessage + "\n\n" + progressMessage);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_ZERO;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void OculusManager::recalibrate() {
|
||||
_calibrationState = UNCALIBRATED;
|
||||
}
|
||||
|
||||
void OculusManager::abandonCalibration() {
|
||||
_calibrationState = CALIBRATED;
|
||||
if (_calibrationMessage) {
|
||||
qCDebug(interfaceapp) << "Abandoned HMD calibration";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool OculusManager::isConnected() {
|
||||
return _isConnected;
|
||||
}
|
||||
|
||||
//Begins the frame timing for oculus prediction purposes
|
||||
void OculusManager::beginFrameTiming() {
|
||||
if (_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
|
||||
}
|
||||
_frameTimingActive = true;
|
||||
}
|
||||
|
||||
bool OculusManager::allowSwap() {
|
||||
return false;
|
||||
}
|
||||
|
||||
//Ends frame timing
|
||||
void OculusManager::endFrameTiming() {
|
||||
_frameIndex++;
|
||||
_frameTimingActive = false;
|
||||
}
|
||||
|
||||
//Sets the camera FoV and aspect ratio
|
||||
void OculusManager::configureCamera(Camera& camera) {
|
||||
if (_activeEye == ovrEye_Count) {
|
||||
// When not rendering, provide a FOV encompasing both eyes
|
||||
camera.setProjection(_combinedProjection);
|
||||
return;
|
||||
}
|
||||
camera.setProjection(_eyeProjection[_activeEye]);
|
||||
}
|
||||
|
||||
//Displays everything for the oculus, frame timing must be active
|
||||
void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
|
||||
|
||||
#ifdef DEBUG
|
||||
// Ensure the frame counter always increments by exactly 1
|
||||
static int oldFrameIndex = -1;
|
||||
assert(oldFrameIndex == -1 || (unsigned int)oldFrameIndex == _frameIndex - 1);
|
||||
oldFrameIndex = _frameIndex;
|
||||
#endif
|
||||
|
||||
#ifndef Q_OS_WIN
|
||||
|
||||
// FIXME: we need a better way of responding to the HSW. In particular
|
||||
// we need to ensure that it's only displayed once per session, rather than
|
||||
// every time the user toggles VR mode, and we need to hook it up to actual
|
||||
// keyboard input. OVR claim they are refactoring HSW
|
||||
// https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599
|
||||
static ovrHSWDisplayState hasWarningState;
|
||||
if (!_hswDismissed) {
|
||||
ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState);
|
||||
if (hasWarningState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_ovrHmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
//beginFrameTiming must be called before display
|
||||
if (!_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
|
||||
return;
|
||||
}
|
||||
|
||||
auto primaryFBO = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
glm::quat orientation;
|
||||
glm::vec3 trackerPosition;
|
||||
auto deviceSize = qApp->getDeviceSize();
|
||||
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
|
||||
|
||||
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
|
||||
|
||||
if (_calibrationState != CALIBRATED) {
|
||||
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
|
||||
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
|
||||
calibrate(trackerPosition, orientation);
|
||||
}
|
||||
|
||||
trackerPosition = bodyOrientation * trackerPosition;
|
||||
ovrPosef eyePoses[ovrEye_Count];
|
||||
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, _eyeOffsets, eyePoses, nullptr);
|
||||
#ifndef Q_OS_WIN
|
||||
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
|
||||
#endif
|
||||
//Render each eye into an fbo
|
||||
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
|
||||
// If we're in eye-per-frame mode, only render one eye
|
||||
// per call to display, and allow timewarp to correct for
|
||||
// the other eye. Poor man's perf improvement
|
||||
if (_eyePerFrameMode && eye == _lastEyeRendered) {
|
||||
return;
|
||||
}
|
||||
_lastEyeRendered = _activeEye = eye;
|
||||
_eyeRenderPoses[eye] = eyePoses[eye];
|
||||
// Set the camera rotation for this eye
|
||||
|
||||
_eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
|
||||
_eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
|
||||
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
|
||||
|
||||
// Update our camera to what the application camera is doing
|
||||
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
|
||||
_camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
|
||||
configureCamera(*_camera);
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
ovrRecti & vp = _eyeViewports[eye];
|
||||
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
|
||||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, *_camera);
|
||||
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
|
||||
});
|
||||
_activeEye = ovrEye_Count;
|
||||
|
||||
gpu::FramebufferPointer finalFbo;
|
||||
finalFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
auto srcFboSize = finalFbo->getSize();
|
||||
|
||||
|
||||
// Blit to the oculus provided texture
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
_swapFbo->Bound(oglplus::Framebuffer::Target::Draw, [&] {
|
||||
glBlitFramebuffer(
|
||||
0, 0, srcFboSize.x, srcFboSize.y,
|
||||
0, 0, _swapFbo->size.x, _swapFbo->size.y,
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
});
|
||||
|
||||
// Blit to the onscreen window
|
||||
auto destWindowSize = qApp->getDeviceSize();
|
||||
glBlitFramebuffer(
|
||||
0, 0, srcFboSize.x, srcFboSize.y,
|
||||
0, 0, destWindowSize.width(), destWindowSize.height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
// Submit the frame to the Oculus SDK for timewarp and distortion
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_sceneLayer.RenderPose[eye] = _eyeRenderPoses[eye];
|
||||
});
|
||||
auto header = &_sceneLayer.Header;
|
||||
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
|
||||
Q_ASSERT(OVR_SUCCESS(res));
|
||||
_swapFbo->Increment();
|
||||
#else
|
||||
GLsync syncObject = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
glFlush();
|
||||
|
||||
|
||||
_outputWindow->makeCurrent();
|
||||
// force the compositing context to wait for the texture
|
||||
// rendering to complete before it starts the distortion rendering,
|
||||
// but without triggering a CPU/GPU synchronization
|
||||
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
|
||||
glDeleteSync(syncObject);
|
||||
|
||||
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
|
||||
glEyeTexture.OGL.TexId = textureId;
|
||||
|
||||
});
|
||||
|
||||
// restore our normal viewport
|
||||
ovrHmd_EndFrame(_ovrHmd, _eyeRenderPoses, _eyeTextures);
|
||||
glCanvas->makeCurrent();
|
||||
#endif
|
||||
|
||||
|
||||
// in order to account account for changes in the pick ray caused by head movement
|
||||
// we need to force a mouse move event on every frame (perhaps we could change this
|
||||
// to based on the head moving a minimum distance from the last position in which we
|
||||
// sent?)
|
||||
{
|
||||
QMouseEvent mouseEvent(QEvent::MouseMove, glCanvas->mapFromGlobal(QCursor::pos()),
|
||||
Qt::NoButton, Qt::NoButton, 0);
|
||||
qApp->mouseMoveEvent(&mouseEvent, 0);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
//Tries to reconnect to the sensors
|
||||
void OculusManager::reset() {
|
||||
if (_isConnected) {
|
||||
ovrHmd_RecenterPose(_ovrHmd);
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 OculusManager::getRelativePosition() {
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(trackingState.HeadPose.ThePose.Position);
|
||||
}
|
||||
|
||||
glm::quat OculusManager::getOrientation() {
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(trackingState.HeadPose.ThePose.Orientation);
|
||||
}
|
||||
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
rv.setHeight(_renderTargetSize.h);
|
||||
return rv;
|
||||
}
|
||||
|
||||
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
if (_activeEye != ovrEye_Count) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
right = nearVal * port.RightTan;
|
||||
left = -nearVal * port.LeftTan;
|
||||
top = nearVal * port.UpTan;
|
||||
bottom = -nearVal * port.DownTan;
|
||||
}
|
||||
}
|
||||
|
||||
int OculusManager::getHMDScreen() {
|
||||
#ifdef Q_OS_WIN
|
||||
return -1;
|
||||
#else
|
||||
int hmdScreenIndex = -1; // unknown
|
||||
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
|
||||
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
|
||||
|
||||
if (_ovrHmd) {
|
||||
QString productNameFromOVR = _ovrHmd->ProductName;
|
||||
|
||||
int hmdWidth = _ovrHmd->Resolution.w;
|
||||
int hmdHeight = _ovrHmd->Resolution.h;
|
||||
int hmdAtX = _ovrHmd->WindowsPos.x;
|
||||
int hmdAtY = _ovrHmd->WindowsPos.y;
|
||||
|
||||
// we will score the likelihood that each screen is a match based on the following
|
||||
// rubrik of potential matching features
|
||||
const int EXACT_NAME_MATCH = 100;
|
||||
const int SIMILAR_NAMES = 10;
|
||||
const int EXACT_LOCATION_MATCH = 50;
|
||||
const int EXACT_RESOLUTION_MATCH = 25;
|
||||
|
||||
int bestMatchScore = 0;
|
||||
|
||||
// look at the display list and see if we can find the best match
|
||||
QDesktopWidget* desktop = QApplication::desktop();
|
||||
int screenNumber = 0;
|
||||
foreach (QScreen* screen, QGuiApplication::screens()) {
|
||||
QString screenName = screen->name();
|
||||
QRect screenRect = desktop->screenGeometry(screenNumber);
|
||||
|
||||
int screenScore = 0;
|
||||
if (screenName == productNameFromOVR) {
|
||||
screenScore += EXACT_NAME_MATCH;
|
||||
}
|
||||
if (similarStrings(screenName, productNameFromOVR)) {
|
||||
screenScore += SIMILAR_NAMES;
|
||||
}
|
||||
if (hmdWidth == screenRect.width() && hmdHeight == screenRect.height()) {
|
||||
screenScore += EXACT_RESOLUTION_MATCH;
|
||||
}
|
||||
if (hmdAtX == screenRect.x() && hmdAtY == screenRect.y()) {
|
||||
screenScore += EXACT_LOCATION_MATCH;
|
||||
}
|
||||
if (screenScore > bestMatchScore) {
|
||||
bestMatchScore = screenScore;
|
||||
hmdScreenIndex = screenNumber;
|
||||
}
|
||||
|
||||
screenNumber++;
|
||||
}
|
||||
}
|
||||
return hmdScreenIndex;
|
||||
#endif
|
||||
}
|
||||
|
||||
mat4 OculusManager::getEyeProjection(int eye) {
|
||||
return _eyeProjection[eye];
|
||||
}
|
||||
|
||||
mat4 OculusManager::getEyePose(int eye) {
|
||||
return toGlm(_eyeRenderPoses[eye]);
|
||||
}
|
||||
|
||||
mat4 OculusManager::getHeadPose() {
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(ts.HeadPose.ThePose);
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
//
|
||||
// OculusManager.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OculusManager_h
|
||||
#define hifi_OculusManager_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <QSize>
|
||||
|
||||
#include "RenderArgs.h"
|
||||
|
||||
class QOpenGLContext;
|
||||
class QGLWidget;
|
||||
class Camera;
|
||||
|
||||
/// Handles interaction with the Oculus Rift.
|
||||
class OculusManager {
|
||||
public:
|
||||
static void connect(QOpenGLContext* shareContext);
|
||||
static void disconnect();
|
||||
static bool isConnected();
|
||||
static void recalibrate();
|
||||
static void abandonCalibration();
|
||||
static void beginFrameTiming();
|
||||
static void endFrameTiming();
|
||||
static bool allowSwap();
|
||||
static void configureCamera(Camera& camera);
|
||||
static void display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
|
||||
static void reset();
|
||||
|
||||
static glm::vec3 getRelativePosition();
|
||||
static glm::quat getOrientation();
|
||||
static QSize getRenderTargetSize();
|
||||
|
||||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
|
||||
static glm::vec3 getLeftEyePosition();
|
||||
static glm::vec3 getRightEyePosition();
|
||||
static glm::vec3 getMidEyePosition();
|
||||
|
||||
static int getHMDScreen();
|
||||
|
||||
static glm::mat4 getEyeProjection(int eye);
|
||||
static glm::mat4 getEyePose(int eye);
|
||||
static glm::mat4 getHeadPose();
|
||||
};
|
||||
|
||||
#endif // hifi_OculusManager_h
|
|
@ -1,151 +0,0 @@
|
|||
//
|
||||
// TV3DManager.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/24/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "TV3DManager.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <RenderArgs.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
|
||||
int TV3DManager::_screenWidth = 1;
|
||||
int TV3DManager::_screenHeight = 1;
|
||||
double TV3DManager::_aspect = 1.0;
|
||||
eyeFrustum TV3DManager::_leftEye;
|
||||
eyeFrustum TV3DManager::_rightEye;
|
||||
eyeFrustum* TV3DManager::_activeEye = NULL;
|
||||
|
||||
|
||||
bool TV3DManager::isConnected() {
|
||||
return Menu::getInstance()->isOptionChecked(MenuOption::Enable3DTVMode);
|
||||
}
|
||||
|
||||
void TV3DManager::connect() {
|
||||
auto deviceSize = qApp->getDeviceSize();
|
||||
configureCamera(*(qApp->getCamera()), deviceSize.width(), deviceSize.height());
|
||||
}
|
||||
|
||||
|
||||
// The basic strategy of this stereoscopic rendering is explained here:
|
||||
// http://www.orthostereo.com/geometryopengl.html
|
||||
void TV3DManager::setFrustum(Camera& whichCamera) {
|
||||
const double DTR = 0.0174532925; // degree to radians
|
||||
const double IOD = 0.05; //intraocular distance
|
||||
double fovy = DEFAULT_FIELD_OF_VIEW_DEGREES; // field of view in y-axis
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double screenZ = 0.25f; // screen projection plane
|
||||
|
||||
double top = nearZ * tan(DTR * fovy / 2.0); //sets top of frustum based on fovy and near clipping plane
|
||||
double right = _aspect * top; // sets right of frustum based on aspect ratio
|
||||
double frustumshift = (IOD / 2) * nearZ / screenZ;
|
||||
|
||||
_leftEye.top = top;
|
||||
_leftEye.bottom = -top;
|
||||
_leftEye.left = -right + frustumshift;
|
||||
_leftEye.right = right + frustumshift;
|
||||
_leftEye.modelTranslation = IOD / 2;
|
||||
|
||||
_rightEye.top = top;
|
||||
_rightEye.bottom = -top;
|
||||
_rightEye.left = -right - frustumshift;
|
||||
_rightEye.right = right - frustumshift;
|
||||
_rightEye.modelTranslation = -IOD / 2;
|
||||
}
|
||||
|
||||
void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int screenHeight) {
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
if (screenHeight == 0) {
|
||||
screenHeight = 1; // prevent divide by 0
|
||||
}
|
||||
_screenWidth = screenWidth;
|
||||
_screenHeight = screenHeight;
|
||||
_aspect= (double)_screenWidth / (double)_screenHeight;
|
||||
setFrustum(whichCamera);
|
||||
|
||||
glViewport (0, 0, _screenWidth, _screenHeight); // sets drawing viewport
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
||||
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
|
||||
|
||||
// left eye portal
|
||||
int portalX = 0;
|
||||
int portalY = 0;
|
||||
QSize deviceSize = qApp->getDeviceSize() *
|
||||
qApp->getRenderResolutionScale();
|
||||
int portalW = deviceSize.width() / 2;
|
||||
int portalH = deviceSize.height();
|
||||
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
Camera eyeCamera;
|
||||
eyeCamera.setRotation(whichCamera.getRotation());
|
||||
eyeCamera.setPosition(whichCamera.getPosition());
|
||||
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
forEachEye([&](eyeFrustum& eye){
|
||||
_activeEye = &eye;
|
||||
glViewport(portalX, portalY, portalW, portalH);
|
||||
glScissor(portalX, portalY, portalW, portalH);
|
||||
renderArgs->_viewport = glm::ivec4(portalX, portalY, portalW, portalH);
|
||||
|
||||
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
|
||||
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
|
||||
eyeCamera.setProjection(projection);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, eyeCamera, false);
|
||||
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
|
||||
_activeEye = NULL;
|
||||
}, [&]{
|
||||
// render right side view
|
||||
portalX = deviceSize.width() / 2;
|
||||
});
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
auto fboSize = finalFbo->getSize();
|
||||
// Get the ACTUAL device size for the BLIT
|
||||
deviceSize = qApp->getDeviceSize();
|
||||
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
glBlitFramebuffer(0, 0, fboSize.x, fboSize.y,
|
||||
0, 0, deviceSize.width(), deviceSize.height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
// reset the viewport to how we started
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
if (_activeEye) {
|
||||
left = _activeEye->left;
|
||||
right = _activeEye->right;
|
||||
bottom = _activeEye->bottom;
|
||||
top = _activeEye->top;
|
||||
}
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
//
|
||||
// TV3DManager.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/24/2013.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_TV3DManager_h
|
||||
#define hifi_TV3DManager_h
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
class Camera;
|
||||
class RenderArgs;
|
||||
|
||||
struct eyeFrustum {
|
||||
double left;
|
||||
double right;
|
||||
double bottom;
|
||||
double top;
|
||||
float modelTranslation;
|
||||
};
|
||||
|
||||
|
||||
/// Handles interaction with 3D TVs
|
||||
class TV3DManager {
|
||||
public:
|
||||
static void connect();
|
||||
static bool isConnected();
|
||||
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
|
||||
static void display(RenderArgs* renderArgs, Camera& whichCamera);
|
||||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
private:
|
||||
static void setFrustum(Camera& whichCamera);
|
||||
static int _screenWidth;
|
||||
static int _screenHeight;
|
||||
static double _aspect;
|
||||
static eyeFrustum _leftEye;
|
||||
static eyeFrustum _rightEye;
|
||||
static eyeFrustum* _activeEye;
|
||||
|
||||
// The first function is the code executed for each eye
|
||||
// while the second is code to be executed between the two eyes.
|
||||
// The use case here is to modify the output viewport coordinates
|
||||
// for the new eye.
|
||||
// FIXME: we'd like to have a default empty lambda for the second parameter,
|
||||
// but gcc 4.8.1 complains about it due to a bug. See
|
||||
// http://stackoverflow.com/questions/25490662/lambda-as-default-parameter-to-a-member-function-template
|
||||
template<typename F, typename FF>
|
||||
static void forEachEye(F f, FF ff) {
|
||||
f(_leftEye);
|
||||
ff();
|
||||
f(_rightEye);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // hifi_TV3DManager_h
|
|
@ -11,15 +11,16 @@
|
|||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <HandData.h>
|
||||
#include <HFBackEvent.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "devices/MotionTracker.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "ControllerScriptingInterface.h"
|
||||
|
||||
// TODO: this needs to be removed, as well as any related controller-specific information
|
||||
#include <input-plugins/SixenseManager.h>
|
||||
|
||||
|
||||
ControllerScriptingInterface::ControllerScriptingInterface() :
|
||||
_mouseCaptured(false),
|
||||
|
@ -82,13 +83,14 @@ void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::In
|
|||
|
||||
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action) {
|
||||
QScriptValue obj = engine->newObject();
|
||||
QVector<UserInputMapper::InputChannel> inputChannels = Application::getUserInputMapper()->getInputChannelsForAction(action);
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
QVector<UserInputMapper::InputChannel> inputChannels = userInputMapper->getInputChannelsForAction(action);
|
||||
QScriptValue _inputChannels = engine->newArray(inputChannels.size());
|
||||
for (int i = 0; i < inputChannels.size(); i++) {
|
||||
_inputChannels.setProperty(i, inputChannelToScriptValue(engine, inputChannels[i]));
|
||||
}
|
||||
obj.setProperty("action", (int) action);
|
||||
obj.setProperty("actionName", Application::getUserInputMapper()->getActionName(action));
|
||||
obj.setProperty("actionName", userInputMapper->getActionName(action));
|
||||
obj.setProperty("inputChannels", _inputChannels);
|
||||
return obj;
|
||||
}
|
||||
|
@ -376,7 +378,7 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) {
|
|||
}
|
||||
|
||||
glm::vec2 ControllerScriptingInterface::getViewportDimensions() const {
|
||||
return Application::getInstance()->getCanvasSize();
|
||||
return Application::getInstance()->getUiSize();
|
||||
}
|
||||
|
||||
AbstractInputController* ControllerScriptingInterface::createInputController(const QString& deviceName, const QString& tracker) {
|
||||
|
@ -428,43 +430,59 @@ void ControllerScriptingInterface::updateInputControllers() {
|
|||
}
|
||||
|
||||
QVector<UserInputMapper::Action> ControllerScriptingInterface::getAllActions() {
|
||||
return Application::getUserInputMapper()->getAllActions();
|
||||
return DependencyManager::get<UserInputMapper>()->getAllActions();
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getInputChannelsForAction(UserInputMapper::Action action) {
|
||||
return Application::getUserInputMapper()->getInputChannelsForAction(action);
|
||||
return DependencyManager::get<UserInputMapper>()->getInputChannelsForAction(action);
|
||||
}
|
||||
|
||||
QString ControllerScriptingInterface::getDeviceName(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getDeviceName((unsigned short) device);
|
||||
return DependencyManager::get<UserInputMapper>()->getDeviceName((unsigned short)device);
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getAllInputsForDevice(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getAllInputsForDevice(device);
|
||||
return DependencyManager::get<UserInputMapper>()->getAllInputsForDevice(device);
|
||||
}
|
||||
|
||||
bool ControllerScriptingInterface::addInputChannel(UserInputMapper::InputChannel inputChannel) {
|
||||
return Application::getUserInputMapper()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
|
||||
return DependencyManager::get<UserInputMapper>()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
|
||||
}
|
||||
|
||||
bool ControllerScriptingInterface::removeInputChannel(UserInputMapper::InputChannel inputChannel) {
|
||||
return Application::getUserInputMapper()->removeInputChannel(inputChannel);
|
||||
return DependencyManager::get<UserInputMapper>()->removeInputChannel(inputChannel);
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputPair> ControllerScriptingInterface::getAvailableInputs(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getAvailableInputs((unsigned short) device);
|
||||
return DependencyManager::get<UserInputMapper>()->getAvailableInputs((unsigned short)device);
|
||||
}
|
||||
|
||||
void ControllerScriptingInterface::resetAllDeviceBindings() {
|
||||
Application::getUserInputMapper()->resetAllDeviceBindings();
|
||||
DependencyManager::get<UserInputMapper>()->resetAllDeviceBindings();
|
||||
}
|
||||
|
||||
void ControllerScriptingInterface::resetDevice(unsigned int device) {
|
||||
Application::getUserInputMapper()->resetDevice(device);
|
||||
DependencyManager::get<UserInputMapper>()->resetDevice(device);
|
||||
}
|
||||
|
||||
int ControllerScriptingInterface::findDevice(QString name) {
|
||||
return Application::getUserInputMapper()->findDevice(name);
|
||||
return DependencyManager::get<UserInputMapper>()->findDevice(name);
|
||||
}
|
||||
|
||||
float ControllerScriptingInterface::getActionValue(int action) {
|
||||
return DependencyManager::get<UserInputMapper>()->getActionState(UserInputMapper::Action(action));
|
||||
}
|
||||
|
||||
int ControllerScriptingInterface::findAction(QString actionName) {
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
auto actions = getAllActions();
|
||||
for (auto action : actions) {
|
||||
if (userInputMapper->getActionName(action) == actionName) {
|
||||
return action;
|
||||
}
|
||||
}
|
||||
// If the action isn't found, return -1
|
||||
return -1;
|
||||
}
|
||||
|
||||
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
|
||||
|
@ -502,4 +520,4 @@ const unsigned int INPUTCONTROLLER_KEY_DEVICE_MASK = 16;
|
|||
|
||||
InputController::Key InputController::getKey() const {
|
||||
return (((_deviceTrackerId & INPUTCONTROLLER_KEY_DEVICE_MASK) << INPUTCONTROLLER_KEY_DEVICE_OFFSET) | _subTrackerId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include <input-plugins/UserInputMapper.h>
|
||||
|
||||
#include <AbstractControllerScriptingInterface.h>
|
||||
class PalmData;
|
||||
|
@ -86,15 +86,24 @@ public:
|
|||
|
||||
public slots:
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::Action> getAllActions();
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
|
||||
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual bool addInputChannel(UserInputMapper::InputChannel inputChannel);
|
||||
Q_INVOKABLE virtual bool removeInputChannel(UserInputMapper::InputChannel inputChannel);
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
|
||||
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputPair> getAvailableInputs(unsigned int device);
|
||||
Q_INVOKABLE virtual void resetAllDeviceBindings();
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual float getActionValue(int action);
|
||||
|
||||
Q_INVOKABLE virtual void resetDevice(unsigned int device);
|
||||
Q_INVOKABLE virtual void resetAllDeviceBindings();
|
||||
Q_INVOKABLE virtual int findDevice(QString name);
|
||||
|
||||
Q_INVOKABLE virtual int findAction(QString actionName);
|
||||
|
||||
virtual bool isPrimaryButtonPressed() const;
|
||||
virtual glm::vec2 getPrimaryJoystickPosition() const;
|
||||
|
||||
|
|
|
@ -122,6 +122,14 @@ void WebWindowClass::setURL(const QString& url) {
|
|||
_webView->setUrl(url);
|
||||
}
|
||||
|
||||
void WebWindowClass::setPosition(int x, int y) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setPosition", Qt::AutoConnection, Q_ARG(int, x), Q_ARG(int, y));
|
||||
return;
|
||||
}
|
||||
_windowWidget->move(x, y);
|
||||
}
|
||||
|
||||
void WebWindowClass::raise() {
|
||||
QMetaObject::invokeMethod(_windowWidget, "showNormal", Qt::AutoConnection);
|
||||
QMetaObject::invokeMethod(_windowWidget, "raise", Qt::AutoConnection);
|
||||
|
|
|
@ -43,6 +43,7 @@ public:
|
|||
|
||||
public slots:
|
||||
void setVisible(bool visible);
|
||||
void setPosition(int x, int y);
|
||||
QString getURL() const { return _webView->url().url(); }
|
||||
void setURL(const QString& url);
|
||||
void raise();
|
||||
|
|
|
@ -11,6 +11,10 @@
|
|||
|
||||
#include "ApplicationCompositor.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QPropertyAnimation>
|
||||
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
|
@ -21,6 +25,8 @@
|
|||
#include "Tooltip.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include <input-plugins/SixenseManager.h> // TODO: any references to sixense should be removed here
|
||||
#include <input-plugins/InputDevice.h>
|
||||
|
||||
|
||||
// Used to animate the magnification windows
|
||||
|
@ -106,7 +112,9 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
|
|||
}
|
||||
}
|
||||
|
||||
ApplicationCompositor::ApplicationCompositor() {
|
||||
ApplicationCompositor::ApplicationCompositor() :
|
||||
_alphaPropertyAnimation(new QPropertyAnimation(this, "alpha"))
|
||||
{
|
||||
memset(_reticleActive, 0, sizeof(_reticleActive));
|
||||
memset(_magActive, 0, sizeof(_reticleActive));
|
||||
memset(_magSizeMult, 0, sizeof(_magSizeMult));
|
||||
|
@ -163,6 +171,8 @@ ApplicationCompositor::ApplicationCompositor() {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
_alphaPropertyAnimation.reset(new QPropertyAnimation(this, "alpha"));
|
||||
}
|
||||
|
||||
ApplicationCompositor::~ApplicationCompositor() {
|
||||
|
@ -184,7 +194,8 @@ void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorI
|
|||
// Draws the FBO texture for the screen
|
||||
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
if (_alpha == 0.0f) {
|
||||
|
||||
if (_alpha <= 0.0f) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -204,7 +215,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
|
||||
batch.setViewportTransform(renderArgs->_viewport);
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(mat4());
|
||||
|
@ -232,15 +243,17 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
}
|
||||
|
||||
|
||||
vec2 getPolarCoordinates(const PalmData& palm) {
|
||||
vec2 ApplicationCompositor::getPolarCoordinates(const PalmData& palm) const {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto avatarOrientation = myAvatar->getOrientation();
|
||||
auto eyePos = myAvatar->getDefaultEyePosition();
|
||||
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
|
||||
// Direction of the tip relative to the eye
|
||||
glm::vec3 tipDirection = tip - eyePos;
|
||||
// orient into avatar space
|
||||
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
|
||||
glm::vec3 relativePos = myAvatar->getDefaultEyePosition();
|
||||
glm::quat rotation = myAvatar->getOrientation();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
|
||||
relativePos = _modelTransform.getTranslation();
|
||||
rotation = _modelTransform.getRotation();
|
||||
}
|
||||
glm::vec3 tipDirection = tip - relativePos;
|
||||
tipDirection = glm::inverse(rotation) * tipDirection;
|
||||
// Normalize for trig functions
|
||||
tipDirection = glm::normalize(tipDirection);
|
||||
// Convert to polar coordinates
|
||||
|
@ -251,7 +264,8 @@ vec2 getPolarCoordinates(const PalmData& palm) {
|
|||
// Draws the FBO texture for Oculus rift.
|
||||
void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int eye) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
if (_alpha == 0.0f) {
|
||||
|
||||
if (_alpha <= 0.0f) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -278,11 +292,13 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
|
||||
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
|
||||
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
mat4 camMat;
|
||||
_cameraBaseTransform.getMatrix(camMat);
|
||||
camMat = camMat * qApp->getEyePose(eye);
|
||||
batch.setViewportTransform(renderArgs->_viewport);
|
||||
batch.setViewTransform(camMat);
|
||||
|
||||
mat4 eyePose = qApp->getEyePose(eye);
|
||||
glm::mat4 overlayXfm = glm::inverse(eyePose);
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
|
||||
#ifdef DEBUG_OVERLAY
|
||||
{
|
||||
|
@ -291,7 +307,9 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
}
|
||||
#else
|
||||
{
|
||||
batch.setModelTransform(overlayXfm);
|
||||
//batch.setModelTransform(overlayXfm);
|
||||
|
||||
batch.setModelTransform(_modelTransform);
|
||||
drawSphereSection(batch);
|
||||
}
|
||||
#endif
|
||||
|
@ -302,8 +320,11 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
|
||||
bindCursorTexture(batch);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
//Controller Pointers
|
||||
glm::mat4 overlayXfm;
|
||||
_modelTransform.getMatrix(overlayXfm);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
|
@ -345,13 +366,18 @@ void ApplicationCompositor::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& or
|
|||
|
||||
// We need the RAW camera orientation and position, because this is what the overlay is
|
||||
// rendered relative to
|
||||
const glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
|
||||
const glm::quat overlayOrientation = qApp->getCamera()->getRotation();
|
||||
glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
|
||||
glm::quat overlayOrientation = qApp->getCamera()->getRotation();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
|
||||
overlayPosition = _modelTransform.getTranslation();
|
||||
overlayOrientation = _modelTransform.getRotation();
|
||||
}
|
||||
|
||||
// Intersection UI overlay space
|
||||
glm::vec3 worldSpaceDirection = overlayOrientation * overlaySpaceDirection;
|
||||
glm::vec3 worldSpaceIntersection = (glm::normalize(worldSpaceDirection) * _oculusUIRadius) + overlayPosition;
|
||||
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * glm::vec3(qApp->getHeadPose()[3])) + overlayPosition;
|
||||
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * extractTranslation(qApp->getHMDSensorPose())) + overlayPosition;
|
||||
|
||||
// Intersection in world space
|
||||
origin = worldSpaceHeadPosition;
|
||||
|
@ -410,13 +436,15 @@ bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& positi
|
|||
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
|
||||
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
|
||||
//If we are in oculus, render reticle later
|
||||
auto trueMouse = qApp->getTrueMouse();
|
||||
trueMouse /= qApp->getCanvasSize();
|
||||
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
|
||||
_reticlePosition[MOUSE] = position;
|
||||
_reticleActive[MOUSE] = true;
|
||||
_magActive[MOUSE] = _magnifier;
|
||||
_reticleActive[LEFT_CONTROLLER] = false;
|
||||
_reticleActive[RIGHT_CONTROLLER] = false;
|
||||
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
|
||||
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::HandMouseInput)) {
|
||||
//only render controller pointer if we aren't already rendering a mouse pointer
|
||||
_reticleActive[MOUSE] = false;
|
||||
_magActive[MOUSE] = false;
|
||||
|
@ -491,6 +519,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
|
||||
auto canvasSize = qApp->getCanvasSize();
|
||||
int mouseX, mouseY;
|
||||
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
|
||||
|
||||
|
@ -499,7 +528,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
|
||||
float cursorRange = canvasSize.x * InputDevice::getCursorPixelRangeMult();
|
||||
|
||||
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
|
||||
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
|
||||
|
@ -611,6 +640,19 @@ void ApplicationCompositor::drawSphereSection(gpu::Batch& batch) {
|
|||
batch.setInputFormat(streamFormat);
|
||||
|
||||
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
|
||||
|
||||
if (_prevAlpha != _alpha) {
|
||||
// adjust alpha by munging vertex color alpha.
|
||||
// FIXME we should probably just use a uniform for this.
|
||||
float* floatPtr = reinterpret_cast<float*>(_hemiVertices->editData());
|
||||
const auto ALPHA_FLOAT_OFFSET = (sizeof(vec3) + sizeof(vec2) + sizeof(vec3)) / sizeof(float);
|
||||
const auto VERTEX_FLOAT_STRIDE = (sizeof(vec3) + sizeof(vec2) + sizeof(vec4)) / sizeof(float);
|
||||
const auto NUM_VERTS = _hemiVertices->getSize() / VERTEX_STRIDE;
|
||||
for (size_t i = 0; i < NUM_VERTS; i++) {
|
||||
floatPtr[i * VERTEX_FLOAT_STRIDE + ALPHA_FLOAT_OFFSET] = _alpha;
|
||||
}
|
||||
}
|
||||
|
||||
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
|
||||
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
|
||||
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
|
||||
|
@ -700,3 +742,29 @@ void ApplicationCompositor::updateTooltips() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
static const float FADE_DURATION = 500.0f;
|
||||
void ApplicationCompositor::fadeIn() {
|
||||
_fadeInAlpha = true;
|
||||
|
||||
_alphaPropertyAnimation->setDuration(FADE_DURATION);
|
||||
_alphaPropertyAnimation->setStartValue(_alpha);
|
||||
_alphaPropertyAnimation->setEndValue(1.0f);
|
||||
_alphaPropertyAnimation->start();
|
||||
}
|
||||
void ApplicationCompositor::fadeOut() {
|
||||
_fadeInAlpha = false;
|
||||
|
||||
_alphaPropertyAnimation->setDuration(FADE_DURATION);
|
||||
_alphaPropertyAnimation->setStartValue(_alpha);
|
||||
_alphaPropertyAnimation->setEndValue(0.0f);
|
||||
_alphaPropertyAnimation->start();
|
||||
}
|
||||
|
||||
void ApplicationCompositor::toggle() {
|
||||
if (_fadeInAlpha) {
|
||||
fadeOut();
|
||||
} else {
|
||||
fadeIn();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#define hifi_ApplicationCompositor_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QPropertyAnimation>
|
||||
#include <cstdint>
|
||||
|
||||
#include <EntityItemID.h>
|
||||
|
@ -33,6 +34,8 @@ const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
|
|||
// facilities of this class
|
||||
class ApplicationCompositor : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(float alpha READ getAlpha WRITE setAlpha)
|
||||
public:
|
||||
ApplicationCompositor();
|
||||
~ApplicationCompositor();
|
||||
|
@ -64,6 +67,19 @@ public:
|
|||
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
|
||||
uint32_t getOverlayTexture() const;
|
||||
|
||||
void setCameraBaseTransform(const Transform& transform) { _cameraBaseTransform = transform; }
|
||||
const Transform& getCameraBaseTransform() const { return _cameraBaseTransform; }
|
||||
|
||||
void setModelTransform(const Transform& transform) { _modelTransform = transform; }
|
||||
const Transform& getModelTransform() const { return _modelTransform; }
|
||||
|
||||
void fadeIn();
|
||||
void fadeOut();
|
||||
void toggle();
|
||||
|
||||
float getAlpha() const { return _alpha; }
|
||||
void setAlpha(float alpha) { _alpha = alpha; }
|
||||
|
||||
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
|
||||
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
|
||||
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
|
||||
|
@ -78,7 +94,8 @@ private:
|
|||
|
||||
void renderPointers(gpu::Batch& batch);
|
||||
void renderControllerPointers(gpu::Batch& batch);
|
||||
void renderPointersOculus(gpu::Batch& batch);
|
||||
|
||||
vec2 getPolarCoordinates(const PalmData& palm) const;
|
||||
|
||||
// Support for hovering and tooltips
|
||||
static EntityItemID _noItemId;
|
||||
|
@ -100,6 +117,8 @@ private:
|
|||
bool _magnifier{ true };
|
||||
|
||||
float _alpha{ 1.0f };
|
||||
float _prevAlpha{ 1.0f };
|
||||
float _fadeInAlpha{ true };
|
||||
float _oculusUIRadius{ 1.0f };
|
||||
|
||||
QMap<uint16_t, gpu::TexturePointer> _cursors;
|
||||
|
@ -115,6 +134,11 @@ private:
|
|||
glm::vec3 _previousMagnifierBottomRight;
|
||||
glm::vec3 _previousMagnifierTopLeft;
|
||||
glm::vec3 _previousMagnifierTopRight;
|
||||
|
||||
Transform _modelTransform;
|
||||
Transform _cameraBaseTransform;
|
||||
|
||||
std::unique_ptr<QPropertyAnimation> _alphaPropertyAnimation;
|
||||
};
|
||||
|
||||
#endif // hifi_ApplicationCompositor_h
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#include <AudioClient.h>
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <devices/Faceshift.h>
|
||||
#include <devices/SixenseManager.h>
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
|
|
@ -19,48 +19,59 @@
|
|||
#include <QScreen>
|
||||
#include <QWindow>
|
||||
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
|
||||
#include "MainWindow.h"
|
||||
#include "Menu.h"
|
||||
#include "ui/DialogsManager.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
static const int WIDTH = 350;
|
||||
static const int HEIGHT = 100;
|
||||
|
||||
HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) ,
|
||||
_previousScreen(NULL),
|
||||
_hmdScreen(NULL),
|
||||
_hmdScreenNumber(-1),
|
||||
_switchModeButton(NULL),
|
||||
_debugDetails(NULL),
|
||||
_previousDialogScreen(NULL),
|
||||
_inHDMMode(false)
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint)
|
||||
{
|
||||
this->setWindowTitle("HMD Tools");
|
||||
// FIXME do we want to support more than one connected HMD? It seems like a pretty corner case
|
||||
foreach(auto displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
|
||||
// The first plugin is always the standard 2D display, by convention
|
||||
if (_defaultPluginName.isEmpty()) {
|
||||
_defaultPluginName = displayPlugin->getName();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (displayPlugin->isHmd()) {
|
||||
// Not all HMD's have corresponding screens
|
||||
if (displayPlugin->getHmdScreen() >= 0) {
|
||||
_hmdScreenNumber = displayPlugin->getHmdScreen();
|
||||
}
|
||||
_hmdPluginName = displayPlugin->getName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
setWindowTitle("HMD Tools");
|
||||
|
||||
// Create layouter
|
||||
QFormLayout* form = new QFormLayout();
|
||||
const int WIDTH = 350;
|
||||
{
|
||||
QFormLayout* form = new QFormLayout();
|
||||
// Add a button to enter
|
||||
_switchModeButton = new QPushButton("Toggle HMD Mode");
|
||||
if (_hmdPluginName.isEmpty()) {
|
||||
_switchModeButton->setEnabled(false);
|
||||
}
|
||||
// Add a button to enter
|
||||
_switchModeButton->setFixedWidth(WIDTH);
|
||||
form->addRow("", _switchModeButton);
|
||||
// Create a label with debug details...
|
||||
_debugDetails = new QLabel();
|
||||
_debugDetails->setFixedSize(WIDTH, HEIGHT);
|
||||
form->addRow("", _debugDetails);
|
||||
setLayout(form);
|
||||
}
|
||||
|
||||
// Add a button to enter
|
||||
_switchModeButton = new QPushButton("Enter HMD Mode");
|
||||
_switchModeButton->setFixedWidth(WIDTH);
|
||||
form->addRow("", _switchModeButton);
|
||||
connect(_switchModeButton,SIGNAL(clicked(bool)),this,SLOT(switchModeClicked(bool)));
|
||||
|
||||
// Create a label with debug details...
|
||||
_debugDetails = new QLabel();
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
const int HEIGHT = 100;
|
||||
_debugDetails->setFixedSize(WIDTH, HEIGHT);
|
||||
form->addRow("", _debugDetails);
|
||||
|
||||
this->QDialog::setLayout(form);
|
||||
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
|
||||
// watch for our application window moving screens. If it does we want to update our screen details
|
||||
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
|
||||
connect(mainWindow, &QWindow::screenChanged, this, &HMDToolsDialog::applicationWindowScreenChanged);
|
||||
qApp->getWindow()->activateWindow();
|
||||
|
||||
// watch for our dialog window moving screens. If it does we want to enforce our rules about
|
||||
// what screens we're allowed on
|
||||
|
@ -82,11 +93,31 @@ HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
|
|||
watchWindow(dialogsManager->getLodToolsDialog()->windowHandle());
|
||||
}
|
||||
|
||||
connect(_switchModeButton, &QPushButton::clicked, [this]{
|
||||
toggleHMDMode();
|
||||
});
|
||||
|
||||
// when the application is about to quit, leave HDM mode
|
||||
connect(Application::getInstance(), SIGNAL(beforeAboutToQuit()), this, SLOT(aboutToQuit()));
|
||||
connect(qApp, &Application::beforeAboutToQuit, [this]{
|
||||
// FIXME this is ineffective because it doesn't trigger the menu to
|
||||
// save the fact that VR Mode is not checked.
|
||||
leaveHMDMode();
|
||||
});
|
||||
|
||||
connect(qApp, &Application::activeDisplayPluginChanged, [this]{
|
||||
updateUi();
|
||||
});
|
||||
|
||||
// watch for our application window moving screens. If it does we want to update our screen details
|
||||
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
|
||||
connect(mainWindow, &QWindow::screenChanged, [this]{
|
||||
updateUi();
|
||||
});
|
||||
|
||||
// keep track of changes to the number of screens
|
||||
connect(QApplication::desktop(), &QDesktopWidget::screenCountChanged, this, &HMDToolsDialog::screenCountChanged);
|
||||
|
||||
updateUi();
|
||||
}
|
||||
|
||||
HMDToolsDialog::~HMDToolsDialog() {
|
||||
|
@ -96,18 +127,13 @@ HMDToolsDialog::~HMDToolsDialog() {
|
|||
_windowWatchers.clear();
|
||||
}
|
||||
|
||||
void HMDToolsDialog::applicationWindowScreenChanged(QScreen* screen) {
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
QString HMDToolsDialog::getDebugDetails() const {
|
||||
QString results;
|
||||
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
if (hmdScreenNumber >= 0) {
|
||||
results += "HMD Screen: " + QGuiApplication::screens()[hmdScreenNumber]->name() + "\n";
|
||||
if (_hmdScreenNumber >= 0) {
|
||||
results += "HMD Screen: " + QGuiApplication::screens()[_hmdScreenNumber]->name() + "\n";
|
||||
} else {
|
||||
results += "HMD Screen Name: Unknown\n";
|
||||
results += "HMD Screen Name: N/A\n";
|
||||
}
|
||||
|
||||
int desktopPrimaryScreenNumber = QApplication::desktop()->primaryScreen();
|
||||
|
@ -122,37 +148,25 @@ QString HMDToolsDialog::getDebugDetails() const {
|
|||
return results;
|
||||
}
|
||||
|
||||
void HMDToolsDialog::switchModeClicked(bool checked) {
|
||||
if (!_inHDMMode) {
|
||||
enterHDMMode();
|
||||
void HMDToolsDialog::toggleHMDMode() {
|
||||
if (!qApp->isHMDMode()) {
|
||||
enterHMDMode();
|
||||
} else {
|
||||
leaveHDMMode();
|
||||
leaveHMDMode();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::enterHDMMode() {
|
||||
if (!_inHDMMode) {
|
||||
_switchModeButton->setText("Leave HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
|
||||
// if we're on a single screen setup, then hide our tools window when entering HMD mode
|
||||
if (QApplication::desktop()->screenCount() == 1) {
|
||||
close();
|
||||
}
|
||||
|
||||
Application::getInstance()->setEnableVRMode(true);
|
||||
|
||||
_inHDMMode = true;
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::leaveHDMMode() {
|
||||
if (_inHDMMode) {
|
||||
_switchModeButton->setText("Enter HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
Application::getInstance()->setEnableVRMode(false);
|
||||
void HMDToolsDialog::enterHMDMode() {
|
||||
if (!qApp->isHMDMode()) {
|
||||
Application::getInstance()->setActiveDisplayPlugin(_hmdPluginName);
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::leaveHMDMode() {
|
||||
if (qApp->isHMDMode()) {
|
||||
Application::getInstance()->setActiveDisplayPlugin(_defaultPluginName);
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
_inHDMMode = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,7 +177,7 @@ void HMDToolsDialog::reject() {
|
|||
|
||||
void HMDToolsDialog::closeEvent(QCloseEvent* event) {
|
||||
// TODO: consider if we want to prevent closing of this window with event->ignore();
|
||||
this->QDialog::closeEvent(event);
|
||||
QDialog::closeEvent(event);
|
||||
emit closed();
|
||||
}
|
||||
|
||||
|
@ -174,9 +188,15 @@ void HMDToolsDialog::centerCursorOnWidget(QWidget* widget) {
|
|||
QCursor::setPos(screen, windowCenter);
|
||||
}
|
||||
|
||||
void HMDToolsDialog::updateUi() {
|
||||
_switchModeButton->setText(qApp->isHMDMode() ? "Leave HMD Mode" : "Enter HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
void HMDToolsDialog::showEvent(QShowEvent* event) {
|
||||
// center the cursor on the hmd tools dialog
|
||||
centerCursorOnWidget(this);
|
||||
updateUi();
|
||||
}
|
||||
|
||||
void HMDToolsDialog::hideEvent(QHideEvent* event) {
|
||||
|
@ -184,33 +204,31 @@ void HMDToolsDialog::hideEvent(QHideEvent* event) {
|
|||
centerCursorOnWidget(Application::getInstance()->getWindow());
|
||||
}
|
||||
|
||||
|
||||
void HMDToolsDialog::aboutToQuit() {
|
||||
if (_inHDMMode) {
|
||||
// FIXME this is ineffective because it doesn't trigger the menu to
|
||||
// save the fact that VR Mode is not checked.
|
||||
leaveHDMMode();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::screenCountChanged(int newCount) {
|
||||
if (!OculusManager::isConnected()) {
|
||||
//OculusManager::connect();
|
||||
int hmdScreenNumber = -1;
|
||||
auto displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
|
||||
foreach(auto dp, displayPlugins) {
|
||||
if (dp->isHmd()) {
|
||||
if (dp->getHmdScreen() >= 0) {
|
||||
hmdScreenNumber = dp->getHmdScreen();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
|
||||
if (_inHDMMode && _hmdScreenNumber != hmdScreenNumber) {
|
||||
if (qApp->isHMDMode() && _hmdScreenNumber != hmdScreenNumber) {
|
||||
qDebug() << "HMD Display changed WHILE IN HMD MODE";
|
||||
leaveHDMMode();
|
||||
leaveHMDMode();
|
||||
|
||||
// if there is a new best HDM screen then go back into HDM mode after done leaving
|
||||
if (hmdScreenNumber >= 0) {
|
||||
qDebug() << "Trying to go back into HDM Mode";
|
||||
qDebug() << "Trying to go back into HMD Mode";
|
||||
const int SLIGHT_DELAY = 2000;
|
||||
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(enterHDMMode()));
|
||||
QTimer::singleShot(SLIGHT_DELAY, [this]{
|
||||
enterHMDMode();
|
||||
});
|
||||
}
|
||||
}
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
void HMDToolsDialog::watchWindow(QWindow* window) {
|
||||
|
@ -247,9 +265,8 @@ void HMDWindowWatcher::windowScreenChanged(QScreen* screen) {
|
|||
// if we have more than one screen, and a known hmdScreen then try to
|
||||
// keep our dialog off of the hmdScreen
|
||||
if (QApplication::desktop()->screenCount() > 1) {
|
||||
|
||||
int hmdScreenNumber = _hmdTools->_hmdScreenNumber;
|
||||
// we want to use a local variable here because we are not necesarily in HMD mode
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
if (hmdScreenNumber >= 0) {
|
||||
QScreen* hmdScreen = QGuiApplication::screens()[hmdScreenNumber];
|
||||
if (screen == hmdScreen) {
|
||||
|
|
|
@ -34,9 +34,6 @@ signals:
|
|||
|
||||
public slots:
|
||||
void reject();
|
||||
void switchModeClicked(bool checked);
|
||||
void applicationWindowScreenChanged(QScreen* screen);
|
||||
void aboutToQuit();
|
||||
void screenCountChanged(int newCount);
|
||||
|
||||
protected:
|
||||
|
@ -46,20 +43,24 @@ protected:
|
|||
|
||||
private:
|
||||
void centerCursorOnWidget(QWidget* widget);
|
||||
void enterHDMMode();
|
||||
void leaveHDMMode();
|
||||
void enterHMDMode();
|
||||
void leaveHMDMode();
|
||||
void toggleHMDMode();
|
||||
void updateUi();
|
||||
|
||||
QScreen* _previousScreen;
|
||||
QScreen* _hmdScreen;
|
||||
int _hmdScreenNumber;
|
||||
QPushButton* _switchModeButton;
|
||||
QLabel* _debugDetails;
|
||||
QScreen* _previousScreen{ nullptr };
|
||||
QScreen* _hmdScreen{ nullptr };
|
||||
int _hmdScreenNumber{ -1 };
|
||||
QPushButton* _switchModeButton{ nullptr };
|
||||
QLabel* _debugDetails{ nullptr };
|
||||
|
||||
QRect _previousDialogRect;
|
||||
QScreen* _previousDialogScreen;
|
||||
bool _inHDMMode;
|
||||
QScreen* _previousDialogScreen{ nullptr };
|
||||
QString _hmdPluginName;
|
||||
QString _defaultPluginName;
|
||||
|
||||
QHash<QWindow*, HMDWindowWatcher*> _windowWatchers;
|
||||
friend class HMDWindowWatcher;
|
||||
};
|
||||
|
||||
|
||||
|
|
157
interface/src/ui/OverlayConductor.cpp
Normal file
157
interface/src/ui/OverlayConductor.cpp
Normal file
|
@ -0,0 +1,157 @@
|
|||
//
|
||||
// OverlayConductor.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "OverlayConductor.h"
|
||||
|
||||
OverlayConductor::OverlayConductor() {
|
||||
}
|
||||
|
||||
OverlayConductor::~OverlayConductor() {
|
||||
}
|
||||
|
||||
void OverlayConductor::update(float dt) {
|
||||
|
||||
updateMode();
|
||||
|
||||
switch (_mode) {
|
||||
case SITTING: {
|
||||
// when sitting, the overlay is at the origin, facing down the -z axis.
|
||||
// the camera is taken directly from the HMD.
|
||||
Transform identity;
|
||||
qApp->getApplicationCompositor().setModelTransform(identity);
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(identity);
|
||||
break;
|
||||
}
|
||||
case STANDING: {
|
||||
// when standing, the overlay is at a reference position, which is set when the overlay is
|
||||
// enabled. The camera is taken directly from the HMD, but in world space.
|
||||
// So the sensorToWorldMatrix must be applied.
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
Transform t;
|
||||
t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix());
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(t);
|
||||
|
||||
// detect when head moves out side of sweet spot, or looks away.
|
||||
mat4 headMat = myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose();
|
||||
vec3 headWorldPos = extractTranslation(headMat);
|
||||
vec3 headForward = glm::quat_cast(headMat) * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
Transform modelXform = qApp->getApplicationCompositor().getModelTransform();
|
||||
vec3 compositorWorldPos = modelXform.getTranslation();
|
||||
vec3 compositorForward = modelXform.getRotation() * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
const float MAX_COMPOSITOR_DISTANCE = 0.6f;
|
||||
const float MAX_COMPOSITOR_ANGLE = 110.0f;
|
||||
if (_enabled && (glm::distance(headWorldPos, compositorWorldPos) > MAX_COMPOSITOR_DISTANCE ||
|
||||
glm::dot(headForward, compositorForward) < cosf(glm::radians(MAX_COMPOSITOR_ANGLE)))) {
|
||||
// fade out the overlay
|
||||
setEnabled(false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case FLAT:
|
||||
// do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void OverlayConductor::updateMode() {
|
||||
|
||||
Mode newMode;
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
if (myAvatar->getStandingHMDSensorMode()) {
|
||||
newMode = STANDING;
|
||||
} else {
|
||||
newMode = SITTING;
|
||||
}
|
||||
} else {
|
||||
newMode = FLAT;
|
||||
}
|
||||
|
||||
if (newMode != _mode) {
|
||||
switch (newMode) {
|
||||
case SITTING: {
|
||||
// enter the SITTING state
|
||||
// place the overlay at origin
|
||||
Transform identity;
|
||||
qApp->getApplicationCompositor().setModelTransform(identity);
|
||||
break;
|
||||
}
|
||||
case STANDING: {
|
||||
// enter the STANDING state
|
||||
// place the overlay at the current hmd position in world space
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
|
||||
Transform t;
|
||||
t.setTranslation(extractTranslation(camMat));
|
||||
t.setRotation(glm::quat_cast(camMat));
|
||||
qApp->getApplicationCompositor().setModelTransform(t);
|
||||
break;
|
||||
}
|
||||
|
||||
case FLAT:
|
||||
// do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_mode = newMode;
|
||||
}
|
||||
|
||||
void OverlayConductor::setEnabled(bool enabled) {
|
||||
|
||||
if (enabled == _enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_enabled) {
|
||||
// alpha fadeOut the overlay mesh.
|
||||
qApp->getApplicationCompositor().fadeOut();
|
||||
|
||||
// disable mouse clicks from script
|
||||
qApp->getOverlays().disable();
|
||||
|
||||
// disable QML events
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->getRootItem()->setEnabled(false);
|
||||
|
||||
_enabled = false;
|
||||
} else {
|
||||
// alpha fadeIn the overlay mesh.
|
||||
qApp->getApplicationCompositor().fadeIn();
|
||||
|
||||
// enable mouse clicks from script
|
||||
qApp->getOverlays().enable();
|
||||
|
||||
// enable QML events
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->getRootItem()->setEnabled(true);
|
||||
|
||||
if (_mode == STANDING) {
|
||||
// place the overlay at the current hmd position in world space
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
|
||||
Transform t;
|
||||
t.setTranslation(extractTranslation(camMat));
|
||||
t.setRotation(glm::quat_cast(camMat));
|
||||
qApp->getApplicationCompositor().setModelTransform(t);
|
||||
}
|
||||
|
||||
_enabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool OverlayConductor::getEnabled() const {
|
||||
return _enabled;
|
||||
}
|
||||
|
36
interface/src/ui/OverlayConductor.h
Normal file
36
interface/src/ui/OverlayConductor.h
Normal file
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// OverlayConductor.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OverlayConductor_h
|
||||
#define hifi_OverlayConductor_h
|
||||
|
||||
class OverlayConductor {
|
||||
public:
|
||||
OverlayConductor();
|
||||
~OverlayConductor();
|
||||
|
||||
void update(float dt);
|
||||
void setEnabled(bool enable);
|
||||
bool getEnabled() const;
|
||||
|
||||
private:
|
||||
void updateMode();
|
||||
|
||||
enum Mode {
|
||||
FLAT,
|
||||
SITTING,
|
||||
STANDING
|
||||
};
|
||||
|
||||
Mode _mode = FLAT;
|
||||
bool _enabled = true;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -16,7 +16,7 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <devices/DdeFaceTracker.h>
|
||||
#include <devices/Faceshift.h>
|
||||
#include <devices/SixenseManager.h>
|
||||
#include <input-plugins/SixenseManager.h> // TODO: This should be replaced with InputDevice/InputPlugin, or something similar
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
@ -177,10 +177,13 @@ void PreferencesDialog::loadPreferences() {
|
|||
|
||||
ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());
|
||||
|
||||
#if 0
|
||||
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationCompositor().getHmdUIAngularSize());
|
||||
#endif
|
||||
|
||||
ui.sixenseReticleMoveSpeedSpin->setValue(InputDevice::getReticleMoveSpeed());
|
||||
|
||||
SixenseManager& sixense = SixenseManager::getInstance();
|
||||
ui.sixenseReticleMoveSpeedSpin->setValue(sixense.getReticleMoveSpeed());
|
||||
ui.invertSixenseButtonsCheckBox->setChecked(sixense.getInvertButtons());
|
||||
|
||||
// LOD items
|
||||
|
@ -244,7 +247,7 @@ void PreferencesDialog::savePreferences() {
|
|||
qApp->getApplicationCompositor().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
|
||||
|
||||
SixenseManager& sixense = SixenseManager::getInstance();
|
||||
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
||||
InputDevice::setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
||||
sixense.setInvertButtons(ui.invertSixenseButtonsCheckBox->isChecked());
|
||||
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <FileUtils.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <NodeList.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <Application.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <LODManager.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
auto size = qApp->getCanvasSize();
|
||||
auto size = qApp->getUiSize();
|
||||
int width = size.x;
|
||||
int height = size.y;
|
||||
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
|
||||
|
@ -123,6 +123,16 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
void Overlays::disable() {
|
||||
QWriteLocker lock(&_lock);
|
||||
_enabled = false;
|
||||
}
|
||||
|
||||
void Overlays::enable() {
|
||||
QWriteLocker lock(&_lock);
|
||||
_enabled = true;
|
||||
}
|
||||
|
||||
Overlay::Pointer Overlays::getOverlay(unsigned int id) const {
|
||||
if (_overlaysHUD.contains(id)) {
|
||||
return _overlaysHUD[id];
|
||||
|
@ -323,6 +333,9 @@ unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
|
|||
}
|
||||
|
||||
QReadLocker lock(&_lock);
|
||||
if (!_enabled) {
|
||||
return 0;
|
||||
}
|
||||
QMapIterator<unsigned int, Overlay::Pointer> i(_overlaysHUD);
|
||||
i.toBack();
|
||||
|
||||
|
|
|
@ -65,6 +65,8 @@ public:
|
|||
void init();
|
||||
void update(float deltatime);
|
||||
void renderHUD(RenderArgs* renderArgs);
|
||||
void disable();
|
||||
void enable();
|
||||
|
||||
Overlay::Pointer getOverlay(unsigned int id) const;
|
||||
OverlayPanel::Pointer getPanel(unsigned int id) const { return _panels[id]; }
|
||||
|
@ -147,6 +149,7 @@ private:
|
|||
QReadWriteLock _lock;
|
||||
QReadWriteLock _deleteLock;
|
||||
QScriptEngine* _scriptEngine;
|
||||
bool _enabled = true;
|
||||
};
|
||||
|
||||
|
||||
|
|
33
interface/ui/temp.qml
Normal file
33
interface/ui/temp.qml
Normal file
|
@ -0,0 +1,33 @@
|
|||
import QtQuick 2.4
|
||||
import QtQuick.Controls 2.3
|
||||
import QtQuick.Controls.Styles 1.3
|
||||
|
||||
|
||||
Item {
|
||||
implicitHeight: 200
|
||||
implicitWidth: 800
|
||||
|
||||
|
||||
TextArea {
|
||||
id: gutter
|
||||
anchors.left: parent.left
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
style: TextAreaStyle {
|
||||
backgroundColor: "grey"
|
||||
}
|
||||
width: 16
|
||||
text: ">"
|
||||
font.family: "Lucida Console"
|
||||
}
|
||||
TextArea {
|
||||
anchors.left: gutter.right
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.right: parent.right
|
||||
text: "undefined"
|
||||
font.family: "Lucida Console"
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -192,7 +192,7 @@ public:
|
|||
void setBodyRoll(float bodyRoll) { _bodyRoll = bodyRoll; }
|
||||
|
||||
glm::quat getOrientation() const;
|
||||
void setOrientation(const glm::quat& orientation, bool overideReferential = false);
|
||||
virtual void setOrientation(const glm::quat& orientation, bool overideReferential = false);
|
||||
|
||||
glm::quat getHeadOrientation() const { return _headData->getOrientation(); }
|
||||
void setHeadOrientation(const glm::quat& orientation) { _headData->setOrientation(orientation); }
|
||||
|
|
34
libraries/display-plugins/CMakeLists.txt
Normal file
34
libraries/display-plugins/CMakeLists.txt
Normal file
|
@ -0,0 +1,34 @@
|
|||
set(TARGET_NAME display-plugins)
|
||||
|
||||
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||
setup_hifi_library(OpenGL)
|
||||
|
||||
setup_hifi_opengl()
|
||||
|
||||
link_hifi_libraries(shared plugins gpu render-utils)
|
||||
|
||||
GroupSources("src/display-plugins")
|
||||
|
||||
add_dependency_external_projects(glm)
|
||||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(boostconfig)
|
||||
find_package(BoostConfig REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(oglplus)
|
||||
find_package(OGLPLUS REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
if (WIN32)
|
||||
add_dependency_external_projects(OpenVR)
|
||||
find_package(OpenVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${OPENVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${OPENVR_LIBRARIES})
|
||||
endif()
|
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Basic2DWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <QWindow>
|
||||
|
||||
const QString Basic2DWindowOpenGLDisplayPlugin::NAME("2D Display");
|
||||
|
||||
const QString MENU_PARENT = "View";
|
||||
const QString MENU_NAME = "Display Options";
|
||||
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
|
||||
const QString FULLSCREEN = "Fullscreen";
|
||||
|
||||
const QString& Basic2DWindowOpenGLDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::activate() {
|
||||
// container->addMenu(MENU_PATH);
|
||||
// container->addMenuItem(MENU_PATH, FULLSCREEN,
|
||||
// [this] (bool clicked) { this->setFullscreen(clicked); },
|
||||
// true, false);
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::deactivate() {
|
||||
// container->removeMenuItem(MENU_NAME, FULLSCREEN);
|
||||
// container->removeMenu(MENU_PATH);
|
||||
MainWindowOpenGLDisplayPlugin::deactivate();
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class Basic2DWindowOpenGLDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,52 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "DisplayPlugin.h"
|
||||
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "NullDisplayPlugin.h"
|
||||
#include "stereo/SideBySideStereoDisplayPlugin.h"
|
||||
#include "stereo/InterleavedStereoDisplayPlugin.h"
|
||||
#include "Basic2DWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include "openvr/OpenVrDisplayPlugin.h"
|
||||
#include "oculus/Oculus_0_5_DisplayPlugin.h"
|
||||
#include "oculus/Oculus_0_6_DisplayPlugin.h"
|
||||
|
||||
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
|
||||
DisplayPluginList getDisplayPlugins() {
|
||||
DisplayPlugin* PLUGIN_POOL[] = {
|
||||
new Basic2DWindowOpenGLDisplayPlugin(),
|
||||
#ifdef DEBUG
|
||||
new NullDisplayPlugin(),
|
||||
#endif
|
||||
|
||||
// Stereo modes
|
||||
// FIXME fix stereo display plugins
|
||||
//new SideBySideStereoDisplayPlugin(),
|
||||
//new InterleavedStereoDisplayPlugin(),
|
||||
|
||||
// HMDs
|
||||
new Oculus_0_5_DisplayPlugin(),
|
||||
new Oculus_0_6_DisplayPlugin(),
|
||||
#ifdef Q_OS_WIN
|
||||
new OpenVrDisplayPlugin(),
|
||||
#endif
|
||||
nullptr
|
||||
};
|
||||
|
||||
DisplayPluginList result;
|
||||
for (int i = 0; PLUGIN_POOL[i]; ++i) {
|
||||
DisplayPlugin * plugin = PLUGIN_POOL[i];
|
||||
if (plugin->isSupported()) {
|
||||
plugin->init();
|
||||
result.push_back(DisplayPluginPointer(plugin));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
128
libraries/display-plugins/src/display-plugins/DisplayPlugin.h
Normal file
128
libraries/display-plugins/src/display-plugins/DisplayPlugin.h
Normal file
|
@ -0,0 +1,128 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "plugins/Plugin.h"
|
||||
|
||||
#include <QSize>
|
||||
#include <QPoint>
|
||||
#include <functional>
|
||||
|
||||
#include "gpu/GPUConfig.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
enum Eye {
|
||||
Left,
|
||||
Right,
|
||||
Mono
|
||||
};
|
||||
|
||||
/*
|
||||
* Helper method to iterate over each eye
|
||||
*/
|
||||
template <typename F>
|
||||
void for_each_eye(F f) {
|
||||
f(Left);
|
||||
f(Right);
|
||||
}
|
||||
|
||||
/*
|
||||
* Helper method to iterate over each eye, with an additional lambda to take action between the eyes
|
||||
*/
|
||||
template <typename F, typename FF>
|
||||
void for_each_eye(F f, FF ff) {
|
||||
f(Eye::Left);
|
||||
ff();
|
||||
f(Eye::Right);
|
||||
}
|
||||
|
||||
class QWindow;
|
||||
|
||||
class DisplayPlugin : public Plugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
/// By default, all HMDs are stereo
|
||||
virtual bool isStereo() const { return isHmd(); }
|
||||
virtual bool isThrottled() const { return false; }
|
||||
|
||||
// Rendering support
|
||||
|
||||
/**
|
||||
* Called by the application before the frame rendering. Can be used for
|
||||
* render timing related calls (for instance, the Oculus begin frame timing
|
||||
* call)
|
||||
*/
|
||||
virtual void preRender() = 0;
|
||||
/**
|
||||
* Called by the application immediately before calling the display function.
|
||||
* For OpenGL based plugins, this is the best place to put activate the output
|
||||
* OpenGL context
|
||||
*/
|
||||
virtual void preDisplay() = 0;
|
||||
|
||||
/**
|
||||
* Sends the scene texture to the display plugin.
|
||||
*/
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) = 0;
|
||||
|
||||
/**
|
||||
* Called by the application immeidately after display. For OpenGL based
|
||||
* displays, this is the best place to put the buffer swap
|
||||
*/
|
||||
virtual void finishFrame() = 0;
|
||||
|
||||
// Does the rendering surface have current focus?
|
||||
virtual bool hasFocus() const = 0;
|
||||
|
||||
// The size of the rendering target (may be larger than the device size due to distortion)
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const = 0;
|
||||
|
||||
// The size of the UI
|
||||
virtual glm::uvec2 getRecommendedUiSize() const {
|
||||
return getRecommendedRenderSize();
|
||||
}
|
||||
|
||||
// Stereo specific methods
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
return baseProjection;
|
||||
}
|
||||
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
return glm::inverse(getEyePose(eye)) * baseModelview;
|
||||
}
|
||||
|
||||
// HMD specific methods
|
||||
// TODO move these into another class
|
||||
virtual glm::mat4 getEyePose(Eye eye) const {
|
||||
static const glm::mat4 pose; return pose;
|
||||
}
|
||||
|
||||
virtual glm::mat4 getHeadPose() const {
|
||||
static const glm::mat4 pose; return pose;
|
||||
}
|
||||
|
||||
virtual void abandonCalibration() {}
|
||||
virtual void resetSensors() {}
|
||||
virtual float devicePixelRatio() { return 1.0; }
|
||||
|
||||
//// The window for the surface, used for event interception. May be null.
|
||||
//virtual QWindow* getWindow() const = 0;
|
||||
|
||||
//virtual void installEventFilter(QObject* filter) {}
|
||||
//virtual void removeEventFilter(QObject* filter) {}
|
||||
|
||||
signals:
|
||||
void recommendedFramebufferSizeChanged(const QSize & size);
|
||||
void requestRender();
|
||||
};
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "MainWindowOpenGLDisplayPlugin.h"
|
|
@ -0,0 +1,13 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "WindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class MainWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
// NullDisplayPlugin.cpp
|
||||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "NullDisplayPlugin.h"
|
||||
|
||||
const QString NullDisplayPlugin::NAME("NullDisplayPlugin");
|
||||
|
||||
const QString & NullDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
glm::uvec2 NullDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return glm::uvec2(100, 100);
|
||||
}
|
||||
|
||||
bool NullDisplayPlugin::hasFocus() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
void NullDisplayPlugin::preRender() {}
|
||||
void NullDisplayPlugin::preDisplay() {}
|
||||
void NullDisplayPlugin::display(GLuint sceneTexture, const glm::uvec2& sceneSize) {}
|
||||
void NullDisplayPlugin::finishFrame() {}
|
||||
|
||||
void NullDisplayPlugin::activate() {}
|
||||
void NullDisplayPlugin::deactivate() {}
|
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "DisplayPlugin.h"
|
||||
|
||||
class NullDisplayPlugin : public DisplayPlugin {
|
||||
public:
|
||||
|
||||
virtual ~NullDisplayPlugin() final {}
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
void activate() override;
|
||||
void deactivate() override;
|
||||
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual bool hasFocus() const override;
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,117 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OpenGLDisplayPlugin.h"
|
||||
|
||||
#include <QOpenGLContext>
|
||||
#include <QCoreApplication>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
|
||||
OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
|
||||
connect(&_timer, &QTimer::timeout, this, [&] {
|
||||
emit requestRender();
|
||||
});
|
||||
}
|
||||
|
||||
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::preDisplay() {
|
||||
makeCurrent();
|
||||
};
|
||||
|
||||
void OpenGLDisplayPlugin::preRender() {
|
||||
// NOOP
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::finishFrame() {
|
||||
swapBuffers();
|
||||
doneCurrent();
|
||||
};
|
||||
|
||||
void OpenGLDisplayPlugin::customizeContext() {
|
||||
using namespace oglplus;
|
||||
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
|
||||
Context::Disable(Capability::Blend);
|
||||
Context::Disable(Capability::DepthTest);
|
||||
Context::Disable(Capability::CullFace);
|
||||
|
||||
|
||||
_program = loadDefaultShader();
|
||||
_plane = loadPlane(_program);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::activate() {
|
||||
_timer.start(1);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::deactivate() {
|
||||
_timer.stop();
|
||||
|
||||
makeCurrent();
|
||||
Q_ASSERT(0 == glGetError());
|
||||
_program.reset();
|
||||
_plane.reset();
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
// Pressing Alt (and Meta) key alone activates the menubar because its style inherits the
|
||||
// SHMenuBarAltKeyNavigation from QWindowsStyle. This makes it impossible for a scripts to
|
||||
// receive keyPress events for the Alt (and Meta) key in a reliable manner.
|
||||
//
|
||||
// This filter catches events before QMenuBar can steal the keyboard focus.
|
||||
// The idea was borrowed from
|
||||
// http://www.archivum.info/qt-interest@trolltech.com/2006-09/00053/Re-(Qt4)-Alt-key-focus-QMenuBar-(solved).html
|
||||
|
||||
// Pass input events on to the application
|
||||
bool OpenGLDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
switch (event->type()) {
|
||||
case QEvent::MouseButtonPress:
|
||||
case QEvent::MouseButtonRelease:
|
||||
case QEvent::MouseButtonDblClick:
|
||||
case QEvent::MouseMove:
|
||||
case QEvent::Wheel:
|
||||
|
||||
case QEvent::TouchBegin:
|
||||
case QEvent::TouchEnd:
|
||||
case QEvent::TouchUpdate:
|
||||
|
||||
case QEvent::FocusIn:
|
||||
case QEvent::FocusOut:
|
||||
|
||||
case QEvent::KeyPress:
|
||||
case QEvent::KeyRelease:
|
||||
case QEvent::ShortcutOverride:
|
||||
|
||||
case QEvent::DragEnter:
|
||||
case QEvent::Drop:
|
||||
|
||||
case QEvent::Resize:
|
||||
if (QCoreApplication::sendEvent(QCoreApplication::instance(), event)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::display(
|
||||
GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
using namespace oglplus;
|
||||
uvec2 size = getRecommendedRenderSize();
|
||||
Context::Viewport(size.x, size.y);
|
||||
glBindTexture(GL_TEXTURE_2D, finalTexture);
|
||||
drawUnitQuad();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::drawUnitQuad() {
|
||||
_program->Bind();
|
||||
_plane->Draw();
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
#include "DisplayPlugin.h"
|
||||
#include "OglplusHelpers.h"
|
||||
|
||||
class GlWindow;
|
||||
class QOpenGLContext;
|
||||
|
||||
class OpenGLDisplayPlugin : public DisplayPlugin {
|
||||
public:
|
||||
OpenGLDisplayPlugin();
|
||||
virtual ~OpenGLDisplayPlugin();
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void finishFrame() override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
|
||||
|
||||
protected:
|
||||
virtual void customizeContext();
|
||||
virtual void drawUnitQuad();
|
||||
virtual void makeCurrent() = 0;
|
||||
virtual void doneCurrent() = 0;
|
||||
virtual void swapBuffers() = 0;
|
||||
|
||||
QTimer _timer;
|
||||
ProgramPtr _program;
|
||||
ShapeWrapperPtr _plane;
|
||||
};
|
||||
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "WindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include <QGLWidget>
|
||||
#include <QOpenGLContext>
|
||||
|
||||
#include "plugins/PluginContainer.h"
|
||||
|
||||
WindowOpenGLDisplayPlugin::WindowOpenGLDisplayPlugin() {
|
||||
}
|
||||
|
||||
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedRenderSize() const {
|
||||
uvec2 result;
|
||||
if (_window) {
|
||||
result = toGlm(_window->geometry().size() * _window->devicePixelRatio());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedUiSize() const {
|
||||
uvec2 result;
|
||||
if (_window) {
|
||||
result = toGlm(_window->geometry().size());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool WindowOpenGLDisplayPlugin::hasFocus() const {
|
||||
return _window ? _window->hasFocus() : false;
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::activate() {
|
||||
OpenGLDisplayPlugin::activate();
|
||||
_window = CONTAINER->getPrimarySurface();
|
||||
_window->makeCurrent();
|
||||
customizeContext();
|
||||
_window->doneCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::deactivate() {
|
||||
OpenGLDisplayPlugin::deactivate();
|
||||
_window = nullptr;
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::makeCurrent() {
|
||||
_window->makeCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::doneCurrent() {
|
||||
_window->doneCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::swapBuffers() {
|
||||
_window->swapBuffers();
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OpenGLDisplayPlugin.h"
|
||||
|
||||
class QGLWidget;
|
||||
|
||||
class WindowOpenGLDisplayPlugin : public OpenGLDisplayPlugin {
|
||||
public:
|
||||
WindowOpenGLDisplayPlugin();
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override;
|
||||
virtual bool hasFocus() const override;
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
protected:
|
||||
virtual void makeCurrent() override;
|
||||
virtual void doneCurrent() override;
|
||||
virtual void swapBuffers() override;
|
||||
QGLWidget* _window{ nullptr };
|
||||
};
|
|
@ -0,0 +1,76 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <ViewFrustum.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
|
||||
using namespace Oculus;
|
||||
|
||||
void OculusBaseDisplayPlugin::activate() {
|
||||
glm::uvec2 eyeSizes[2];
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
|
||||
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
|
||||
ovrMatrix4f ovrPerspectiveProjection =
|
||||
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
|
||||
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
|
||||
|
||||
ovrPerspectiveProjection =
|
||||
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
|
||||
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
|
||||
|
||||
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
|
||||
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
|
||||
});
|
||||
_desiredFramebufferSize = uvec2(
|
||||
eyeSizes[0].x + eyeSizes[1].x,
|
||||
std::max(eyeSizes[0].y, eyeSizes[1].y));
|
||||
|
||||
_frameIndex = 0;
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return _desiredFramebufferSize;
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::preRender() {
|
||||
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, nullptr);
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
return _eyeProjections[eye];
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
return baseModelview * toGlm(_eyePoses[eye]);
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::resetSensors() {
|
||||
ovrHmd_RecenterPose(_hmd);
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getEyePose(Eye eye) const {
|
||||
return toGlm(_eyePoses[eye]);
|
||||
}
|
||||
|
||||
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
|
||||
// any use of head poses for rendering, ensuring you use the correct eye
|
||||
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
|
||||
ovrTrackingState state = ovrHmd_GetTrackingState(_hmd, 0.0f);
|
||||
return toGlm(state.HeadPose.ThePose);
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class OculusBaseDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
public:
|
||||
// Stereo specific methods
|
||||
virtual bool isHmd() const override { return true; }
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
virtual void activate() override;
|
||||
virtual void preRender() override;
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
|
||||
virtual void resetSensors() override;
|
||||
virtual glm::mat4 getEyePose(Eye eye) const override;
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
};
|
||||
|
||||
#if (OVR_MAJOR_VERSION < 6)
|
||||
#define OVR_SUCCESS(x) x
|
||||
#endif
|
|
@ -0,0 +1,24 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/08/08
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
|
||||
namespace Oculus {
|
||||
ovrHmd _hmd;
|
||||
unsigned int _frameIndex{ 0 };
|
||||
ovrEyeRenderDesc _eyeRenderDescs[2];
|
||||
ovrPosef _eyePoses[2];
|
||||
ovrVector3f _eyeOffsets[2];
|
||||
ovrFovPort _eyeFovs[2];
|
||||
mat4 _eyeProjections[2];
|
||||
mat4 _compositeEyeProjections[2];
|
||||
uvec2 _desiredFramebufferSize;
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/26
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <OVR_CAPI.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
// Convenience method for looping over each eye with a lambda
|
||||
template <typename Function>
|
||||
inline void ovr_for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
namespace Oculus {
|
||||
extern ovrHmd _hmd;
|
||||
extern unsigned int _frameIndex;
|
||||
extern ovrEyeRenderDesc _eyeRenderDescs[2];
|
||||
extern ovrPosef _eyePoses[2];
|
||||
extern ovrVector3f _eyeOffsets[2];
|
||||
extern ovrFovPort _eyeFovs[2];
|
||||
extern mat4 _eyeProjections[2];
|
||||
extern mat4 _compositeEyeProjections[2];
|
||||
extern uvec2 _desiredFramebufferSize;
|
||||
}
|
|
@ -0,0 +1,192 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Oculus_0_5_DisplayPlugin.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
#include <QOpenGLContext>
|
||||
#include <QGuiApplication>
|
||||
#include <QScreen>
|
||||
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <OglplusHelpers.h>
|
||||
|
||||
#include "plugins/PluginContainer.h"
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
using namespace Oculus;
|
||||
ovrTexture _eyeTextures[2];
|
||||
int _hmdScreen{ -1 };
|
||||
bool _hswDismissed{ false };
|
||||
|
||||
DisplayPlugin* makeOculusDisplayPlugin() {
|
||||
return new Oculus_0_5_DisplayPlugin();
|
||||
}
|
||||
|
||||
using namespace oglplus;
|
||||
|
||||
const QString Oculus_0_5_DisplayPlugin::NAME("Oculus Rift (0.5)");
|
||||
|
||||
const QString & Oculus_0_5_DisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
|
||||
bool Oculus_0_5_DisplayPlugin::isSupported() const {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!ovr_Initialize(nullptr)) {
|
||||
return false;
|
||||
}
|
||||
bool result = false;
|
||||
if (ovrHmd_Detect() > 0) {
|
||||
result = true;
|
||||
}
|
||||
|
||||
auto hmd = ovrHmd_Create(0);
|
||||
if (hmd) {
|
||||
QPoint targetPosition{ hmd->WindowsPos.x, hmd->WindowsPos.y };
|
||||
auto screens = qApp->screens();
|
||||
for(int i = 0; i < screens.size(); ++i) {
|
||||
auto screen = screens[i];
|
||||
QPoint position = screen->geometry().topLeft();
|
||||
if (position == targetPosition) {
|
||||
_hmdScreen = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ovr_Shutdown();
|
||||
return result;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::activate() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
}
|
||||
_hswDismissed = false;
|
||||
_hmd = ovrHmd_Create(0);
|
||||
if (!_hmd) {
|
||||
qFatal("Failed to acquire HMD");
|
||||
}
|
||||
|
||||
OculusBaseDisplayPlugin::activate();
|
||||
int screen = getHmdScreen();
|
||||
if (screen != -1) {
|
||||
CONTAINER->setFullscreen(qApp->screens()[screen]);
|
||||
}
|
||||
|
||||
_window->installEventFilter(this);
|
||||
_window->makeCurrent();
|
||||
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
|
||||
auto& header = config.Config.Header;
|
||||
header.API = ovrRenderAPI_OpenGL;
|
||||
header.BackBufferSize = _hmd->Resolution;
|
||||
header.Multisample = 1;
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_TimeWarp
|
||||
;
|
||||
|
||||
memset(_eyeTextures, 0, sizeof(ovrTexture) * 2);
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
auto& header = _eyeTextures[eye].Header;
|
||||
header.API = ovrRenderAPI_OpenGL;
|
||||
header.TextureSize = { (int)_desiredFramebufferSize.x, (int)_desiredFramebufferSize.y };
|
||||
header.RenderViewport.Size = header.TextureSize;
|
||||
header.RenderViewport.Size.w /= 2;
|
||||
if (eye == ovrEye_Right) {
|
||||
header.RenderViewport.Pos.x = header.RenderViewport.Size.w;
|
||||
}
|
||||
});
|
||||
|
||||
ovrEyeRenderDesc _eyeRenderDescs[ovrEye_Count];
|
||||
ovrBool result = ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
|
||||
Q_ASSERT(result);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::deactivate() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
_window->removeEventFilter(this);
|
||||
|
||||
OculusBaseDisplayPlugin::deactivate();
|
||||
|
||||
QScreen* riftScreen = nullptr;
|
||||
if (_hmdScreen >= 0) {
|
||||
riftScreen = qApp->screens()[_hmdScreen];
|
||||
}
|
||||
CONTAINER->unsetFullscreen(riftScreen);
|
||||
|
||||
ovrHmd_Destroy(_hmd);
|
||||
_hmd = nullptr;
|
||||
ovr_Shutdown();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::preRender() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
OculusBaseDisplayPlugin::preRender();
|
||||
ovrHmd_BeginFrame(_hmd, _frameIndex);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::preDisplay() {
|
||||
_window->makeCurrent();
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
++_frameIndex;
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = finalTexture;
|
||||
});
|
||||
ovrHmd_EndFrame(_hmd, _eyePoses, _eyeTextures);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Pass input events on to the application
|
||||
bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
|
||||
static ovrHSWDisplayState hswState;
|
||||
ovrHmd_GetHSWDisplayState(_hmd, &hswState);
|
||||
if (hswState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_hmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
|
||||
}
|
||||
|
||||
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we
|
||||
// trigger a swap, which causes the client to wait for the v-sync of the main screen running
|
||||
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
|
||||
// thread
|
||||
void Oculus_0_5_DisplayPlugin::finishFrame() {
|
||||
_window->doneCurrent();
|
||||
};
|
||||
|
||||
int Oculus_0_5_DisplayPlugin::getHmdScreen() const {
|
||||
return _hmdScreen;
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
class Oculus_0_5_DisplayPlugin : public OculusBaseDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
virtual int getHmdScreen() const override;
|
||||
|
||||
protected:
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
|
|
@ -0,0 +1,370 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Oculus_0_6_DisplayPlugin.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
|
||||
#include <OglplusHelpers.h>
|
||||
#include <oglplus/opt/list_init.hpp>
|
||||
#include <oglplus/shapes/vector.hpp>
|
||||
#include <oglplus/opt/list_init.hpp>
|
||||
#include <oglplus/shapes/obj_mesh.hpp>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
using namespace Oculus;
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
SwapFboPtr _sceneFbo;
|
||||
MirrorFboPtr _mirrorFbo;
|
||||
ovrLayerEyeFov _sceneLayer;
|
||||
|
||||
// A base class for FBO wrappers that need to use the Oculus C
|
||||
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
|
||||
// ovrHmd_CreateMirrorTextureGL, etc
|
||||
template <typename C>
|
||||
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
|
||||
ovrHmd hmd;
|
||||
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
|
||||
color = 0;
|
||||
depth = 0;
|
||||
};
|
||||
|
||||
void Resize(const uvec2 & size) {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
this->size = size;
|
||||
initColor();
|
||||
initDone();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initDepth() override final {
|
||||
}
|
||||
};
|
||||
|
||||
// A wrapper for constructing and using a swap texture set,
|
||||
// where each frame you draw to a texture via the FBO,
|
||||
// then submit it and increment to the next texture.
|
||||
// The Oculus SDK manages the creation and destruction of
|
||||
// the textures
|
||||
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
|
||||
SwapFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
~SwapFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void Increment() {
|
||||
++color->CurrentIndex;
|
||||
color->CurrentIndex %= color->TextureCount;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) {
|
||||
qFatal("Unable to create swap textures");
|
||||
}
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
|
||||
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
virtual void initDone() override {
|
||||
}
|
||||
|
||||
virtual void onBind(oglplus::Framebuffer::Target target) override {
|
||||
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
|
||||
}
|
||||
|
||||
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// We use a FBO to wrap the mirror texture because it makes it easier to
|
||||
// render to the screen via glBlitFramebuffer
|
||||
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
|
||||
MirrorFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) { }
|
||||
|
||||
virtual ~MirrorFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
}
|
||||
|
||||
void initDone() override {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
const QString Oculus_0_6_DisplayPlugin::NAME("Oculus Rift");
|
||||
|
||||
const QString & Oculus_0_6_DisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
bool Oculus_0_6_DisplayPlugin::isSupported() const {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
return false;
|
||||
}
|
||||
bool result = false;
|
||||
if (ovrHmd_Detect() > 0) {
|
||||
result = true;
|
||||
}
|
||||
ovr_Shutdown();
|
||||
return result;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
ovrLayerEyeFov& getSceneLayer() {
|
||||
return _sceneLayer;
|
||||
}
|
||||
#endif
|
||||
|
||||
//static gpu::TexturePointer _texture;
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::activate() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
}
|
||||
if (!OVR_SUCCESS(ovrHmd_Create(0, &_hmd))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to acquire HMD");
|
||||
}
|
||||
|
||||
OculusBaseDisplayPlugin::activate();
|
||||
|
||||
// Parent class relies on our _hmd intialization, so it must come after that.
|
||||
ovrLayerEyeFov& sceneLayer = getSceneLayer();
|
||||
memset(&sceneLayer, 0, sizeof(ovrLayerEyeFov));
|
||||
sceneLayer.Header.Type = ovrLayerType_EyeFov;
|
||||
sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
ovrFovPort & fov = sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
|
||||
ovrSizei & size = sceneLayer.Viewport[eye].Size = ovrHmd_GetFovTextureSize(_hmd, eye, fov, 1.0f);
|
||||
sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
|
||||
});
|
||||
// We're rendering both eyes to the same texture, so only one of the
|
||||
// pointers is populated
|
||||
sceneLayer.ColorTexture[0] = _sceneFbo->color;
|
||||
// not needed since the structure was zeroed on init, but explicit
|
||||
sceneLayer.ColorTexture[1] = nullptr;
|
||||
|
||||
PerformanceTimer::setActive(true);
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::customizeContext() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
OculusBaseDisplayPlugin::customizeContext();
|
||||
|
||||
//_texture = DependencyManager::get<TextureCache>()->
|
||||
// getImageTexture(PathUtils::resourcesPath() + "/images/cube_texture.png");
|
||||
uvec2 mirrorSize = toGlm(_window->geometry().size());
|
||||
_mirrorFbo = MirrorFboPtr(new MirrorFramebufferWrapper(_hmd));
|
||||
_mirrorFbo->Init(mirrorSize);
|
||||
|
||||
_sceneFbo = SwapFboPtr(new SwapFramebufferWrapper(_hmd));
|
||||
_sceneFbo->Init(getRecommendedRenderSize());
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::deactivate() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
makeCurrent();
|
||||
_sceneFbo.reset();
|
||||
_mirrorFbo.reset();
|
||||
doneCurrent();
|
||||
PerformanceTimer::setActive(false);
|
||||
|
||||
OculusBaseDisplayPlugin::deactivate();
|
||||
|
||||
ovrHmd_Destroy(_hmd);
|
||||
_hmd = nullptr;
|
||||
ovr_Shutdown();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
using namespace oglplus;
|
||||
// Need to make sure only the display plugin is responsible for
|
||||
// controlling vsync
|
||||
wglSwapIntervalEXT(0);
|
||||
|
||||
_sceneFbo->Bound([&] {
|
||||
auto size = _sceneFbo->size;
|
||||
Context::Viewport(size.x, size.y);
|
||||
glBindTexture(GL_TEXTURE_2D, finalTexture);
|
||||
drawUnitQuad();
|
||||
});
|
||||
|
||||
ovrLayerEyeFov& sceneLayer = getSceneLayer();
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
sceneLayer.RenderPose[eye] = _eyePoses[eye];
|
||||
});
|
||||
|
||||
auto windowSize = toGlm(_window->size());
|
||||
|
||||
/*
|
||||
Two alternatives for mirroring to the screen, the first is to copy our own composited
|
||||
scene to the window framebuffer, before distortion. Note this only works if we're doing
|
||||
ui compositing ourselves, and not relying on the Oculus SDK compositor (or we don't want
|
||||
the UI visible in the output window (unlikely). This should be done before
|
||||
_sceneFbo->Increment or we're be using the wrong texture
|
||||
*/
|
||||
//_sceneFbo->Bound(GL_READ_FRAMEBUFFER, [&] {
|
||||
// glBlitFramebuffer(
|
||||
// 0, 0, _sceneFbo->size.x, _sceneFbo->size.y,
|
||||
// 0, 0, windowSize.x, _mirrorFbo.y,
|
||||
// GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
//});
|
||||
|
||||
{
|
||||
PerformanceTimer("OculusSubmit");
|
||||
ovrLayerHeader* layers = &sceneLayer.Header;
|
||||
ovrResult result = ovrHmd_SubmitFrame(_hmd, _frameIndex, nullptr, &layers, 1);
|
||||
}
|
||||
_sceneFbo->Increment();
|
||||
|
||||
/*
|
||||
The other alternative for mirroring is to use the Oculus mirror texture support, which
|
||||
will contain the post-distorted and fully composited scene regardless of how many layers
|
||||
we send.
|
||||
*/
|
||||
auto mirrorSize = _mirrorFbo->size;
|
||||
_mirrorFbo->Bound(Framebuffer::Target::Read, [&] {
|
||||
Context::BlitFramebuffer(
|
||||
0, mirrorSize.y, mirrorSize.x, 0,
|
||||
0, 0, windowSize.x, windowSize.y,
|
||||
BufferSelectBit::ColorBuffer, BlitFilter::Nearest);
|
||||
});
|
||||
|
||||
++_frameIndex;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Pass input events on to the application
|
||||
bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (event->type() == QEvent::Resize) {
|
||||
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
||||
qDebug() << resizeEvent->size().width() << " x " << resizeEvent->size().height();
|
||||
auto newSize = toGlm(resizeEvent->size());
|
||||
makeCurrent();
|
||||
_mirrorFbo->Resize(newSize);
|
||||
doneCurrent();
|
||||
}
|
||||
#endif
|
||||
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
|
||||
}
|
||||
|
||||
/*
|
||||
The swapbuffer call here is only required if we want to mirror the content to the screen.
|
||||
However, it should only be done if we can reliably disable v-sync on the mirror surface,
|
||||
otherwise the swapbuffer delay will interefere with the framerate of the headset
|
||||
*/
|
||||
void Oculus_0_6_DisplayPlugin::finishFrame() {
|
||||
swapBuffers();
|
||||
doneCurrent();
|
||||
};
|
||||
|
||||
|
||||
#if 0
|
||||
/*
|
||||
An alternative way to render the UI is to pass it specifically as a composition layer to
|
||||
the Oculus SDK which should technically result in higher quality. However, the SDK doesn't
|
||||
have a mechanism to present the image as a sphere section, which is our desired look.
|
||||
*/
|
||||
ovrLayerQuad& uiLayer = getUiLayer();
|
||||
if (nullptr == uiLayer.ColorTexture || overlaySize != _uiFbo->size) {
|
||||
_uiFbo->Resize(overlaySize);
|
||||
uiLayer.ColorTexture = _uiFbo->color;
|
||||
uiLayer.Viewport.Size.w = overlaySize.x;
|
||||
uiLayer.Viewport.Size.h = overlaySize.y;
|
||||
float overlayAspect = aspect(overlaySize);
|
||||
uiLayer.QuadSize.x = 1.0f;
|
||||
uiLayer.QuadSize.y = 1.0f / overlayAspect;
|
||||
}
|
||||
|
||||
_uiFbo->Bound([&] {
|
||||
Q_ASSERT(0 == glGetError());
|
||||
using namespace oglplus;
|
||||
Context::Viewport(_uiFbo->size.x, _uiFbo->size.y);
|
||||
glClearColor(0, 0, 0, 0);
|
||||
Context::Clear().ColorBuffer();
|
||||
|
||||
_program->Bind();
|
||||
glBindTexture(GL_TEXTURE_2D, overlayTexture);
|
||||
_plane->Use();
|
||||
_plane->Draw();
|
||||
Q_ASSERT(0 == glGetError());
|
||||
});
|
||||
#endif
|
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
class OffscreenGlCanvas;
|
||||
struct SwapFramebufferWrapper;
|
||||
struct MirrorFramebufferWrapper;
|
||||
|
||||
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
|
||||
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
|
||||
|
||||
class Oculus_0_6_DisplayPlugin : public OculusBaseDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
protected:
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void customizeContext() override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
|
@ -0,0 +1,197 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OpenVrDisplayPlugin.h"
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <ViewFrustum.h>
|
||||
|
||||
#include "OpenVrHelpers.h"
|
||||
#include "GLMHelpers.h"
|
||||
|
||||
#include <QLoggingCategory>
|
||||
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
||||
Q_LOGGING_CATEGORY(displayplugins, "hifi.displayplugins")
|
||||
|
||||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
const QString & OpenVrDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
vr::IVRSystem* _hmd{ nullptr };
|
||||
int hmdRefCount = 0;
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
static uvec2 _windowSize;
|
||||
static ivec2 _windowPosition;
|
||||
static uvec2 _renderTargetSize;
|
||||
|
||||
struct PerEyeData {
|
||||
uvec2 _viewportOrigin;
|
||||
uvec2 _viewportSize;
|
||||
mat4 _projectionMatrix;
|
||||
mat4 _eyeOffset;
|
||||
mat4 _pose;
|
||||
};
|
||||
|
||||
static PerEyeData _eyesData[2];
|
||||
|
||||
|
||||
template<typename F>
|
||||
void openvr_for_each_eye(F f) {
|
||||
f(vr::Hmd_Eye::Eye_Left);
|
||||
f(vr::Hmd_Eye::Eye_Right);
|
||||
}
|
||||
|
||||
mat4 toGlm(const vr::HmdMatrix44_t& m) {
|
||||
return glm::transpose(glm::make_mat4(&m.m[0][0]));
|
||||
}
|
||||
|
||||
mat4 toGlm(const vr::HmdMatrix34_t& m) {
|
||||
mat4 result = mat4(
|
||||
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
|
||||
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
|
||||
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
|
||||
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
|
||||
return result;
|
||||
}
|
||||
|
||||
bool OpenVrDisplayPlugin::isSupported() const {
|
||||
bool success = vr::VR_IsHmdPresent();
|
||||
if (success) {
|
||||
vr::HmdError eError = vr::HmdError_None;
|
||||
auto hmd = vr::VR_Init(&eError);
|
||||
success = (hmd != nullptr);
|
||||
vr::VR_Shutdown();
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::activate() {
|
||||
CONTAINER->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
hmdRefCount++;
|
||||
vr::HmdError eError = vr::HmdError_None;
|
||||
if (!_hmd) {
|
||||
_hmd = vr::VR_Init(&eError);
|
||||
Q_ASSERT(eError == vr::HmdError_None);
|
||||
}
|
||||
Q_ASSERT(_hmd);
|
||||
|
||||
_hmd->GetWindowBounds(&_windowPosition.x, &_windowPosition.y, &_windowSize.x, &_windowSize.y);
|
||||
_hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
PerEyeData& eyeData = _eyesData[eye];
|
||||
_hmd->GetEyeOutputViewport(eye,
|
||||
&eyeData._viewportOrigin.x, &eyeData._viewportOrigin.y,
|
||||
&eyeData._viewportSize.x, &eyeData._viewportSize.y);
|
||||
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
});
|
||||
|
||||
|
||||
_compositor = (vr::IVRCompositor*)vr::VR_GetGenericInterface(vr::IVRCompositor_Version, &eError);
|
||||
Q_ASSERT(eError == vr::HmdError_None);
|
||||
Q_ASSERT(_compositor);
|
||||
|
||||
_compositor->SetGraphicsDevice(vr::Compositor_DeviceType_OpenGL, NULL);
|
||||
|
||||
uint32_t unSize = _compositor->GetLastError(NULL, 0);
|
||||
if (unSize > 1) {
|
||||
char* buffer = new char[unSize];
|
||||
_compositor->GetLastError(buffer, unSize);
|
||||
printf("Compositor - %s\n", buffer);
|
||||
delete[] buffer;
|
||||
}
|
||||
Q_ASSERT(unSize <= 1);
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::deactivate() {
|
||||
CONTAINER->setIsOptionChecked(StandingHMDSensorMode, false);
|
||||
|
||||
hmdRefCount--;
|
||||
|
||||
if (hmdRefCount == 0 && _hmd) {
|
||||
vr::VR_Shutdown();
|
||||
_hmd = nullptr;
|
||||
}
|
||||
_compositor = nullptr;
|
||||
MainWindowOpenGLDisplayPlugin::deactivate();
|
||||
}
|
||||
|
||||
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return _renderTargetSize;
|
||||
}
|
||||
|
||||
mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) const {
|
||||
return _eyesData[eye]._projectionMatrix;
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
|
||||
return baseModelview * getEyePose(eye);
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const {
|
||||
return getHeadPose() * _eyesData[eye]._eyeOffset;
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::customizeContext() {
|
||||
MainWindowOpenGLDisplayPlugin::customizeContext();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
// Flip y-axis since GL UV coords are backwards.
|
||||
static vr::Compositor_TextureBounds leftBounds{ 0, 1, 0.5f, 0 };
|
||||
static vr::Compositor_TextureBounds rightBounds{ 0.5f, 1, 1, 0 };
|
||||
_compositor->Submit(vr::Eye_Left, (void*)finalTexture, &leftBounds);
|
||||
_compositor->Submit(vr::Eye_Right, (void*)finalTexture, &rightBounds);
|
||||
glFinish();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::finishFrame() {
|
||||
// swapBuffers();
|
||||
doneCurrent();
|
||||
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
|
||||
});
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/06/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <QtGlobal>
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class OpenVrDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
virtual bool isHmd() const override { return true; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
|
||||
|
||||
// Stereo specific methods
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
virtual void resetSensors() override;
|
||||
|
||||
virtual glm::mat4 getEyePose(Eye eye) const override;
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
protected:
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void customizeContext() override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/06/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
#include <openvr.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#endif
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterleavedStereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
const QString InterleavedStereoDisplayPlugin::NAME("Interleaved Stereo Display");
|
||||
|
||||
const QString & InterleavedStereoDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
InterleavedStereoDisplayPlugin::InterleavedStereoDisplayPlugin() {
|
||||
}
|
||||
|
||||
void InterleavedStereoDisplayPlugin::customizeContext() {
|
||||
StereoDisplayPlugin::customizeContext();
|
||||
// Set up the stencil buffers? Or use a custom shader?
|
||||
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
class InterleavedStereoDisplayPlugin : public StereoDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
InterleavedStereoDisplayPlugin();
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
// initialize OpenGL context settings needed by the plugin
|
||||
virtual void customizeContext() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "SideBySideStereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
const QString SideBySideStereoDisplayPlugin::NAME("SBS Stereo Display");
|
||||
|
||||
const QString & SideBySideStereoDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
SideBySideStereoDisplayPlugin::SideBySideStereoDisplayPlugin() {
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
class SideBySideStereoDisplayPlugin : public StereoDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
SideBySideStereoDisplayPlugin();
|
||||
virtual const QString & getName() const override;
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,57 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
|
||||
StereoDisplayPlugin::StereoDisplayPlugin() {
|
||||
}
|
||||
|
||||
bool StereoDisplayPlugin::isSupported() const {
|
||||
// FIXME this should attempt to do a scan for supported 3D output
|
||||
return true;
|
||||
}
|
||||
|
||||
// FIXME make this into a setting that can be adjusted
|
||||
const float DEFAULT_IPD = 0.064f;
|
||||
const float HALF_DEFAULT_IPD = DEFAULT_IPD / 2.0f;
|
||||
|
||||
glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
// Refer to http://www.nvidia.com/content/gtc-2010/pdfs/2010_gtc2010.pdf on creating
|
||||
// stereo projection matrices. Do NOT use "toe-in", use translation.
|
||||
|
||||
float nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
float screenZ = 0.25f; // screen projection plane
|
||||
// FIXME verify this is the right calculation
|
||||
float frustumshift = HALF_DEFAULT_IPD * nearZ / screenZ;
|
||||
if (eye == Right) {
|
||||
frustumshift = -frustumshift;
|
||||
}
|
||||
return glm::translate(baseProjection, vec3(frustumshift, 0, 0));
|
||||
}
|
||||
|
||||
glm::mat4 StereoDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
float modelviewShift = HALF_DEFAULT_IPD;
|
||||
if (eye == Left) {
|
||||
modelviewShift = -modelviewShift;
|
||||
}
|
||||
return baseModelview * glm::translate(mat4(), vec3(modelviewShift, 0, 0));
|
||||
}
|
||||
|
||||
void StereoDisplayPlugin::activate() {
|
||||
WindowOpenGLDisplayPlugin::activate();
|
||||
CONTAINER->setFullscreen(qApp->primaryScreen());
|
||||
// FIXME Add menu items
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class StereoDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
StereoDisplayPlugin();
|
||||
virtual bool isStereo() const override final { return true; }
|
||||
virtual bool isSupported() const override final;
|
||||
|
||||
virtual void activate() override;
|
||||
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
|
||||
};
|
|
@ -96,7 +96,6 @@ void EntityTreeRenderer::clear() {
|
|||
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
foreach(auto entity, _entitiesInScene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
}
|
||||
|
|
|
@ -174,7 +174,6 @@ uint32_t toRGBA(uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
|
|||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::updateRenderItem() {
|
||||
|
||||
if (!_scene) {
|
||||
return;
|
||||
}
|
||||
|
@ -212,8 +211,7 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
|
|||
}
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
pendingChanges.updateItem<ParticlePayload>(_renderItemId, [&](ParticlePayload& payload) {
|
||||
|
||||
pendingChanges.updateItem<ParticlePayload>(_renderItemId, [this](ParticlePayload& payload) {
|
||||
// update vertex buffer
|
||||
auto vertexBuffer = payload.getVertexBuffer();
|
||||
size_t numBytes = sizeof(Vertex) * _vertices.size();
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#ifndef gpu__GPUConfig__
|
||||
#define gpu__GPUConfig__
|
||||
|
||||
|
||||
#define GL_GLEXT_PROTOTYPES 1
|
||||
|
||||
#define GPU_CORE 1
|
||||
|
@ -24,6 +25,9 @@
|
|||
#define GPU_FEATURE_PROFILE GPU_CORE
|
||||
#define GPU_INPUT_PROFILE GPU_CORE_41
|
||||
|
||||
#include <OpenGL/gl.h>
|
||||
#include <OpenGL/glext.h>
|
||||
|
||||
#elif defined(WIN32)
|
||||
#include <GL/glew.h>
|
||||
#include <GL/wglew.h>
|
||||
|
@ -42,4 +46,5 @@
|
|||
|
||||
#endif
|
||||
|
||||
|
||||
#endif
|
||||
|
|
72
libraries/input-plugins/CMakeLists.txt
Normal file
72
libraries/input-plugins/CMakeLists.txt
Normal file
|
@ -0,0 +1,72 @@
|
|||
set(TARGET_NAME input-plugins)
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "SDL2" "Sixense")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
string(TOLOWER ${EXTERNAL} ${EXTERNAL}_LOWERCASE)
|
||||
set(${${EXTERNAL}_UPPERCASE}_ROOT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/${${EXTERNAL}_LOWERCASE}")
|
||||
endif ()
|
||||
endforeach()
|
||||
|
||||
setup_hifi_library()
|
||||
|
||||
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||
link_hifi_libraries(shared plugins gpu render-utils)
|
||||
|
||||
GroupSources("src/input-plugins")
|
||||
|
||||
add_dependency_external_projects(glm)
|
||||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
|
||||
|
||||
if (WIN32)
|
||||
add_dependency_external_projects(OpenVR)
|
||||
find_package(OpenVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${OPENVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${OPENVR_LIBRARIES})
|
||||
endif()
|
||||
|
||||
#add_dependency_external_projects(Sixense)
|
||||
#find_package(Sixense REQUIRED)
|
||||
#target_include_directories(${TARGET_NAME} PRIVATE ${SIXENSE_INCLUDE_DIRS})
|
||||
#target_link_libraries(${TARGET_NAME} ${SIXENSE_LIBRARIES})
|
||||
|
||||
# perform standard include and linking for found externals
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
|
||||
if (${${EXTERNAL}_UPPERCASE}_REQUIRED)
|
||||
find_package(${EXTERNAL} REQUIRED)
|
||||
else ()
|
||||
find_package(${EXTERNAL})
|
||||
endif ()
|
||||
|
||||
if (${${EXTERNAL}_UPPERCASE}_FOUND AND NOT DISABLE_${${EXTERNAL}_UPPERCASE})
|
||||
add_definitions(-DHAVE_${${EXTERNAL}_UPPERCASE})
|
||||
|
||||
# include the library directories (ignoring warnings)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_INCLUDE_DIRS)
|
||||
set(${${EXTERNAL}_UPPERCASE}_INCLUDE_DIRS ${${${EXTERNAL}_UPPERCASE}_INCLUDE_DIR})
|
||||
endif ()
|
||||
|
||||
include_directories(SYSTEM ${${${EXTERNAL}_UPPERCASE}_INCLUDE_DIRS})
|
||||
|
||||
# perform the system include hack for OS X to ignore warnings
|
||||
if (APPLE)
|
||||
foreach(EXTERNAL_INCLUDE_DIR ${${${EXTERNAL}_UPPERCASE}_INCLUDE_DIRS})
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${EXTERNAL_INCLUDE_DIR}")
|
||||
endforeach()
|
||||
endif ()
|
||||
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_LIBRARIES)
|
||||
set(${${EXTERNAL}_UPPERCASE}_LIBRARIES ${${${EXTERNAL}_UPPERCASE}_LIBRARY})
|
||||
endif ()
|
||||
|
||||
if (NOT APPLE OR NOT ${${EXTERNAL}_UPPERCASE} MATCHES "SIXENSE")
|
||||
target_link_libraries(${TARGET_NAME} ${${${EXTERNAL}_UPPERCASE}_LIBRARIES})
|
||||
elseif (APPLE AND NOT INSTALLER_BUILD)
|
||||
add_definitions(-DSIXENSE_LIB_FILENAME=\"${${${EXTERNAL}_UPPERCASE}_LIBRARY_RELEASE}\")
|
||||
endif ()
|
||||
endif ()
|
||||
endforeach()
|
59
libraries/input-plugins/src/input-plugins/InputDevice.cpp
Normal file
59
libraries/input-plugins/src/input-plugins/InputDevice.cpp
Normal file
|
@ -0,0 +1,59 @@
|
|||
//
|
||||
// InputDevice.cpp
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 7/15/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "InputDevice.h"
|
||||
|
||||
bool InputDevice::_lowVelocityFilter = false;
|
||||
|
||||
const float DEFAULT_HAND_RETICLE_MOVE_SPEED = 37.5f;
|
||||
float InputDevice::reticleMoveSpeed = DEFAULT_HAND_RETICLE_MOVE_SPEED;
|
||||
|
||||
//Constants for getCursorPixelRangeMultiplier()
|
||||
const float MIN_PIXEL_RANGE_MULT = 0.4f;
|
||||
const float MAX_PIXEL_RANGE_MULT = 2.0f;
|
||||
const float RANGE_MULT = (MAX_PIXEL_RANGE_MULT - MIN_PIXEL_RANGE_MULT) * 0.01f;
|
||||
|
||||
//Returns a multiplier to be applied to the cursor range for the controllers
|
||||
float InputDevice::getCursorPixelRangeMult() {
|
||||
//scales (0,100) to (MINIMUM_PIXEL_RANGE_MULT, MAXIMUM_PIXEL_RANGE_MULT)
|
||||
return InputDevice::reticleMoveSpeed * RANGE_MULT + MIN_PIXEL_RANGE_MULT;
|
||||
}
|
||||
|
||||
float InputDevice::getButton(int channel) const {
|
||||
if (!_buttonPressedMap.empty()) {
|
||||
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
|
||||
return 1.0f;
|
||||
}
|
||||
else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float InputDevice::getAxis(int channel) const {
|
||||
auto axis = _axisStateMap.find(channel);
|
||||
if (axis != _axisStateMap.end()) {
|
||||
return (*axis).second;
|
||||
}
|
||||
else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
UserInputMapper::PoseValue InputDevice::getPose(int channel) const {
|
||||
auto pose = _poseStateMap.find(channel);
|
||||
if (pose != _poseStateMap.end()) {
|
||||
return (*pose).second;
|
||||
}
|
||||
else {
|
||||
return UserInputMapper::PoseValue();
|
||||
}
|
||||
}
|
68
libraries/input-plugins/src/input-plugins/InputDevice.h
Normal file
68
libraries/input-plugins/src/input-plugins/InputDevice.h
Normal file
|
@ -0,0 +1,68 @@
|
|||
//
|
||||
// InputDevice.h
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 7/15/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "UserInputMapper.h"
|
||||
|
||||
// Event types for each controller
|
||||
const unsigned int CONTROLLER_0_EVENT = 1500U;
|
||||
const unsigned int CONTROLLER_1_EVENT = 1501U;
|
||||
|
||||
// NOTE: If something inherits from both InputDevice and InputPlugin, InputPlugin must go first.
|
||||
// e.g. class Example : public InputPlugin, public InputDevice
|
||||
// instead of class Example : public InputDevice, public InputPlugin
|
||||
class InputDevice {
|
||||
public:
|
||||
InputDevice(const QString& name) : _name(name) {}
|
||||
|
||||
typedef std::unordered_set<int> ButtonPressedMap;
|
||||
typedef std::map<int, float> AxisStateMap;
|
||||
typedef std::map<int, UserInputMapper::PoseValue> PoseStateMap;
|
||||
|
||||
// Get current state for each channel
|
||||
float getButton(int channel) const;
|
||||
float getAxis(int channel) const;
|
||||
UserInputMapper::PoseValue getPose(int channel) const;
|
||||
|
||||
virtual void registerToUserInputMapper(UserInputMapper& mapper) = 0;
|
||||
virtual void assignDefaultInputMapping(UserInputMapper& mapper) = 0;
|
||||
|
||||
// Update call MUST be called once per simulation loop
|
||||
// It takes care of updating the action states and deltas
|
||||
virtual void update(float deltaTime, bool jointsCaptured) = 0;
|
||||
|
||||
virtual void focusOutEvent() = 0;
|
||||
|
||||
int getDeviceID() { return _deviceID; }
|
||||
|
||||
static float getCursorPixelRangeMult();
|
||||
static float getReticleMoveSpeed() { return reticleMoveSpeed; }
|
||||
static void setReticleMoveSpeed(float sixenseReticleMoveSpeed) { reticleMoveSpeed = sixenseReticleMoveSpeed; }
|
||||
|
||||
static bool getLowVelocityFilter() { return _lowVelocityFilter; };
|
||||
|
||||
public slots:
|
||||
static void setLowVelocityFilter(bool newLowVelocityFilter) { _lowVelocityFilter = newLowVelocityFilter; };
|
||||
|
||||
protected:
|
||||
int _deviceID = 0;
|
||||
|
||||
QString _name;
|
||||
|
||||
ButtonPressedMap _buttonPressedMap;
|
||||
AxisStateMap _axisStateMap;
|
||||
PoseStateMap _poseStateMap;
|
||||
|
||||
static bool _lowVelocityFilter;
|
||||
|
||||
private:
|
||||
static float reticleMoveSpeed;
|
||||
};
|
39
libraries/input-plugins/src/input-plugins/InputPlugin.cpp
Normal file
39
libraries/input-plugins/src/input-plugins/InputPlugin.cpp
Normal file
|
@ -0,0 +1,39 @@
|
|||
//
|
||||
// InputPlugin.cpp
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 7/13/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "InputPlugin.h"
|
||||
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "KeyboardMouseDevice.h"
|
||||
#include "SDL2Manager.h"
|
||||
#include "SixenseManager.h"
|
||||
#include "ViveControllerManager.h"
|
||||
|
||||
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
|
||||
InputPluginList getInputPlugins() {
|
||||
InputPlugin* PLUGIN_POOL[] = {
|
||||
new KeyboardMouseDevice(),
|
||||
new SDL2Manager(),
|
||||
new SixenseManager(),
|
||||
new ViveControllerManager(),
|
||||
nullptr
|
||||
};
|
||||
|
||||
InputPluginList result;
|
||||
for (int i = 0; PLUGIN_POOL[i]; ++i) {
|
||||
InputPlugin * plugin = PLUGIN_POOL[i];
|
||||
if (plugin->isSupported()) {
|
||||
plugin->init();
|
||||
result.push_back(InputPluginPointer(plugin));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
23
libraries/input-plugins/src/input-plugins/InputPlugin.h
Normal file
23
libraries/input-plugins/src/input-plugins/InputPlugin.h
Normal file
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// InputPlugin.h
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 7/13/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <plugins/Plugin.h>
|
||||
|
||||
class InputPlugin : public Plugin {
|
||||
public:
|
||||
virtual bool isJointController() const = 0;
|
||||
|
||||
virtual void pluginFocusOutEvent() = 0;
|
||||
|
||||
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) = 0;
|
||||
};
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// Joystick.cpp
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-09-23.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -13,8 +13,6 @@
|
|||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "Joystick.h"
|
||||
|
||||
const float CONTROLLER_THRESHOLD = 0.3f;
|
||||
|
@ -23,6 +21,7 @@ const float CONTROLLER_THRESHOLD = 0.3f;
|
|||
const float MAX_AXIS = 32768.0f;
|
||||
|
||||
Joystick::Joystick(SDL_JoystickID instanceId, const QString& name, SDL_GameController* sdlGameController) :
|
||||
InputDevice(name),
|
||||
_sdlGameController(sdlGameController),
|
||||
_sdlJoystick(SDL_GameControllerGetJoystick(_sdlGameController)),
|
||||
_instanceId(instanceId)
|
||||
|
@ -42,7 +41,7 @@ void Joystick::closeJoystick() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void Joystick::update() {
|
||||
void Joystick::update(float deltaTime, bool jointsCaptured) {
|
||||
for (auto axisState : _axisStateMap) {
|
||||
if (fabsf(axisState.second) < CONTROLLER_THRESHOLD) {
|
||||
_axisStateMap[axisState.first] = 0.0f;
|
||||
|
@ -214,26 +213,6 @@ void Joystick::assignDefaultInputMapping(UserInputMapper& mapper) {
|
|||
#endif
|
||||
}
|
||||
|
||||
float Joystick::getButton(int channel) const {
|
||||
if (!_buttonPressedMap.empty()) {
|
||||
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
|
||||
return 1.0f;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float Joystick::getAxis(int channel) const {
|
||||
auto axis = _axisStateMap.find(channel);
|
||||
if (axis != _axisStateMap.end()) {
|
||||
return (*axis).second;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
UserInputMapper::Input Joystick::makeInput(SDL_GameControllerButton button) {
|
||||
return UserInputMapper::Input(_deviceID, button, UserInputMapper::ChannelType::BUTTON);
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// Joystick.h
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-09-23.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
|
@ -20,11 +20,10 @@
|
|||
#undef main
|
||||
#endif
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "InputDevice.h"
|
||||
|
||||
class Joystick : public QObject {
|
||||
class Joystick : public QObject, public InputDevice {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(QString name READ getName)
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
|
@ -44,27 +43,23 @@ public:
|
|||
RIGHT_SHOULDER,
|
||||
LEFT_SHOULDER,
|
||||
};
|
||||
|
||||
const QString& getName() const { return _name; }
|
||||
|
||||
// Device functions
|
||||
virtual void registerToUserInputMapper(UserInputMapper& mapper) override;
|
||||
virtual void assignDefaultInputMapping(UserInputMapper& mapper) override;
|
||||
virtual void update(float deltaTime, bool jointsCaptured) override;
|
||||
virtual void focusOutEvent() override;
|
||||
|
||||
Joystick();
|
||||
Joystick() : InputDevice("Joystick") {}
|
||||
~Joystick();
|
||||
|
||||
typedef std::unordered_set<int> ButtonPressedMap;
|
||||
typedef std::map<int, float> AxisStateMap;
|
||||
|
||||
float getButton(int channel) const;
|
||||
float getAxis(int channel) const;
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
UserInputMapper::Input makeInput(SDL_GameControllerButton button);
|
||||
#endif
|
||||
UserInputMapper::Input makeInput(Joystick::JoystickAxisChannel axis);
|
||||
|
||||
void registerToUserInputMapper(UserInputMapper& mapper);
|
||||
void assignDefaultInputMapping(UserInputMapper& mapper);
|
||||
|
||||
void update();
|
||||
void focusOutEvent();
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
Joystick(SDL_JoystickID instanceId, const QString& name, SDL_GameController* sdlGameController);
|
||||
#endif
|
||||
|
@ -76,27 +71,16 @@ public:
|
|||
void handleButtonEvent(const SDL_ControllerButtonEvent& event);
|
||||
#endif
|
||||
|
||||
const QString& getName() const { return _name; }
|
||||
#ifdef HAVE_SDL2
|
||||
int getInstanceId() const { return _instanceId; }
|
||||
#endif
|
||||
|
||||
int getDeviceID() { return _deviceID; }
|
||||
|
||||
private:
|
||||
#ifdef HAVE_SDL2
|
||||
SDL_GameController* _sdlGameController;
|
||||
SDL_Joystick* _sdlJoystick;
|
||||
SDL_JoystickID _instanceId;
|
||||
#endif
|
||||
|
||||
QString _name;
|
||||
|
||||
protected:
|
||||
int _deviceID = 0;
|
||||
|
||||
ButtonPressedMap _buttonPressedMap;
|
||||
AxisStateMap _axisStateMap;
|
||||
};
|
||||
|
||||
#endif // hifi_Joystick_h
|
|
@ -1,7 +1,6 @@
|
|||
|
||||
//
|
||||
// KeyboardMouseDevice.cpp
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gateau on 4/27/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -11,7 +10,9 @@
|
|||
//
|
||||
#include "KeyboardMouseDevice.h"
|
||||
|
||||
void KeyboardMouseDevice::update() {
|
||||
const QString KeyboardMouseDevice::NAME = "Keyboard/Mouse";
|
||||
|
||||
void KeyboardMouseDevice::update(float deltaTime, bool jointsCaptured) {
|
||||
_axisStateMap.clear();
|
||||
|
||||
// For touch event, we need to check that the last event is not too long ago
|
||||
|
@ -27,7 +28,7 @@ void KeyboardMouseDevice::update() {
|
|||
}
|
||||
}
|
||||
|
||||
void KeyboardMouseDevice::focusOutEvent(QFocusEvent* event) {
|
||||
void KeyboardMouseDevice::focusOutEvent() {
|
||||
_buttonPressedMap.clear();
|
||||
};
|
||||
|
||||
|
@ -159,7 +160,7 @@ void KeyboardMouseDevice::registerToUserInputMapper(UserInputMapper& mapper) {
|
|||
// Grab the current free device ID
|
||||
_deviceID = mapper.getFreeDeviceID();
|
||||
|
||||
auto proxy = std::make_shared<UserInputMapper::DeviceProxy>("Keyboard");
|
||||
auto proxy = std::make_shared<UserInputMapper::DeviceProxy>(_name);
|
||||
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
|
||||
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
|
||||
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
|
||||
|
@ -282,23 +283,3 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
|
|||
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(Qt::Key_T));
|
||||
}
|
||||
|
||||
float KeyboardMouseDevice::getButton(int channel) const {
|
||||
if (!_buttonPressedMap.empty()) {
|
||||
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
|
||||
return 1.0f;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float KeyboardMouseDevice::getAxis(int channel) const {
|
||||
auto axis = _axisStateMap.find(channel);
|
||||
if (axis != _axisStateMap.end()) {
|
||||
return (*axis).second;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
48
interface/src/devices/KeyboardMouseDevice.h → libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h
Executable file → Normal file
48
interface/src/devices/KeyboardMouseDevice.h → libraries/input-plugins/src/input-plugins/KeyboardMouseDevice.h
Executable file → Normal file
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// KeyboardMouseDevice.h
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gateau on 4/27/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -14,11 +14,12 @@
|
|||
|
||||
#include <QTouchEvent>
|
||||
#include <chrono>
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "InputDevice.h"
|
||||
#include "InputPlugin.h"
|
||||
|
||||
class KeyboardMouseDevice {
|
||||
class KeyboardMouseDevice : public InputPlugin, public InputDevice {
|
||||
Q_OBJECT
|
||||
public:
|
||||
|
||||
enum KeyboardChannel {
|
||||
KEYBOARD_FIRST = 0,
|
||||
KEYBOARD_LAST = 255,
|
||||
|
@ -53,10 +54,24 @@ public:
|
|||
TOUCH_BUTTON_PRESS = TOUCH_AXIS_Y_NEG + 1,
|
||||
};
|
||||
|
||||
typedef std::unordered_set<int> ButtonPressedMap;
|
||||
typedef std::map<int, float> AxisStateMap; // 8 axes
|
||||
KeyboardMouseDevice() : InputDevice("Keyboard") {}
|
||||
|
||||
void focusOutEvent(QFocusEvent* event);
|
||||
// Plugin functions
|
||||
virtual bool isSupported() const override { return true; }
|
||||
virtual bool isJointController() const override { return false; }
|
||||
const QString& getName() const { return NAME; }
|
||||
|
||||
virtual void activate() override {};
|
||||
virtual void deactivate() override {};
|
||||
|
||||
virtual void pluginFocusOutEvent() override { focusOutEvent(); }
|
||||
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override { update(deltaTime, jointsCaptured); }
|
||||
|
||||
// Device functions
|
||||
virtual void registerToUserInputMapper(UserInputMapper& mapper) override;
|
||||
virtual void assignDefaultInputMapping(UserInputMapper& mapper) override;
|
||||
virtual void update(float deltaTime, bool jointsCaptured) override;
|
||||
virtual void focusOutEvent() override;
|
||||
|
||||
void keyPressEvent(QKeyEvent* event);
|
||||
void keyReleaseEvent(QKeyEvent* event);
|
||||
|
@ -70,32 +85,17 @@ public:
|
|||
void touchUpdateEvent(const QTouchEvent* event);
|
||||
|
||||
void wheelEvent(QWheelEvent* event);
|
||||
|
||||
// Get current state for each channels
|
||||
float getButton(int channel) const;
|
||||
float getAxis(int channel) const;
|
||||
|
||||
|
||||
// Let's make it easy for Qt because we assume we love Qt forever
|
||||
UserInputMapper::Input makeInput(Qt::Key code);
|
||||
UserInputMapper::Input makeInput(Qt::MouseButton code);
|
||||
UserInputMapper::Input makeInput(KeyboardMouseDevice::MouseAxisChannel axis);
|
||||
UserInputMapper::Input makeInput(KeyboardMouseDevice::TouchAxisChannel axis);
|
||||
UserInputMapper::Input makeInput(KeyboardMouseDevice::TouchButtonChannel button);
|
||||
|
||||
KeyboardMouseDevice() {}
|
||||
|
||||
void registerToUserInputMapper(UserInputMapper& mapper);
|
||||
void assignDefaultInputMapping(UserInputMapper& mapper);
|
||||
|
||||
// Update call MUST be called once per simulation loop
|
||||
// It takes care of updating the action states and deltas
|
||||
void update();
|
||||
static const QString NAME;
|
||||
|
||||
protected:
|
||||
ButtonPressedMap _buttonPressedMap;
|
||||
AxisStateMap _axisStateMap;
|
||||
|
||||
int _deviceID = 0;
|
||||
QPoint _lastCursor;
|
||||
glm::vec2 _lastTouch;
|
||||
bool _isTouching = false;
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// SDL2Manager.cpp
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 6/5/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -15,10 +15,10 @@
|
|||
#include <HFBackEvent.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "SDL2Manager.h"
|
||||
|
||||
const QString SDL2Manager::NAME = "SDL2";
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
SDL_JoystickID SDL2Manager::getInstanceId(SDL_GameController* controller) {
|
||||
SDL_Joystick* joystick = SDL_GameControllerGetJoystick(controller);
|
||||
|
@ -32,48 +32,56 @@ _openJoysticks(),
|
|||
#endif
|
||||
_isInitialized(false)
|
||||
{
|
||||
}
|
||||
|
||||
void SDL2Manager::init() {
|
||||
#ifdef HAVE_SDL2
|
||||
bool initSuccess = (SDL_Init(SDL_INIT_GAMECONTROLLER) == 0);
|
||||
|
||||
|
||||
if (initSuccess) {
|
||||
int joystickCount = SDL_NumJoysticks();
|
||||
|
||||
|
||||
for (int i = 0; i < joystickCount; i++) {
|
||||
SDL_GameController* controller = SDL_GameControllerOpen(i);
|
||||
|
||||
|
||||
if (controller) {
|
||||
SDL_JoystickID id = getInstanceId(controller);
|
||||
if (!_openJoysticks.contains(id)) {
|
||||
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
|
||||
_openJoysticks[id] = joystick;
|
||||
joystick->registerToUserInputMapper(*Application::getUserInputMapper());
|
||||
joystick->assignDefaultInputMapping(*Application::getUserInputMapper());
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
joystick->registerToUserInputMapper(*userInputMapper);
|
||||
joystick->assignDefaultInputMapping(*userInputMapper);
|
||||
emit joystickAdded(joystick);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_isInitialized = true;
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
qDebug() << "Error initializing SDL2 Manager";
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
SDL2Manager::~SDL2Manager() {
|
||||
void SDL2Manager::deinit() {
|
||||
#ifdef HAVE_SDL2
|
||||
qDeleteAll(_openJoysticks);
|
||||
|
||||
|
||||
SDL_Quit();
|
||||
#endif
|
||||
}
|
||||
|
||||
SDL2Manager* SDL2Manager::getInstance() {
|
||||
static SDL2Manager sharedInstance;
|
||||
return &sharedInstance;
|
||||
bool SDL2Manager::isSupported() const {
|
||||
#ifdef HAVE_SDL2
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void SDL2Manager::focusOutEvent() {
|
||||
void SDL2Manager::pluginFocusOutEvent() {
|
||||
#ifdef HAVE_SDL2
|
||||
for (auto joystick : _openJoysticks) {
|
||||
joystick->focusOutEvent();
|
||||
|
@ -81,11 +89,12 @@ void SDL2Manager::focusOutEvent() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SDL2Manager::update() {
|
||||
void SDL2Manager::pluginUpdate(float deltaTime, bool jointsCaptured) {
|
||||
#ifdef HAVE_SDL2
|
||||
if (_isInitialized) {
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
for (auto joystick : _openJoysticks) {
|
||||
joystick->update();
|
||||
joystick->update(deltaTime, jointsCaptured);
|
||||
}
|
||||
|
||||
PerformanceTimer perfTimer("SDL2Manager::update");
|
||||
|
@ -111,18 +120,6 @@ void SDL2Manager::update() {
|
|||
HFBackEvent backEvent(backType);
|
||||
|
||||
qApp->sendEvent(qApp, &backEvent);
|
||||
} else if (event.cbutton.button == SDL_CONTROLLER_BUTTON_A) {
|
||||
// this will either start or stop a global action event
|
||||
QEvent::Type actionType = (event.type == SDL_CONTROLLERBUTTONDOWN)
|
||||
? HFActionEvent::startType()
|
||||
: HFActionEvent::endType();
|
||||
|
||||
// global action events fire in the center of the screen
|
||||
Application* app = Application::getInstance();
|
||||
PickRay pickRay = app->getCamera()->computePickRay(app->getTrueMouseX(),
|
||||
app->getTrueMouseY());
|
||||
HFActionEvent actionEvent(actionType, pickRay);
|
||||
qApp->sendEvent(qApp, &actionEvent);
|
||||
}
|
||||
|
||||
} else if (event.type == SDL_CONTROLLERDEVICEADDED) {
|
||||
|
@ -132,14 +129,14 @@ void SDL2Manager::update() {
|
|||
if (!_openJoysticks.contains(id)) {
|
||||
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
|
||||
_openJoysticks[id] = joystick;
|
||||
joystick->registerToUserInputMapper(*Application::getUserInputMapper());
|
||||
joystick->assignDefaultInputMapping(*Application::getUserInputMapper());
|
||||
joystick->registerToUserInputMapper(*userInputMapper);
|
||||
joystick->assignDefaultInputMapping(*userInputMapper);
|
||||
emit joystickAdded(joystick);
|
||||
}
|
||||
} else if (event.type == SDL_CONTROLLERDEVICEREMOVED) {
|
||||
Joystick* joystick = _openJoysticks[event.cdevice.which];
|
||||
_openJoysticks.remove(event.cdevice.which);
|
||||
Application::getUserInputMapper()->removeDevice(joystick->getDeviceID());
|
||||
userInputMapper->removeDevice(joystick->getDeviceID());
|
||||
emit joystickRemoved(joystick);
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// SDL2Manager.h
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gondelman on 6/5/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -16,22 +16,29 @@
|
|||
#include <SDL.h>
|
||||
#endif
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "InputPlugin.h"
|
||||
#include "UserInputMapper.h"
|
||||
|
||||
#include "devices/Joystick.h"
|
||||
#include "Joystick.h"
|
||||
|
||||
class SDL2Manager : public QObject {
|
||||
class SDL2Manager : public InputPlugin {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
SDL2Manager();
|
||||
~SDL2Manager();
|
||||
|
||||
void focusOutEvent();
|
||||
// Plugin functions
|
||||
virtual bool isSupported() const override;
|
||||
virtual bool isJointController() const override { return false; }
|
||||
const QString& getName() const { return NAME; }
|
||||
|
||||
virtual void init() override;
|
||||
virtual void deinit() override;
|
||||
virtual void activate() override {};
|
||||
virtual void deactivate() override {};
|
||||
|
||||
void update();
|
||||
|
||||
static SDL2Manager* getInstance();
|
||||
virtual void pluginFocusOutEvent() override;
|
||||
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override;
|
||||
|
||||
signals:
|
||||
void joystickAdded(Joystick* joystick);
|
||||
|
@ -70,12 +77,11 @@ private:
|
|||
|
||||
int buttonPressed() const { return SDL_PRESSED; }
|
||||
int buttonRelease() const { return SDL_RELEASED; }
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
|
||||
QMap<SDL_JoystickID, Joystick*> _openJoysticks;
|
||||
#endif
|
||||
bool _isInitialized;
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
#endif // hifi__SDL2Manager_h
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// SixenseManager.cpp
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Andrzej Kapolka on 11/15/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
|
@ -11,14 +11,20 @@
|
|||
|
||||
#include <vector>
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <QCoreApplication>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "NumericalConstants.h"
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include "SixenseManager.h"
|
||||
#include "UserActivityLogger.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
// TODO: This should not be here
|
||||
#include <QLoggingCategory>
|
||||
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
|
||||
Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")
|
||||
|
||||
// These bits aren't used for buttons, so they can be used as masks:
|
||||
const unsigned int LEFT_MASK = 0;
|
||||
|
@ -47,41 +53,89 @@ typedef int (*SixenseTakeIntAndSixenseControllerData)(int, sixenseControllerData
|
|||
|
||||
#endif
|
||||
|
||||
const QString SixenseManager::NAME = "Sixense";
|
||||
|
||||
const QString MENU_PARENT = "Avatar";
|
||||
const QString MENU_NAME = "Sixense";
|
||||
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
|
||||
const QString TOGGLE_SMOOTH = "Smooth Sixense Movement";
|
||||
|
||||
SixenseManager& SixenseManager::getInstance() {
|
||||
static SixenseManager sharedInstance;
|
||||
return sharedInstance;
|
||||
}
|
||||
|
||||
SixenseManager::SixenseManager() :
|
||||
InputDevice("Hydra"),
|
||||
#if defined(HAVE_SIXENSE) && defined(__APPLE__)
|
||||
_sixenseLibrary(NULL),
|
||||
#endif
|
||||
_isInitialized(false),
|
||||
_isEnabled(true),
|
||||
_hydrasConnected(false)
|
||||
{
|
||||
_triggerPressed[0] = false;
|
||||
_bumperPressed[0] = false;
|
||||
_oldX[0] = -1;
|
||||
_oldY[0] = -1;
|
||||
_triggerPressed[1] = false;
|
||||
_bumperPressed[1] = false;
|
||||
_oldX[1] = -1;
|
||||
_oldY[1] = -1;
|
||||
_prevPalms[0] = nullptr;
|
||||
_prevPalms[1] = nullptr;
|
||||
|
||||
}
|
||||
|
||||
SixenseManager::~SixenseManager() {
|
||||
#ifdef HAVE_SIXENSE_
|
||||
bool SixenseManager::isSupported() const {
|
||||
#ifdef HAVE_SIXENSE
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::activate() {
|
||||
#ifdef HAVE_SIXENSE
|
||||
_calibrationState = CALIBRATION_STATE_IDLE;
|
||||
// By default we assume the _neckBase (in orb frame) is as high above the orb
|
||||
// as the "torso" is below it.
|
||||
_neckBase = glm::vec3(NECK_X, -NECK_Y, NECK_Z);
|
||||
|
||||
CONTAINER->addMenu(MENU_PATH);
|
||||
CONTAINER->addMenuItem(MENU_PATH, TOGGLE_SMOOTH,
|
||||
[this] (bool clicked) { this->setFilter(clicked); },
|
||||
true, true);
|
||||
|
||||
if (_isInitialized) {
|
||||
#ifdef __APPLE__
|
||||
SixenseBaseFunction sixenseExit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseExit");
|
||||
|
||||
if (!_sixenseLibrary) {
|
||||
|
||||
#ifdef SIXENSE_LIB_FILENAME
|
||||
_sixenseLibrary = new QLibrary(SIXENSE_LIB_FILENAME);
|
||||
#else
|
||||
const QString SIXENSE_LIBRARY_NAME = "libsixense_x64";
|
||||
QString frameworkSixenseLibrary = QCoreApplication::applicationDirPath() + "/../Frameworks/"
|
||||
+ SIXENSE_LIBRARY_NAME;
|
||||
|
||||
_sixenseLibrary = new QLibrary(frameworkSixenseLibrary);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (_sixenseLibrary->load()){
|
||||
qCDebug(inputplugins) << "Loaded sixense library for hydra support -" << _sixenseLibrary->fileName();
|
||||
} else {
|
||||
qCDebug(inputplugins) << "Sixense library at" << _sixenseLibrary->fileName() << "failed to load."
|
||||
<< "Continuing without hydra support.";
|
||||
return;
|
||||
}
|
||||
|
||||
SixenseBaseFunction sixenseInit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseInit");
|
||||
#endif
|
||||
sixenseInit();
|
||||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::deactivate() {
|
||||
#ifdef HAVE_SIXENSE
|
||||
CONTAINER->removeMenuItem(MENU_NAME, TOGGLE_SMOOTH);
|
||||
CONTAINER->removeMenu(MENU_PATH);
|
||||
|
||||
_poseStateMap.clear();
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseBaseFunction sixenseExit = (SixenseBaseFunction)_sixenseLibrary->resolve("sixenseExit");
|
||||
#endif
|
||||
|
||||
sixenseExit();
|
||||
}
|
||||
sixenseExit();
|
||||
|
||||
#ifdef __APPLE__
|
||||
delete _sixenseLibrary;
|
||||
|
@ -90,245 +144,103 @@ SixenseManager::~SixenseManager() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::initialize() {
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
||||
if (!_isInitialized) {
|
||||
_lowVelocityFilter = false;
|
||||
_controllersAtBase = true;
|
||||
_calibrationState = CALIBRATION_STATE_IDLE;
|
||||
// By default we assume the _neckBase (in orb frame) is as high above the orb
|
||||
// as the "torso" is below it.
|
||||
_neckBase = glm::vec3(NECK_X, -NECK_Y, NECK_Z);
|
||||
|
||||
#ifdef __APPLE__
|
||||
|
||||
if (!_sixenseLibrary) {
|
||||
|
||||
#ifdef SIXENSE_LIB_FILENAME
|
||||
_sixenseLibrary = new QLibrary(SIXENSE_LIB_FILENAME);
|
||||
#else
|
||||
const QString SIXENSE_LIBRARY_NAME = "libsixense_x64";
|
||||
QString frameworkSixenseLibrary = QCoreApplication::applicationDirPath() + "/../Frameworks/"
|
||||
+ SIXENSE_LIBRARY_NAME;
|
||||
|
||||
_sixenseLibrary = new QLibrary(frameworkSixenseLibrary);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (_sixenseLibrary->load()){
|
||||
qCDebug(interfaceapp) << "Loaded sixense library for hydra support -" << _sixenseLibrary->fileName();
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Sixense library at" << _sixenseLibrary->fileName() << "failed to load."
|
||||
<< "Continuing without hydra support.";
|
||||
return;
|
||||
}
|
||||
|
||||
SixenseBaseFunction sixenseInit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseInit");
|
||||
#endif
|
||||
sixenseInit();
|
||||
|
||||
_isInitialized = true;
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::setFilter(bool filter) {
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
||||
if (_isInitialized) {
|
||||
#ifdef __APPLE__
|
||||
SixenseTakeIntFunction sixenseSetFilterEnabled = (SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseSetFilterEnabled");
|
||||
SixenseTakeIntFunction sixenseSetFilterEnabled = (SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseSetFilterEnabled");
|
||||
#endif
|
||||
|
||||
if (filter) {
|
||||
sixenseSetFilterEnabled(1);
|
||||
} else {
|
||||
sixenseSetFilterEnabled(0);
|
||||
}
|
||||
}
|
||||
|
||||
int newFilter = filter ? 1 : 0;
|
||||
sixenseSetFilterEnabled(newFilter);
|
||||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::update(float deltaTime) {
|
||||
void SixenseManager::update(float deltaTime, bool jointsCaptured) {
|
||||
#ifdef HAVE_SIXENSE
|
||||
Hand* hand = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHand();
|
||||
if (_isInitialized && _isEnabled) {
|
||||
_buttonPressedMap.clear();
|
||||
_buttonPressedMap.clear();
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseBaseFunction sixenseGetNumActiveControllers =
|
||||
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
|
||||
SixenseBaseFunction sixenseGetNumActiveControllers =
|
||||
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
|
||||
#endif
|
||||
|
||||
if (sixenseGetNumActiveControllers() == 0) {
|
||||
_hydrasConnected = false;
|
||||
if (_deviceID != 0) {
|
||||
Application::getUserInputMapper()->removeDevice(_deviceID);
|
||||
_deviceID = 0;
|
||||
if (_prevPalms[0]) {
|
||||
_prevPalms[0]->setActive(false);
|
||||
}
|
||||
if (_prevPalms[1]) {
|
||||
_prevPalms[1]->setActive(false);
|
||||
}
|
||||
}
|
||||
return;
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
|
||||
if (sixenseGetNumActiveControllers() == 0) {
|
||||
_hydrasConnected = false;
|
||||
if (_deviceID != 0) {
|
||||
userInputMapper->removeDevice(_deviceID);
|
||||
_deviceID = 0;
|
||||
_poseStateMap.clear();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
PerformanceTimer perfTimer("sixense");
|
||||
if (!_hydrasConnected) {
|
||||
_hydrasConnected = true;
|
||||
registerToUserInputMapper(*Application::getUserInputMapper());
|
||||
getInstance().assignDefaultInputMapping(*Application::getUserInputMapper());
|
||||
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
|
||||
PerformanceTimer perfTimer("sixense");
|
||||
if (!_hydrasConnected) {
|
||||
_hydrasConnected = true;
|
||||
registerToUserInputMapper(*userInputMapper);
|
||||
assignDefaultInputMapping(*userInputMapper);
|
||||
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
|
||||
}
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseBaseFunction sixenseGetMaxControllers =
|
||||
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetMaxControllers");
|
||||
#endif
|
||||
|
||||
int maxControllers = sixenseGetMaxControllers();
|
||||
|
||||
// we only support two controllers
|
||||
sixenseControllerData controllers[2];
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseTakeIntFunction sixenseIsControllerEnabled =
|
||||
(SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseIsControllerEnabled");
|
||||
|
||||
SixenseTakeIntAndSixenseControllerData sixenseGetNewestData =
|
||||
(SixenseTakeIntAndSixenseControllerData) _sixenseLibrary->resolve("sixenseGetNewestData");
|
||||
#endif
|
||||
|
||||
int numActiveControllers = 0;
|
||||
for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
|
||||
if (!sixenseIsControllerEnabled(i)) {
|
||||
continue;
|
||||
}
|
||||
sixenseControllerData* data = controllers + numActiveControllers;
|
||||
++numActiveControllers;
|
||||
sixenseGetNewestData(i, data);
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseBaseFunction sixenseGetMaxControllers =
|
||||
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetMaxControllers");
|
||||
#endif
|
||||
|
||||
int maxControllers = sixenseGetMaxControllers();
|
||||
|
||||
// we only support two controllers
|
||||
sixenseControllerData controllers[2];
|
||||
|
||||
#ifdef __APPLE__
|
||||
SixenseTakeIntFunction sixenseIsControllerEnabled =
|
||||
(SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseIsControllerEnabled");
|
||||
|
||||
SixenseTakeIntAndSixenseControllerData sixenseGetNewestData =
|
||||
(SixenseTakeIntAndSixenseControllerData) _sixenseLibrary->resolve("sixenseGetNewestData");
|
||||
#endif
|
||||
int numControllersAtBase = 0;
|
||||
int numActiveControllers = 0;
|
||||
for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
|
||||
if (!sixenseIsControllerEnabled(i)) {
|
||||
continue;
|
||||
}
|
||||
sixenseControllerData* data = controllers + numActiveControllers;
|
||||
++numActiveControllers;
|
||||
sixenseGetNewestData(i, data);
|
||||
|
||||
// Set palm position and normal based on Hydra position/orientation
|
||||
|
||||
// Either find a palm matching the sixense controller, or make a new one
|
||||
PalmData* palm;
|
||||
bool foundHand = false;
|
||||
for (size_t j = 0; j < hand->getNumPalms(); j++) {
|
||||
if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
|
||||
palm = &(hand->getPalms()[j]);
|
||||
_prevPalms[numActiveControllers - 1] = palm;
|
||||
foundHand = true;
|
||||
}
|
||||
}
|
||||
if (!foundHand) {
|
||||
PalmData newPalm(hand);
|
||||
hand->getPalms().push_back(newPalm);
|
||||
palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
|
||||
palm->setSixenseID(data->controller_index);
|
||||
_prevPalms[numActiveControllers - 1] = palm;
|
||||
qCDebug(interfaceapp, "Found new Sixense controller, ID %i", data->controller_index);
|
||||
}
|
||||
|
||||
// Disable the hands (and return to default pose) if both controllers are at base station
|
||||
if (foundHand) {
|
||||
palm->setActive(!_controllersAtBase);
|
||||
} else {
|
||||
palm->setActive(false); // if this isn't a Sixsense ID palm, always make it inactive
|
||||
}
|
||||
|
||||
// Read controller buttons and joystick into the hand
|
||||
palm->setControllerButtons(data->buttons);
|
||||
palm->setTrigger(data->trigger);
|
||||
palm->setJoystick(data->joystick_x, data->joystick_y);
|
||||
|
||||
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
|
||||
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
|
||||
position *= METERS_PER_MILLIMETER;
|
||||
|
||||
// Check to see if this hand/controller is on the base
|
||||
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
|
||||
if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
|
||||
numControllersAtBase++;
|
||||
palm->setActive(false);
|
||||
} else {
|
||||
handleButtonEvent(data->buttons, numActiveControllers - 1);
|
||||
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
|
||||
|
||||
// Emulate the mouse so we can use scripts
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
|
||||
emulateMouse(palm, numActiveControllers - 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Transform the measured position into body frame.
|
||||
glm::vec3 neck = _neckBase;
|
||||
// Zeroing y component of the "neck" effectively raises the measured position a little bit.
|
||||
neck.y = 0.0f;
|
||||
position = _orbRotation * (position - neck);
|
||||
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
|
||||
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
|
||||
position *= METERS_PER_MILLIMETER;
|
||||
|
||||
// Check to see if this hand/controller is on the base
|
||||
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
|
||||
if (glm::length(position) >= CONTROLLER_AT_BASE_DISTANCE) {
|
||||
handleButtonEvent(data->buttons, numActiveControllers - 1);
|
||||
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
|
||||
|
||||
// Rotation of Palm
|
||||
glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
|
||||
rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * _orbRotation * rotation;
|
||||
|
||||
// Compute current velocity from position change
|
||||
glm::vec3 rawVelocity;
|
||||
if (deltaTime > 0.0f) {
|
||||
rawVelocity = (position - palm->getRawPosition()) / deltaTime;
|
||||
if (!jointsCaptured) {
|
||||
handlePoseEvent(position, rotation, numActiveControllers - 1);
|
||||
} else {
|
||||
rawVelocity = glm::vec3(0.0f);
|
||||
_poseStateMap.clear();
|
||||
}
|
||||
palm->setRawVelocity(rawVelocity); // meters/sec
|
||||
|
||||
// adjustment for hydra controllers fit into hands
|
||||
float sign = (i == 0) ? -1.0f : 1.0f;
|
||||
rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
|
||||
|
||||
// Angular Velocity of Palm
|
||||
glm::quat deltaRotation = rotation * glm::inverse(palm->getRawRotation());
|
||||
glm::vec3 angularVelocity(0.0f);
|
||||
float rotationAngle = glm::angle(deltaRotation);
|
||||
if ((rotationAngle > EPSILON) && (deltaTime > 0.0f)) {
|
||||
angularVelocity = glm::normalize(glm::axis(deltaRotation));
|
||||
angularVelocity *= (rotationAngle / deltaTime);
|
||||
palm->setRawAngularVelocity(angularVelocity);
|
||||
} else {
|
||||
palm->setRawAngularVelocity(glm::vec3(0.0f));
|
||||
}
|
||||
|
||||
if (_lowVelocityFilter) {
|
||||
// Use a velocity sensitive filter to damp small motions and preserve large ones with
|
||||
// no latency.
|
||||
float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
|
||||
position = palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter);
|
||||
rotation = safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter);
|
||||
palm->setRawPosition(position);
|
||||
palm->setRawRotation(rotation);
|
||||
} else {
|
||||
palm->setRawPosition(position);
|
||||
palm->setRawRotation(rotation);
|
||||
}
|
||||
|
||||
// Store the one fingertip in the palm structure so we can track velocity
|
||||
const float FINGER_LENGTH = 0.3f; // meters
|
||||
const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
|
||||
const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
|
||||
glm::vec3 oldTipPosition = palm->getTipRawPosition();
|
||||
if (deltaTime > 0.0f) {
|
||||
palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime);
|
||||
} else {
|
||||
palm->setTipVelocity(glm::vec3(0.0f));
|
||||
}
|
||||
palm->setTipPosition(newTipPosition);
|
||||
} else {
|
||||
_poseStateMap[(numActiveControllers - 1) == 0 ? LEFT_HAND : RIGHT_HAND] = UserInputMapper::PoseValue();
|
||||
}
|
||||
|
||||
// // Read controller buttons and joystick into the hand
|
||||
// palm->setControllerButtons(data->buttons);
|
||||
// palm->setTrigger(data->trigger);
|
||||
// palm->setJoystick(data->joystick_x, data->joystick_y);
|
||||
}
|
||||
|
||||
if (numActiveControllers == 2) {
|
||||
updateCalibration(controllers);
|
||||
}
|
||||
_controllersAtBase = (numControllersAtBase == 2);
|
||||
if (numActiveControllers == 2) {
|
||||
updateCalibration(controllers);
|
||||
}
|
||||
|
||||
for (auto axisState : _axisStateMap) {
|
||||
|
@ -339,26 +251,6 @@ void SixenseManager::update(float deltaTime) {
|
|||
#endif // HAVE_SIXENSE
|
||||
}
|
||||
|
||||
//Constants for getCursorPixelRangeMultiplier()
|
||||
const float MIN_PIXEL_RANGE_MULT = 0.4f;
|
||||
const float MAX_PIXEL_RANGE_MULT = 2.0f;
|
||||
const float RANGE_MULT = (MAX_PIXEL_RANGE_MULT - MIN_PIXEL_RANGE_MULT) * 0.01f;
|
||||
|
||||
//Returns a multiplier to be applied to the cursor range for the controllers
|
||||
float SixenseManager::getCursorPixelRangeMult() const {
|
||||
//scales (0,100) to (MINIMUM_PIXEL_RANGE_MULT, MAXIMUM_PIXEL_RANGE_MULT)
|
||||
return _reticleMoveSpeed * RANGE_MULT + MIN_PIXEL_RANGE_MULT;
|
||||
}
|
||||
|
||||
void SixenseManager::toggleSixense(bool shouldEnable) {
|
||||
if (shouldEnable && !isInitialized()) {
|
||||
initialize();
|
||||
setFilter(Menu::getInstance()->isOptionChecked(MenuOption::FilterSixense));
|
||||
setLowVelocityFilter(Menu::getInstance()->isOptionChecked(MenuOption::LowVelocityFilter));
|
||||
}
|
||||
setIsEnabled(shouldEnable);
|
||||
}
|
||||
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
||||
// the calibration sequence is:
|
||||
|
@ -372,7 +264,8 @@ const float MINIMUM_ARM_REACH = 0.3f; // meters
|
|||
const float MAXIMUM_NOISE_LEVEL = 0.05f; // meters
|
||||
const quint64 LOCK_DURATION = USECS_PER_SECOND / 4; // time for lock to be acquired
|
||||
|
||||
void SixenseManager::updateCalibration(const sixenseControllerData* controllers) {
|
||||
void SixenseManager::updateCalibration(void* controllersX) {
|
||||
auto controllers = reinterpret_cast<sixenseControllerData*>(controllersX);
|
||||
const sixenseControllerData* dataLeft = controllers;
|
||||
const sixenseControllerData* dataRight = controllers + 1;
|
||||
|
||||
|
@ -397,11 +290,11 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
glm::vec3 zAxis = glm::normalize(glm::cross(xAxis, yAxis));
|
||||
xAxis = glm::normalize(glm::cross(yAxis, zAxis));
|
||||
_orbRotation = glm::inverse(glm::quat_cast(glm::mat3(xAxis, yAxis, zAxis)));
|
||||
qCDebug(interfaceapp, "succeess: sixense calibration");
|
||||
qCDebug(inputplugins, "succeess: sixense calibration");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
qCDebug(interfaceapp, "failed: sixense calibration");
|
||||
qCDebug(inputplugins, "failed: sixense calibration");
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -420,7 +313,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
if (_calibrationState == CALIBRATION_STATE_IDLE) {
|
||||
float reach = glm::distance(positionLeft, positionRight);
|
||||
if (reach > 2.0f * MINIMUM_ARM_REACH) {
|
||||
qCDebug(interfaceapp, "started: sixense calibration");
|
||||
qCDebug(inputplugins, "started: sixense calibration");
|
||||
_averageLeft = positionLeft;
|
||||
_averageRight = positionRight;
|
||||
_reachLeft = _averageLeft;
|
||||
|
@ -453,7 +346,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
_lastDistance = 0.0f;
|
||||
_reachUp = 0.5f * (_reachLeft + _reachRight);
|
||||
_calibrationState = CALIBRATION_STATE_Y;
|
||||
qCDebug(interfaceapp, "success: sixense calibration: left");
|
||||
qCDebug(inputplugins, "success: sixense calibration: left");
|
||||
}
|
||||
}
|
||||
else if (_calibrationState == CALIBRATION_STATE_Y) {
|
||||
|
@ -472,7 +365,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
_lastDistance = 0.0f;
|
||||
_lockExpiry = now + LOCK_DURATION;
|
||||
_calibrationState = CALIBRATION_STATE_Z;
|
||||
qCDebug(interfaceapp, "success: sixense calibration: up");
|
||||
qCDebug(inputplugins, "success: sixense calibration: up");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -494,7 +387,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
if (fabsf(_lastDistance) > 0.05f * MINIMUM_ARM_REACH) {
|
||||
// lock has expired so clamp the data and move on
|
||||
_calibrationState = CALIBRATION_STATE_COMPLETE;
|
||||
qCDebug(interfaceapp, "success: sixense calibration: forward");
|
||||
qCDebug(inputplugins, "success: sixense calibration: forward");
|
||||
// TODO: it is theoretically possible to detect that the controllers have been
|
||||
// accidentally switched (left hand is holding right controller) and to swap the order.
|
||||
}
|
||||
|
@ -502,124 +395,6 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
}
|
||||
}
|
||||
|
||||
//Injecting mouse movements and clicks
|
||||
void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
QPoint pos;
|
||||
|
||||
Qt::MouseButton bumperButton;
|
||||
Qt::MouseButton triggerButton;
|
||||
|
||||
unsigned int deviceID = index == 0 ? CONTROLLER_0_EVENT : CONTROLLER_1_EVENT;
|
||||
|
||||
if (_invertButtons) {
|
||||
bumperButton = Qt::LeftButton;
|
||||
triggerButton = Qt::RightButton;
|
||||
} else {
|
||||
bumperButton = Qt::RightButton;
|
||||
triggerButton = Qt::LeftButton;
|
||||
}
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
|
||||
pos = qApp->getApplicationCompositor().getPalmClickLocation(palm);
|
||||
} else {
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
|
||||
|
||||
// Get the angles, scaled between (-0.5,0.5)
|
||||
float xAngle = (atan2(direction.z, direction.x) + PI_OVER_TWO);
|
||||
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
|
||||
auto canvasSize = qApp->getCanvasSize();
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = canvasSize.x * getCursorPixelRangeMult();
|
||||
|
||||
pos.setX(canvasSize.x / 2.0f + cursorRange * xAngle);
|
||||
pos.setY(canvasSize.y / 2.0f + cursorRange * yAngle);
|
||||
|
||||
}
|
||||
|
||||
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
|
||||
//we should unpress them.
|
||||
if (pos.x() == INT_MAX) {
|
||||
if (_bumperPressed[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
||||
|
||||
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||
|
||||
_bumperPressed[index] = false;
|
||||
}
|
||||
if (_triggerPressed[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
||||
|
||||
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||
|
||||
_triggerPressed[index] = false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
//If position has changed, emit a mouse move to the application
|
||||
if (pos.x() != _oldX[index] || pos.y() != _oldY[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseMove, pos, Qt::NoButton, Qt::NoButton, 0);
|
||||
|
||||
//Only send the mouse event if the opposite left button isnt held down.
|
||||
if (triggerButton == Qt::LeftButton) {
|
||||
if (!_triggerPressed[(int)(!index)]) {
|
||||
qApp->mouseMoveEvent(&mouseEvent, deviceID);
|
||||
}
|
||||
} else {
|
||||
if (!_bumperPressed[(int)(!index)]) {
|
||||
qApp->mouseMoveEvent(&mouseEvent, deviceID);
|
||||
}
|
||||
}
|
||||
}
|
||||
_oldX[index] = pos.x();
|
||||
_oldY[index] = pos.y();
|
||||
|
||||
|
||||
//We need separate coordinates for clicks, since we need to check if
|
||||
//a magnification window was clicked on
|
||||
int clickX = pos.x();
|
||||
int clickY = pos.y();
|
||||
//Set pos to the new click location, which may be the same if no magnification window is open
|
||||
pos.setX(clickX);
|
||||
pos.setY(clickY);
|
||||
|
||||
//Check for bumper press ( Right Click )
|
||||
if (palm->getControllerButtons() & BUTTON_FWD) {
|
||||
if (!_bumperPressed[index]) {
|
||||
_bumperPressed[index] = true;
|
||||
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, bumperButton, bumperButton, 0);
|
||||
|
||||
qApp->mousePressEvent(&mouseEvent, deviceID);
|
||||
}
|
||||
} else if (_bumperPressed[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
||||
|
||||
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||
|
||||
_bumperPressed[index] = false;
|
||||
}
|
||||
|
||||
//Check for trigger press ( Left Click )
|
||||
if (palm->getTrigger() == 1.0f) {
|
||||
if (!_triggerPressed[index]) {
|
||||
_triggerPressed[index] = true;
|
||||
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, triggerButton, triggerButton, 0);
|
||||
|
||||
qApp->mousePressEvent(&mouseEvent, deviceID);
|
||||
}
|
||||
} else if (_triggerPressed[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
|
||||
|
||||
qApp->mouseReleaseEvent(&mouseEvent, deviceID);
|
||||
|
||||
_triggerPressed[index] = false;
|
||||
}
|
||||
}
|
||||
|
||||
#endif // HAVE_SIXENSE
|
||||
|
||||
void SixenseManager::focusOutEvent() {
|
||||
|
@ -659,13 +434,30 @@ void SixenseManager::handleButtonEvent(unsigned int buttons, int index) {
|
|||
}
|
||||
}
|
||||
|
||||
void SixenseManager::handlePoseEvent(glm::vec3 position, glm::quat rotation, int index) {
|
||||
#ifdef HAVE_SIXENSE
|
||||
// Transform the measured position into body frame.
|
||||
glm::vec3 neck = _neckBase;
|
||||
// Set y component of the "neck" to raise the measured position a little bit.
|
||||
neck.y = 0.5f;
|
||||
position = _orbRotation * (position - neck);
|
||||
|
||||
// adjustment for hydra controllers fit into hands
|
||||
float sign = (index == 0) ? -1.0f : 1.0f;
|
||||
rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
|
||||
|
||||
_poseStateMap[makeInput(JointChannel(index)).getChannel()] = UserInputMapper::PoseValue(position, rotation);
|
||||
#endif // HAVE_SIXENSE
|
||||
}
|
||||
|
||||
void SixenseManager::registerToUserInputMapper(UserInputMapper& mapper) {
|
||||
// Grab the current free device ID
|
||||
_deviceID = mapper.getFreeDeviceID();
|
||||
|
||||
auto proxy = std::make_shared<UserInputMapper::DeviceProxy>("Hydra");
|
||||
auto proxy = std::make_shared<UserInputMapper::DeviceProxy>(_name);
|
||||
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
|
||||
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
|
||||
proxy->getPose = [this](const UserInputMapper::Input& input, int timestamp) -> UserInputMapper::PoseValue { return this->getPose(input.getChannel()); };
|
||||
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
|
||||
QVector<UserInputMapper::InputPair> availableInputs;
|
||||
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_0, 0), "Left Start"));
|
||||
|
@ -739,29 +531,15 @@ void SixenseManager::assignDefaultInputMapping(UserInputMapper& mapper) {
|
|||
|
||||
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(BUTTON_4, 0));
|
||||
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(BUTTON_4, 1));
|
||||
|
||||
mapper.addInputChannel(UserInputMapper::LEFT_HAND, makeInput(LEFT_HAND));
|
||||
mapper.addInputChannel(UserInputMapper::RIGHT_HAND, makeInput(RIGHT_HAND));
|
||||
|
||||
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(BUTTON_FWD, 0));
|
||||
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(BUTTON_FWD, 1));
|
||||
|
||||
}
|
||||
|
||||
float SixenseManager::getButton(int channel) const {
|
||||
if (!_buttonPressedMap.empty()) {
|
||||
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
|
||||
return 1.0f;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float SixenseManager::getAxis(int channel) const {
|
||||
auto axis = _axisStateMap.find(channel);
|
||||
if (axis != _axisStateMap.end()) {
|
||||
return (*axis).second;
|
||||
} else {
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
UserInputMapper::Input SixenseManager::makeInput(unsigned int button, int index) {
|
||||
return UserInputMapper::Input(_deviceID, button | (index == 0 ? LEFT_MASK : RIGHT_MASK), UserInputMapper::ChannelType::BUTTON);
|
||||
}
|
||||
|
@ -769,3 +547,7 @@ UserInputMapper::Input SixenseManager::makeInput(unsigned int button, int index)
|
|||
UserInputMapper::Input SixenseManager::makeInput(SixenseManager::JoystickAxisChannel axis, int index) {
|
||||
return UserInputMapper::Input(_deviceID, axis | (index == 0 ? LEFT_MASK : RIGHT_MASK), UserInputMapper::ChannelType::AXIS);
|
||||
}
|
||||
|
||||
UserInputMapper::Input SixenseManager::makeInput(JointChannel joint) {
|
||||
return UserInputMapper::Input(_deviceID, joint, UserInputMapper::ChannelType::POSE);
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// SixenseManager.h
|
||||
// interface/src/devices
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Andrzej Kapolka on 11/15/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
|
@ -12,23 +12,22 @@
|
|||
#ifndef hifi_SixenseManager_h
|
||||
#define hifi_SixenseManager_h
|
||||
|
||||
#include <QObject>
|
||||
#include <unordered_set>
|
||||
|
||||
#ifdef HAVE_SIXENSE
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include "sixense.h"
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <QCoreApplication>
|
||||
#include <qlibrary.h>
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "InputPlugin.h"
|
||||
#include "InputDevice.h"
|
||||
|
||||
class PalmData;
|
||||
class QLibrary;
|
||||
|
||||
const unsigned int BUTTON_0 = 1U << 0; // the skinny button between 1 and 2
|
||||
const unsigned int BUTTON_1 = 1U << 5;
|
||||
|
@ -38,15 +37,10 @@ const unsigned int BUTTON_4 = 1U << 4;
|
|||
const unsigned int BUTTON_FWD = 1U << 7;
|
||||
const unsigned int BUTTON_TRIGGER = 1U << 8;
|
||||
|
||||
// Event type that represents using the controller
|
||||
const unsigned int CONTROLLER_0_EVENT = 1500U;
|
||||
const unsigned int CONTROLLER_1_EVENT = 1501U;
|
||||
|
||||
const float DEFAULT_SIXENSE_RETICLE_MOVE_SPEED = 37.5f;
|
||||
const bool DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS = false;
|
||||
|
||||
/// Handles interaction with the Sixense SDK (e.g., Razer Hydra).
|
||||
class SixenseManager : public QObject {
|
||||
// Handles interaction with the Sixense SDK (e.g., Razer Hydra).
|
||||
class SixenseManager : public InputPlugin, public InputDevice {
|
||||
Q_OBJECT
|
||||
public:
|
||||
enum JoystickAxisChannel {
|
||||
|
@ -57,51 +51,49 @@ public:
|
|||
BACK_TRIGGER = 1U << 6,
|
||||
};
|
||||
|
||||
enum JointChannel {
|
||||
LEFT_HAND = 0,
|
||||
RIGHT_HAND,
|
||||
};
|
||||
|
||||
SixenseManager();
|
||||
|
||||
static SixenseManager& getInstance();
|
||||
|
||||
void initialize();
|
||||
bool isInitialized() const { return _isInitialized; }
|
||||
|
||||
void setIsEnabled(bool isEnabled) { _isEnabled = isEnabled; }
|
||||
|
||||
void update(float deltaTime);
|
||||
float getCursorPixelRangeMult() const;
|
||||
|
||||
float getReticleMoveSpeed() const { return _reticleMoveSpeed; }
|
||||
void setReticleMoveSpeed(float sixenseReticleMoveSpeed) { _reticleMoveSpeed = sixenseReticleMoveSpeed; }
|
||||
|
||||
// Plugin functions
|
||||
virtual bool isSupported() const override;
|
||||
virtual bool isJointController() const override { return true; }
|
||||
const QString& getName() const { return NAME; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { focusOutEvent(); }
|
||||
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override { update(deltaTime, jointsCaptured); }
|
||||
|
||||
// Device functions
|
||||
virtual void registerToUserInputMapper(UserInputMapper& mapper) override;
|
||||
virtual void assignDefaultInputMapping(UserInputMapper& mapper) override;
|
||||
virtual void update(float deltaTime, bool jointsCaptured) override;
|
||||
virtual void focusOutEvent() override;
|
||||
|
||||
bool getInvertButtons() const { return _invertButtons; }
|
||||
void setInvertButtons(bool invertSixenseButtons) { _invertButtons = invertSixenseButtons; }
|
||||
|
||||
typedef std::unordered_set<int> ButtonPressedMap;
|
||||
typedef std::map<int, float> AxisStateMap;
|
||||
|
||||
float getButton(int channel) const;
|
||||
float getAxis(int channel) const;
|
||||
|
||||
UserInputMapper::Input makeInput(unsigned int button, int index);
|
||||
UserInputMapper::Input makeInput(JoystickAxisChannel axis, int index);
|
||||
|
||||
void registerToUserInputMapper(UserInputMapper& mapper);
|
||||
void assignDefaultInputMapping(UserInputMapper& mapper);
|
||||
|
||||
void update();
|
||||
void focusOutEvent();
|
||||
|
||||
public slots:
|
||||
void toggleSixense(bool shouldEnable);
|
||||
void setFilter(bool filter);
|
||||
void setLowVelocityFilter(bool lowVelocityFilter) { _lowVelocityFilter = lowVelocityFilter; };
|
||||
UserInputMapper::Input makeInput(JointChannel joint);
|
||||
|
||||
private:
|
||||
SixenseManager();
|
||||
~SixenseManager();
|
||||
|
||||
public slots:
|
||||
void setFilter(bool filter);
|
||||
|
||||
private:
|
||||
void handleButtonEvent(unsigned int buttons, int index);
|
||||
void handleAxisEvent(float x, float y, float trigger, int index);
|
||||
#ifdef HAVE_SIXENSE
|
||||
void updateCalibration(const sixenseControllerData* controllers);
|
||||
void emulateMouse(PalmData* palm, int index);
|
||||
void handlePoseEvent(glm::vec3 position, glm::quat rotation, int index);
|
||||
|
||||
void updateCalibration(void* controllers);
|
||||
|
||||
int _calibrationState;
|
||||
|
||||
// these are calibration results
|
||||
|
@ -123,29 +115,11 @@ private:
|
|||
QLibrary* _sixenseLibrary;
|
||||
#endif
|
||||
|
||||
#endif
|
||||
bool _isInitialized;
|
||||
bool _isEnabled;
|
||||
bool _hydrasConnected;
|
||||
|
||||
// for mouse emulation with the two controllers
|
||||
bool _triggerPressed[2];
|
||||
bool _bumperPressed[2];
|
||||
int _oldX[2];
|
||||
int _oldY[2];
|
||||
PalmData* _prevPalms[2];
|
||||
|
||||
bool _lowVelocityFilter;
|
||||
bool _controllersAtBase;
|
||||
|
||||
float _reticleMoveSpeed = DEFAULT_SIXENSE_RETICLE_MOVE_SPEED;
|
||||
bool _invertButtons = DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS;
|
||||
|
||||
protected:
|
||||
int _deviceID = 0;
|
||||
|
||||
ButtonPressedMap _buttonPressedMap;
|
||||
AxisStateMap _axisStateMap;
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
#endif // hifi_SixenseManager_h
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// UserInputMapper.cpp
|
||||
// interface/src/ui
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gateau on 4/27/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -8,15 +8,9 @@
|
|||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include <algorithm>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "UserInputMapper.h"
|
||||
|
||||
|
||||
// UserInputMapper Class
|
||||
|
||||
// Default contruct allocate the poutput size with the current hardcoded action channels
|
||||
UserInputMapper::UserInputMapper() {
|
||||
assignDefaulActionScales();
|
||||
|
@ -158,6 +152,10 @@ void UserInputMapper::update(float deltaTime) {
|
|||
for (auto& channel : _actionStates) {
|
||||
channel = 0.0f;
|
||||
}
|
||||
|
||||
for (auto& channel : _poseStates) {
|
||||
channel = PoseValue();
|
||||
}
|
||||
|
||||
int currentTimestamp = 0;
|
||||
|
||||
|
@ -193,8 +191,10 @@ void UserInputMapper::update(float deltaTime) {
|
|||
_actionStates[channelInput.first] += inputMapping._scale * deviceProxy->getAxis(inputID, currentTimestamp);
|
||||
break;
|
||||
}
|
||||
case ChannelType::JOINT: {
|
||||
// _channelStates[channelInput.first].jointVal = deviceProxy->getJoint(inputID, currentTimestamp);
|
||||
case ChannelType::POSE: {
|
||||
if (!_poseStates[channelInput.first].isValid()) {
|
||||
_poseStates[channelInput.first] = deviceProxy->getPose(inputID, currentTimestamp);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
|
@ -211,8 +211,9 @@ void UserInputMapper::update(float deltaTime) {
|
|||
for (auto i = 0; i < NUM_ACTIONS; i++) {
|
||||
_actionStates[i] *= _actionScales[i];
|
||||
if (_actionStates[i] > 0) {
|
||||
emit Application::getInstance()->getControllerScriptingInterface()->actionEvent(i, _actionStates[i]);
|
||||
emit actionEvent(i, _actionStates[i]);
|
||||
}
|
||||
// TODO: emit signal for pose changes
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,6 +248,10 @@ void UserInputMapper::assignDefaulActionScales() {
|
|||
_actionScales[PITCH_UP] = 1.0f; // 1 degree per unit
|
||||
_actionScales[BOOM_IN] = 0.5f; // .5m per unit
|
||||
_actionScales[BOOM_OUT] = 0.5f; // .5m per unit
|
||||
_actionScales[LEFT_HAND] = 1.0f; // default
|
||||
_actionScales[RIGHT_HAND] = 1.0f; // default
|
||||
_actionScales[LEFT_HAND_CLICK] = 1.0f; // on
|
||||
_actionScales[RIGHT_HAND_CLICK] = 1.0f; // on
|
||||
_actionStates[SHIFT] = 1.0f; // on
|
||||
_actionStates[ACTION1] = 1.0f; // default
|
||||
_actionStates[ACTION2] = 1.0f; // default
|
||||
|
@ -267,6 +272,10 @@ void UserInputMapper::createActionNames() {
|
|||
_actionNames[PITCH_UP] = "PITCH_UP";
|
||||
_actionNames[BOOM_IN] = "BOOM_IN";
|
||||
_actionNames[BOOM_OUT] = "BOOM_OUT";
|
||||
_actionNames[LEFT_HAND] = "LEFT_HAND";
|
||||
_actionNames[RIGHT_HAND] = "RIGHT_HAND";
|
||||
_actionNames[LEFT_HAND_CLICK] = "LEFT_HAND_CLICK";
|
||||
_actionNames[RIGHT_HAND_CLICK] = "RIGHT_HAND_CLICK";
|
||||
_actionNames[SHIFT] = "SHIFT";
|
||||
_actionNames[ACTION1] = "ACTION1";
|
||||
_actionNames[ACTION2] = "ACTION2";
|
|
@ -1,6 +1,6 @@
|
|||
//
|
||||
// UserInputMapper.h
|
||||
// interface/src/ui
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Sam Gateau on 4/27/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
|
@ -13,14 +13,17 @@
|
|||
#define hifi_UserInputMapper_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
#include <unordered_set>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <DependencyManager.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
|
||||
class UserInputMapper : public QObject {
|
||||
class UserInputMapper : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
Q_ENUMS(Action)
|
||||
public:
|
||||
typedef unsigned short uint16;
|
||||
|
@ -30,7 +33,7 @@ public:
|
|||
UNKNOWN = 0,
|
||||
BUTTON = 1,
|
||||
AXIS,
|
||||
JOINT,
|
||||
POSE,
|
||||
};
|
||||
|
||||
// Input is the unique identifier to find a n input channel of a particular device
|
||||
|
@ -61,7 +64,7 @@ public:
|
|||
|
||||
bool isButton() const { return getType() == ChannelType::BUTTON; }
|
||||
bool isAxis() const { return getType() == ChannelType::AXIS; }
|
||||
bool isJoint() const { return getType() == ChannelType::JOINT; }
|
||||
bool isPose() const { return getType() == ChannelType::POSE; }
|
||||
|
||||
// WORKAROUND: the explicit initializer here avoids a bug in GCC-4.8.2 (but not found in 4.9.2)
|
||||
// where the default initializer (a C++-11ism) for the union data above is not applied.
|
||||
|
@ -77,19 +80,26 @@ public:
|
|||
// Modifiers are just button inputID
|
||||
typedef std::vector< Input > Modifiers;
|
||||
|
||||
class JointValue {
|
||||
class PoseValue {
|
||||
public:
|
||||
glm::vec3 translation{ 0.0f };
|
||||
glm::quat rotation;
|
||||
glm::vec3 _translation{ 0.0f };
|
||||
glm::quat _rotation;
|
||||
bool _valid;
|
||||
|
||||
JointValue() {};
|
||||
JointValue(const JointValue&) = default;
|
||||
JointValue& operator = (const JointValue&) = default;
|
||||
PoseValue() : _valid(false) {};
|
||||
PoseValue(glm::vec3 translation, glm::quat rotation) : _translation(translation), _rotation(rotation), _valid(true) {}
|
||||
PoseValue(const PoseValue&) = default;
|
||||
PoseValue& operator = (const PoseValue&) = default;
|
||||
bool operator ==(const PoseValue& right) const { return _translation == right.getTranslation() && _rotation == right.getRotation() && _valid == right.isValid(); }
|
||||
|
||||
bool isValid() const { return _valid; }
|
||||
glm::vec3 getTranslation() const { return _translation; }
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
};
|
||||
|
||||
typedef std::function<bool (const Input& input, int timestamp)> ButtonGetter;
|
||||
typedef std::function<float (const Input& input, int timestamp)> AxisGetter;
|
||||
typedef std::function<JointValue (const Input& input, int timestamp)> JointGetter;
|
||||
typedef std::function<PoseValue (const Input& input, int timestamp)> PoseGetter;
|
||||
typedef QPair<Input, QString> InputPair;
|
||||
typedef std::function<QVector<InputPair> ()> AvailableInputGetter;
|
||||
typedef std::function<bool ()> ResetBindings;
|
||||
|
@ -102,8 +112,8 @@ public:
|
|||
|
||||
QString _name;
|
||||
ButtonGetter getButton = [] (const Input& input, int timestamp) -> bool { return false; };
|
||||
AxisGetter getAxis = [] (const Input& input, int timestamp) -> bool { return 0.0f; };
|
||||
JointGetter getJoint = [] (const Input& input, int timestamp) -> JointValue { return JointValue(); };
|
||||
AxisGetter getAxis = [] (const Input& input, int timestamp) -> float { return 0.0f; };
|
||||
PoseGetter getPose = [] (const Input& input, int timestamp) -> PoseValue { return PoseValue(); };
|
||||
AvailableInputGetter getAvailabeInputs = [] () -> AvailableInput { return QVector<InputPair>(); };
|
||||
ResetBindings resetDeviceBindings = [] () -> bool { return true; };
|
||||
|
||||
|
@ -140,6 +150,12 @@ public:
|
|||
BOOM_IN,
|
||||
BOOM_OUT,
|
||||
|
||||
LEFT_HAND,
|
||||
RIGHT_HAND,
|
||||
|
||||
LEFT_HAND_CLICK,
|
||||
RIGHT_HAND_CLICK,
|
||||
|
||||
SHIFT,
|
||||
|
||||
ACTION1,
|
||||
|
@ -154,6 +170,7 @@ public:
|
|||
QVector<Action> getAllActions();
|
||||
QString getActionName(Action action) { return UserInputMapper::_actionNames[(int) action]; }
|
||||
float getActionState(Action action) const { return _actionStates[action]; }
|
||||
PoseValue getPoseState(Action action) const { return _poseStates[action]; }
|
||||
void assignDefaulActionScales();
|
||||
|
||||
// Add input channel to the mapper and check that all the used channels are registered.
|
||||
|
@ -206,8 +223,15 @@ public:
|
|||
// Update means go grab all the device input channels and update the output channel values
|
||||
void update(float deltaTime);
|
||||
|
||||
void setSensorToWorldMat(glm::mat4 sensorToWorldMat) { _sensorToWorldMat = sensorToWorldMat; }
|
||||
glm::mat4 getSensorToWorldMat() { return _sensorToWorldMat; }
|
||||
|
||||
UserInputMapper();
|
||||
|
||||
signals:
|
||||
void actionEvent(int action, float state);
|
||||
|
||||
|
||||
protected:
|
||||
typedef std::map<int, DeviceProxy::Pointer> DevicesMap;
|
||||
DevicesMap _registeredDevices;
|
||||
|
@ -221,6 +245,9 @@ protected:
|
|||
|
||||
std::vector<float> _actionStates = std::vector<float>(NUM_ACTIONS, 0.0f);
|
||||
std::vector<float> _actionScales = std::vector<float>(NUM_ACTIONS, 1.0f);
|
||||
std::vector<PoseValue> _poseStates = std::vector<PoseValue>(NUM_ACTIONS);
|
||||
|
||||
glm::mat4 _sensorToWorldMat;
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(UserInputMapper::InputPair)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue