mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 20:53:33 +02:00
Merge branch 'master' of https://github.com/worklist/hifi into 20638
This commit is contained in:
commit
a97524f954
191 changed files with 9443 additions and 6040 deletions
cmake
examples
interface
CMakeLists.txt
resources/images
src
Application.cppApplication.hCamera.cppCamera.hGLCanvas.cppGLCanvas.hMainWindow.cppMenu.cppMenu.hUIUtil.cpp
avatar
devices
3DConnexionClient.cpp3DConnexionClient.hOculusManager.cppOculusManager.hTV3DManager.cppTV3DManager.h
scripting
ui
ui
libraries
animation/src
ElbowConstraint.cppElbowConstraint.hJointState.cppRig.cppRig.hRotationConstraint.hSwingTwistConstraint.cppSwingTwistConstraint.h
avatars/src
display-plugins
CMakeLists.txt
src/display-plugins
Basic2DWindowOpenGLDisplayPlugin.cppBasic2DWindowOpenGLDisplayPlugin.hDisplayPlugin.cppDisplayPlugin.hMainWindowOpenGLDisplayPlugin.cppMainWindowOpenGLDisplayPlugin.hNullDisplayPlugin.cppNullDisplayPlugin.hOpenGLDisplayPlugin.cppOpenGLDisplayPlugin.hWindowOpenGLDisplayPlugin.cppWindowOpenGLDisplayPlugin.h
oculus
OculusBaseDisplayPlugin.cppOculusBaseDisplayPlugin.hOculusHelpers.cppOculusHelpers.hOculus_0_5_DisplayPlugin.cppOculus_0_5_DisplayPlugin.hOculus_0_6_DisplayPlugin.cppOculus_0_6_DisplayPlugin.h
openvr
stereo
entities-renderer/src
4
cmake/externals/LibOVR/CMakeLists.txt
vendored
4
cmake/externals/LibOVR/CMakeLists.txt
vendored
|
@ -9,8 +9,8 @@ if (WIN32)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/0.6.0.0/1431634088/ovr_sdk_win_0.6.0.0.zip
|
||||
URL_MD5 a3dfdab037a854fdcf7e6033fa8d7028
|
||||
URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
|
||||
URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
5
cmake/externals/openvr/CMakeLists.txt
vendored
5
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -26,16 +26,19 @@ if (WIN32)
|
|||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/win32/openvr_api.lib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux64)
|
||||
|
||||
endif()
|
||||
|
||||
|
|
60
cmake/externals/sixense/CMakeLists.txt
vendored
Normal file
60
cmake/externals/sixense/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME Sixense)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL ./SixenseSDK_062612.zip
|
||||
URL_MD5 10cc8dc470d2ac1244a88cf04bc549cc
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/osx_x64/release_dll/libsixense_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
find_library(SIXENSE_LIBRARY_DEBUG lib/osx_x64/debug_dll/libsixensed_x64.dylib HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (UNIX)
|
||||
find_library(SIXENSE_LIBRARY_RELEASE lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
# find_library(SIXENSE_LIBRARY_DEBUG lib/linux_x64/debug/libsixensed_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
|
||||
elseif (WIN32)
|
||||
endif ()
|
||||
|
||||
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
|
||||
|
||||
if (WIN32)
|
||||
|
||||
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(ARCH_DIR "x64")
|
||||
set(ARCH_SUFFIX "_x64")
|
||||
else()
|
||||
set(ARCH_DIR "Win32")
|
||||
set(ARCH_SUFFIX "")
|
||||
endif()
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES "${SOURCE_DIR}/lib/${ARCH_DIR}/release_dll/sixense${ARCH_SUFFIX}.lib" CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/win32)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/osx32/libopenvr_api.dylib CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/osx32)
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux32/libopenvr_api.so CACHE TYPE INTERNAL)
|
||||
add_paths_to_fixup_libs(${SOURCE_DIR}/bin/linux32)
|
||||
|
||||
endif()
|
||||
|
16
cmake/macros/GroupSources.cmake
Normal file
16
cmake/macros/GroupSources.cmake
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(GroupSources curdir)
|
||||
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
|
||||
foreach(child ${children})
|
||||
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
GroupSources(${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
331
examples/controllers/handGrab.js
Normal file
331
examples/controllers/handGrab.js
Normal file
|
@ -0,0 +1,331 @@
|
|||
// handGrab.js
|
||||
// examples
|
||||
//
|
||||
// Created by Sam Gondelman on 8/3/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Allow avatar to grab the closest object to each hand and throw them
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
|
||||
|
||||
var nullActionID = "00000000-0000-0000-0000-000000000000";
|
||||
var controllerID;
|
||||
var controllerActive;
|
||||
var leftHandObjectID = null;
|
||||
var rightHandObjectID = null;
|
||||
var leftHandActionID = nullActionID;
|
||||
var rightHandActionID = nullActionID;
|
||||
|
||||
var TRIGGER_THRESHOLD = 0.2;
|
||||
var GRAB_RADIUS = 0.25;
|
||||
|
||||
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
|
||||
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
var ACTION1 = Controller.findAction("ACTION1");
|
||||
var ACTION2 = Controller.findAction("ACTION2");
|
||||
|
||||
var rightHandGrabAction = RIGHT_HAND_CLICK;
|
||||
var leftHandGrabAction = LEFT_HAND_CLICK;
|
||||
|
||||
var rightHandGrabValue = 0;
|
||||
var leftHandGrabValue = 0;
|
||||
var prevRightHandGrabValue = 0;
|
||||
var prevLeftHandGrabValue = 0;
|
||||
|
||||
var grabColor = { red: 0, green: 255, blue: 0};
|
||||
var releaseColor = { red: 0, green: 0, blue: 255};
|
||||
|
||||
var toolBar = new ToolBar(0, 0, ToolBar.vertical, "highfidelity.toybox.toolbar", function() {
|
||||
return {
|
||||
x: 100,
|
||||
y: 380
|
||||
};
|
||||
});
|
||||
|
||||
var BUTTON_SIZE = 32;
|
||||
var SWORD_IMAGE = "https://hifi-public.s3.amazonaws.com/images/sword/sword.svg"; // replace this with a table icon
|
||||
var CLEANUP_IMAGE = "http://s3.amazonaws.com/hifi-public/images/delete.png"; // cleanup table
|
||||
var tableButton = toolBar.addOverlay("image", {
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: SWORD_IMAGE,
|
||||
alpha: 1
|
||||
});
|
||||
var cleanupButton = toolBar.addOverlay("image", {
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: CLEANUP_IMAGE,
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
var leftHandOverlay = Overlays.addOverlay("sphere", {
|
||||
position: MyAvatar.getLeftPalmPosition(),
|
||||
size: GRAB_RADIUS,
|
||||
color: releaseColor,
|
||||
alpha: 0.5,
|
||||
solid: false
|
||||
});
|
||||
var rightHandOverlay = Overlays.addOverlay("sphere", {
|
||||
position: MyAvatar.getRightPalmPosition(),
|
||||
size: GRAB_RADIUS,
|
||||
color: releaseColor,
|
||||
alpha: 0.5,
|
||||
solid: false
|
||||
});
|
||||
|
||||
var OBJECT_HEIGHT_OFFSET = 0.5;
|
||||
var MIN_OBJECT_SIZE = 0.05;
|
||||
var MAX_OBJECT_SIZE = 0.3;
|
||||
var TABLE_DIMENSIONS = {
|
||||
x: 10.0,
|
||||
y: 0.2,
|
||||
z: 5.0
|
||||
};
|
||||
|
||||
var GRAVITY = {
|
||||
x: 0,
|
||||
y: -2,
|
||||
z: 0
|
||||
}
|
||||
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
|
||||
var tableCreated = false;
|
||||
|
||||
var NUM_OBJECTS = 100;
|
||||
var tableEntities = Array(NUM_OBJECTS + 1); // Also includes table
|
||||
|
||||
var VELOCITY_MAG = 0.3;
|
||||
|
||||
var MODELS = Array(
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx" },
|
||||
{ modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Vehicles/clara/spaceshuttle.fbx" },
|
||||
{ modelURL: "https://s3.amazonaws.com/hifi-public/cozza13/apartment/Stargate.fbx" },
|
||||
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/kelectricguitar10/kelectricguitar10.fbx" },
|
||||
{ modelURL: "https://dl.dropboxusercontent.com/u/17344741/ktoilet10/ktoilet10.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/BilliardsTable.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicRed/robotMedicRed.fst" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/avatars/robotMedic/robotMedicFaceRig/robotMedic.fst" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/029db3d4-da2c-4cb2-9c08-b9612ba576f5/02949063e7c4aed42ad9d1a58461f56d.fst?1427169842" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/Bar.fbx" },
|
||||
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/96124d04-d603-4707-a5b3-e03bf47a53b2/1431770eba362c1c25c524126f2970fb.fst?1436924721" }
|
||||
// { modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Architecture/sketchfab/cudillero.fbx" },
|
||||
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/musicality/musicality.fbx" },
|
||||
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/statelyHome/statelyHome.fbx" }
|
||||
);
|
||||
|
||||
function letGo(hand) {
|
||||
var actionIDToRemove = (hand == LEFT) ? leftHandActionID : rightHandActionID;
|
||||
var entityIDToEdit = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
|
||||
var handVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmVelocity() : MyAvatar.getRightPalmVelocity();
|
||||
var handAngularVelocity = (hand == LEFT) ? MyAvatar.getLeftPalmAngularVelocity() :
|
||||
MyAvatar.getRightPalmAngularVelocity();
|
||||
if (actionIDToRemove != nullActionID && entityIDToEdit != null) {
|
||||
Entities.deleteAction(entityIDToEdit, actionIDToRemove);
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = null;
|
||||
leftHandActionID = nullActionID;
|
||||
} else {
|
||||
rightHandObjectID = null;
|
||||
rightHandActionID = nullActionID;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setGrabbedObject(hand) {
|
||||
var handPosition = (hand == LEFT) ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition();
|
||||
var entities = Entities.findEntities(handPosition, GRAB_RADIUS);
|
||||
var objectID = null;
|
||||
var minDistance = GRAB_RADIUS;
|
||||
for (var i = 0; i < entities.length; i++) {
|
||||
if ((hand == LEFT && entities[i] == rightHandObjectID) ||
|
||||
(hand == RIGHT) && entities[i] == leftHandObjectID) {
|
||||
continue;
|
||||
} else {
|
||||
var distance = Vec3.distance(Entities.getEntityProperties(entities[i]).position, handPosition);
|
||||
if (distance < minDistance) {
|
||||
objectID = entities[i];
|
||||
minDistance = distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (objectID == null) {
|
||||
return false;
|
||||
}
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = objectID;
|
||||
} else {
|
||||
rightHandObjectID = objectID;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function grab(hand) {
|
||||
if (!setGrabbedObject(hand)) {
|
||||
return;
|
||||
}
|
||||
var objectID = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
|
||||
var handRotation = (hand == LEFT) ? MyAvatar.getLeftPalmRotation() : MyAvatar.getRightPalmRotation();
|
||||
|
||||
var objectRotation = Entities.getEntityProperties(objectID).rotation;
|
||||
var offsetRotation = Quat.multiply(Quat.inverse(handRotation), objectRotation);
|
||||
var actionID = Entities.addAction("hold", objectID, {
|
||||
relativePosition: {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
},
|
||||
relativeRotation: offsetRotation,
|
||||
hand: (hand == LEFT) ? "left" : "right",
|
||||
timeScale: 0.05
|
||||
});
|
||||
if (actionID == nullActionID) {
|
||||
if (hand == LEFT) {
|
||||
leftHandObjectID = null;
|
||||
} else {
|
||||
rightHandObjectID = null;
|
||||
}
|
||||
} else {
|
||||
// Entities.editEntity(objectID, { ignore});
|
||||
if (hand == LEFT) {
|
||||
leftHandActionID = actionID;
|
||||
} else {
|
||||
rightHandActionID = actionID;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function update() {
|
||||
Overlays.editOverlay(leftHandOverlay, { position: MyAvatar.getLeftPalmPosition() });
|
||||
Overlays.editOverlay(rightHandOverlay, { position: MyAvatar.getRightPalmPosition() });
|
||||
|
||||
rightHandGrabValue = Controller.getActionValue(rightHandGrabAction);
|
||||
leftHandGrabValue = Controller.getActionValue(leftHandGrabAction);
|
||||
|
||||
if (rightHandGrabValue > TRIGGER_THRESHOLD && rightHandObjectID == null) {
|
||||
Overlays.editOverlay(rightHandOverlay, { color: grabColor });
|
||||
grab(RIGHT);
|
||||
} else if (rightHandGrabValue < TRIGGER_THRESHOLD &&
|
||||
prevRightHandGrabValue > TRIGGER_THRESHOLD) {
|
||||
Overlays.editOverlay(rightHandOverlay, { color: releaseColor });
|
||||
letGo(RIGHT);
|
||||
}
|
||||
|
||||
if (leftHandGrabValue > TRIGGER_THRESHOLD && leftHandObjectID == null) {
|
||||
Overlays.editOverlay(leftHandOverlay, { color: grabColor });
|
||||
grab(LEFT);
|
||||
} else if (leftHandGrabValue < TRIGGER_THRESHOLD &&
|
||||
prevLeftHandGrabValue > TRIGGER_THRESHOLD) {
|
||||
Overlays.editOverlay(leftHandOverlay, { color: releaseColor });
|
||||
letGo(LEFT);
|
||||
}
|
||||
|
||||
prevRightHandGrabValue = rightHandGrabValue;
|
||||
prevLeftHandGrabValue = leftHandGrabValue;
|
||||
}
|
||||
|
||||
function cleanUp() {
|
||||
letGo(RIGHT);
|
||||
letGo(LEFT);
|
||||
Overlays.deleteOverlay(leftHandOverlay);
|
||||
Overlays.deleteOverlay(rightHandOverlay);
|
||||
removeTable();
|
||||
toolBar.cleanup();
|
||||
}
|
||||
|
||||
function onClick(event) {
|
||||
if (event.deviceID != 0) {
|
||||
return;
|
||||
}
|
||||
switch (Overlays.getOverlayAtPoint(event)) {
|
||||
case tableButton:
|
||||
if (!tableCreated) {
|
||||
createTable();
|
||||
tableCreated = true;
|
||||
}
|
||||
break;
|
||||
case cleanupButton:
|
||||
if (tableCreated) {
|
||||
removeTable();
|
||||
tableCreated = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
randFloat = function(low, high) {
|
||||
return low + Math.random() * (high - low);
|
||||
}
|
||||
|
||||
randInt = function(low, high) {
|
||||
return Math.floor(randFloat(low, high));
|
||||
}
|
||||
|
||||
function createTable() {
|
||||
var tablePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(MyAvatar.orientation)));
|
||||
tableEntities[0] = Entities.addEntity( {
|
||||
type: "Box",
|
||||
position: tablePosition,
|
||||
dimensions: TABLE_DIMENSIONS,
|
||||
rotation: MyAvatar.orientation,
|
||||
color: { red: 255, green: 0, blue: 0 }
|
||||
});
|
||||
|
||||
for (var i = 1; i < NUM_OBJECTS + 1; i++) {
|
||||
var objectOffset = { x: TABLE_DIMENSIONS.x/2.0 * randFloat(-1, 1),
|
||||
y: OBJECT_HEIGHT_OFFSET,
|
||||
z: TABLE_DIMENSIONS.z/2.0 * randFloat(-1, 1) };
|
||||
var objectPosition = Vec3.sum(tablePosition, Vec3.multiplyQbyV(MyAvatar.orientation, objectOffset));
|
||||
var type;
|
||||
var randType = randInt(0, 3);
|
||||
switch (randType) {
|
||||
case 0:
|
||||
type = "Box";
|
||||
break;
|
||||
case 1:
|
||||
type = "Sphere";
|
||||
// break;
|
||||
case 2:
|
||||
type = "Model";
|
||||
break;
|
||||
}
|
||||
tableEntities[i] = Entities.addEntity( {
|
||||
type: type,
|
||||
position: objectPosition,
|
||||
velocity: { x: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
|
||||
y: randFloat(-VELOCITY_MAG, VELOCITY_MAG),
|
||||
z: randFloat(-VELOCITY_MAG, VELOCITY_MAG) },
|
||||
dimensions: { x: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
|
||||
y: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE),
|
||||
z: randFloat(MIN_OBJECT_SIZE, MAX_OBJECT_SIZE) },
|
||||
rotation: MyAvatar.orientation,
|
||||
gravity: GRAVITY,
|
||||
damping: 0.1,
|
||||
restitution: 0.01,
|
||||
density: 0.5,
|
||||
collisionsWillMove: true,
|
||||
color: { red: randInt(0, 255), green: randInt(0, 255), blue: randInt(0, 255) }
|
||||
});
|
||||
if (type == "Model") {
|
||||
var randModel = randInt(0, MODELS.length);
|
||||
Entities.editEntity(tableEntities[i], {
|
||||
shapeType: "box",
|
||||
modelURL: MODELS[randModel].modelURL
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function removeTable() {
|
||||
for (var i = 0; i < tableEntities.length; i++) {
|
||||
Entities.deleteEntity(tableEntities[i]);
|
||||
}
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanUp);
|
||||
Script.update.connect(update);
|
||||
Controller.mousePressEvent.connect(onClick);
|
79
examples/controllers/squeezeHands2.js
Normal file
79
examples/controllers/squeezeHands2.js
Normal file
|
@ -0,0 +1,79 @@
|
|||
//
|
||||
// squeezeHands.js
|
||||
// examples
|
||||
//
|
||||
// Created by Philip Rosedale on June 4, 2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
var rightHandAnimation = HIFI_PUBLIC_BUCKET + "animations/RightHandAnimPhilip.fbx";
|
||||
var leftHandAnimation = HIFI_PUBLIC_BUCKET + "animations/LeftHandAnimPhilip.fbx";
|
||||
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
|
||||
var lastLeftFrame = 0;
|
||||
var lastRightFrame = 0;
|
||||
|
||||
var leftDirection = true;
|
||||
var rightDirection = true;
|
||||
|
||||
var LAST_FRAME = 15.0; // What is the number of the last frame we want to use in the animation?
|
||||
var SMOOTH_FACTOR = 0.0;
|
||||
var MAX_FRAMES = 30.0;
|
||||
|
||||
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
|
||||
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
|
||||
Script.update.connect(function(deltaTime) {
|
||||
var leftTriggerValue = Controller.getActionValue(LEFT_HAND_CLICK);
|
||||
var rightTriggerValue = Controller.getActionValue(RIGHT_HAND_CLICK);
|
||||
|
||||
var leftFrame, rightFrame;
|
||||
|
||||
// Average last few trigger frames together for a bit of smoothing
|
||||
leftFrame = (leftTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastLeftFrame * SMOOTH_FACTOR;
|
||||
rightFrame = (rightTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastRightFrame * SMOOTH_FACTOR;
|
||||
|
||||
if (!leftDirection) {
|
||||
leftFrame = MAX_FRAMES - leftFrame;
|
||||
}
|
||||
if (!rightDirection) {
|
||||
rightFrame = MAX_FRAMES - rightFrame;
|
||||
}
|
||||
|
||||
if ((leftTriggerValue == 1.0) && (leftDirection == true)) {
|
||||
leftDirection = false;
|
||||
lastLeftFrame = MAX_FRAMES - leftFrame;
|
||||
} else if ((leftTriggerValue == 0.0) && (leftDirection == false)) {
|
||||
leftDirection = true;
|
||||
lastLeftFrame = leftFrame;
|
||||
}
|
||||
if ((rightTriggerValue == 1.0) && (rightDirection == true)) {
|
||||
rightDirection = false;
|
||||
lastRightFrame = MAX_FRAMES - rightFrame;
|
||||
} else if ((rightTriggerValue == 0.0) && (rightDirection == false)) {
|
||||
rightDirection = true;
|
||||
lastRightFrame = rightFrame;
|
||||
}
|
||||
|
||||
if ((leftFrame != lastLeftFrame) && leftHandAnimation.length){
|
||||
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
|
||||
}
|
||||
if ((rightFrame != lastRightFrame) && rightHandAnimation.length) {
|
||||
MyAvatar.startAnimation(rightHandAnimation, 30.0, 1.0, false, true, rightFrame, rightFrame);
|
||||
}
|
||||
|
||||
lastLeftFrame = leftFrame;
|
||||
lastRightFrame = rightFrame;
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
MyAvatar.stopAnimation(leftHandAnimation);
|
||||
MyAvatar.stopAnimation(rightHandAnimation);
|
||||
});
|
|
@ -13,11 +13,15 @@
|
|||
/*jslint vars: true*/
|
||||
var Script, Entities, MyAvatar, Window, Overlays, Controller, Vec3, Quat, print, ToolBar, Settings; // Referenced globals provided by High Fidelity.
|
||||
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
|
||||
var zombieGameScriptURL = "https://hifi-public.s3.amazonaws.com/eric/scripts/zombieFight.js?v2";
|
||||
// var zombieGameScriptURL = "zombieFight.js";
|
||||
Script.include(zombieGameScriptURL);
|
||||
|
||||
var zombieFight;
|
||||
|
||||
var zombieFight = new ZombieFight();
|
||||
|
||||
var hand = "right";
|
||||
|
||||
var zombieFight;
|
||||
var nullActionID = "00000000-0000-0000-0000-000000000000";
|
||||
var controllerID;
|
||||
var controllerActive;
|
||||
|
@ -78,7 +82,7 @@ var cleanupButton = toolBar.addOverlay("image", {
|
|||
|
||||
var flasher;
|
||||
|
||||
var leftTriggerButton = 0;
|
||||
var leftHandClick = 14;
|
||||
var leftTriggerValue = 0;
|
||||
var prevLeftTriggerValue = 0;
|
||||
|
||||
|
@ -88,7 +92,7 @@ var RIGHT = 1;
|
|||
|
||||
var leftPalm = 2 * LEFT;
|
||||
var rightPalm = 2 * RIGHT;
|
||||
var rightTriggerButton = 1;
|
||||
var rightHandClick = 15;
|
||||
var prevRightTriggerValue = 0;
|
||||
var rightTriggerValue = 0;
|
||||
var TRIGGER_THRESHOLD = 0.2;
|
||||
|
@ -357,8 +361,8 @@ function update() {
|
|||
}
|
||||
|
||||
function updateControllerState() {
|
||||
rightTriggerValue = Controller.getTriggerValue(rightTriggerButton);
|
||||
leftTriggerValue = Controller.getTriggerValue(leftTriggerButton);
|
||||
rightTriggerValue = Controller.getActionValue(rightHandClick);
|
||||
leftTriggerValue = Controller.getActionValue(leftHandClick);
|
||||
|
||||
if (rightTriggerValue > TRIGGER_THRESHOLD && !swordHeld) {
|
||||
grabSword("right")
|
||||
|
@ -470,4 +474,4 @@ function onClick(event) {
|
|||
|
||||
Script.scriptEnding.connect(cleanUp);
|
||||
Script.update.connect(update);
|
||||
Controller.mousePressEvent.connect(onClick);
|
||||
Controller.mousePressEvent.connect(onClick);
|
||||
|
|
225
examples/example/painting/hydraPaint.js
Normal file
225
examples/example/painting/hydraPaint.js
Normal file
|
@ -0,0 +1,225 @@
|
|||
//
|
||||
// hydraPaint.js
|
||||
// examples
|
||||
//
|
||||
// Created by Eric Levin on 5/14/15.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This script allows you to paint with the hydra!
|
||||
//
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
var LEFT = 0;
|
||||
var RIGHT = 1;
|
||||
var LASER_WIDTH = 3;
|
||||
var LASER_COLOR = {
|
||||
red: 50,
|
||||
green: 150,
|
||||
blue: 200
|
||||
};
|
||||
var TRIGGER_THRESHOLD = .1;
|
||||
|
||||
var MAX_POINTS_PER_LINE = 40;
|
||||
|
||||
var LIFETIME = 6000;
|
||||
var DRAWING_DEPTH = 1;
|
||||
var LINE_DIMENSIONS = 20;
|
||||
|
||||
|
||||
var MIN_POINT_DISTANCE = 0.01;
|
||||
|
||||
var MIN_BRUSH_RADIUS = 0.08;
|
||||
var MAX_BRUSH_RADIUS = 0.1;
|
||||
|
||||
var RIGHT_BUTTON_1 = 7
|
||||
var RIGHT_BUTTON_2 = 8
|
||||
var RIGHT_BUTTON_3 = 9;
|
||||
var RIGHT_BUTTON_4 = 10
|
||||
var LEFT_BUTTON_1 = 1;
|
||||
var LEFT_BUTTON_2 = 2;
|
||||
var LEFT_BUTTON_3 = 3;
|
||||
var LEFT_BUTTON_4 = 4;
|
||||
|
||||
var colorPalette = [{
|
||||
red: 250,
|
||||
green: 0,
|
||||
blue: 0
|
||||
}, {
|
||||
red: 214,
|
||||
green: 91,
|
||||
blue: 67
|
||||
}, {
|
||||
red: 192,
|
||||
green: 41,
|
||||
blue: 66
|
||||
}, {
|
||||
red: 84,
|
||||
green: 36,
|
||||
blue: 55
|
||||
}, {
|
||||
red: 83,
|
||||
green: 119,
|
||||
blue: 122
|
||||
}];
|
||||
|
||||
var MIN_STROKE_WIDTH = 0.002;
|
||||
var MAX_STROKE_WIDTH = 0.05;
|
||||
|
||||
function controller(side, cycleColorButton) {
|
||||
this.triggerHeld = false;
|
||||
this.triggerThreshold = 0.9;
|
||||
this.side = side;
|
||||
this.palm = 2 * side;
|
||||
this.tip = 2 * side + 1;
|
||||
this.trigger = side;
|
||||
this.cycleColorButton = cycleColorButton;
|
||||
|
||||
this.points = [];
|
||||
this.normals = [];
|
||||
this.strokeWidths = [];
|
||||
|
||||
this.currentColorIndex = 0;
|
||||
this.currentColor = colorPalette[this.currentColorIndex];
|
||||
|
||||
var self = this;
|
||||
|
||||
|
||||
this.brush = Entities.addEntity({
|
||||
type: 'Sphere',
|
||||
position: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
color: this.currentColor,
|
||||
dimensions: {
|
||||
x: MIN_BRUSH_RADIUS,
|
||||
y: MIN_BRUSH_RADIUS,
|
||||
z: MIN_BRUSH_RADIUS
|
||||
}
|
||||
});
|
||||
|
||||
this.cycleColor = function() {
|
||||
this.currentColor = colorPalette[++this.currentColorIndex];
|
||||
if (this.currentColorIndex === colorPalette.length - 1) {
|
||||
this.currentColorIndex = -1;
|
||||
}
|
||||
}
|
||||
this.newLine = function(position) {
|
||||
this.linePosition = position;
|
||||
this.line = Entities.addEntity({
|
||||
position: position,
|
||||
type: "PolyLine",
|
||||
color: this.currentColor,
|
||||
dimensions: {
|
||||
x: LINE_DIMENSIONS,
|
||||
y: LINE_DIMENSIONS,
|
||||
z: LINE_DIMENSIONS
|
||||
},
|
||||
lifetime: LIFETIME
|
||||
});
|
||||
this.points = [];
|
||||
this.normals = []
|
||||
this.strokeWidths = [];
|
||||
}
|
||||
|
||||
this.update = function(deltaTime) {
|
||||
this.updateControllerState();
|
||||
var newBrushPosOffset = Vec3.multiply(Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition)), DRAWING_DEPTH);
|
||||
var newBrushPos = Vec3.sum(this.palmPosition, newBrushPosOffset);
|
||||
var brushRadius = map(this.triggerValue, TRIGGER_THRESHOLD, 1, MIN_BRUSH_RADIUS, MAX_BRUSH_RADIUS)
|
||||
Entities.editEntity(this.brush, {
|
||||
position: newBrushPos,
|
||||
color: this.currentColor,
|
||||
dimensions: {
|
||||
x: brushRadius,
|
||||
y: brushRadius,
|
||||
z: brushRadius
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
if (this.triggerValue > TRIGGER_THRESHOLD && !this.drawing) {
|
||||
this.newLine(newBrushPos);
|
||||
this.drawing = true;
|
||||
} else if (this.drawing && this.triggerValue < TRIGGER_THRESHOLD) {
|
||||
this.drawing = false;
|
||||
}
|
||||
|
||||
if (this.drawing && this.points.length < MAX_POINTS_PER_LINE) {
|
||||
var localPoint = Vec3.subtract(newBrushPos, this.linePosition);
|
||||
if (Vec3.distance(localPoint, this.points[this.points.length - 1]) < MIN_POINT_DISTANCE) {
|
||||
//Need a minimum distance to avoid binormal NANs
|
||||
return;
|
||||
}
|
||||
|
||||
this.points.push(localPoint);
|
||||
var normal = computeNormal(newBrushPos, Camera.getPosition());
|
||||
|
||||
this.normals.push(normal);
|
||||
var strokeWidth = map(this.triggerValue, TRIGGER_THRESHOLD, 1, MIN_STROKE_WIDTH, MAX_STROKE_WIDTH);
|
||||
this.strokeWidths.push(strokeWidth);
|
||||
Entities.editEntity(this.line, {
|
||||
linePoints: this.points,
|
||||
normals: this.normals,
|
||||
strokeWidths: this.strokeWidths,
|
||||
color: this.currentColor
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
this.updateControllerState = function() {
|
||||
this.cycleColorButtonPressed = Controller.isButtonPressed(this.cycleColorButton);
|
||||
this.palmPosition = Controller.getSpatialControlPosition(this.palm);
|
||||
this.tipPosition = Controller.getSpatialControlPosition(this.tip);
|
||||
this.palmNormal = Controller.getSpatialControlNormal(this.palm);
|
||||
this.triggerValue = Controller.getTriggerValue(this.trigger);
|
||||
|
||||
|
||||
if (this.prevCycleColorButtonPressed === true && this.cycleColorButtonPressed === false) {
|
||||
this.cycleColor();
|
||||
Entities.editEntity(this.brush, {
|
||||
// color: this.currentColor
|
||||
});
|
||||
}
|
||||
|
||||
this.prevCycleColorButtonPressed = this.cycleColorButtonPressed;
|
||||
|
||||
}
|
||||
|
||||
this.cleanup = function() {
|
||||
Entities.deleteEntity(self.brush);
|
||||
}
|
||||
}
|
||||
|
||||
function computeNormal(p1, p2) {
|
||||
return Vec3.normalize(Vec3.subtract(p2, p1));
|
||||
}
|
||||
|
||||
function update(deltaTime) {
|
||||
leftController.update(deltaTime);
|
||||
rightController.update(deltaTime);
|
||||
}
|
||||
|
||||
function scriptEnding() {
|
||||
leftController.cleanup();
|
||||
rightController.cleanup();
|
||||
}
|
||||
|
||||
function vectorIsZero(v) {
|
||||
return v.x === 0 && v.y === 0 && v.z === 0;
|
||||
}
|
||||
|
||||
|
||||
var rightController = new controller(RIGHT, RIGHT_BUTTON_4);
|
||||
var leftController = new controller(LEFT, LEFT_BUTTON_4);
|
||||
Script.update.connect(update);
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
||||
function map(value, min1, max1, min2, max2) {
|
||||
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
|
||||
}
|
194
examples/example/painting/mousePaint.js
Normal file
194
examples/example/painting/mousePaint.js
Normal file
|
@ -0,0 +1,194 @@
|
|||
//
|
||||
// mousePaint.js
|
||||
// examples
|
||||
//
|
||||
// Created by Eric Levin on 6/4/15.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This script allows you to paint with the hydra or mouse!
|
||||
//
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
var LINE_DIMENSIONS = 10;
|
||||
var LIFETIME = 6000;
|
||||
var EVENT_CHANGE_THRESHOLD = 200;
|
||||
var LINE_WIDTH = .07;
|
||||
var MAX_POINTS_PER_LINE = 40;
|
||||
var points = [];
|
||||
var normals = [];
|
||||
var deletedLines = [];
|
||||
var strokeWidths = [];
|
||||
var count = 0;
|
||||
var prevEvent = {x: 0, y: 0};
|
||||
var eventChange;
|
||||
|
||||
var MIN_POINT_DISTANCE = .01;
|
||||
|
||||
var colorPalette = [{
|
||||
red: 250,
|
||||
green: 0,
|
||||
blue: 0
|
||||
}, {
|
||||
red: 214,
|
||||
green: 91,
|
||||
blue: 67
|
||||
}, {
|
||||
red: 192,
|
||||
green: 41,
|
||||
blue: 66
|
||||
}, {
|
||||
red: 84,
|
||||
green: 36,
|
||||
blue: 55
|
||||
}, {
|
||||
red: 83,
|
||||
green: 119,
|
||||
blue: 122
|
||||
}];
|
||||
|
||||
var currentColorIndex = 0;
|
||||
var currentColor = colorPalette[currentColorIndex];
|
||||
|
||||
function cycleColor() {
|
||||
currentColor = colorPalette[++currentColorIndex];
|
||||
if (currentColorIndex === colorPalette.length - 1) {
|
||||
currentColorIndex = -1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
MousePaint();
|
||||
|
||||
function MousePaint() {
|
||||
var DRAWING_DISTANCE = 5;
|
||||
var lines = [];
|
||||
var isDrawing = false;
|
||||
|
||||
var line, linePosition;
|
||||
|
||||
var BRUSH_SIZE = .05;
|
||||
|
||||
var brush = Entities.addEntity({
|
||||
type: 'Sphere',
|
||||
position: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
color: currentColor,
|
||||
dimensions: {
|
||||
x: BRUSH_SIZE,
|
||||
y: BRUSH_SIZE,
|
||||
z: BRUSH_SIZE
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
function newLine(position) {
|
||||
linePosition = position;
|
||||
line = Entities.addEntity({
|
||||
position: position,
|
||||
type: "PolyLine",
|
||||
color: currentColor,
|
||||
dimensions: {
|
||||
x: LINE_DIMENSIONS,
|
||||
y: LINE_DIMENSIONS,
|
||||
z: LINE_DIMENSIONS
|
||||
},
|
||||
linePoints: [],
|
||||
lifetime: LIFETIME
|
||||
});
|
||||
points = [];
|
||||
normals = []
|
||||
strokeWidths = [];
|
||||
lines.push(line);
|
||||
}
|
||||
|
||||
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
count++;
|
||||
var worldPoint = computeWorldPoint(pickRay);
|
||||
Entities.editEntity(brush, {
|
||||
position: worldPoint
|
||||
});
|
||||
|
||||
eventChange = Math.sqrt(Math.pow(event.x - prevEvent.x, 2) + Math.pow(event.y - prevEvent.y, 2));
|
||||
localPoint = computeLocalPoint(worldPoint);
|
||||
if (!isDrawing || points.length > MAX_POINTS_PER_LINE || eventChange > EVENT_CHANGE_THRESHOLD ||
|
||||
Vec3.distance(points[points.length - 1], localPoint) < MIN_POINT_DISTANCE) {
|
||||
return;
|
||||
}
|
||||
|
||||
points.push(localPoint)
|
||||
normals.push(computeNormal(worldPoint, pickRay.origin));
|
||||
strokeWidths.push(LINE_WIDTH);
|
||||
Entities.editEntity(line, {
|
||||
strokeWidths: strokeWidths,
|
||||
linePoints: points,
|
||||
normals: normals,
|
||||
});
|
||||
prevEvent = event;
|
||||
}
|
||||
|
||||
function computeNormal(p1, p2) {
|
||||
return Vec3.normalize(Vec3.subtract(p2, p1));
|
||||
}
|
||||
|
||||
function computeWorldPoint(pickRay) {
|
||||
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DRAWING_DISTANCE);
|
||||
return Vec3.sum(pickRay.origin, addVector);
|
||||
}
|
||||
|
||||
function computeLocalPoint(worldPoint) {
|
||||
var localPoint = Vec3.subtract(worldPoint, linePosition);
|
||||
return localPoint;
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
if (!event.isLeftButton) {
|
||||
isDrawing = false;
|
||||
return;
|
||||
}
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
prevEvent = {x: event.x, y:event.y};
|
||||
var worldPoint = computeWorldPoint(pickRay);
|
||||
newLine(worldPoint);
|
||||
var localPoint = computeLocalPoint(worldPoint);
|
||||
points.push(localPoint);
|
||||
normals.push(computeNormal(worldPoint, pickRay.origin));
|
||||
strokeWidths.push(0.07);
|
||||
isDrawing = true;
|
||||
}
|
||||
|
||||
function mouseReleaseEvent() {
|
||||
isDrawing = false;
|
||||
}
|
||||
|
||||
function keyPressEvent(event) {
|
||||
if (event.text === "SPACE") {
|
||||
cycleColor();
|
||||
Entities.editEntity(brush, {
|
||||
color: currentColor
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
lines.forEach(function(line) {
|
||||
// Entities.deleteEntity(line);
|
||||
});
|
||||
Entities.deleteEntity(brush);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
|
||||
Controller.keyPressEvent.connect(keyPressEvent);
|
||||
}
|
|
@ -55,8 +55,8 @@ var warpLine = Overlays.addOverlay("line3d", {
|
|||
var velocity = { x: 0, y: 0, z: 0 };
|
||||
var VERY_LONG_TIME = 1000000.0;
|
||||
|
||||
var active = Menu.isOptionChecked("Enable VR Mode");
|
||||
var prevVRMode = Menu.isOptionChecked("Enable VR Mode");
|
||||
var active = HMD.active;
|
||||
var prevVRMode = HMD.active;
|
||||
|
||||
var hmdControls = (function () {
|
||||
|
||||
|
@ -121,28 +121,28 @@ var hmdControls = (function () {
|
|||
velocity = Vec3.sum(velocity, direction);
|
||||
break;
|
||||
case findAction("YAW_LEFT"):
|
||||
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (yawTimer < 0.0 && HMD.active) {
|
||||
yawChange = yawChange + (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
|
||||
yawTimer = CAMERA_UPDATE_TIME;
|
||||
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
} else if (!HMD.active) {
|
||||
yawChange = yawChange + (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("YAW_RIGHT"):
|
||||
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (yawTimer < 0.0 && HMD.active) {
|
||||
yawChange = yawChange - (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
|
||||
yawTimer = CAMERA_UPDATE_TIME;
|
||||
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
} else if (!HMD.active) {
|
||||
yawChange = yawChange - (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("PITCH_DOWN"):
|
||||
if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (!HMD.active) {
|
||||
pitchChange = pitchChange - (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
|
||||
}
|
||||
break;
|
||||
case findAction("PITCH_UP"):
|
||||
if (!Menu.isOptionChecked("Enable VR Mode")) {
|
||||
if (!HMD.active) {
|
||||
pitchChange = pitchChange + (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
|
||||
}
|
||||
break;
|
||||
|
@ -175,9 +175,9 @@ var hmdControls = (function () {
|
|||
}
|
||||
|
||||
function update(dt) {
|
||||
if (prevVRMode != Menu.isOptionChecked("Enable VR Mode")) {
|
||||
active = Menu.isOptionChecked("Enable VR Mode");
|
||||
prevVRMode = Menu.isOptionChecked("Enable VR Mode");
|
||||
if (prevVRMode != HMD.active) {
|
||||
active = HMD.active;
|
||||
prevVRMode = HMD.active;
|
||||
}
|
||||
|
||||
if (yawTimer >= 0.0) {
|
||||
|
|
|
@ -362,6 +362,9 @@
|
|||
var elVoxelVolumeSizeY = document.getElementById("property-voxel-volume-size-y");
|
||||
var elVoxelVolumeSizeZ = document.getElementById("property-voxel-volume-size-z");
|
||||
var elVoxelSurfaceStyle = document.getElementById("property-voxel-surface-style");
|
||||
var elXTextureURL = document.getElementById("property-x-texture-url");
|
||||
var elYTextureURL = document.getElementById("property-y-texture-url");
|
||||
var elZTextureURL = document.getElementById("property-z-texture-url");
|
||||
|
||||
var elHyperlinkHref = document.getElementById("property-hyperlink-href");
|
||||
var elHyperlinkDescription = document.getElementById("property-hyperlink-description");
|
||||
|
@ -614,6 +617,9 @@
|
|||
elVoxelVolumeSizeY.value = properties.voxelVolumeSize.y.toFixed(2);
|
||||
elVoxelVolumeSizeZ.value = properties.voxelVolumeSize.z.toFixed(2);
|
||||
elVoxelSurfaceStyle.value = properties.voxelSurfaceStyle;
|
||||
elXTextureURL.value = properties.xTextureURL;
|
||||
elYTextureURL.value = properties.yTextureURL;
|
||||
elZTextureURL.value = properties.zTextureURL;
|
||||
}
|
||||
|
||||
if (selected) {
|
||||
|
@ -867,6 +873,9 @@
|
|||
elVoxelVolumeSizeY.addEventListener('change', voxelVolumeSizeChangeFunction);
|
||||
elVoxelVolumeSizeZ.addEventListener('change', voxelVolumeSizeChangeFunction);
|
||||
elVoxelSurfaceStyle.addEventListener('change', createEmitTextPropertyUpdateFunction('voxelSurfaceStyle'));
|
||||
elXTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('xTextureURL'));
|
||||
elYTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('yTextureURL'));
|
||||
elZTextureURL.addEventListener('change', createEmitTextPropertyUpdateFunction('zTextureURL'));
|
||||
|
||||
elMoveSelectionToGrid.addEventListener("click", function() {
|
||||
EventBridge.emitWebEvent(JSON.stringify({
|
||||
|
@ -1063,7 +1072,22 @@
|
|||
<option value='0'>marching cubes</option>
|
||||
<option value='1'>cubic</option>
|
||||
<option value='2'>edged cubic</option>
|
||||
</select>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="label">X-axis Texture URL</div>
|
||||
<div class="value">
|
||||
<input type="text" id="property-x-texture-url" class="url"></input>
|
||||
</div>
|
||||
|
||||
<div class="label">Y-axis Texture URL</div>
|
||||
<div class="value">
|
||||
<input type="text" id="property-y-texture-url" class="url"></input>
|
||||
</div>
|
||||
|
||||
<div class="label">Z-axis Texture URL</div>
|
||||
<div class="value">
|
||||
<input type="text" id="property-z-texture-url" class="url"></input>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -98,8 +98,8 @@ EntityPropertyDialogBox = (function () {
|
|||
index++;
|
||||
}
|
||||
|
||||
if (properties.type == "PolyVox") {
|
||||
array.push({ label: "Voxel Space Size:", type: "header" });
|
||||
if (properties.type == "PolyVox") {
|
||||
array.push({ label: "Voxel Space Size:", type: "header" });
|
||||
index++;
|
||||
|
||||
array.push({ label: "X:", value: properties.voxelVolumeSize.x.toFixed(decimals) });
|
||||
|
@ -109,9 +109,16 @@ EntityPropertyDialogBox = (function () {
|
|||
array.push({ label: "Z:", value: properties.voxelVolumeSize.z.toFixed(decimals) });
|
||||
index++;
|
||||
|
||||
array.push({ label: "Surface Extractor", value: properties.voxelSurfaceStyle });
|
||||
index++;
|
||||
}
|
||||
array.push({ label: "Surface Extractor", value: properties.voxelSurfaceStyle });
|
||||
index++;
|
||||
|
||||
array.push({ label: "X-axis Texture URL:", value: properties.xTextureURL });
|
||||
index++;
|
||||
array.push({ label: "Y-axis Texture URL:", value: properties.yTextureURL });
|
||||
index++;
|
||||
array.push({ label: "Z-axis Texture URL:", value: properties.zTextureURL });
|
||||
index++;
|
||||
}
|
||||
|
||||
array.push({ label: "Position:", type: "header" });
|
||||
index++;
|
||||
|
@ -348,14 +355,17 @@ EntityPropertyDialogBox = (function () {
|
|||
properties.backgroundColor.blue = array[index++].value;
|
||||
}
|
||||
|
||||
if (properties.type == "PolyVox") {
|
||||
if (properties.type == "PolyVox") {
|
||||
properties.shapeType = array[index++].value;
|
||||
|
||||
index++; // skip header
|
||||
properties.voxelVolumeSize.x = array[index++].value;
|
||||
properties.voxelVolumeSize.y = array[index++].value;
|
||||
properties.voxelVolumeSize.z = array[index++].value;
|
||||
properties.voxelSurfaceStyle = array[index++].value;
|
||||
index++; // skip header
|
||||
properties.voxelVolumeSize.x = array[index++].value;
|
||||
properties.voxelVolumeSize.y = array[index++].value;
|
||||
properties.voxelVolumeSize.z = array[index++].value;
|
||||
properties.voxelSurfaceStyle = array[index++].value;
|
||||
properties.xTextureURL = array[index++].value;
|
||||
properties.yTextureURL = array[index++].value;
|
||||
properties.zTextureURL = array[index++].value;
|
||||
}
|
||||
|
||||
index++; // skip header
|
||||
|
|
|
@ -130,7 +130,6 @@ var heights = [];
|
|||
var myAlpha = [];
|
||||
var arrays = [];
|
||||
var isOnHMD = false,
|
||||
ENABLE_VR_MODE = "Enable VR Mode",
|
||||
NOTIFICATIONS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
|
||||
NOTIFICATIONS_3D_DISTANCE = 0.6, // Horizontal distance from avatar position.
|
||||
NOTIFICATIONS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
|
||||
|
@ -414,7 +413,7 @@ function update() {
|
|||
j,
|
||||
k;
|
||||
|
||||
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE)) {
|
||||
if (isOnHMD !== HMD.active) {
|
||||
while (arrays.length > 0) {
|
||||
deleteNotification(0);
|
||||
}
|
||||
|
@ -596,7 +595,7 @@ function menuItemEvent(menuItem) {
|
|||
LODManager.LODDecreased.connect(function() {
|
||||
var warningText = "\n"
|
||||
+ "Due to the complexity of the content, the \n"
|
||||
+ "level of detail has been decreased."
|
||||
+ "level of detail has been decreased. "
|
||||
+ "You can now see: \n"
|
||||
+ LODManager.getLODFeedbackText();
|
||||
|
||||
|
|
|
@ -41,7 +41,6 @@
|
|||
SCALE_2D = 0.35, // Scale the SVGs for 2D display.
|
||||
background3D = {},
|
||||
bar3D = {},
|
||||
ENABLE_VR_MODE_MENU_ITEM = "Enable VR Mode",
|
||||
PROGRESS_3D_DIRECTION = 0.0, // Degrees from avatar orientation.
|
||||
PROGRESS_3D_DISTANCE = 0.602, // Horizontal distance from avatar position.
|
||||
PROGRESS_3D_ELEVATION = -0.8, // Height of top middle of top notification relative to avatar eyes.
|
||||
|
@ -157,7 +156,7 @@
|
|||
eyePosition,
|
||||
avatarOrientation;
|
||||
|
||||
if (isOnHMD !== Menu.isOptionChecked(ENABLE_VR_MODE_MENU_ITEM)) {
|
||||
if (isOnHMD !== HMD.active) {
|
||||
deleteOverlays();
|
||||
isOnHMD = !isOnHMD;
|
||||
createOverlays();
|
||||
|
|
|
@ -44,7 +44,7 @@ function mousePressEvent(event) {
|
|||
}
|
||||
}
|
||||
|
||||
// if the PolyVox entity is empty, we can't pick against its voxel. try picking against its
|
||||
// if the PolyVox entity is empty, we can't pick against its "on" voxels. try picking against its
|
||||
// bounding box, instead.
|
||||
intersection = Entities.findRayIntersection(pickRay, false); // bounding box picking
|
||||
if (intersection.intersects) {
|
||||
|
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
|||
project(${TARGET_NAME})
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK" "3DConnexionClient")
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
|
@ -29,18 +29,6 @@ endif()
|
|||
|
||||
configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVersion.h")
|
||||
|
||||
macro(GroupSources curdir)
|
||||
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir} ${PROJECT_SOURCE_DIR}/${curdir}/*)
|
||||
foreach(child ${children})
|
||||
if(IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
GroupSources(${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
# grab the implementation and header files from src dirs
|
||||
file(GLOB_RECURSE INTERFACE_SRCS "src/*.cpp" "src/*.h")
|
||||
GroupSources("src")
|
||||
|
@ -115,16 +103,12 @@ else()
|
|||
add_executable(${TARGET_NAME} ${INTERFACE_SRCS} ${QM})
|
||||
endif()
|
||||
|
||||
# set up the external glm library
|
||||
add_dependency_external_projects(glm bullet)
|
||||
|
||||
# set up the external glm library
|
||||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
find_package(Bullet REQUIRED)
|
||||
|
||||
# perform the system include hack for OS X to ignore warnings
|
||||
|
@ -137,9 +121,10 @@ endif()
|
|||
target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES})
|
||||
|
||||
# link required hifi libraries
|
||||
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer ui auto-updater)
|
||||
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer ui auto-updater
|
||||
plugins display-plugins input-plugins)
|
||||
|
||||
add_dependency_external_projects(sdl2)
|
||||
|
||||
|
|
BIN
interface/resources/images/paintStroke.png
Normal file
BIN
interface/resources/images/paintStroke.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 25 KiB |
File diff suppressed because it is too large
Load diff
|
@ -36,6 +36,8 @@
|
|||
#include <StDev.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "Bookmarks.h"
|
||||
|
@ -47,13 +49,12 @@
|
|||
#include "Stars.h"
|
||||
#include "avatar/Avatar.h"
|
||||
#include "avatar/MyAvatar.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include <input-plugins/KeyboardMouseDevice.h>
|
||||
#include "scripting/ControllerScriptingInterface.h"
|
||||
#include "scripting/DialogsManagerScriptingInterface.h"
|
||||
#include "scripting/WebWindowClass.h"
|
||||
#include "ui/AudioStatsDialog.h"
|
||||
#include "ui/BandwidthDialog.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "ui/ModelsBrowser.h"
|
||||
#include "ui/OctreeStatsDialog.h"
|
||||
#include "ui/SnapshotShareDialog.h"
|
||||
|
@ -62,10 +63,9 @@
|
|||
#include "ui/overlays/Overlays.h"
|
||||
#include "ui/ApplicationOverlay.h"
|
||||
#include "ui/ApplicationCompositor.h"
|
||||
#include "ui/OverlayConductor.h"
|
||||
#include "ui/RunningScriptsWidget.h"
|
||||
#include "ui/ToolWindow.h"
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "devices/KeyboardMouseDevice.h"
|
||||
#include "octree/OctreePacketProcessor.h"
|
||||
#include "UndoStackScriptingInterface.h"
|
||||
|
||||
|
@ -79,6 +79,7 @@ class QMouseEvent;
|
|||
class QSystemTrayIcon;
|
||||
class QTouchEvent;
|
||||
class QWheelEvent;
|
||||
class OffscreenGlCanvas;
|
||||
|
||||
class GLCanvas;
|
||||
class FaceTracker;
|
||||
|
@ -86,6 +87,12 @@ class MainWindow;
|
|||
class Node;
|
||||
class ScriptEngine;
|
||||
|
||||
namespace gpu {
|
||||
class Context;
|
||||
typedef std::shared_ptr<Context> ContextPointer;
|
||||
}
|
||||
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
|
@ -124,7 +131,7 @@ class Application;
|
|||
|
||||
typedef bool (Application::* AcceptURLMethod)(const QString &);
|
||||
|
||||
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface {
|
||||
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface, PluginContainer {
|
||||
Q_OBJECT
|
||||
|
||||
friend class OctreePacketProcessor;
|
||||
|
@ -136,7 +143,6 @@ public:
|
|||
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
|
||||
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
|
||||
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
|
||||
static UserInputMapper* getUserInputMapper() { return &getInstance()->_userInputMapper; }
|
||||
static void initPlugins();
|
||||
static void shutdownPlugins();
|
||||
|
||||
|
@ -179,6 +185,7 @@ public:
|
|||
bool eventFilter(QObject* object, QEvent* event);
|
||||
|
||||
glm::uvec2 getCanvasSize() const;
|
||||
glm::uvec2 getUiSize() const;
|
||||
QSize getDeviceSize() const;
|
||||
bool hasFocus() const;
|
||||
PickRay computePickRay() const;
|
||||
|
@ -262,11 +269,6 @@ public:
|
|||
void displaySide(RenderArgs* renderArgs, Camera& whichCamera, bool selfAvatarOnly = false, bool billboard = false);
|
||||
|
||||
virtual const glm::vec3& getShadowDistances() const { return _shadowDistances; }
|
||||
|
||||
/// Computes the off-axis frustum parameters for the view frustum, taking mirroring into account.
|
||||
virtual void computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const;
|
||||
|
||||
virtual ViewFrustum* getCurrentViewFrustum() { return getDisplayViewFrustum(); }
|
||||
virtual QThread* getMainThread() { return thread(); }
|
||||
virtual float getSizeScale() const;
|
||||
|
@ -277,6 +279,24 @@ public:
|
|||
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
|
||||
virtual qreal getDevicePixelRatio();
|
||||
|
||||
// Plugin container support
|
||||
virtual void addMenu(const QString& menuName);
|
||||
virtual void removeMenu(const QString& menuName);
|
||||
virtual void addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName);
|
||||
virtual void removeMenuItem(const QString& menuName, const QString& menuItem);
|
||||
virtual bool isOptionChecked(const QString& name);
|
||||
virtual void setIsOptionChecked(const QString& path, bool checked);
|
||||
virtual void setFullscreen(const QScreen* target) override;
|
||||
virtual void unsetFullscreen(const QScreen* avoid) override;
|
||||
virtual void showDisplayPluginsTools() override;
|
||||
virtual QGLWidget* getPrimarySurface() override;
|
||||
|
||||
void setActiveDisplayPlugin(const QString& pluginName);
|
||||
private:
|
||||
DisplayPlugin * getActiveDisplayPlugin();
|
||||
const DisplayPlugin * getActiveDisplayPlugin() const;
|
||||
public:
|
||||
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
||||
glm::vec2 getViewportDimensions() const;
|
||||
|
@ -300,10 +320,9 @@ public:
|
|||
// rendering of several elements depend on that
|
||||
// TODO: carry that information on the Camera as a setting
|
||||
bool isHMDMode() const;
|
||||
glm::quat getHeadOrientation() const;
|
||||
glm::vec3 getHeadPosition() const;
|
||||
glm::mat4 getHeadPose() const;
|
||||
glm::mat4 getHMDSensorPose() const;
|
||||
glm::mat4 getEyePose(int eye) const;
|
||||
glm::mat4 getEyeOffset(int eye) const;
|
||||
glm::mat4 getEyeProjection(int eye) const;
|
||||
|
||||
QRect getDesirableApplicationGeometry();
|
||||
|
@ -353,6 +372,7 @@ signals:
|
|||
void fullAvatarURLChanged(const QString& newValue, const QString& modelName);
|
||||
|
||||
void beforeAboutToQuit();
|
||||
void activeDisplayPluginChanged();
|
||||
|
||||
public slots:
|
||||
void setSessionUUID(const QUuid& sessionUUID);
|
||||
|
@ -361,6 +381,8 @@ public slots:
|
|||
void nodeAdded(SharedNodePointer node);
|
||||
void nodeKilled(SharedNodePointer node);
|
||||
void packetSent(quint64 length);
|
||||
void updateDisplayMode();
|
||||
void updateInputModes();
|
||||
|
||||
QVector<EntityItemID> pasteEntities(float x, float y, float z);
|
||||
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs);
|
||||
|
@ -431,15 +453,8 @@ private slots:
|
|||
|
||||
void connectedToDomain(const QString& hostname);
|
||||
|
||||
friend class HMDToolsDialog;
|
||||
void setFullscreen(bool fullscreen);
|
||||
void setEnable3DTVMode(bool enable3DTVMode);
|
||||
void setEnableVRMode(bool enableVRMode);
|
||||
|
||||
void rotationModeChanged();
|
||||
|
||||
glm::vec2 getScaledScreenPoint(glm::vec2 projectedPoint);
|
||||
|
||||
void closeMirrorView();
|
||||
void restoreMirrorView();
|
||||
void shrinkMirrorView();
|
||||
|
@ -467,6 +482,9 @@ private:
|
|||
|
||||
void update(float deltaTime);
|
||||
|
||||
void setPalmData(Hand* hand, UserInputMapper::PoseValue pose, float deltaTime, int index);
|
||||
void emulateMouse(Hand* hand, float click, float shift, int index);
|
||||
|
||||
// Various helper functions called during update()
|
||||
void updateLOD();
|
||||
void updateMouseRay();
|
||||
|
@ -495,6 +513,11 @@ private:
|
|||
int sendNackPackets();
|
||||
|
||||
bool _dependencyManagerIsSetup;
|
||||
|
||||
OffscreenGlCanvas* _offscreenContext;
|
||||
DisplayPluginPointer _displayPlugin;
|
||||
InputPluginList _activeInputPlugins;
|
||||
|
||||
MainWindow* _window;
|
||||
|
||||
ToolWindow* _toolWindow;
|
||||
|
@ -532,11 +555,10 @@ private:
|
|||
|
||||
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
||||
|
||||
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
|
||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||
Camera _myCamera; // My view onto the world
|
||||
Camera _mirrorCamera; // Cammera for mirror view
|
||||
KeyboardMouseDevice* _keyboardMouseDevice{ nullptr }; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||
Camera _myCamera; // My view onto the world
|
||||
Camera _mirrorCamera; // Cammera for mirror view
|
||||
QRect _mirrorViewRect;
|
||||
|
||||
Setting::Handle<bool> _firstRun;
|
||||
|
@ -623,9 +645,6 @@ private:
|
|||
|
||||
void checkSkeleton();
|
||||
|
||||
QWidget* _fullscreenMenuWidget = new QWidget();
|
||||
int _menuBarHeight;
|
||||
|
||||
QHash<QString, AcceptURLMethod> _acceptedExtensions;
|
||||
|
||||
QList<QString> _domainConnectionRefusals;
|
||||
|
@ -642,8 +661,16 @@ private:
|
|||
Overlays _overlays;
|
||||
ApplicationOverlay _applicationOverlay;
|
||||
ApplicationCompositor _compositor;
|
||||
OverlayConductor _overlayConductor;
|
||||
|
||||
int _oldHandMouseX[2];
|
||||
int _oldHandMouseY[2];
|
||||
bool _oldHandLeftClick[2];
|
||||
bool _oldHandRightClick[2];
|
||||
int _numFramesSinceLastResize = 0;
|
||||
|
||||
bool _overlayEnabled = true;
|
||||
QRect _savedGeometry;
|
||||
DialogsManagerScriptingInterface* _dialogsManagerScriptingInterface = new DialogsManagerScriptingInterface();
|
||||
};
|
||||
|
||||
|
|
|
@ -46,11 +46,7 @@ QString modeToString(CameraMode mode) {
|
|||
}
|
||||
|
||||
Camera::Camera() :
|
||||
_mode(CAMERA_MODE_THIRD_PERSON),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP)),
|
||||
_isKeepLookingAt(false),
|
||||
_lookingAt(0.0f, 0.0f, 0.0f)
|
||||
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -61,12 +57,33 @@ void Camera::update(float deltaTime) {
|
|||
return;
|
||||
}
|
||||
|
||||
void Camera::recompose() {
|
||||
mat4 orientation = glm::mat4_cast(_rotation);
|
||||
mat4 translation = glm::translate(mat4(), _position);
|
||||
_transform = translation * orientation;
|
||||
}
|
||||
|
||||
void Camera::decompose() {
|
||||
_position = vec3(_transform[3]);
|
||||
_rotation = glm::quat_cast(_transform);
|
||||
}
|
||||
|
||||
void Camera::setTransform(const glm::mat4& transform) {
|
||||
_transform = transform;
|
||||
decompose();
|
||||
}
|
||||
|
||||
void Camera::setPosition(const glm::vec3& position) {
|
||||
_position = position;
|
||||
_position = position;
|
||||
recompose();
|
||||
if (_isKeepLookingAt) {
|
||||
lookAt(_lookingAt);
|
||||
}
|
||||
}
|
||||
|
||||
void Camera::setRotation(const glm::quat& rotation) {
|
||||
_rotation = rotation;
|
||||
recompose();
|
||||
if (_isKeepLookingAt) {
|
||||
lookAt(_lookingAt);
|
||||
}
|
||||
|
@ -129,3 +146,21 @@ void Camera::keepLookingAt(const glm::vec3& point) {
|
|||
_isKeepLookingAt = true;
|
||||
_lookingAt = point;
|
||||
}
|
||||
|
||||
void Camera::loadViewFrustum(ViewFrustum& frustum) const {
|
||||
// We will use these below, from either the camera or head vectors calculated above
|
||||
frustum.setProjection(getProjection());
|
||||
|
||||
// Set the viewFrustum up with the correct position and orientation of the camera
|
||||
frustum.setPosition(getPosition());
|
||||
frustum.setOrientation(getRotation());
|
||||
|
||||
// Ask the ViewFrustum class to calculate our corners
|
||||
frustum.calculate();
|
||||
}
|
||||
|
||||
ViewFrustum Camera::toViewFrustum() const {
|
||||
ViewFrustum result;
|
||||
loadViewFrustum(result);
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -43,24 +43,31 @@ public:
|
|||
|
||||
void update( float deltaTime );
|
||||
|
||||
void setRotation(const glm::quat& rotation);
|
||||
void setProjection(const glm::mat4 & projection);
|
||||
CameraMode getMode() const { return _mode; }
|
||||
void setMode(CameraMode m);
|
||||
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
const glm::mat4& getProjection() const { return _projection; }
|
||||
CameraMode getMode() const { return _mode; }
|
||||
void loadViewFrustum(ViewFrustum& frustum) const;
|
||||
ViewFrustum toViewFrustum() const;
|
||||
|
||||
public slots:
|
||||
QString getModeString() const;
|
||||
void setModeString(const QString& mode);
|
||||
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
void setRotation(const glm::quat& rotation);
|
||||
|
||||
glm::vec3 getPosition() const { return _position; }
|
||||
void setPosition(const glm::vec3& position);
|
||||
|
||||
glm::quat getOrientation() const { return getRotation(); }
|
||||
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
|
||||
|
||||
const glm::mat4& getTransform() const { return _transform; }
|
||||
void setTransform(const glm::mat4& transform);
|
||||
|
||||
const glm::mat4& getProjection() const { return _projection; }
|
||||
void setProjection(const glm::mat4& projection);
|
||||
|
||||
PickRay computePickRay(float x, float y);
|
||||
|
||||
// These only work on independent cameras
|
||||
|
@ -78,11 +85,17 @@ signals:
|
|||
void modeUpdated(const QString& newMode);
|
||||
|
||||
private:
|
||||
CameraMode _mode;
|
||||
void recompose();
|
||||
void decompose();
|
||||
|
||||
CameraMode _mode{ CAMERA_MODE_THIRD_PERSON };
|
||||
glm::mat4 _transform;
|
||||
glm::mat4 _projection;
|
||||
|
||||
// derived
|
||||
glm::vec3 _position;
|
||||
glm::quat _rotation;
|
||||
glm::mat4 _projection;
|
||||
bool _isKeepLookingAt;
|
||||
bool _isKeepLookingAt{ false };
|
||||
glm::vec3 _lookingAt;
|
||||
};
|
||||
|
||||
|
|
|
@ -9,12 +9,13 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "GLCanvas.h"
|
||||
|
||||
#include <QMimeData>
|
||||
#include <QUrl>
|
||||
#include <QWindow>
|
||||
|
||||
#include "Application.h"
|
||||
#include "GLCanvas.h"
|
||||
#include "MainWindow.h"
|
||||
|
||||
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
|
||||
|
@ -63,7 +64,6 @@ int GLCanvas::getDeviceHeight() const {
|
|||
}
|
||||
|
||||
void GLCanvas::initializeGL() {
|
||||
Application::getInstance()->initializeGL();
|
||||
setAttribute(Qt::WA_AcceptTouchEvents);
|
||||
setAcceptDrops(true);
|
||||
connect(Application::getInstance(), SIGNAL(applicationStateChanged(Qt::ApplicationState)), this, SLOT(activeChanged(Qt::ApplicationState)));
|
||||
|
|
|
@ -22,7 +22,7 @@ class GLCanvas : public QGLWidget {
|
|||
|
||||
public:
|
||||
GLCanvas();
|
||||
|
||||
|
||||
void stopFrameTimer();
|
||||
|
||||
bool isThrottleRendering() const;
|
||||
|
|
|
@ -97,10 +97,6 @@ void MainWindow::changeEvent(QEvent* event) {
|
|||
} else {
|
||||
emit windowShown(true);
|
||||
}
|
||||
|
||||
if (isFullScreen() != Menu::getInstance()->isOptionChecked(MenuOption::Fullscreen)) {
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::Fullscreen, isFullScreen());
|
||||
}
|
||||
} else if (event->type() == QEvent::ActivationChange) {
|
||||
if (isActiveWindow()) {
|
||||
emit windowShown(true);
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/RealSense.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "devices/3DConnexionClient.h"
|
||||
#include "MainWindow.h"
|
||||
#include "scripting/MenuScriptingInterface.h"
|
||||
|
@ -221,9 +220,20 @@ Menu::Menu() {
|
|||
addActionToQMenuAndActionHash(toolsMenu, MenuOption::PackageModel, 0,
|
||||
qApp, SLOT(packageModel()));
|
||||
|
||||
MenuWrapper* displayMenu = addMenu("Display");
|
||||
{
|
||||
MenuWrapper* displayModeMenu = addMenu(MenuOption::OutputMenu);
|
||||
QActionGroup* displayModeGroup = new QActionGroup(displayModeMenu);
|
||||
displayModeGroup->setExclusive(true);
|
||||
}
|
||||
|
||||
MenuWrapper* avatarMenu = addMenu("Avatar");
|
||||
QObject* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
MenuWrapper* inputModeMenu = addMenu(MenuOption::InputMenu);
|
||||
QActionGroup* inputModeGroup = new QActionGroup(inputModeMenu);
|
||||
inputModeGroup->setExclusive(false);
|
||||
|
||||
MenuWrapper* avatarSizeMenu = avatarMenu->addMenu("Size");
|
||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||
MenuOption::IncreaseAvatarSize,
|
||||
|
@ -242,26 +252,16 @@ Menu::Menu() {
|
|||
SLOT(resetSize()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::KeyboardMotorControl,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehavior()));
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehaviorFromMenu()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
avatar, SLOT(updateMotionBehaviorFromMenu()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
||||
MenuWrapper* viewMenu = addMenu("View");
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::Fullscreen,
|
||||
#ifdef Q_OS_MAC
|
||||
Qt::CTRL | Qt::META | Qt::Key_F,
|
||||
#else
|
||||
Qt::CTRL | Qt::Key_F,
|
||||
#endif
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setFullscreen(bool)));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
|
||||
|
||||
MenuWrapper* cameraModeMenu = viewMenu->addMenu("Camera Mode");
|
||||
QActionGroup* cameraModeGroup = new QActionGroup(cameraModeMenu);
|
||||
|
@ -299,18 +299,11 @@ Menu::Menu() {
|
|||
dialogsManager.data(),
|
||||
SLOT(hmdTools(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::EnableVRMode, 0,
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setEnableVRMode(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Enable3DTVMode, 0,
|
||||
false,
|
||||
qApp,
|
||||
SLOT(setEnable3DTVMode(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
|
||||
avatar, SLOT(updateStandingHMDModeFromMenu()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
|
||||
|
@ -453,30 +446,11 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::HandMouseInput, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
|
||||
qApp, SLOT(setLowVelocityFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false);
|
||||
|
||||
MenuWrapper* sixenseOptionsMenu = handOptionsMenu->addMenu("Sixense");
|
||||
#ifdef __APPLE__
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::SixenseEnabled,
|
||||
0, false,
|
||||
&SixenseManager::getInstance(),
|
||||
SLOT(toggleSixense(bool)));
|
||||
#endif
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::FilterSixense,
|
||||
0,
|
||||
true,
|
||||
&SixenseManager::getInstance(),
|
||||
SLOT(setFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu,
|
||||
MenuOption::LowVelocityFilter,
|
||||
0,
|
||||
true,
|
||||
qApp,
|
||||
SLOT(setLowVelocityFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
|
||||
|
||||
MenuWrapper* leapOptionsMenu = handOptionsMenu->addMenu("Leap Motion");
|
||||
addCheckableActionToQMenuAndActionHash(leapOptionsMenu, MenuOption::LeapMotionOnHMD, 0, false);
|
||||
|
||||
|
|
|
@ -80,6 +80,13 @@ public:
|
|||
const QKeySequence& shortcut = 0,
|
||||
QAction::MenuRole role = QAction::NoRole,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const bool checked = false,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
void removeAction(MenuWrapper* menu, const QString& actionName);
|
||||
|
||||
|
@ -109,14 +116,6 @@ private:
|
|||
void addDisabledActionAndSeparator(MenuWrapper* destinationMenu, const QString& actionName,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const bool checked = false,
|
||||
const QObject* receiver = NULL,
|
||||
const char* member = NULL,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION);
|
||||
|
||||
QAction* getActionFromName(const QString& menuName, MenuWrapper* menu);
|
||||
MenuWrapper* getSubMenuFromName(const QString& menuName, MenuWrapper* menu);
|
||||
MenuWrapper* getMenuParent(const QString& menuName, QString& finalMenuPart);
|
||||
|
@ -186,22 +185,22 @@ namespace MenuOption {
|
|||
const QString EditEntitiesHelp = "Edit Entities Help...";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
const QString ExpandPaintGLTiming = "Expand /paintGL";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FilterSixense = "Smooth Sixense Movement";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString Forward = "Forward";
|
||||
const QString FrameTimer = "Show Timer";
|
||||
const QString Fullscreen = "Fullscreen";
|
||||
const QString FullscreenMirror = "Fullscreen Mirror";
|
||||
const QString GlowWhenSpeaking = "Glow When Speaking";
|
||||
const QString HandMouseInput = "Enable Hand Mouse Input";
|
||||
const QString HMDTools = "HMD Tools";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
const QString InputMenu = "Avatar>Input Devices";
|
||||
const QString KeyboardMotorControl = "Enable Keyboard Motor Control";
|
||||
const QString LeapMotionOnHMD = "Leap Motion on HMD";
|
||||
const QString LoadScript = "Open and Run Script File...";
|
||||
|
@ -220,6 +219,7 @@ namespace MenuOption {
|
|||
const QString NoFaceTracking = "None";
|
||||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OutputMenu = "Display>Mode";
|
||||
const QString PackageModel = "Package Model...";
|
||||
const QString Pair = "Pair";
|
||||
const QString PhysicsShowOwned = "Highlight Simulation Ownership";
|
||||
|
@ -270,8 +270,7 @@ namespace MenuOption {
|
|||
const QString ShowIKConstraints = "Show IK Constraints";
|
||||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
|
||||
const QString SixenseEnabled = "Enable Hydra Support";
|
||||
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
|
||||
const QString Stars = "Stars";
|
||||
const QString Stats = "Stats";
|
||||
const QString StopAllScripts = "Stop All Scripts";
|
||||
|
|
|
@ -12,8 +12,6 @@
|
|||
#include <QStyle>
|
||||
#include <QStyleOptionTitleBar>
|
||||
|
||||
#include "GLCanvas.h"
|
||||
|
||||
#include "UIUtil.h"
|
||||
|
||||
int UIUtil::getWindowTitleBarHeight(const QWidget* window) {
|
||||
|
|
|
@ -245,7 +245,7 @@ void Avatar::simulate(float deltaTime) {
|
|||
}
|
||||
|
||||
void Avatar::slamPosition(const glm::vec3& newPosition) {
|
||||
AvatarData::setPosition(newPosition);
|
||||
setPosition(newPosition);
|
||||
_positionDeltaAccumulator = glm::vec3(0.0f);
|
||||
_velocity = glm::vec3(0.0f);
|
||||
_lastVelocity = glm::vec3(0.0f);
|
||||
|
|
|
@ -279,7 +279,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
|
|||
const QString& collisionSoundURL = myAvatar->getCollisionSoundURL();
|
||||
if (!collisionSoundURL.isEmpty()) {
|
||||
const float velocityChange = glm::length(collision.velocityChange);
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01;
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01f;
|
||||
const bool isSound = (collision.type == CONTACT_EVENT_TYPE_START) && (velocityChange > MIN_AVATAR_COLLISION_ACCELERATION);
|
||||
|
||||
if (!isSound) {
|
||||
|
|
|
@ -117,12 +117,15 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
}
|
||||
}
|
||||
}
|
||||
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
||||
// since everyone else will see the full joint rotations for other people.
|
||||
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
||||
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
||||
float currentTwist = getTorsoTwist();
|
||||
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
||||
|
||||
if (!myAvatar->getStandingHMDSensorMode()) {
|
||||
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
||||
// since everyone else will see the full joint rotations for other people.
|
||||
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
||||
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
||||
float currentTwist = getTorsoTwist();
|
||||
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
||||
}
|
||||
}
|
||||
|
||||
if (!(_isFaceTrackerConnected || billboard)) {
|
||||
|
@ -344,14 +347,20 @@ bool Head::isLookingAtMe() {
|
|||
glm::quat Head::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
return getOrientation();
|
||||
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
||||
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
return getOrientation();
|
||||
}
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
|
||||
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||
|
|
|
@ -34,7 +34,6 @@
|
|||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "AvatarManager.h"
|
||||
|
@ -97,6 +96,15 @@ MyAvatar::MyAvatar(RigPointer rig) :
|
|||
_eyeContactTarget(LEFT_EYE),
|
||||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
_hmdSensorMatrix(),
|
||||
_hmdSensorOrientation(),
|
||||
_hmdSensorPosition(),
|
||||
_bodySensorMatrix(),
|
||||
_sensorToWorldMatrix(),
|
||||
_standingHMDSensorMode(false),
|
||||
_goToPending(false),
|
||||
_goToPosition(),
|
||||
_goToOrientation(),
|
||||
_rig(rig),
|
||||
_prevShouldDrawHead(true)
|
||||
{
|
||||
|
@ -142,6 +150,13 @@ void MyAvatar::reset() {
|
|||
}
|
||||
|
||||
void MyAvatar::update(float deltaTime) {
|
||||
|
||||
if (_goToPending) {
|
||||
setPosition(_goToPosition);
|
||||
setOrientation(_goToOrientation);
|
||||
_goToPending = false;
|
||||
}
|
||||
|
||||
if (_referential) {
|
||||
_referential->update();
|
||||
}
|
||||
|
@ -149,6 +164,7 @@ void MyAvatar::update(float deltaTime) {
|
|||
Head* head = getHead();
|
||||
head->relaxLean(deltaTime);
|
||||
updateFromTrackers(deltaTime);
|
||||
|
||||
// Get audio loudness data from audio input device
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
head->setAudioLoudness(audio->getLastInputLoudness());
|
||||
|
@ -228,6 +244,41 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
maybeUpdateBillboard();
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
|
||||
if (getStandingHMDSensorMode()) {
|
||||
return _sensorToWorldMatrix;
|
||||
} else {
|
||||
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
|
||||
}
|
||||
}
|
||||
|
||||
// best called at start of main loop just after we have a fresh hmd pose.
|
||||
// update internal body position from new hmd pose.
|
||||
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
||||
// update the sensorMatrices based on the new hmd pose
|
||||
_hmdSensorMatrix = hmdSensorMatrix;
|
||||
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||
_bodySensorMatrix = deriveBodyFromHMDSensor();
|
||||
|
||||
if (getStandingHMDSensorMode()) {
|
||||
// set the body position/orientation to reflect motion due to the head.
|
||||
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
|
||||
setPosition(extractTranslation(worldMat));
|
||||
setOrientation(glm::quat_cast(worldMat));
|
||||
}
|
||||
}
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void MyAvatar::updateSensorToWorldMatrix() {
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
// position when driven from the head.
|
||||
glm::mat4 desiredMat = createMatFromQuatAndPos(getOrientation(), getPosition());
|
||||
_sensorToWorldMatrix = desiredMat * glm::inverse(_bodySensorMatrix);
|
||||
}
|
||||
|
||||
// Update avatar head rotation with sensor data
|
||||
void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
@ -242,7 +293,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
bool inFacetracker = tracker && !tracker->isMuted();
|
||||
|
||||
if (inHmd) {
|
||||
estimatedPosition = qApp->getHeadPosition();
|
||||
estimatedPosition = extractTranslation(getHMDSensorMatrix());
|
||||
estimatedPosition.x *= -1.0f;
|
||||
_trackedHeadPosition = estimatedPosition;
|
||||
|
||||
|
@ -286,15 +337,18 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
|
||||
Head* head = getHead();
|
||||
if (inHmd || isPlaying()) {
|
||||
head->setDeltaPitch(estimatedRotation.x);
|
||||
head->setDeltaYaw(estimatedRotation.y);
|
||||
if (!getStandingHMDSensorMode()) {
|
||||
head->setDeltaPitch(estimatedRotation.x);
|
||||
head->setDeltaYaw(estimatedRotation.y);
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
}
|
||||
} else {
|
||||
float magnifyFieldOfView = qApp->getFieldOfView() /
|
||||
_realWorldFieldOfView.get();
|
||||
head->setDeltaPitch(estimatedRotation.x * magnifyFieldOfView);
|
||||
head->setDeltaYaw(estimatedRotation.y * magnifyFieldOfView);
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
}
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
|
||||
// Update torso lean distance based on accelerometer data
|
||||
const float TORSO_LENGTH = 0.5f;
|
||||
|
@ -309,10 +363,12 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
relativePosition.x = -relativePosition.x;
|
||||
}
|
||||
|
||||
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
if (!(inHmd && getStandingHMDSensorMode())) {
|
||||
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
||||
-MAX_LEAN, MAX_LEAN));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -343,6 +399,22 @@ glm::vec3 MyAvatar::getLeftPalmPosition() {
|
|||
return leftHandPosition;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getLeftPalmVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getLeftPalmAngularVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(LEFT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getRawAngularVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getLeftPalmRotation() {
|
||||
glm::quat leftRotation;
|
||||
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getLeftHandJointIndex(), leftRotation);
|
||||
|
@ -358,6 +430,22 @@ glm::vec3 MyAvatar::getRightPalmPosition() {
|
|||
return rightHandPosition;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getRightPalmVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getRightPalmAngularVelocity() {
|
||||
const PalmData* palm = getHand()->getPalm(RIGHT_HAND_INDEX);
|
||||
if (palm != NULL) {
|
||||
return palm->getRawAngularVelocity();
|
||||
}
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getRightPalmRotation() {
|
||||
glm::quat rightRotation;
|
||||
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getRightHandJointIndex(), rightRotation);
|
||||
|
@ -845,10 +933,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
|
||||
|
||||
if (Application::getInstance()->isHMDMode()) {
|
||||
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
|
||||
// + OculusManager::getMidEyePosition() + gazeOffset);
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ OculusManager::getMidEyePosition() + gazeOffset);
|
||||
+ glm::vec3(qApp->getHMDSensorPose()[3]) + gazeOffset);
|
||||
} else {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ gazeOffset);
|
||||
|
@ -889,7 +975,7 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
|
|||
}
|
||||
|
||||
glm::vec3 MyAvatar::getDefaultEyePosition() const {
|
||||
return _position + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
|
||||
return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
|
||||
}
|
||||
|
||||
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
|
||||
|
@ -1263,10 +1349,13 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta * deltaTime, 0.0f))));
|
||||
|
||||
if (qApp->isHMDMode()) {
|
||||
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();
|
||||
glm::quat bodyOrientation = getWorldBodyOrientation();
|
||||
glm::quat localOrientation = glm::inverse(bodyOrientation) * orientation;
|
||||
|
||||
// these angles will be in radians
|
||||
glm::quat orientation = qApp->getHeadOrientation();
|
||||
// ... so they need to be converted to degrees before we do math...
|
||||
glm::vec3 euler = glm::eulerAngles(orientation) * DEGREES_PER_RADIAN;
|
||||
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
||||
|
||||
//Invert yaw and roll when in mirror mode
|
||||
if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
|
||||
|
@ -1329,6 +1418,8 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
|
|||
glm::vec3 direction = front + right + up;
|
||||
float directionLength = glm::length(direction);
|
||||
|
||||
//qCDebug(interfaceapp, "direction = (%.5f, %.5f, %.5f)", direction.x, direction.y, direction.z);
|
||||
|
||||
// Compute motor magnitude
|
||||
if (directionLength > EPSILON) {
|
||||
direction /= directionLength;
|
||||
|
@ -1433,7 +1524,6 @@ void MyAvatar::updatePosition(float deltaTime) {
|
|||
// update _moving flag based on speed
|
||||
const float MOVING_SPEED_THRESHOLD = 0.01f;
|
||||
_moving = speed > MOVING_SPEED_THRESHOLD;
|
||||
|
||||
}
|
||||
|
||||
void MyAvatar::updateCollisionSound(const glm::vec3 &penetration, float deltaTime, float frequency) {
|
||||
|
@ -1528,32 +1618,31 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
|
|||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
|
||||
<< newPosition.y << ", " << newPosition.z;
|
||||
|
||||
glm::vec3 shiftedPosition = newPosition;
|
||||
|
||||
_goToPending = true;
|
||||
_goToPosition = newPosition;
|
||||
_goToOrientation = getOrientation();
|
||||
if (hasOrientation) {
|
||||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
|
||||
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||
|
||||
// orient the user to face the target
|
||||
glm::quat quatOrientation = newOrientation;
|
||||
|
||||
if (shouldFaceLocation) {
|
||||
|
||||
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
||||
// move the user a couple units away
|
||||
const float DISTANCE_TO_USER = 2.0f;
|
||||
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
_goToPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
}
|
||||
|
||||
setOrientation(quatOrientation);
|
||||
_goToOrientation = quatOrientation;
|
||||
}
|
||||
|
||||
slamPosition(shiftedPosition);
|
||||
emit transformChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::updateMotionBehavior() {
|
||||
void MyAvatar::updateMotionBehaviorFromMenu() {
|
||||
Menu* menu = Menu::getInstance();
|
||||
if (menu->isOptionChecked(MenuOption::KeyboardMotorControl)) {
|
||||
_motionBehaviors |= AVATAR_MOTION_KEYBOARD_MOTOR_ENABLED;
|
||||
|
@ -1568,6 +1657,11 @@ void MyAvatar::updateMotionBehavior() {
|
|||
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
|
||||
}
|
||||
|
||||
void MyAvatar::updateStandingHMDModeFromMenu() {
|
||||
Menu* menu = Menu::getInstance();
|
||||
_standingHMDSensorMode = menu->isOptionChecked(MenuOption::StandingHMDSensorMode);
|
||||
}
|
||||
|
||||
//Renders sixense laser pointers for UI selection with controllers
|
||||
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
|
||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||
|
@ -1619,3 +1713,37 @@ void MyAvatar::relayDriveKeysToCharacterController() {
|
|||
_characterController.jump();
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getWorldBodyPosition() const {
|
||||
return transformPoint(_sensorToWorldMatrix, extractTranslation(_bodySensorMatrix));
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getWorldBodyOrientation() const {
|
||||
return glm::quat_cast(_sensorToWorldMatrix * _bodySensorMatrix);
|
||||
}
|
||||
|
||||
// derive avatar body position and orientation from the current HMD Sensor location.
|
||||
// results are in sensor space
|
||||
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
||||
|
||||
// HMD is in sensor space.
|
||||
const glm::vec3 hmdPosition = getHMDSensorPosition();
|
||||
const glm::quat hmdOrientation = getHMDSensorOrientation();
|
||||
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
|
||||
|
||||
// In sensor space, figure out where the avatar body should be,
|
||||
// by applying offsets from the avatar's neck & head joints.
|
||||
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
|
||||
vec3 localNeck(0.0f, 0.48f, 0.0f); // start with some kind of guess if the skeletonModel is not loaded yet.
|
||||
_skeletonModel.getLocalNeckPosition(localNeck);
|
||||
|
||||
// apply simplistic head/neck model
|
||||
// eyeToNeck offset is relative full HMD orientation.
|
||||
// while neckToRoot offset is only relative to HMDs yaw.
|
||||
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
|
||||
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
|
||||
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
|
||||
|
||||
// avatar facing is determined solely by hmd orientation.
|
||||
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,20 @@ public:
|
|||
void update(float deltaTime);
|
||||
void preRender(RenderArgs* renderArgs);
|
||||
|
||||
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
||||
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
||||
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
||||
glm::mat4 getSensorToWorldMatrix() const;
|
||||
|
||||
// best called at start of main loop just after we have a fresh hmd pose.
|
||||
// update internal body position from new hmd pose.
|
||||
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void updateSensorToWorldMatrix();
|
||||
|
||||
void setLeanScale(float scale) { _leanScale = scale; }
|
||||
void setRealWorldFieldOfView(float realWorldFov) { _realWorldFieldOfView.set(realWorldFov); }
|
||||
|
||||
|
@ -59,6 +73,7 @@ public:
|
|||
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
|
||||
bool hold = false, float firstFrame = 0.0f,
|
||||
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
|
||||
/// Stops an animation as identified by a URL.
|
||||
Q_INVOKABLE void stopAnimation(const QString& url);
|
||||
|
||||
|
@ -148,6 +163,8 @@ public:
|
|||
static const float ZOOM_MAX;
|
||||
static const float ZOOM_DEFAULT;
|
||||
|
||||
bool getStandingHMDSensorMode() const { return _standingHMDSensorMode; }
|
||||
|
||||
public slots:
|
||||
void increaseSize();
|
||||
void decreaseSize();
|
||||
|
@ -162,11 +179,16 @@ public slots:
|
|||
glm::vec3 getThrust() { return _thrust; };
|
||||
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
|
||||
|
||||
void updateMotionBehavior();
|
||||
void updateMotionBehaviorFromMenu();
|
||||
void updateStandingHMDModeFromMenu();
|
||||
|
||||
glm::vec3 getLeftPalmPosition();
|
||||
glm::vec3 getLeftPalmVelocity();
|
||||
glm::vec3 getLeftPalmAngularVelocity();
|
||||
glm::quat getLeftPalmRotation();
|
||||
glm::vec3 getRightPalmPosition();
|
||||
glm::vec3 getRightPalmVelocity();
|
||||
glm::vec3 getRightPalmAngularVelocity();
|
||||
glm::quat getRightPalmRotation();
|
||||
|
||||
void clearReferential();
|
||||
|
@ -189,6 +211,8 @@ signals:
|
|||
|
||||
private:
|
||||
|
||||
glm::vec3 getWorldBodyPosition() const;
|
||||
glm::quat getWorldBodyOrientation() const;
|
||||
QByteArray toByteArray();
|
||||
void simulate(float deltaTime);
|
||||
void updateFromTrackers(float deltaTime);
|
||||
|
@ -224,6 +248,10 @@ private:
|
|||
|
||||
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);
|
||||
|
||||
// derive avatar body position and orientation from the current HMD Sensor location.
|
||||
// results are in sensor space
|
||||
glm::mat4 deriveBodyFromHMDSensor() const;
|
||||
|
||||
glm::vec3 _gravity;
|
||||
|
||||
float _driveKeys[MAX_DRIVE_KEYS];
|
||||
|
@ -281,6 +309,26 @@ private:
|
|||
|
||||
RigPointer _rig;
|
||||
bool _prevShouldDrawHead;
|
||||
|
||||
// cache of the current HMD sensor position and orientation
|
||||
// in sensor space.
|
||||
glm::mat4 _hmdSensorMatrix;
|
||||
glm::quat _hmdSensorOrientation;
|
||||
glm::vec3 _hmdSensorPosition;
|
||||
|
||||
// cache of the current body position and orientation of the avatar's body,
|
||||
// in sensor space.
|
||||
glm::mat4 _bodySensorMatrix;
|
||||
|
||||
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
||||
glm::mat4 _sensorToWorldMatrix;
|
||||
|
||||
bool _standingHMDSensorMode;
|
||||
|
||||
bool _goToPending;
|
||||
glm::vec3 _goToPosition;
|
||||
glm::quat _goToOrientation;
|
||||
|
||||
std::unordered_set<int> _headBoneSet;
|
||||
};
|
||||
|
||||
|
|
|
@ -107,12 +107,14 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
|
||||
Rig::HeadParameters params;
|
||||
params.modelRotation = getRotation();
|
||||
params.modelTranslation = getTranslation();
|
||||
params.leanSideways = _owningAvatar->getHead()->getFinalLeanSideways();
|
||||
params.leanForward = _owningAvatar->getHead()->getFinalLeanSideways();
|
||||
params.leanForward = _owningAvatar->getHead()->getFinalLeanForward();
|
||||
params.torsoTwist = _owningAvatar->getHead()->getTorsoTwist();
|
||||
params.localHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInLocalFrame();
|
||||
params.worldHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInWorldFrame();
|
||||
params.eyeLookAt = _owningAvatar->getHead()->getCorrectedLookAtPosition();
|
||||
params.eyeLookAt = _owningAvatar->getHead()->getLookAtPosition();
|
||||
params.eyeSaccade = _owningAvatar->getHead()->getSaccade();
|
||||
params.leanJointIndex = geometry.leanJointIndex;
|
||||
params.neckJointIndex = geometry.neckJointIndex;
|
||||
|
@ -390,6 +392,10 @@ bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
|
|||
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
|
||||
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
|
|
|
@ -80,6 +80,8 @@ public:
|
|||
/// \return whether or not the neck was found
|
||||
bool getNeckPosition(glm::vec3& neckPosition) const;
|
||||
|
||||
bool getLocalNeckPosition(glm::vec3& neckPosition) const;
|
||||
|
||||
/// Returns the rotation of the neck joint's parent from default orientation
|
||||
/// \return whether or not the neck was found
|
||||
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,12 +11,13 @@
|
|||
#ifndef hifi_3DConnexionClient_h
|
||||
#define hifi_3DConnexionClient_h
|
||||
|
||||
#include <qobject.h>
|
||||
#include <qlibrary.h>
|
||||
#include <QObject>
|
||||
#include <QLibrary>
|
||||
#include <input-plugins/UserInputMapper.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
|
||||
#ifndef HAVE_3DCONNEXIONCLIENT
|
||||
class ConnexionClient : public QObject {
|
||||
|
|
|
@ -1,887 +0,0 @@
|
|||
//
|
||||
// OculusManager.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OculusManager.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <QDesktopWidget>
|
||||
#include <QGuiApplication>
|
||||
#include <gpu/GPUConfig.h>
|
||||
#include <QScreen>
|
||||
#include <CursorManager.h>
|
||||
#include <QOpenGLTimerQuery>
|
||||
#include <QGLWidget>
|
||||
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GlWindow.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <OglplusHelpers.h>
|
||||
#include <PathUtils.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <UserActivityLogger.h>
|
||||
#include <FramebufferCache.h>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
#include "ui/overlays/Text3DOverlay.h"
|
||||
|
||||
template <typename Function>
|
||||
void for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Function>
|
||||
void for_each_eye(const ovrHmd & hmd, Function function) {
|
||||
for (int i = 0; i < ovrEye_Count; ++i) {
|
||||
ovrEyeType eye = hmd->EyeRenderOrder[i];
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
enum CalibrationState {
|
||||
UNCALIBRATED,
|
||||
WAITING_FOR_DELTA,
|
||||
WAITING_FOR_ZERO,
|
||||
WAITING_FOR_ZERO_HELD,
|
||||
CALIBRATED
|
||||
};
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
|
||||
return glm::ivec2(ov.x, ov.y);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
// A base class for FBO wrappers that need to use the Oculus C
|
||||
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
|
||||
// ovrHmd_CreateMirrorTextureGL, etc
|
||||
template <typename C>
|
||||
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
|
||||
ovrHmd hmd;
|
||||
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
|
||||
color = 0;
|
||||
depth = 0;
|
||||
};
|
||||
|
||||
void Resize(const uvec2 & size) {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
this->size = size;
|
||||
initColor();
|
||||
initDone();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initDepth() override final {
|
||||
}
|
||||
};
|
||||
|
||||
// A wrapper for constructing and using a swap texture set,
|
||||
// where each frame you draw to a texture via the FBO,
|
||||
// then submit it and increment to the next texture.
|
||||
// The Oculus SDK manages the creation and destruction of
|
||||
// the textures
|
||||
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
|
||||
SwapFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
~SwapFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void Increment() {
|
||||
++color->CurrentIndex;
|
||||
color->CurrentIndex %= color->TextureCount;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
|
||||
ovrResult result = ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
|
||||
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
virtual void initDone() override {
|
||||
}
|
||||
|
||||
virtual void onBind(oglplus::Framebuffer::Target target) override {
|
||||
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
|
||||
}
|
||||
|
||||
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// We use a FBO to wrap the mirror texture because it makes it easier to
|
||||
// render to the screen via glBlitFramebuffer
|
||||
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
|
||||
MirrorFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
virtual ~MirrorFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
}
|
||||
|
||||
void initDone() override {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
};
|
||||
|
||||
static SwapFramebufferWrapper* _swapFbo{ nullptr };
|
||||
static MirrorFramebufferWrapper* _mirrorFbo{ nullptr };
|
||||
static ovrLayerEyeFov _sceneLayer;
|
||||
|
||||
#else
|
||||
|
||||
static ovrTexture _eyeTextures[ovrEye_Count];
|
||||
static GlWindow* _outputWindow{ nullptr };
|
||||
|
||||
#endif
|
||||
|
||||
static bool _isConnected = false;
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static glm::mat4 _eyeProjection[ovrEye_Count];
|
||||
static unsigned int _frameIndex = 0;
|
||||
static bool _frameTimingActive = false;
|
||||
static Camera* _camera = NULL;
|
||||
static ovrEyeType _activeEye = ovrEye_Count;
|
||||
static bool _hswDismissed = false;
|
||||
|
||||
static const float CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
|
||||
static const float CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
|
||||
static const float CALIBRATION_ZERO_MAXIMUM_LENGTH = 0.01f;
|
||||
static const float CALIBRATION_ZERO_MAXIMUM_ANGLE = 2.0f * RADIANS_PER_DEGREE;
|
||||
static const quint64 CALIBRATION_ZERO_HOLD_TIME = 3000000; // usec
|
||||
static const float CALIBRATION_MESSAGE_DISTANCE = 2.5f;
|
||||
static CalibrationState _calibrationState;
|
||||
static glm::vec3 _calibrationPosition;
|
||||
static glm::quat _calibrationOrientation;
|
||||
static quint64 _calibrationStartTime;
|
||||
static int _calibrationMessage = 0;
|
||||
static glm::vec3 _eyePositions[ovrEye_Count];
|
||||
// TODO expose this as a developer toggle
|
||||
static bool _eyePerFrameMode = false;
|
||||
static ovrEyeType _lastEyeRendered = ovrEye_Count;
|
||||
static ovrSizei _recommendedTexSize = { 0, 0 };
|
||||
static float _offscreenRenderScale = 1.0;
|
||||
static glm::mat4 _combinedProjection;
|
||||
static ovrPosef _eyeRenderPoses[ovrEye_Count];
|
||||
static ovrRecti _eyeViewports[ovrEye_Count];
|
||||
static ovrVector3f _eyeOffsets[ovrEye_Count];
|
||||
|
||||
|
||||
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
|
||||
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
|
||||
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
|
||||
|
||||
void OculusManager::connect(QOpenGLContext* shareContext) {
|
||||
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
|
||||
|
||||
ovrInitParams initParams; memset(&initParams, 0, sizeof(initParams));
|
||||
|
||||
#ifdef DEBUG
|
||||
initParams.Flags |= ovrInit_Debug;
|
||||
#endif
|
||||
|
||||
ovr_Initialize(&initParams);
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
ovrResult res = ovrHmd_Create(0, &_ovrHmd);
|
||||
#ifdef DEBUG
|
||||
if (!OVR_SUCCESS(res)) {
|
||||
res = ovrHmd_CreateDebug(ovrHmd_DK2, &_ovrHmd);
|
||||
Q_ASSERT(OVR_SUCCESS(res));
|
||||
}
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
#ifdef DEBUG
|
||||
if (!_ovrHmd) {
|
||||
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
if (!_ovrHmd) {
|
||||
_isConnected = false;
|
||||
|
||||
// we're definitely not in "VR mode" so tell the menu that
|
||||
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
|
||||
ovr_Shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
_calibrationState = UNCALIBRATED;
|
||||
if (!_isConnected) {
|
||||
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
|
||||
}
|
||||
_isConnected = true;
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
|
||||
_eyeProjection[eye] = toGlm(ovrMatrix4f_Projection(_eyeFov[eye],
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
|
||||
ovrEyeRenderDesc erd = ovrHmd_GetRenderDesc(_ovrHmd, eye, _eyeFov[eye]);
|
||||
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
|
||||
});
|
||||
ovrFovPort combinedFov = _ovrHmd->MaxEyeFov[0];
|
||||
combinedFov.RightTan = _ovrHmd->MaxEyeFov[1].RightTan;
|
||||
_combinedProjection = toGlm(ovrMatrix4f_Projection(combinedFov,
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
|
||||
|
||||
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
|
||||
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
||||
_mirrorFbo = new MirrorFramebufferWrapper(_ovrHmd);
|
||||
_swapFbo = new SwapFramebufferWrapper(_ovrHmd);
|
||||
_swapFbo->Init(toGlm(_renderTargetSize));
|
||||
_sceneLayer.ColorTexture[0] = _swapFbo->color;
|
||||
_sceneLayer.ColorTexture[1] = nullptr;
|
||||
_sceneLayer.Viewport[0].Pos = { 0, 0 };
|
||||
_sceneLayer.Viewport[0].Size = _recommendedTexSize;
|
||||
_sceneLayer.Viewport[1].Pos = { _recommendedTexSize.w, 0 };
|
||||
_sceneLayer.Viewport[1].Size = _recommendedTexSize;
|
||||
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
|
||||
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeViewports[eye] = _sceneLayer.Viewport[eye];
|
||||
_sceneLayer.Fov[eye] = _eyeFov[eye];
|
||||
});
|
||||
|
||||
|
||||
|
||||
#else
|
||||
_outputWindow = new GlWindow(shareContext);
|
||||
_outputWindow->show();
|
||||
// _outputWindow->setFlags(Qt::FramelessWindowHint );
|
||||
// _outputWindow->resize(_ovrHmd->Resolution.w, _ovrHmd->Resolution.h);
|
||||
// _outputWindow->setPosition(_ovrHmd->WindowsPos.x, _ovrHmd->WindowsPos.y);
|
||||
ivec2 desiredPosition = toGlm(_ovrHmd->WindowsPos);
|
||||
foreach(QScreen* screen, qGuiApp->screens()) {
|
||||
ivec2 screenPosition = toGlm(screen->geometry().topLeft());
|
||||
if (screenPosition == desiredPosition) {
|
||||
_outputWindow->setScreen(screen);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_outputWindow->showFullScreen();
|
||||
_outputWindow->makeCurrent();
|
||||
|
||||
ovrGLConfig cfg;
|
||||
memset(&cfg, 0, sizeof(cfg));
|
||||
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
|
||||
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
|
||||
cfg.OGL.Header.Multisample = 0;
|
||||
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_Vignette
|
||||
| ovrDistortionCap_Overdrive
|
||||
| ovrDistortionCap_TimeWarp;
|
||||
|
||||
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
|
||||
distortionCaps, _eyeFov, _eyeRenderDesc);
|
||||
assert(configResult);
|
||||
Q_UNUSED(configResult);
|
||||
|
||||
_outputWindow->doneCurrent();
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
//Get texture size
|
||||
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
|
||||
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
|
||||
_eyeTextures[eye].Header.RenderViewport.Size = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Size.w /= 2;
|
||||
});
|
||||
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeViewports[eye] = _eyeTextures[eye].Header.RenderViewport;
|
||||
});
|
||||
#endif
|
||||
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd,
|
||||
ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
|
||||
|
||||
ovrHmd_ConfigureTracking(_ovrHmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection,
|
||||
ovrTrackingCap_Orientation);
|
||||
|
||||
if (!_camera) {
|
||||
_camera = new Camera;
|
||||
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
|
||||
}
|
||||
}
|
||||
|
||||
//Disconnects and deallocates the OR
|
||||
void OculusManager::disconnect() {
|
||||
if (_isConnected) {
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
if (_swapFbo) {
|
||||
delete _swapFbo;
|
||||
_swapFbo = nullptr;
|
||||
}
|
||||
|
||||
if (_mirrorFbo) {
|
||||
delete _mirrorFbo;
|
||||
_mirrorFbo = nullptr;
|
||||
}
|
||||
#else
|
||||
_outputWindow->showNormal();
|
||||
_outputWindow->deleteLater();
|
||||
_outputWindow = nullptr;
|
||||
#endif
|
||||
|
||||
if (_ovrHmd) {
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
_ovrHmd = nullptr;
|
||||
}
|
||||
ovr_Shutdown();
|
||||
|
||||
_isConnected = false;
|
||||
// Prepare to potentially have to dismiss the HSW again
|
||||
// if the user re-enables VR
|
||||
_hswDismissed = false;
|
||||
}
|
||||
}
|
||||
|
||||
void positionCalibrationBillboard(Text3DOverlay* billboard) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
glm::quat headOrientation = myAvatar->getHeadOrientation();
|
||||
headOrientation.x = 0;
|
||||
headOrientation.z = 0;
|
||||
glm::normalize(headOrientation);
|
||||
billboard->setPosition(myAvatar->getHeadPosition()
|
||||
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
||||
billboard->setRotation(headOrientation);
|
||||
}
|
||||
|
||||
void calibrate(const glm::vec3& position, const glm::quat& orientation) {
|
||||
static QString instructionMessage = "Hold still to calibrate";
|
||||
static QString progressMessage;
|
||||
static Text3DOverlay* billboard;
|
||||
|
||||
switch (_calibrationState) {
|
||||
|
||||
case UNCALIBRATED:
|
||||
if (position != glm::vec3() && orientation != glm::quat()) { // Handle zero values at start-up.
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_DELTA;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_DELTA:
|
||||
if (glm::length(position - _calibrationPosition) > CALIBRATION_DELTA_MINIMUM_LENGTH
|
||||
|| glm::angle(orientation * glm::inverse(_calibrationOrientation)) > CALIBRATION_DELTA_MINIMUM_ANGLE) {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_ZERO;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_ZERO:
|
||||
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
|
||||
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
|
||||
_calibrationStartTime = usecTimestampNow();
|
||||
_calibrationState = WAITING_FOR_ZERO_HELD;
|
||||
|
||||
if (!_calibrationMessage) {
|
||||
qCDebug(interfaceapp) << "Hold still to calibrate HMD";
|
||||
|
||||
billboard = new Text3DOverlay();
|
||||
billboard->setDimensions(glm::vec2(2.0f, 1.25f));
|
||||
billboard->setTopMargin(0.35f);
|
||||
billboard->setLeftMargin(0.28f);
|
||||
billboard->setText(instructionMessage);
|
||||
billboard->setAlpha(0.5f);
|
||||
billboard->setLineHeight(0.1f);
|
||||
billboard->setIsFacingAvatar(false);
|
||||
positionCalibrationBillboard(billboard);
|
||||
|
||||
_calibrationMessage = Application::getInstance()->getOverlays().addOverlay(billboard);
|
||||
}
|
||||
|
||||
progressMessage = "";
|
||||
} else {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
}
|
||||
break;
|
||||
|
||||
case WAITING_FOR_ZERO_HELD:
|
||||
if (glm::length(position - _calibrationPosition) < CALIBRATION_ZERO_MAXIMUM_LENGTH
|
||||
&& glm::angle(orientation * glm::inverse(_calibrationOrientation)) < CALIBRATION_ZERO_MAXIMUM_ANGLE) {
|
||||
if ((usecTimestampNow() - _calibrationStartTime) > CALIBRATION_ZERO_HOLD_TIME) {
|
||||
_calibrationState = CALIBRATED;
|
||||
qCDebug(interfaceapp) << "HMD calibrated";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = 0;
|
||||
Application::getInstance()->resetSensors();
|
||||
} else {
|
||||
quint64 quarterSeconds = (usecTimestampNow() - _calibrationStartTime) / 250000;
|
||||
if (quarterSeconds + 1 > (quint64)progressMessage.length()) {
|
||||
// 3...2...1...
|
||||
if (quarterSeconds == 4 * (quarterSeconds / 4)) {
|
||||
quint64 wholeSeconds = CALIBRATION_ZERO_HOLD_TIME / 1000000 - quarterSeconds / 4;
|
||||
|
||||
if (wholeSeconds == 3) {
|
||||
positionCalibrationBillboard(billboard);
|
||||
}
|
||||
|
||||
progressMessage += QString::number(wholeSeconds);
|
||||
} else {
|
||||
progressMessage += ".";
|
||||
}
|
||||
billboard->setText(instructionMessage + "\n\n" + progressMessage);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_calibrationPosition = position;
|
||||
_calibrationOrientation = orientation;
|
||||
_calibrationState = WAITING_FOR_ZERO;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void OculusManager::recalibrate() {
|
||||
_calibrationState = UNCALIBRATED;
|
||||
}
|
||||
|
||||
void OculusManager::abandonCalibration() {
|
||||
_calibrationState = CALIBRATED;
|
||||
if (_calibrationMessage) {
|
||||
qCDebug(interfaceapp) << "Abandoned HMD calibration";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool OculusManager::isConnected() {
|
||||
return _isConnected;
|
||||
}
|
||||
|
||||
//Begins the frame timing for oculus prediction purposes
|
||||
void OculusManager::beginFrameTiming() {
|
||||
if (_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
|
||||
}
|
||||
_frameTimingActive = true;
|
||||
}
|
||||
|
||||
bool OculusManager::allowSwap() {
|
||||
return false;
|
||||
}
|
||||
|
||||
//Ends frame timing
|
||||
void OculusManager::endFrameTiming() {
|
||||
_frameIndex++;
|
||||
_frameTimingActive = false;
|
||||
}
|
||||
|
||||
//Sets the camera FoV and aspect ratio
|
||||
void OculusManager::configureCamera(Camera& camera) {
|
||||
if (_activeEye == ovrEye_Count) {
|
||||
// When not rendering, provide a FOV encompasing both eyes
|
||||
camera.setProjection(_combinedProjection);
|
||||
return;
|
||||
}
|
||||
camera.setProjection(_eyeProjection[_activeEye]);
|
||||
}
|
||||
|
||||
//Displays everything for the oculus, frame timing must be active
|
||||
void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
|
||||
|
||||
#ifdef DEBUG
|
||||
// Ensure the frame counter always increments by exactly 1
|
||||
static int oldFrameIndex = -1;
|
||||
assert(oldFrameIndex == -1 || (unsigned int)oldFrameIndex == _frameIndex - 1);
|
||||
oldFrameIndex = _frameIndex;
|
||||
#endif
|
||||
|
||||
#ifndef Q_OS_WIN
|
||||
|
||||
// FIXME: we need a better way of responding to the HSW. In particular
|
||||
// we need to ensure that it's only displayed once per session, rather than
|
||||
// every time the user toggles VR mode, and we need to hook it up to actual
|
||||
// keyboard input. OVR claim they are refactoring HSW
|
||||
// https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599
|
||||
static ovrHSWDisplayState hasWarningState;
|
||||
if (!_hswDismissed) {
|
||||
ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState);
|
||||
if (hasWarningState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_ovrHmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
//beginFrameTiming must be called before display
|
||||
if (!_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
|
||||
return;
|
||||
}
|
||||
|
||||
auto primaryFBO = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
glm::quat orientation;
|
||||
glm::vec3 trackerPosition;
|
||||
auto deviceSize = qApp->getDeviceSize();
|
||||
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
|
||||
|
||||
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
|
||||
|
||||
if (_calibrationState != CALIBRATED) {
|
||||
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
|
||||
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
|
||||
calibrate(trackerPosition, orientation);
|
||||
}
|
||||
|
||||
trackerPosition = bodyOrientation * trackerPosition;
|
||||
ovrPosef eyePoses[ovrEye_Count];
|
||||
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, _eyeOffsets, eyePoses, nullptr);
|
||||
#ifndef Q_OS_WIN
|
||||
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
|
||||
#endif
|
||||
//Render each eye into an fbo
|
||||
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
|
||||
// If we're in eye-per-frame mode, only render one eye
|
||||
// per call to display, and allow timewarp to correct for
|
||||
// the other eye. Poor man's perf improvement
|
||||
if (_eyePerFrameMode && eye == _lastEyeRendered) {
|
||||
return;
|
||||
}
|
||||
_lastEyeRendered = _activeEye = eye;
|
||||
_eyeRenderPoses[eye] = eyePoses[eye];
|
||||
// Set the camera rotation for this eye
|
||||
|
||||
_eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
|
||||
_eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
|
||||
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
|
||||
|
||||
// Update our camera to what the application camera is doing
|
||||
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
|
||||
_camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
|
||||
configureCamera(*_camera);
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
ovrRecti & vp = _eyeViewports[eye];
|
||||
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
|
||||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, *_camera);
|
||||
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
|
||||
});
|
||||
_activeEye = ovrEye_Count;
|
||||
|
||||
gpu::FramebufferPointer finalFbo;
|
||||
finalFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
auto srcFboSize = finalFbo->getSize();
|
||||
|
||||
|
||||
// Blit to the oculus provided texture
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
_swapFbo->Bound(oglplus::Framebuffer::Target::Draw, [&] {
|
||||
glBlitFramebuffer(
|
||||
0, 0, srcFboSize.x, srcFboSize.y,
|
||||
0, 0, _swapFbo->size.x, _swapFbo->size.y,
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
});
|
||||
|
||||
// Blit to the onscreen window
|
||||
auto destWindowSize = qApp->getDeviceSize();
|
||||
glBlitFramebuffer(
|
||||
0, 0, srcFboSize.x, srcFboSize.y,
|
||||
0, 0, destWindowSize.width(), destWindowSize.height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
// Submit the frame to the Oculus SDK for timewarp and distortion
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_sceneLayer.RenderPose[eye] = _eyeRenderPoses[eye];
|
||||
});
|
||||
auto header = &_sceneLayer.Header;
|
||||
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
|
||||
Q_ASSERT(OVR_SUCCESS(res));
|
||||
_swapFbo->Increment();
|
||||
#else
|
||||
GLsync syncObject = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
glFlush();
|
||||
|
||||
|
||||
_outputWindow->makeCurrent();
|
||||
// force the compositing context to wait for the texture
|
||||
// rendering to complete before it starts the distortion rendering,
|
||||
// but without triggering a CPU/GPU synchronization
|
||||
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
|
||||
glDeleteSync(syncObject);
|
||||
|
||||
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
|
||||
glEyeTexture.OGL.TexId = textureId;
|
||||
|
||||
});
|
||||
|
||||
// restore our normal viewport
|
||||
ovrHmd_EndFrame(_ovrHmd, _eyeRenderPoses, _eyeTextures);
|
||||
glCanvas->makeCurrent();
|
||||
#endif
|
||||
|
||||
|
||||
// in order to account account for changes in the pick ray caused by head movement
|
||||
// we need to force a mouse move event on every frame (perhaps we could change this
|
||||
// to based on the head moving a minimum distance from the last position in which we
|
||||
// sent?)
|
||||
{
|
||||
QMouseEvent mouseEvent(QEvent::MouseMove, glCanvas->mapFromGlobal(QCursor::pos()),
|
||||
Qt::NoButton, Qt::NoButton, 0);
|
||||
qApp->mouseMoveEvent(&mouseEvent, 0);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
//Tries to reconnect to the sensors
|
||||
void OculusManager::reset() {
|
||||
if (_isConnected) {
|
||||
ovrHmd_RecenterPose(_ovrHmd);
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 OculusManager::getRelativePosition() {
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(trackingState.HeadPose.ThePose.Position);
|
||||
}
|
||||
|
||||
glm::quat OculusManager::getOrientation() {
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(trackingState.HeadPose.ThePose.Orientation);
|
||||
}
|
||||
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
rv.setHeight(_renderTargetSize.h);
|
||||
return rv;
|
||||
}
|
||||
|
||||
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
if (_activeEye != ovrEye_Count) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
right = nearVal * port.RightTan;
|
||||
left = -nearVal * port.LeftTan;
|
||||
top = nearVal * port.UpTan;
|
||||
bottom = -nearVal * port.DownTan;
|
||||
}
|
||||
}
|
||||
|
||||
int OculusManager::getHMDScreen() {
|
||||
#ifdef Q_OS_WIN
|
||||
return -1;
|
||||
#else
|
||||
int hmdScreenIndex = -1; // unknown
|
||||
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
|
||||
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
|
||||
|
||||
if (_ovrHmd) {
|
||||
QString productNameFromOVR = _ovrHmd->ProductName;
|
||||
|
||||
int hmdWidth = _ovrHmd->Resolution.w;
|
||||
int hmdHeight = _ovrHmd->Resolution.h;
|
||||
int hmdAtX = _ovrHmd->WindowsPos.x;
|
||||
int hmdAtY = _ovrHmd->WindowsPos.y;
|
||||
|
||||
// we will score the likelihood that each screen is a match based on the following
|
||||
// rubrik of potential matching features
|
||||
const int EXACT_NAME_MATCH = 100;
|
||||
const int SIMILAR_NAMES = 10;
|
||||
const int EXACT_LOCATION_MATCH = 50;
|
||||
const int EXACT_RESOLUTION_MATCH = 25;
|
||||
|
||||
int bestMatchScore = 0;
|
||||
|
||||
// look at the display list and see if we can find the best match
|
||||
QDesktopWidget* desktop = QApplication::desktop();
|
||||
int screenNumber = 0;
|
||||
foreach (QScreen* screen, QGuiApplication::screens()) {
|
||||
QString screenName = screen->name();
|
||||
QRect screenRect = desktop->screenGeometry(screenNumber);
|
||||
|
||||
int screenScore = 0;
|
||||
if (screenName == productNameFromOVR) {
|
||||
screenScore += EXACT_NAME_MATCH;
|
||||
}
|
||||
if (similarStrings(screenName, productNameFromOVR)) {
|
||||
screenScore += SIMILAR_NAMES;
|
||||
}
|
||||
if (hmdWidth == screenRect.width() && hmdHeight == screenRect.height()) {
|
||||
screenScore += EXACT_RESOLUTION_MATCH;
|
||||
}
|
||||
if (hmdAtX == screenRect.x() && hmdAtY == screenRect.y()) {
|
||||
screenScore += EXACT_LOCATION_MATCH;
|
||||
}
|
||||
if (screenScore > bestMatchScore) {
|
||||
bestMatchScore = screenScore;
|
||||
hmdScreenIndex = screenNumber;
|
||||
}
|
||||
|
||||
screenNumber++;
|
||||
}
|
||||
}
|
||||
return hmdScreenIndex;
|
||||
#endif
|
||||
}
|
||||
|
||||
mat4 OculusManager::getEyeProjection(int eye) {
|
||||
return _eyeProjection[eye];
|
||||
}
|
||||
|
||||
mat4 OculusManager::getEyePose(int eye) {
|
||||
return toGlm(_eyeRenderPoses[eye]);
|
||||
}
|
||||
|
||||
mat4 OculusManager::getHeadPose() {
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
return toGlm(ts.HeadPose.ThePose);
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
//
|
||||
// OculusManager.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OculusManager_h
|
||||
#define hifi_OculusManager_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <QSize>
|
||||
|
||||
#include "RenderArgs.h"
|
||||
|
||||
class QOpenGLContext;
|
||||
class QGLWidget;
|
||||
class Camera;
|
||||
|
||||
/// Handles interaction with the Oculus Rift.
|
||||
class OculusManager {
|
||||
public:
|
||||
static void connect(QOpenGLContext* shareContext);
|
||||
static void disconnect();
|
||||
static bool isConnected();
|
||||
static void recalibrate();
|
||||
static void abandonCalibration();
|
||||
static void beginFrameTiming();
|
||||
static void endFrameTiming();
|
||||
static bool allowSwap();
|
||||
static void configureCamera(Camera& camera);
|
||||
static void display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
|
||||
static void reset();
|
||||
|
||||
static glm::vec3 getRelativePosition();
|
||||
static glm::quat getOrientation();
|
||||
static QSize getRenderTargetSize();
|
||||
|
||||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
|
||||
static glm::vec3 getLeftEyePosition();
|
||||
static glm::vec3 getRightEyePosition();
|
||||
static glm::vec3 getMidEyePosition();
|
||||
|
||||
static int getHMDScreen();
|
||||
|
||||
static glm::mat4 getEyeProjection(int eye);
|
||||
static glm::mat4 getEyePose(int eye);
|
||||
static glm::mat4 getHeadPose();
|
||||
};
|
||||
|
||||
#endif // hifi_OculusManager_h
|
|
@ -1,151 +0,0 @@
|
|||
//
|
||||
// TV3DManager.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/24/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "TV3DManager.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <RenderArgs.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
|
||||
int TV3DManager::_screenWidth = 1;
|
||||
int TV3DManager::_screenHeight = 1;
|
||||
double TV3DManager::_aspect = 1.0;
|
||||
eyeFrustum TV3DManager::_leftEye;
|
||||
eyeFrustum TV3DManager::_rightEye;
|
||||
eyeFrustum* TV3DManager::_activeEye = NULL;
|
||||
|
||||
|
||||
bool TV3DManager::isConnected() {
|
||||
return Menu::getInstance()->isOptionChecked(MenuOption::Enable3DTVMode);
|
||||
}
|
||||
|
||||
void TV3DManager::connect() {
|
||||
auto deviceSize = qApp->getDeviceSize();
|
||||
configureCamera(*(qApp->getCamera()), deviceSize.width(), deviceSize.height());
|
||||
}
|
||||
|
||||
|
||||
// The basic strategy of this stereoscopic rendering is explained here:
|
||||
// http://www.orthostereo.com/geometryopengl.html
|
||||
void TV3DManager::setFrustum(Camera& whichCamera) {
|
||||
const double DTR = 0.0174532925; // degree to radians
|
||||
const double IOD = 0.05; //intraocular distance
|
||||
double fovy = DEFAULT_FIELD_OF_VIEW_DEGREES; // field of view in y-axis
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double screenZ = 0.25f; // screen projection plane
|
||||
|
||||
double top = nearZ * tan(DTR * fovy / 2.0); //sets top of frustum based on fovy and near clipping plane
|
||||
double right = _aspect * top; // sets right of frustum based on aspect ratio
|
||||
double frustumshift = (IOD / 2) * nearZ / screenZ;
|
||||
|
||||
_leftEye.top = top;
|
||||
_leftEye.bottom = -top;
|
||||
_leftEye.left = -right + frustumshift;
|
||||
_leftEye.right = right + frustumshift;
|
||||
_leftEye.modelTranslation = IOD / 2;
|
||||
|
||||
_rightEye.top = top;
|
||||
_rightEye.bottom = -top;
|
||||
_rightEye.left = -right - frustumshift;
|
||||
_rightEye.right = right - frustumshift;
|
||||
_rightEye.modelTranslation = -IOD / 2;
|
||||
}
|
||||
|
||||
void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int screenHeight) {
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
if (screenHeight == 0) {
|
||||
screenHeight = 1; // prevent divide by 0
|
||||
}
|
||||
_screenWidth = screenWidth;
|
||||
_screenHeight = screenHeight;
|
||||
_aspect= (double)_screenWidth / (double)_screenHeight;
|
||||
setFrustum(whichCamera);
|
||||
|
||||
glViewport (0, 0, _screenWidth, _screenHeight); // sets drawing viewport
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
||||
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
|
||||
|
||||
// left eye portal
|
||||
int portalX = 0;
|
||||
int portalY = 0;
|
||||
QSize deviceSize = qApp->getDeviceSize() *
|
||||
qApp->getRenderResolutionScale();
|
||||
int portalW = deviceSize.width() / 2;
|
||||
int portalH = deviceSize.height();
|
||||
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
Camera eyeCamera;
|
||||
eyeCamera.setRotation(whichCamera.getRotation());
|
||||
eyeCamera.setPosition(whichCamera.getPosition());
|
||||
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
forEachEye([&](eyeFrustum& eye){
|
||||
_activeEye = &eye;
|
||||
glViewport(portalX, portalY, portalW, portalH);
|
||||
glScissor(portalX, portalY, portalW, portalH);
|
||||
renderArgs->_viewport = glm::ivec4(portalX, portalY, portalW, portalH);
|
||||
|
||||
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
|
||||
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
|
||||
eyeCamera.setProjection(projection);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, eyeCamera, false);
|
||||
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
|
||||
_activeEye = NULL;
|
||||
}, [&]{
|
||||
// render right side view
|
||||
portalX = deviceSize.width() / 2;
|
||||
});
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
auto fboSize = finalFbo->getSize();
|
||||
// Get the ACTUAL device size for the BLIT
|
||||
deviceSize = qApp->getDeviceSize();
|
||||
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
glBlitFramebuffer(0, 0, fboSize.x, fboSize.y,
|
||||
0, 0, deviceSize.width(), deviceSize.height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
// reset the viewport to how we started
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
if (_activeEye) {
|
||||
left = _activeEye->left;
|
||||
right = _activeEye->right;
|
||||
bottom = _activeEye->bottom;
|
||||
top = _activeEye->top;
|
||||
}
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
//
|
||||
// TV3DManager.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 12/24/2013.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_TV3DManager_h
|
||||
#define hifi_TV3DManager_h
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
class Camera;
|
||||
class RenderArgs;
|
||||
|
||||
struct eyeFrustum {
|
||||
double left;
|
||||
double right;
|
||||
double bottom;
|
||||
double top;
|
||||
float modelTranslation;
|
||||
};
|
||||
|
||||
|
||||
/// Handles interaction with 3D TVs
|
||||
class TV3DManager {
|
||||
public:
|
||||
static void connect();
|
||||
static bool isConnected();
|
||||
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
|
||||
static void display(RenderArgs* renderArgs, Camera& whichCamera);
|
||||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
private:
|
||||
static void setFrustum(Camera& whichCamera);
|
||||
static int _screenWidth;
|
||||
static int _screenHeight;
|
||||
static double _aspect;
|
||||
static eyeFrustum _leftEye;
|
||||
static eyeFrustum _rightEye;
|
||||
static eyeFrustum* _activeEye;
|
||||
|
||||
// The first function is the code executed for each eye
|
||||
// while the second is code to be executed between the two eyes.
|
||||
// The use case here is to modify the output viewport coordinates
|
||||
// for the new eye.
|
||||
// FIXME: we'd like to have a default empty lambda for the second parameter,
|
||||
// but gcc 4.8.1 complains about it due to a bug. See
|
||||
// http://stackoverflow.com/questions/25490662/lambda-as-default-parameter-to-a-member-function-template
|
||||
template<typename F, typename FF>
|
||||
static void forEachEye(F f, FF ff) {
|
||||
f(_leftEye);
|
||||
ff();
|
||||
f(_rightEye);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // hifi_TV3DManager_h
|
|
@ -11,15 +11,16 @@
|
|||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <HandData.h>
|
||||
#include <HFBackEvent.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "devices/MotionTracker.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "ControllerScriptingInterface.h"
|
||||
|
||||
// TODO: this needs to be removed, as well as any related controller-specific information
|
||||
#include <input-plugins/SixenseManager.h>
|
||||
|
||||
|
||||
ControllerScriptingInterface::ControllerScriptingInterface() :
|
||||
_mouseCaptured(false),
|
||||
|
@ -82,13 +83,14 @@ void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::In
|
|||
|
||||
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action) {
|
||||
QScriptValue obj = engine->newObject();
|
||||
QVector<UserInputMapper::InputChannel> inputChannels = Application::getUserInputMapper()->getInputChannelsForAction(action);
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
QVector<UserInputMapper::InputChannel> inputChannels = userInputMapper->getInputChannelsForAction(action);
|
||||
QScriptValue _inputChannels = engine->newArray(inputChannels.size());
|
||||
for (int i = 0; i < inputChannels.size(); i++) {
|
||||
_inputChannels.setProperty(i, inputChannelToScriptValue(engine, inputChannels[i]));
|
||||
}
|
||||
obj.setProperty("action", (int) action);
|
||||
obj.setProperty("actionName", Application::getUserInputMapper()->getActionName(action));
|
||||
obj.setProperty("actionName", userInputMapper->getActionName(action));
|
||||
obj.setProperty("inputChannels", _inputChannels);
|
||||
return obj;
|
||||
}
|
||||
|
@ -376,7 +378,7 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) {
|
|||
}
|
||||
|
||||
glm::vec2 ControllerScriptingInterface::getViewportDimensions() const {
|
||||
return Application::getInstance()->getCanvasSize();
|
||||
return Application::getInstance()->getUiSize();
|
||||
}
|
||||
|
||||
AbstractInputController* ControllerScriptingInterface::createInputController(const QString& deviceName, const QString& tracker) {
|
||||
|
@ -428,43 +430,59 @@ void ControllerScriptingInterface::updateInputControllers() {
|
|||
}
|
||||
|
||||
QVector<UserInputMapper::Action> ControllerScriptingInterface::getAllActions() {
|
||||
return Application::getUserInputMapper()->getAllActions();
|
||||
return DependencyManager::get<UserInputMapper>()->getAllActions();
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getInputChannelsForAction(UserInputMapper::Action action) {
|
||||
return Application::getUserInputMapper()->getInputChannelsForAction(action);
|
||||
return DependencyManager::get<UserInputMapper>()->getInputChannelsForAction(action);
|
||||
}
|
||||
|
||||
QString ControllerScriptingInterface::getDeviceName(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getDeviceName((unsigned short) device);
|
||||
return DependencyManager::get<UserInputMapper>()->getDeviceName((unsigned short)device);
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getAllInputsForDevice(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getAllInputsForDevice(device);
|
||||
return DependencyManager::get<UserInputMapper>()->getAllInputsForDevice(device);
|
||||
}
|
||||
|
||||
bool ControllerScriptingInterface::addInputChannel(UserInputMapper::InputChannel inputChannel) {
|
||||
return Application::getUserInputMapper()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
|
||||
return DependencyManager::get<UserInputMapper>()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
|
||||
}
|
||||
|
||||
bool ControllerScriptingInterface::removeInputChannel(UserInputMapper::InputChannel inputChannel) {
|
||||
return Application::getUserInputMapper()->removeInputChannel(inputChannel);
|
||||
return DependencyManager::get<UserInputMapper>()->removeInputChannel(inputChannel);
|
||||
}
|
||||
|
||||
QVector<UserInputMapper::InputPair> ControllerScriptingInterface::getAvailableInputs(unsigned int device) {
|
||||
return Application::getUserInputMapper()->getAvailableInputs((unsigned short) device);
|
||||
return DependencyManager::get<UserInputMapper>()->getAvailableInputs((unsigned short)device);
|
||||
}
|
||||
|
||||
void ControllerScriptingInterface::resetAllDeviceBindings() {
|
||||
Application::getUserInputMapper()->resetAllDeviceBindings();
|
||||
DependencyManager::get<UserInputMapper>()->resetAllDeviceBindings();
|
||||
}
|
||||
|
||||
void ControllerScriptingInterface::resetDevice(unsigned int device) {
|
||||
Application::getUserInputMapper()->resetDevice(device);
|
||||
DependencyManager::get<UserInputMapper>()->resetDevice(device);
|
||||
}
|
||||
|
||||
int ControllerScriptingInterface::findDevice(QString name) {
|
||||
return Application::getUserInputMapper()->findDevice(name);
|
||||
return DependencyManager::get<UserInputMapper>()->findDevice(name);
|
||||
}
|
||||
|
||||
float ControllerScriptingInterface::getActionValue(int action) {
|
||||
return DependencyManager::get<UserInputMapper>()->getActionState(UserInputMapper::Action(action));
|
||||
}
|
||||
|
||||
int ControllerScriptingInterface::findAction(QString actionName) {
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
auto actions = getAllActions();
|
||||
for (auto action : actions) {
|
||||
if (userInputMapper->getActionName(action) == actionName) {
|
||||
return action;
|
||||
}
|
||||
}
|
||||
// If the action isn't found, return -1
|
||||
return -1;
|
||||
}
|
||||
|
||||
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
|
||||
|
@ -502,4 +520,4 @@ const unsigned int INPUTCONTROLLER_KEY_DEVICE_MASK = 16;
|
|||
|
||||
InputController::Key InputController::getKey() const {
|
||||
return (((_deviceTrackerId & INPUTCONTROLLER_KEY_DEVICE_MASK) << INPUTCONTROLLER_KEY_DEVICE_OFFSET) | _subTrackerId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include <input-plugins/UserInputMapper.h>
|
||||
|
||||
#include <AbstractControllerScriptingInterface.h>
|
||||
class PalmData;
|
||||
|
@ -86,15 +86,24 @@ public:
|
|||
|
||||
public slots:
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::Action> getAllActions();
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
|
||||
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual bool addInputChannel(UserInputMapper::InputChannel inputChannel);
|
||||
Q_INVOKABLE virtual bool removeInputChannel(UserInputMapper::InputChannel inputChannel);
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
|
||||
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputPair> getAvailableInputs(unsigned int device);
|
||||
Q_INVOKABLE virtual void resetAllDeviceBindings();
|
||||
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
|
||||
|
||||
Q_INVOKABLE virtual float getActionValue(int action);
|
||||
|
||||
Q_INVOKABLE virtual void resetDevice(unsigned int device);
|
||||
Q_INVOKABLE virtual void resetAllDeviceBindings();
|
||||
Q_INVOKABLE virtual int findDevice(QString name);
|
||||
|
||||
Q_INVOKABLE virtual int findAction(QString actionName);
|
||||
|
||||
virtual bool isPrimaryButtonPressed() const;
|
||||
virtual glm::vec2 getPrimaryJoystickPosition() const;
|
||||
|
||||
|
|
|
@ -122,6 +122,14 @@ void WebWindowClass::setURL(const QString& url) {
|
|||
_webView->setUrl(url);
|
||||
}
|
||||
|
||||
void WebWindowClass::setPosition(int x, int y) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setPosition", Qt::AutoConnection, Q_ARG(int, x), Q_ARG(int, y));
|
||||
return;
|
||||
}
|
||||
_windowWidget->move(x, y);
|
||||
}
|
||||
|
||||
void WebWindowClass::raise() {
|
||||
QMetaObject::invokeMethod(_windowWidget, "showNormal", Qt::AutoConnection);
|
||||
QMetaObject::invokeMethod(_windowWidget, "raise", Qt::AutoConnection);
|
||||
|
|
|
@ -43,6 +43,7 @@ public:
|
|||
|
||||
public slots:
|
||||
void setVisible(bool visible);
|
||||
void setPosition(int x, int y);
|
||||
QString getURL() const { return _webView->url().url(); }
|
||||
void setURL(const QString& url);
|
||||
void raise();
|
||||
|
|
|
@ -11,6 +11,10 @@
|
|||
|
||||
#include "ApplicationCompositor.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QPropertyAnimation>
|
||||
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
|
@ -21,6 +25,8 @@
|
|||
#include "Tooltip.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include <input-plugins/SixenseManager.h> // TODO: any references to sixense should be removed here
|
||||
#include <input-plugins/InputDevice.h>
|
||||
|
||||
|
||||
// Used to animate the magnification windows
|
||||
|
@ -106,7 +112,9 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
|
|||
}
|
||||
}
|
||||
|
||||
ApplicationCompositor::ApplicationCompositor() {
|
||||
ApplicationCompositor::ApplicationCompositor() :
|
||||
_alphaPropertyAnimation(new QPropertyAnimation(this, "alpha"))
|
||||
{
|
||||
memset(_reticleActive, 0, sizeof(_reticleActive));
|
||||
memset(_magActive, 0, sizeof(_reticleActive));
|
||||
memset(_magSizeMult, 0, sizeof(_magSizeMult));
|
||||
|
@ -163,6 +171,8 @@ ApplicationCompositor::ApplicationCompositor() {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
_alphaPropertyAnimation.reset(new QPropertyAnimation(this, "alpha"));
|
||||
}
|
||||
|
||||
ApplicationCompositor::~ApplicationCompositor() {
|
||||
|
@ -184,7 +194,8 @@ void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorI
|
|||
// Draws the FBO texture for the screen
|
||||
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
if (_alpha == 0.0f) {
|
||||
|
||||
if (_alpha <= 0.0f) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -204,7 +215,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
|
||||
batch.setViewportTransform(renderArgs->_viewport);
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(mat4());
|
||||
|
@ -232,15 +243,17 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
}
|
||||
|
||||
|
||||
vec2 getPolarCoordinates(const PalmData& palm) {
|
||||
vec2 ApplicationCompositor::getPolarCoordinates(const PalmData& palm) const {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto avatarOrientation = myAvatar->getOrientation();
|
||||
auto eyePos = myAvatar->getDefaultEyePosition();
|
||||
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
|
||||
// Direction of the tip relative to the eye
|
||||
glm::vec3 tipDirection = tip - eyePos;
|
||||
// orient into avatar space
|
||||
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
|
||||
glm::vec3 relativePos = myAvatar->getDefaultEyePosition();
|
||||
glm::quat rotation = myAvatar->getOrientation();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
|
||||
relativePos = _modelTransform.getTranslation();
|
||||
rotation = _modelTransform.getRotation();
|
||||
}
|
||||
glm::vec3 tipDirection = tip - relativePos;
|
||||
tipDirection = glm::inverse(rotation) * tipDirection;
|
||||
// Normalize for trig functions
|
||||
tipDirection = glm::normalize(tipDirection);
|
||||
// Convert to polar coordinates
|
||||
|
@ -251,7 +264,8 @@ vec2 getPolarCoordinates(const PalmData& palm) {
|
|||
// Draws the FBO texture for Oculus rift.
|
||||
void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int eye) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
if (_alpha == 0.0f) {
|
||||
|
||||
if (_alpha <= 0.0f) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -278,11 +292,13 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
|
||||
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
|
||||
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
mat4 camMat;
|
||||
_cameraBaseTransform.getMatrix(camMat);
|
||||
camMat = camMat * qApp->getEyePose(eye);
|
||||
batch.setViewportTransform(renderArgs->_viewport);
|
||||
batch.setViewTransform(camMat);
|
||||
|
||||
mat4 eyePose = qApp->getEyePose(eye);
|
||||
glm::mat4 overlayXfm = glm::inverse(eyePose);
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
|
||||
#ifdef DEBUG_OVERLAY
|
||||
{
|
||||
|
@ -291,7 +307,9 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
}
|
||||
#else
|
||||
{
|
||||
batch.setModelTransform(overlayXfm);
|
||||
//batch.setModelTransform(overlayXfm);
|
||||
|
||||
batch.setModelTransform(_modelTransform);
|
||||
drawSphereSection(batch);
|
||||
}
|
||||
#endif
|
||||
|
@ -302,8 +320,11 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
|
||||
bindCursorTexture(batch);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
//Controller Pointers
|
||||
glm::mat4 overlayXfm;
|
||||
_modelTransform.getMatrix(overlayXfm);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
|
@ -345,13 +366,18 @@ void ApplicationCompositor::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& or
|
|||
|
||||
// We need the RAW camera orientation and position, because this is what the overlay is
|
||||
// rendered relative to
|
||||
const glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
|
||||
const glm::quat overlayOrientation = qApp->getCamera()->getRotation();
|
||||
glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
|
||||
glm::quat overlayOrientation = qApp->getCamera()->getRotation();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StandingHMDSensorMode)) {
|
||||
overlayPosition = _modelTransform.getTranslation();
|
||||
overlayOrientation = _modelTransform.getRotation();
|
||||
}
|
||||
|
||||
// Intersection UI overlay space
|
||||
glm::vec3 worldSpaceDirection = overlayOrientation * overlaySpaceDirection;
|
||||
glm::vec3 worldSpaceIntersection = (glm::normalize(worldSpaceDirection) * _oculusUIRadius) + overlayPosition;
|
||||
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * glm::vec3(qApp->getHeadPose()[3])) + overlayPosition;
|
||||
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * extractTranslation(qApp->getHMDSensorPose())) + overlayPosition;
|
||||
|
||||
// Intersection in world space
|
||||
origin = worldSpaceHeadPosition;
|
||||
|
@ -410,13 +436,15 @@ bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& positi
|
|||
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
|
||||
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
|
||||
//If we are in oculus, render reticle later
|
||||
auto trueMouse = qApp->getTrueMouse();
|
||||
trueMouse /= qApp->getCanvasSize();
|
||||
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
|
||||
_reticlePosition[MOUSE] = position;
|
||||
_reticleActive[MOUSE] = true;
|
||||
_magActive[MOUSE] = _magnifier;
|
||||
_reticleActive[LEFT_CONTROLLER] = false;
|
||||
_reticleActive[RIGHT_CONTROLLER] = false;
|
||||
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
|
||||
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::HandMouseInput)) {
|
||||
//only render controller pointer if we aren't already rendering a mouse pointer
|
||||
_reticleActive[MOUSE] = false;
|
||||
_magActive[MOUSE] = false;
|
||||
|
@ -491,6 +519,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
|
||||
auto canvasSize = qApp->getCanvasSize();
|
||||
int mouseX, mouseY;
|
||||
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
|
||||
|
||||
|
@ -499,7 +528,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
|
||||
float cursorRange = canvasSize.x * InputDevice::getCursorPixelRangeMult();
|
||||
|
||||
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
|
||||
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
|
||||
|
@ -611,6 +640,19 @@ void ApplicationCompositor::drawSphereSection(gpu::Batch& batch) {
|
|||
batch.setInputFormat(streamFormat);
|
||||
|
||||
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
|
||||
|
||||
if (_prevAlpha != _alpha) {
|
||||
// adjust alpha by munging vertex color alpha.
|
||||
// FIXME we should probably just use a uniform for this.
|
||||
float* floatPtr = reinterpret_cast<float*>(_hemiVertices->editData());
|
||||
const auto ALPHA_FLOAT_OFFSET = (sizeof(vec3) + sizeof(vec2) + sizeof(vec3)) / sizeof(float);
|
||||
const auto VERTEX_FLOAT_STRIDE = (sizeof(vec3) + sizeof(vec2) + sizeof(vec4)) / sizeof(float);
|
||||
const auto NUM_VERTS = _hemiVertices->getSize() / VERTEX_STRIDE;
|
||||
for (size_t i = 0; i < NUM_VERTS; i++) {
|
||||
floatPtr[i * VERTEX_FLOAT_STRIDE + ALPHA_FLOAT_OFFSET] = _alpha;
|
||||
}
|
||||
}
|
||||
|
||||
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
|
||||
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
|
||||
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
|
||||
|
@ -700,3 +742,29 @@ void ApplicationCompositor::updateTooltips() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
static const float FADE_DURATION = 500.0f;
|
||||
void ApplicationCompositor::fadeIn() {
|
||||
_fadeInAlpha = true;
|
||||
|
||||
_alphaPropertyAnimation->setDuration(FADE_DURATION);
|
||||
_alphaPropertyAnimation->setStartValue(_alpha);
|
||||
_alphaPropertyAnimation->setEndValue(1.0f);
|
||||
_alphaPropertyAnimation->start();
|
||||
}
|
||||
void ApplicationCompositor::fadeOut() {
|
||||
_fadeInAlpha = false;
|
||||
|
||||
_alphaPropertyAnimation->setDuration(FADE_DURATION);
|
||||
_alphaPropertyAnimation->setStartValue(_alpha);
|
||||
_alphaPropertyAnimation->setEndValue(0.0f);
|
||||
_alphaPropertyAnimation->start();
|
||||
}
|
||||
|
||||
void ApplicationCompositor::toggle() {
|
||||
if (_fadeInAlpha) {
|
||||
fadeOut();
|
||||
} else {
|
||||
fadeIn();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#define hifi_ApplicationCompositor_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QPropertyAnimation>
|
||||
#include <cstdint>
|
||||
|
||||
#include <EntityItemID.h>
|
||||
|
@ -33,6 +34,8 @@ const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
|
|||
// facilities of this class
|
||||
class ApplicationCompositor : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(float alpha READ getAlpha WRITE setAlpha)
|
||||
public:
|
||||
ApplicationCompositor();
|
||||
~ApplicationCompositor();
|
||||
|
@ -64,6 +67,19 @@ public:
|
|||
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
|
||||
uint32_t getOverlayTexture() const;
|
||||
|
||||
void setCameraBaseTransform(const Transform& transform) { _cameraBaseTransform = transform; }
|
||||
const Transform& getCameraBaseTransform() const { return _cameraBaseTransform; }
|
||||
|
||||
void setModelTransform(const Transform& transform) { _modelTransform = transform; }
|
||||
const Transform& getModelTransform() const { return _modelTransform; }
|
||||
|
||||
void fadeIn();
|
||||
void fadeOut();
|
||||
void toggle();
|
||||
|
||||
float getAlpha() const { return _alpha; }
|
||||
void setAlpha(float alpha) { _alpha = alpha; }
|
||||
|
||||
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
|
||||
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
|
||||
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
|
||||
|
@ -78,7 +94,8 @@ private:
|
|||
|
||||
void renderPointers(gpu::Batch& batch);
|
||||
void renderControllerPointers(gpu::Batch& batch);
|
||||
void renderPointersOculus(gpu::Batch& batch);
|
||||
|
||||
vec2 getPolarCoordinates(const PalmData& palm) const;
|
||||
|
||||
// Support for hovering and tooltips
|
||||
static EntityItemID _noItemId;
|
||||
|
@ -100,6 +117,8 @@ private:
|
|||
bool _magnifier{ true };
|
||||
|
||||
float _alpha{ 1.0f };
|
||||
float _prevAlpha{ 1.0f };
|
||||
float _fadeInAlpha{ true };
|
||||
float _oculusUIRadius{ 1.0f };
|
||||
|
||||
QMap<uint16_t, gpu::TexturePointer> _cursors;
|
||||
|
@ -115,6 +134,11 @@ private:
|
|||
glm::vec3 _previousMagnifierBottomRight;
|
||||
glm::vec3 _previousMagnifierTopLeft;
|
||||
glm::vec3 _previousMagnifierTopRight;
|
||||
|
||||
Transform _modelTransform;
|
||||
Transform _cameraBaseTransform;
|
||||
|
||||
std::unique_ptr<QPropertyAnimation> _alphaPropertyAnimation;
|
||||
};
|
||||
|
||||
#endif // hifi_ApplicationCompositor_h
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#include <AudioClient.h>
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <devices/Faceshift.h>
|
||||
#include <devices/SixenseManager.h>
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
|
|
@ -19,48 +19,59 @@
|
|||
#include <QScreen>
|
||||
#include <QWindow>
|
||||
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
|
||||
#include "MainWindow.h"
|
||||
#include "Menu.h"
|
||||
#include "ui/DialogsManager.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
static const int WIDTH = 350;
|
||||
static const int HEIGHT = 100;
|
||||
|
||||
HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) ,
|
||||
_previousScreen(NULL),
|
||||
_hmdScreen(NULL),
|
||||
_hmdScreenNumber(-1),
|
||||
_switchModeButton(NULL),
|
||||
_debugDetails(NULL),
|
||||
_previousDialogScreen(NULL),
|
||||
_inHDMMode(false)
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint)
|
||||
{
|
||||
this->setWindowTitle("HMD Tools");
|
||||
// FIXME do we want to support more than one connected HMD? It seems like a pretty corner case
|
||||
foreach(auto displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
|
||||
// The first plugin is always the standard 2D display, by convention
|
||||
if (_defaultPluginName.isEmpty()) {
|
||||
_defaultPluginName = displayPlugin->getName();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (displayPlugin->isHmd()) {
|
||||
// Not all HMD's have corresponding screens
|
||||
if (displayPlugin->getHmdScreen() >= 0) {
|
||||
_hmdScreenNumber = displayPlugin->getHmdScreen();
|
||||
}
|
||||
_hmdPluginName = displayPlugin->getName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
setWindowTitle("HMD Tools");
|
||||
|
||||
// Create layouter
|
||||
QFormLayout* form = new QFormLayout();
|
||||
const int WIDTH = 350;
|
||||
{
|
||||
QFormLayout* form = new QFormLayout();
|
||||
// Add a button to enter
|
||||
_switchModeButton = new QPushButton("Toggle HMD Mode");
|
||||
if (_hmdPluginName.isEmpty()) {
|
||||
_switchModeButton->setEnabled(false);
|
||||
}
|
||||
// Add a button to enter
|
||||
_switchModeButton->setFixedWidth(WIDTH);
|
||||
form->addRow("", _switchModeButton);
|
||||
// Create a label with debug details...
|
||||
_debugDetails = new QLabel();
|
||||
_debugDetails->setFixedSize(WIDTH, HEIGHT);
|
||||
form->addRow("", _debugDetails);
|
||||
setLayout(form);
|
||||
}
|
||||
|
||||
// Add a button to enter
|
||||
_switchModeButton = new QPushButton("Enter HMD Mode");
|
||||
_switchModeButton->setFixedWidth(WIDTH);
|
||||
form->addRow("", _switchModeButton);
|
||||
connect(_switchModeButton,SIGNAL(clicked(bool)),this,SLOT(switchModeClicked(bool)));
|
||||
|
||||
// Create a label with debug details...
|
||||
_debugDetails = new QLabel();
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
const int HEIGHT = 100;
|
||||
_debugDetails->setFixedSize(WIDTH, HEIGHT);
|
||||
form->addRow("", _debugDetails);
|
||||
|
||||
this->QDialog::setLayout(form);
|
||||
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
|
||||
// watch for our application window moving screens. If it does we want to update our screen details
|
||||
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
|
||||
connect(mainWindow, &QWindow::screenChanged, this, &HMDToolsDialog::applicationWindowScreenChanged);
|
||||
qApp->getWindow()->activateWindow();
|
||||
|
||||
// watch for our dialog window moving screens. If it does we want to enforce our rules about
|
||||
// what screens we're allowed on
|
||||
|
@ -82,11 +93,31 @@ HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
|
|||
watchWindow(dialogsManager->getLodToolsDialog()->windowHandle());
|
||||
}
|
||||
|
||||
connect(_switchModeButton, &QPushButton::clicked, [this]{
|
||||
toggleHMDMode();
|
||||
});
|
||||
|
||||
// when the application is about to quit, leave HDM mode
|
||||
connect(Application::getInstance(), SIGNAL(beforeAboutToQuit()), this, SLOT(aboutToQuit()));
|
||||
connect(qApp, &Application::beforeAboutToQuit, [this]{
|
||||
// FIXME this is ineffective because it doesn't trigger the menu to
|
||||
// save the fact that VR Mode is not checked.
|
||||
leaveHMDMode();
|
||||
});
|
||||
|
||||
connect(qApp, &Application::activeDisplayPluginChanged, [this]{
|
||||
updateUi();
|
||||
});
|
||||
|
||||
// watch for our application window moving screens. If it does we want to update our screen details
|
||||
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
|
||||
connect(mainWindow, &QWindow::screenChanged, [this]{
|
||||
updateUi();
|
||||
});
|
||||
|
||||
// keep track of changes to the number of screens
|
||||
connect(QApplication::desktop(), &QDesktopWidget::screenCountChanged, this, &HMDToolsDialog::screenCountChanged);
|
||||
|
||||
updateUi();
|
||||
}
|
||||
|
||||
HMDToolsDialog::~HMDToolsDialog() {
|
||||
|
@ -96,18 +127,13 @@ HMDToolsDialog::~HMDToolsDialog() {
|
|||
_windowWatchers.clear();
|
||||
}
|
||||
|
||||
void HMDToolsDialog::applicationWindowScreenChanged(QScreen* screen) {
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
QString HMDToolsDialog::getDebugDetails() const {
|
||||
QString results;
|
||||
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
if (hmdScreenNumber >= 0) {
|
||||
results += "HMD Screen: " + QGuiApplication::screens()[hmdScreenNumber]->name() + "\n";
|
||||
if (_hmdScreenNumber >= 0) {
|
||||
results += "HMD Screen: " + QGuiApplication::screens()[_hmdScreenNumber]->name() + "\n";
|
||||
} else {
|
||||
results += "HMD Screen Name: Unknown\n";
|
||||
results += "HMD Screen Name: N/A\n";
|
||||
}
|
||||
|
||||
int desktopPrimaryScreenNumber = QApplication::desktop()->primaryScreen();
|
||||
|
@ -122,37 +148,25 @@ QString HMDToolsDialog::getDebugDetails() const {
|
|||
return results;
|
||||
}
|
||||
|
||||
void HMDToolsDialog::switchModeClicked(bool checked) {
|
||||
if (!_inHDMMode) {
|
||||
enterHDMMode();
|
||||
void HMDToolsDialog::toggleHMDMode() {
|
||||
if (!qApp->isHMDMode()) {
|
||||
enterHMDMode();
|
||||
} else {
|
||||
leaveHDMMode();
|
||||
leaveHMDMode();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::enterHDMMode() {
|
||||
if (!_inHDMMode) {
|
||||
_switchModeButton->setText("Leave HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
|
||||
// if we're on a single screen setup, then hide our tools window when entering HMD mode
|
||||
if (QApplication::desktop()->screenCount() == 1) {
|
||||
close();
|
||||
}
|
||||
|
||||
Application::getInstance()->setEnableVRMode(true);
|
||||
|
||||
_inHDMMode = true;
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::leaveHDMMode() {
|
||||
if (_inHDMMode) {
|
||||
_switchModeButton->setText("Enter HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
Application::getInstance()->setEnableVRMode(false);
|
||||
void HMDToolsDialog::enterHMDMode() {
|
||||
if (!qApp->isHMDMode()) {
|
||||
Application::getInstance()->setActiveDisplayPlugin(_hmdPluginName);
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::leaveHMDMode() {
|
||||
if (qApp->isHMDMode()) {
|
||||
Application::getInstance()->setActiveDisplayPlugin(_defaultPluginName);
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
_inHDMMode = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,7 +177,7 @@ void HMDToolsDialog::reject() {
|
|||
|
||||
void HMDToolsDialog::closeEvent(QCloseEvent* event) {
|
||||
// TODO: consider if we want to prevent closing of this window with event->ignore();
|
||||
this->QDialog::closeEvent(event);
|
||||
QDialog::closeEvent(event);
|
||||
emit closed();
|
||||
}
|
||||
|
||||
|
@ -174,9 +188,15 @@ void HMDToolsDialog::centerCursorOnWidget(QWidget* widget) {
|
|||
QCursor::setPos(screen, windowCenter);
|
||||
}
|
||||
|
||||
void HMDToolsDialog::updateUi() {
|
||||
_switchModeButton->setText(qApp->isHMDMode() ? "Leave HMD Mode" : "Enter HMD Mode");
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
void HMDToolsDialog::showEvent(QShowEvent* event) {
|
||||
// center the cursor on the hmd tools dialog
|
||||
centerCursorOnWidget(this);
|
||||
updateUi();
|
||||
}
|
||||
|
||||
void HMDToolsDialog::hideEvent(QHideEvent* event) {
|
||||
|
@ -184,33 +204,31 @@ void HMDToolsDialog::hideEvent(QHideEvent* event) {
|
|||
centerCursorOnWidget(Application::getInstance()->getWindow());
|
||||
}
|
||||
|
||||
|
||||
void HMDToolsDialog::aboutToQuit() {
|
||||
if (_inHDMMode) {
|
||||
// FIXME this is ineffective because it doesn't trigger the menu to
|
||||
// save the fact that VR Mode is not checked.
|
||||
leaveHDMMode();
|
||||
}
|
||||
}
|
||||
|
||||
void HMDToolsDialog::screenCountChanged(int newCount) {
|
||||
if (!OculusManager::isConnected()) {
|
||||
//OculusManager::connect();
|
||||
int hmdScreenNumber = -1;
|
||||
auto displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
|
||||
foreach(auto dp, displayPlugins) {
|
||||
if (dp->isHmd()) {
|
||||
if (dp->getHmdScreen() >= 0) {
|
||||
hmdScreenNumber = dp->getHmdScreen();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
|
||||
if (_inHDMMode && _hmdScreenNumber != hmdScreenNumber) {
|
||||
if (qApp->isHMDMode() && _hmdScreenNumber != hmdScreenNumber) {
|
||||
qDebug() << "HMD Display changed WHILE IN HMD MODE";
|
||||
leaveHDMMode();
|
||||
leaveHMDMode();
|
||||
|
||||
// if there is a new best HDM screen then go back into HDM mode after done leaving
|
||||
if (hmdScreenNumber >= 0) {
|
||||
qDebug() << "Trying to go back into HDM Mode";
|
||||
qDebug() << "Trying to go back into HMD Mode";
|
||||
const int SLIGHT_DELAY = 2000;
|
||||
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(enterHDMMode()));
|
||||
QTimer::singleShot(SLIGHT_DELAY, [this]{
|
||||
enterHMDMode();
|
||||
});
|
||||
}
|
||||
}
|
||||
_debugDetails->setText(getDebugDetails());
|
||||
}
|
||||
|
||||
void HMDToolsDialog::watchWindow(QWindow* window) {
|
||||
|
@ -247,9 +265,8 @@ void HMDWindowWatcher::windowScreenChanged(QScreen* screen) {
|
|||
// if we have more than one screen, and a known hmdScreen then try to
|
||||
// keep our dialog off of the hmdScreen
|
||||
if (QApplication::desktop()->screenCount() > 1) {
|
||||
|
||||
int hmdScreenNumber = _hmdTools->_hmdScreenNumber;
|
||||
// we want to use a local variable here because we are not necesarily in HMD mode
|
||||
int hmdScreenNumber = OculusManager::getHMDScreen();
|
||||
if (hmdScreenNumber >= 0) {
|
||||
QScreen* hmdScreen = QGuiApplication::screens()[hmdScreenNumber];
|
||||
if (screen == hmdScreen) {
|
||||
|
|
|
@ -34,9 +34,6 @@ signals:
|
|||
|
||||
public slots:
|
||||
void reject();
|
||||
void switchModeClicked(bool checked);
|
||||
void applicationWindowScreenChanged(QScreen* screen);
|
||||
void aboutToQuit();
|
||||
void screenCountChanged(int newCount);
|
||||
|
||||
protected:
|
||||
|
@ -46,20 +43,24 @@ protected:
|
|||
|
||||
private:
|
||||
void centerCursorOnWidget(QWidget* widget);
|
||||
void enterHDMMode();
|
||||
void leaveHDMMode();
|
||||
void enterHMDMode();
|
||||
void leaveHMDMode();
|
||||
void toggleHMDMode();
|
||||
void updateUi();
|
||||
|
||||
QScreen* _previousScreen;
|
||||
QScreen* _hmdScreen;
|
||||
int _hmdScreenNumber;
|
||||
QPushButton* _switchModeButton;
|
||||
QLabel* _debugDetails;
|
||||
QScreen* _previousScreen{ nullptr };
|
||||
QScreen* _hmdScreen{ nullptr };
|
||||
int _hmdScreenNumber{ -1 };
|
||||
QPushButton* _switchModeButton{ nullptr };
|
||||
QLabel* _debugDetails{ nullptr };
|
||||
|
||||
QRect _previousDialogRect;
|
||||
QScreen* _previousDialogScreen;
|
||||
bool _inHDMMode;
|
||||
QScreen* _previousDialogScreen{ nullptr };
|
||||
QString _hmdPluginName;
|
||||
QString _defaultPluginName;
|
||||
|
||||
QHash<QWindow*, HMDWindowWatcher*> _windowWatchers;
|
||||
friend class HMDWindowWatcher;
|
||||
};
|
||||
|
||||
|
||||
|
|
157
interface/src/ui/OverlayConductor.cpp
Normal file
157
interface/src/ui/OverlayConductor.cpp
Normal file
|
@ -0,0 +1,157 @@
|
|||
//
|
||||
// OverlayConductor.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "OverlayConductor.h"
|
||||
|
||||
OverlayConductor::OverlayConductor() {
|
||||
}
|
||||
|
||||
OverlayConductor::~OverlayConductor() {
|
||||
}
|
||||
|
||||
void OverlayConductor::update(float dt) {
|
||||
|
||||
updateMode();
|
||||
|
||||
switch (_mode) {
|
||||
case SITTING: {
|
||||
// when sitting, the overlay is at the origin, facing down the -z axis.
|
||||
// the camera is taken directly from the HMD.
|
||||
Transform identity;
|
||||
qApp->getApplicationCompositor().setModelTransform(identity);
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(identity);
|
||||
break;
|
||||
}
|
||||
case STANDING: {
|
||||
// when standing, the overlay is at a reference position, which is set when the overlay is
|
||||
// enabled. The camera is taken directly from the HMD, but in world space.
|
||||
// So the sensorToWorldMatrix must be applied.
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
Transform t;
|
||||
t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix());
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(t);
|
||||
|
||||
// detect when head moves out side of sweet spot, or looks away.
|
||||
mat4 headMat = myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose();
|
||||
vec3 headWorldPos = extractTranslation(headMat);
|
||||
vec3 headForward = glm::quat_cast(headMat) * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
Transform modelXform = qApp->getApplicationCompositor().getModelTransform();
|
||||
vec3 compositorWorldPos = modelXform.getTranslation();
|
||||
vec3 compositorForward = modelXform.getRotation() * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
const float MAX_COMPOSITOR_DISTANCE = 0.6f;
|
||||
const float MAX_COMPOSITOR_ANGLE = 110.0f;
|
||||
if (_enabled && (glm::distance(headWorldPos, compositorWorldPos) > MAX_COMPOSITOR_DISTANCE ||
|
||||
glm::dot(headForward, compositorForward) < cosf(glm::radians(MAX_COMPOSITOR_ANGLE)))) {
|
||||
// fade out the overlay
|
||||
setEnabled(false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case FLAT:
|
||||
// do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void OverlayConductor::updateMode() {
|
||||
|
||||
Mode newMode;
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
if (myAvatar->getStandingHMDSensorMode()) {
|
||||
newMode = STANDING;
|
||||
} else {
|
||||
newMode = SITTING;
|
||||
}
|
||||
} else {
|
||||
newMode = FLAT;
|
||||
}
|
||||
|
||||
if (newMode != _mode) {
|
||||
switch (newMode) {
|
||||
case SITTING: {
|
||||
// enter the SITTING state
|
||||
// place the overlay at origin
|
||||
Transform identity;
|
||||
qApp->getApplicationCompositor().setModelTransform(identity);
|
||||
break;
|
||||
}
|
||||
case STANDING: {
|
||||
// enter the STANDING state
|
||||
// place the overlay at the current hmd position in world space
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
|
||||
Transform t;
|
||||
t.setTranslation(extractTranslation(camMat));
|
||||
t.setRotation(glm::quat_cast(camMat));
|
||||
qApp->getApplicationCompositor().setModelTransform(t);
|
||||
break;
|
||||
}
|
||||
|
||||
case FLAT:
|
||||
// do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_mode = newMode;
|
||||
}
|
||||
|
||||
void OverlayConductor::setEnabled(bool enabled) {
|
||||
|
||||
if (enabled == _enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_enabled) {
|
||||
// alpha fadeOut the overlay mesh.
|
||||
qApp->getApplicationCompositor().fadeOut();
|
||||
|
||||
// disable mouse clicks from script
|
||||
qApp->getOverlays().disable();
|
||||
|
||||
// disable QML events
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->getRootItem()->setEnabled(false);
|
||||
|
||||
_enabled = false;
|
||||
} else {
|
||||
// alpha fadeIn the overlay mesh.
|
||||
qApp->getApplicationCompositor().fadeIn();
|
||||
|
||||
// enable mouse clicks from script
|
||||
qApp->getOverlays().enable();
|
||||
|
||||
// enable QML events
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->getRootItem()->setEnabled(true);
|
||||
|
||||
if (_mode == STANDING) {
|
||||
// place the overlay at the current hmd position in world space
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
|
||||
Transform t;
|
||||
t.setTranslation(extractTranslation(camMat));
|
||||
t.setRotation(glm::quat_cast(camMat));
|
||||
qApp->getApplicationCompositor().setModelTransform(t);
|
||||
}
|
||||
|
||||
_enabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool OverlayConductor::getEnabled() const {
|
||||
return _enabled;
|
||||
}
|
||||
|
36
interface/src/ui/OverlayConductor.h
Normal file
36
interface/src/ui/OverlayConductor.h
Normal file
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// OverlayConductor.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OverlayConductor_h
|
||||
#define hifi_OverlayConductor_h
|
||||
|
||||
class OverlayConductor {
|
||||
public:
|
||||
OverlayConductor();
|
||||
~OverlayConductor();
|
||||
|
||||
void update(float dt);
|
||||
void setEnabled(bool enable);
|
||||
bool getEnabled() const;
|
||||
|
||||
private:
|
||||
void updateMode();
|
||||
|
||||
enum Mode {
|
||||
FLAT,
|
||||
SITTING,
|
||||
STANDING
|
||||
};
|
||||
|
||||
Mode _mode = FLAT;
|
||||
bool _enabled = true;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -16,7 +16,7 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <devices/DdeFaceTracker.h>
|
||||
#include <devices/Faceshift.h>
|
||||
#include <devices/SixenseManager.h>
|
||||
#include <input-plugins/SixenseManager.h> // TODO: This should be replaced with InputDevice/InputPlugin, or something similar
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
@ -177,10 +177,13 @@ void PreferencesDialog::loadPreferences() {
|
|||
|
||||
ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());
|
||||
|
||||
#if 0
|
||||
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationCompositor().getHmdUIAngularSize());
|
||||
#endif
|
||||
|
||||
ui.sixenseReticleMoveSpeedSpin->setValue(InputDevice::getReticleMoveSpeed());
|
||||
|
||||
SixenseManager& sixense = SixenseManager::getInstance();
|
||||
ui.sixenseReticleMoveSpeedSpin->setValue(sixense.getReticleMoveSpeed());
|
||||
ui.invertSixenseButtonsCheckBox->setChecked(sixense.getInvertButtons());
|
||||
|
||||
// LOD items
|
||||
|
@ -244,7 +247,7 @@ void PreferencesDialog::savePreferences() {
|
|||
qApp->getApplicationCompositor().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
|
||||
|
||||
SixenseManager& sixense = SixenseManager::getInstance();
|
||||
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
||||
InputDevice::setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
|
||||
sixense.setInvertButtons(ui.invertSixenseButtonsCheckBox->isChecked());
|
||||
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <FileUtils.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <NodeList.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#include <avatar/AvatarManager.h>
|
||||
#include <Application.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <GLCanvas.h>
|
||||
#include <LODManager.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
auto size = qApp->getCanvasSize();
|
||||
auto size = qApp->getUiSize();
|
||||
int width = size.x;
|
||||
int height = size.y;
|
||||
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
|
||||
|
@ -123,6 +123,16 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
void Overlays::disable() {
|
||||
QWriteLocker lock(&_lock);
|
||||
_enabled = false;
|
||||
}
|
||||
|
||||
void Overlays::enable() {
|
||||
QWriteLocker lock(&_lock);
|
||||
_enabled = true;
|
||||
}
|
||||
|
||||
Overlay::Pointer Overlays::getOverlay(unsigned int id) const {
|
||||
if (_overlaysHUD.contains(id)) {
|
||||
return _overlaysHUD[id];
|
||||
|
@ -323,6 +333,9 @@ unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
|
|||
}
|
||||
|
||||
QReadLocker lock(&_lock);
|
||||
if (!_enabled) {
|
||||
return 0;
|
||||
}
|
||||
QMapIterator<unsigned int, Overlay::Pointer> i(_overlaysHUD);
|
||||
i.toBack();
|
||||
|
||||
|
|
|
@ -65,6 +65,8 @@ public:
|
|||
void init();
|
||||
void update(float deltatime);
|
||||
void renderHUD(RenderArgs* renderArgs);
|
||||
void disable();
|
||||
void enable();
|
||||
|
||||
Overlay::Pointer getOverlay(unsigned int id) const;
|
||||
OverlayPanel::Pointer getPanel(unsigned int id) const { return _panels[id]; }
|
||||
|
@ -147,6 +149,7 @@ private:
|
|||
QReadWriteLock _lock;
|
||||
QReadWriteLock _deleteLock;
|
||||
QScriptEngine* _scriptEngine;
|
||||
bool _enabled = true;
|
||||
};
|
||||
|
||||
|
||||
|
|
33
interface/ui/temp.qml
Normal file
33
interface/ui/temp.qml
Normal file
|
@ -0,0 +1,33 @@
|
|||
import QtQuick 2.4
|
||||
import QtQuick.Controls 2.3
|
||||
import QtQuick.Controls.Styles 1.3
|
||||
|
||||
|
||||
Item {
|
||||
implicitHeight: 200
|
||||
implicitWidth: 800
|
||||
|
||||
|
||||
TextArea {
|
||||
id: gutter
|
||||
anchors.left: parent.left
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
style: TextAreaStyle {
|
||||
backgroundColor: "grey"
|
||||
}
|
||||
width: 16
|
||||
text: ">"
|
||||
font.family: "Lucida Console"
|
||||
}
|
||||
TextArea {
|
||||
anchors.left: gutter.right
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.right: parent.right
|
||||
text: "undefined"
|
||||
font.family: "Lucida Console"
|
||||
|
||||
}
|
||||
}
|
||||
|
80
libraries/animation/src/ElbowConstraint.cpp
Normal file
80
libraries/animation/src/ElbowConstraint.cpp
Normal file
|
@ -0,0 +1,80 @@
|
|||
//
|
||||
// ElbowConstraint.cpp
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "ElbowConstraint.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <GeometryUtil.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
ElbowConstraint::ElbowConstraint() :
|
||||
_referenceRotation(),
|
||||
_minAngle(-PI),
|
||||
_maxAngle(PI)
|
||||
{
|
||||
}
|
||||
|
||||
void ElbowConstraint::setHingeAxis(const glm::vec3& axis) {
|
||||
float axisLength = glm::length(axis);
|
||||
assert(axisLength > EPSILON);
|
||||
_axis = axis / axisLength;
|
||||
|
||||
// for later we need a second axis that is perpendicular to the first
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
float component = _axis[i];
|
||||
const float MIN_LARGEST_NORMALIZED_VECTOR_COMPONENT = 0.57735f; // just under 1/sqrt(3)
|
||||
if (fabsf(component) > MIN_LARGEST_NORMALIZED_VECTOR_COMPONENT) {
|
||||
int j = (i + 1) % 3;
|
||||
int k = (j + 1) % 3;
|
||||
_perpAxis[i] = - _axis[j];
|
||||
_perpAxis[j] = component;
|
||||
_perpAxis[k] = 0.0f;
|
||||
_perpAxis = glm::normalize(_perpAxis);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ElbowConstraint::setAngleLimits(float minAngle, float maxAngle) {
|
||||
// NOTE: min/maxAngle angles should be in the range [-PI, PI]
|
||||
_minAngle = glm::min(minAngle, maxAngle);
|
||||
_maxAngle = glm::max(minAngle, maxAngle);
|
||||
}
|
||||
|
||||
bool ElbowConstraint::apply(glm::quat& rotation) const {
|
||||
// decompose the rotation into swing and twist
|
||||
// NOTE: rotation = postRotation * referenceRotation
|
||||
glm::quat postRotation = rotation * glm::inverse(_referenceRotation);
|
||||
glm::quat swingRotation, twistRotation;
|
||||
swingTwistDecomposition(postRotation, _axis, swingRotation, twistRotation);
|
||||
// NOTE: postRotation = swingRotation * twistRotation
|
||||
|
||||
// compute twistAngle
|
||||
float twistAngle = 2.0f * acosf(fabsf(twistRotation.w));
|
||||
glm::vec3 twisted = twistRotation * _perpAxis;
|
||||
twistAngle *= copysignf(1.0f, glm::dot(glm::cross(_perpAxis, twisted), _axis));
|
||||
|
||||
// clamp twistAngle
|
||||
float clampedTwistAngle = glm::clamp(twistAngle, _minAngle, _maxAngle);
|
||||
bool twistWasClamped = (twistAngle != clampedTwistAngle);
|
||||
|
||||
// update rotation
|
||||
const float MIN_SWING_REAL_PART = 0.99999f;
|
||||
if (twistWasClamped || fabsf(swingRotation.w < MIN_SWING_REAL_PART)) {
|
||||
if (twistWasClamped) {
|
||||
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
|
||||
}
|
||||
// we discard all swing and only keep twist
|
||||
rotation = twistRotation * _referenceRotation;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
30
libraries/animation/src/ElbowConstraint.h
Normal file
30
libraries/animation/src/ElbowConstraint.h
Normal file
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// ElbowConstraint.h
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_ElbowConstraint_h
|
||||
#define hifi_ElbowConstraint_h
|
||||
|
||||
#include "RotationConstraint.h"
|
||||
|
||||
class ElbowConstraint : public RotationConstraint {
|
||||
public:
|
||||
ElbowConstraint();
|
||||
virtual void setReferenceRotation(const glm::quat& rotation) override { _referenceRotation = rotation; }
|
||||
void setHingeAxis(const glm::vec3& axis);
|
||||
void setAngleLimits(float minAngle, float maxAngle);
|
||||
virtual bool apply(glm::quat& rotation) const override;
|
||||
protected:
|
||||
glm::quat _referenceRotation;
|
||||
glm::vec3 _axis;
|
||||
glm::vec3 _perpAxis;
|
||||
float _minAngle;
|
||||
float _maxAngle;
|
||||
};
|
||||
|
||||
#endif // hifi_ElbowConstraint_h
|
|
@ -230,11 +230,13 @@ void JointState::setRotationInConstrainedFrame(glm::quat targetRotation, float p
|
|||
}
|
||||
|
||||
void JointState::setRotationInConstrainedFrameInternal(const glm::quat& targetRotation) {
|
||||
glm::quat parentRotation = computeParentRotation();
|
||||
_rotationInConstrainedFrame = targetRotation;
|
||||
_transformChanged = true;
|
||||
// R' = Rp * Rpre * r' * Rpost
|
||||
_rotation = parentRotation * _preRotation * _rotationInConstrainedFrame * _postRotation;
|
||||
if (_rotationInConstrainedFrame != targetRotation) {
|
||||
glm::quat parentRotation = computeParentRotation();
|
||||
_rotationInConstrainedFrame = targetRotation;
|
||||
_transformChanged = true;
|
||||
// R' = Rp * Rpre * r' * Rpost
|
||||
_rotation = parentRotation * _preRotation * _rotationInConstrainedFrame * _postRotation;
|
||||
}
|
||||
}
|
||||
|
||||
void JointState::setVisibleRotationInConstrainedFrame(const glm::quat& targetRotation) {
|
||||
|
|
|
@ -27,6 +27,10 @@ void Rig::HeadParameters::dump() const {
|
|||
axis = glm::axis(worldHeadOrientation);
|
||||
theta = glm::angle(worldHeadOrientation);
|
||||
qCDebug(animation, " worldHeadOrientation axis = (%.5f, %.5f, %.5f), theta = %0.5f", axis.x, axis.y, axis.z, theta);
|
||||
axis = glm::axis(modelRotation);
|
||||
theta = glm::angle(modelRotation);
|
||||
qCDebug(animation, " modelRotation axis = (%.5f, %.5f, %.5f), theta = %0.5f", axis.x, axis.y, axis.z, theta);
|
||||
qCDebug(animation, " modelTranslation = (%.5f, %.5f, %.5f)", modelTranslation.x, modelTranslation.y, modelTranslation.z);
|
||||
qCDebug(animation, " eyeLookAt = (%.5f, %.5f, %.5f)", eyeLookAt.x, eyeLookAt.y, eyeLookAt.z);
|
||||
qCDebug(animation, " eyeSaccade = (%.5f, %.5f, %.5f)", eyeSaccade.x, eyeSaccade.y, eyeSaccade.z);
|
||||
qCDebug(animation, " leanJointIndex = %.d", leanJointIndex);
|
||||
|
@ -782,8 +786,8 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
|
|||
void Rig::updateFromHeadParameters(const HeadParameters& params) {
|
||||
updateLeanJoint(params.leanJointIndex, params.leanSideways, params.leanForward, params.torsoTwist);
|
||||
updateNeckJoint(params.neckJointIndex, params.localHeadOrientation, params.leanSideways, params.leanForward, params.torsoTwist);
|
||||
updateEyeJoint(params.leftEyeJointIndex, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.rightEyeJointIndex, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
}
|
||||
|
||||
void Rig::updateLeanJoint(int index, float leanSideways, float leanForward, float torsoTwist) {
|
||||
|
@ -824,22 +828,21 @@ void Rig::updateNeckJoint(int index, const glm::quat& localHeadOrientation, floa
|
|||
}
|
||||
}
|
||||
|
||||
void Rig::updateEyeJoint(int index, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade) {
|
||||
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
|
||||
if (index >= 0 && _jointStates[index].getParentIndex() >= 0) {
|
||||
auto& state = _jointStates[index];
|
||||
auto& parentState = _jointStates[state.getParentIndex()];
|
||||
|
||||
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
|
||||
glm::mat4 inverse = glm::inverse(parentState.getTransform() *
|
||||
glm::translate(getJointDefaultTranslationInConstrainedFrame(index)) *
|
||||
glm::mat4 inverse = glm::inverse(glm::mat4_cast(modelRotation) * parentState.getTransform() *
|
||||
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
|
||||
state.getPreTransform() * glm::mat4_cast(state.getPreRotation() * state.getDefaultRotation()));
|
||||
glm::vec3 front = glm::vec3(inverse * glm::vec4(worldHeadOrientation * IDENTITY_FRONT, 0.0f));
|
||||
glm::vec3 lookAtDelta = lookAt;
|
||||
glm::vec3 lookAtDelta = lookAtSpot - modelTranslation;
|
||||
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(lookAtDelta + glm::length(lookAtDelta) * saccade, 1.0f));
|
||||
glm::quat between = rotationBetween(front, lookAt);
|
||||
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
|
||||
float angle = glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE);
|
||||
glm::quat rot = glm::angleAxis(angle, glm::axis(between));
|
||||
setJointRotationInConstrainedFrame(index, rot * state.getDefaultRotation(), DEFAULT_PRIORITY);
|
||||
state.setRotationInConstrainedFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
|
||||
state.getDefaultRotation(), DEFAULT_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,10 +54,12 @@ public:
|
|||
float leanSideways = 0.0f; // degrees
|
||||
float leanForward = 0.0f; // degrees
|
||||
float torsoTwist = 0.0f; // degrees
|
||||
glm::quat modelRotation = glm::quat();
|
||||
glm::quat localHeadOrientation = glm::quat();
|
||||
glm::quat worldHeadOrientation = glm::quat();
|
||||
glm::vec3 eyeLookAt = glm::vec3(); // world space
|
||||
glm::vec3 eyeSaccade = glm::vec3(); // world space
|
||||
glm::vec3 modelTranslation = glm::vec3();
|
||||
int leanJointIndex = -1;
|
||||
int neckJointIndex = -1;
|
||||
int leftEyeJointIndex = -1;
|
||||
|
@ -163,7 +165,7 @@ public:
|
|||
|
||||
void updateLeanJoint(int index, float leanSideways, float leanForward, float torsoTwist);
|
||||
void updateNeckJoint(int index, const glm::quat& localHeadOrientation, float leanSideways, float leanForward, float torsoTwist);
|
||||
void updateEyeJoint(int index, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
|
||||
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
|
||||
|
||||
QVector<JointState> _jointStates;
|
||||
int _rootJointIndex = -1;
|
||||
|
|
29
libraries/animation/src/RotationConstraint.h
Normal file
29
libraries/animation/src/RotationConstraint.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
//
|
||||
// RotationConstraint.h
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_RotationConstraint_h
|
||||
#define hifi_RotationConstraint_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
class RotationConstraint {
|
||||
public:
|
||||
RotationConstraint() {}
|
||||
virtual ~RotationConstraint() {}
|
||||
|
||||
/// \param rotation the default rotation that represents
|
||||
virtual void setReferenceRotation(const glm::quat& rotation) = 0;
|
||||
|
||||
/// \param rotation rotation to clamp
|
||||
/// \return true if rotation is clamped
|
||||
virtual bool apply(glm::quat& rotation) const = 0;
|
||||
};
|
||||
|
||||
#endif // hifi_RotationConstraint_h
|
128
libraries/animation/src/SwingTwistConstraint.cpp
Normal file
128
libraries/animation/src/SwingTwistConstraint.cpp
Normal file
|
@ -0,0 +1,128 @@
|
|||
//
|
||||
// SwingTwistConstraint.cpp
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "SwingTwistConstraint.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <math.h>
|
||||
|
||||
#include <GeometryUtil.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
|
||||
const float MIN_MINDOT = -0.999f;
|
||||
const float MAX_MINDOT = 1.0f;
|
||||
|
||||
SwingTwistConstraint::SwingLimitFunction::SwingLimitFunction() {
|
||||
setCone(PI);
|
||||
}
|
||||
|
||||
void SwingTwistConstraint::SwingLimitFunction::setCone(float maxAngle) {
|
||||
_minDots.clear();
|
||||
float minDot = glm::clamp(maxAngle, MIN_MINDOT, MAX_MINDOT);
|
||||
_minDots.push_back(minDot);
|
||||
// push the first value to the back to establish cyclic boundary conditions
|
||||
_minDots.push_back(minDot);
|
||||
}
|
||||
|
||||
void SwingTwistConstraint::SwingLimitFunction::setMinDots(const std::vector<float>& minDots) {
|
||||
int numDots = minDots.size();
|
||||
_minDots.clear();
|
||||
_minDots.reserve(numDots);
|
||||
for (int i = 0; i < numDots; ++i) {
|
||||
_minDots.push_back(glm::clamp(minDots[i], MIN_MINDOT, MAX_MINDOT));
|
||||
}
|
||||
// push the first value to the back to establish cyclic boundary conditions
|
||||
_minDots.push_back(_minDots[0]);
|
||||
}
|
||||
|
||||
float SwingTwistConstraint::SwingLimitFunction::getMinDot(float theta) const {
|
||||
// extract the positive normalized fractional part of theta
|
||||
float integerPart;
|
||||
float normalizedTheta = modff(theta / TWO_PI, &integerPart);
|
||||
if (normalizedTheta < 0.0f) {
|
||||
normalizedTheta += 1.0f;
|
||||
}
|
||||
|
||||
// interpolate between the two nearest points in the cycle
|
||||
float fractionPart = modff(normalizedTheta * (float)(_minDots.size() - 1), &integerPart);
|
||||
int i = (int)(integerPart);
|
||||
int j = (i + 1) % _minDots.size();
|
||||
return _minDots[i] * (1.0f - fractionPart) + _minDots[j] * fractionPart;
|
||||
}
|
||||
|
||||
SwingTwistConstraint::SwingTwistConstraint() :
|
||||
_swingLimitFunction(),
|
||||
_referenceRotation(),
|
||||
_minTwist(-PI),
|
||||
_maxTwist(PI)
|
||||
{
|
||||
}
|
||||
|
||||
void SwingTwistConstraint::setSwingLimits(std::vector<float> minDots) {
|
||||
_swingLimitFunction.setMinDots(minDots);
|
||||
}
|
||||
|
||||
void SwingTwistConstraint::setTwistLimits(float minTwist, float maxTwist) {
|
||||
// NOTE: min/maxTwist angles should be in the range [-PI, PI]
|
||||
_minTwist = glm::min(minTwist, maxTwist);
|
||||
_maxTwist = glm::max(minTwist, maxTwist);
|
||||
}
|
||||
|
||||
bool SwingTwistConstraint::apply(glm::quat& rotation) const {
|
||||
|
||||
// decompose the rotation into first twist about yAxis, then swing about something perp
|
||||
const glm::vec3 yAxis(0.0f, 1.0f, 0.0f);
|
||||
// NOTE: rotation = postRotation * referenceRotation
|
||||
glm::quat postRotation = rotation * glm::inverse(_referenceRotation);
|
||||
glm::quat swingRotation, twistRotation;
|
||||
swingTwistDecomposition(postRotation, yAxis, swingRotation, twistRotation);
|
||||
// NOTE: postRotation = swingRotation * twistRotation
|
||||
|
||||
// compute twistAngle
|
||||
float twistAngle = 2.0f * acosf(fabsf(twistRotation.w));
|
||||
const glm::vec3 xAxis = glm::vec3(1.0f, 0.0f, 0.0f);
|
||||
glm::vec3 twistedX = twistRotation * xAxis;
|
||||
twistAngle *= copysignf(1.0f, glm::dot(glm::cross(xAxis, twistedX), yAxis));
|
||||
|
||||
// clamp twistAngle
|
||||
float clampedTwistAngle = glm::clamp(twistAngle, _minTwist, _maxTwist);
|
||||
bool twistWasClamped = (twistAngle != clampedTwistAngle);
|
||||
|
||||
// clamp the swing
|
||||
// The swingAxis is always perpendicular to the reference axis (yAxis in the constraint's frame).
|
||||
glm::vec3 swungY = swingRotation * yAxis;
|
||||
glm::vec3 swingAxis = glm::cross(yAxis, swungY);
|
||||
bool swingWasClamped = false;
|
||||
float axisLength = glm::length(swingAxis);
|
||||
if (axisLength > EPSILON) {
|
||||
// The limit of swing is a function of "theta" which can be computed from the swingAxis
|
||||
// (which is in the constraint's ZX plane).
|
||||
float theta = atan2f(-swingAxis.z, swingAxis.x);
|
||||
float minDot = _swingLimitFunction.getMinDot(theta);
|
||||
if (glm::dot(swungY, yAxis) < minDot) {
|
||||
// The swing limits are violated so we use the maxAngle to supply a new rotation.
|
||||
float maxAngle = acosf(minDot);
|
||||
if (minDot < 0.0f) {
|
||||
maxAngle = PI - maxAngle;
|
||||
}
|
||||
swingAxis /= axisLength;
|
||||
swingRotation = glm::angleAxis(maxAngle, swingAxis);
|
||||
swingWasClamped = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (swingWasClamped || twistWasClamped) {
|
||||
twistRotation = glm::angleAxis(clampedTwistAngle, yAxis);
|
||||
rotation = swingRotation * twistRotation * _referenceRotation;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
79
libraries/animation/src/SwingTwistConstraint.h
Normal file
79
libraries/animation/src/SwingTwistConstraint.h
Normal file
|
@ -0,0 +1,79 @@
|
|||
//
|
||||
// SwingTwistConstraint.h
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_SwingTwistConstraint_h
|
||||
#define hifi_SwingTwistConstraint_h
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "RotationConstraint.h"
|
||||
|
||||
#include <math.h>
|
||||
|
||||
class SwingTwistConstraint : RotationConstraint {
|
||||
public:
|
||||
// The SwingTwistConstraint starts in the "referenceRotation" and then measures an initial twist
|
||||
// about the yAxis followed by a swing about some axis that lies in the XZ plane, such that the twist
|
||||
// and swing combine to produce the rotation. Each partial rotation is constrained within limits
|
||||
// then used to construct the new final rotation.
|
||||
|
||||
SwingTwistConstraint();
|
||||
|
||||
/// \param referenceRotation the rotation from which rotation changes are measured.
|
||||
virtual void setReferenceRotation(const glm::quat& referenceRotation) override { _referenceRotation = referenceRotation; }
|
||||
|
||||
/// \param minDots vector of minimum dot products between the twist and swung axes.
|
||||
/// \brief The values are minimum dot-products between the twist axis and the swung axes
|
||||
/// that correspond to swing axes equally spaced around the XZ plane. Another way to
|
||||
/// think about it is that the dot-products correspond to correspond to angles (theta)
|
||||
/// about the twist axis ranging from 0 to 2PI-deltaTheta (Note: the cyclic boundary
|
||||
/// conditions are handled internally, so don't duplicate the dot-product at 2PI).
|
||||
/// See the paper by Quang Liu and Edmond C. Prakash mentioned below for a more detailed
|
||||
/// description of how this works.
|
||||
void setSwingLimits(std::vector<float> minDots);
|
||||
|
||||
/// \param minTwist the minimum angle of rotation about the twist axis
|
||||
/// \param maxTwist the maximum angle of rotation about the twist axis
|
||||
void setTwistLimits(float minTwist, float maxTwist);
|
||||
|
||||
/// \param rotation[in/out] the current rotation to be modified to fit within constraints
|
||||
/// \return true if rotation is changed
|
||||
virtual bool apply(glm::quat& rotation) const override;
|
||||
|
||||
// SwingLimitFunction is an implementation of the constraint check described in the paper:
|
||||
// "The Parameterization of Joint Rotation with the Unit Quaternion" by Quang Liu and Edmond C. Prakash
|
||||
class SwingLimitFunction {
|
||||
public:
|
||||
SwingLimitFunction();
|
||||
|
||||
/// \brief use a uniform conical swing limit
|
||||
void setCone(float maxAngle);
|
||||
|
||||
/// \brief use a vector of lookup values for swing limits
|
||||
void setMinDots(const std::vector<float>& minDots);
|
||||
|
||||
/// \return minimum dotProduct between reference and swung axes
|
||||
float getMinDot(float theta) const;
|
||||
|
||||
protected:
|
||||
// the limits are stored in a lookup table with cyclic boundary conditions
|
||||
std::vector<float> _minDots;
|
||||
};
|
||||
|
||||
/// \return reference to SwingLimitFunction instance for unit-testing
|
||||
const SwingLimitFunction& getSwingLimitFunction() const { return _swingLimitFunction; }
|
||||
|
||||
protected:
|
||||
SwingLimitFunction _swingLimitFunction;
|
||||
glm::quat _referenceRotation;
|
||||
float _minTwist;
|
||||
float _maxTwist;
|
||||
};
|
||||
|
||||
#endif // hifi_SwingTwistConstraint_h
|
|
@ -192,7 +192,7 @@ public:
|
|||
void setBodyRoll(float bodyRoll) { _bodyRoll = bodyRoll; }
|
||||
|
||||
glm::quat getOrientation() const;
|
||||
void setOrientation(const glm::quat& orientation, bool overideReferential = false);
|
||||
virtual void setOrientation(const glm::quat& orientation, bool overideReferential = false);
|
||||
|
||||
glm::quat getHeadOrientation() const { return _headData->getOrientation(); }
|
||||
void setHeadOrientation(const glm::quat& orientation) { _headData->setOrientation(orientation); }
|
||||
|
|
34
libraries/display-plugins/CMakeLists.txt
Normal file
34
libraries/display-plugins/CMakeLists.txt
Normal file
|
@ -0,0 +1,34 @@
|
|||
set(TARGET_NAME display-plugins)
|
||||
|
||||
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||
setup_hifi_library(OpenGL)
|
||||
|
||||
setup_hifi_opengl()
|
||||
|
||||
link_hifi_libraries(shared plugins gpu render-utils)
|
||||
|
||||
GroupSources("src/display-plugins")
|
||||
|
||||
add_dependency_external_projects(glm)
|
||||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(boostconfig)
|
||||
find_package(BoostConfig REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(oglplus)
|
||||
find_package(OGLPLUS REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
if (WIN32)
|
||||
add_dependency_external_projects(OpenVR)
|
||||
find_package(OpenVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${OPENVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${OPENVR_LIBRARIES})
|
||||
endif()
|
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Basic2DWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <QWindow>
|
||||
|
||||
const QString Basic2DWindowOpenGLDisplayPlugin::NAME("2D Display");
|
||||
|
||||
const QString MENU_PARENT = "View";
|
||||
const QString MENU_NAME = "Display Options";
|
||||
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
|
||||
const QString FULLSCREEN = "Fullscreen";
|
||||
|
||||
const QString& Basic2DWindowOpenGLDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::activate() {
|
||||
// container->addMenu(MENU_PATH);
|
||||
// container->addMenuItem(MENU_PATH, FULLSCREEN,
|
||||
// [this] (bool clicked) { this->setFullscreen(clicked); },
|
||||
// true, false);
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::deactivate() {
|
||||
// container->removeMenuItem(MENU_NAME, FULLSCREEN);
|
||||
// container->removeMenu(MENU_PATH);
|
||||
MainWindowOpenGLDisplayPlugin::deactivate();
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class Basic2DWindowOpenGLDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,52 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "DisplayPlugin.h"
|
||||
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "NullDisplayPlugin.h"
|
||||
#include "stereo/SideBySideStereoDisplayPlugin.h"
|
||||
#include "stereo/InterleavedStereoDisplayPlugin.h"
|
||||
#include "Basic2DWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include "openvr/OpenVrDisplayPlugin.h"
|
||||
#include "oculus/Oculus_0_5_DisplayPlugin.h"
|
||||
#include "oculus/Oculus_0_6_DisplayPlugin.h"
|
||||
|
||||
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
|
||||
DisplayPluginList getDisplayPlugins() {
|
||||
DisplayPlugin* PLUGIN_POOL[] = {
|
||||
new Basic2DWindowOpenGLDisplayPlugin(),
|
||||
#ifdef DEBUG
|
||||
new NullDisplayPlugin(),
|
||||
#endif
|
||||
|
||||
// Stereo modes
|
||||
// FIXME fix stereo display plugins
|
||||
//new SideBySideStereoDisplayPlugin(),
|
||||
//new InterleavedStereoDisplayPlugin(),
|
||||
|
||||
// HMDs
|
||||
new Oculus_0_5_DisplayPlugin(),
|
||||
new Oculus_0_6_DisplayPlugin(),
|
||||
#ifdef Q_OS_WIN
|
||||
new OpenVrDisplayPlugin(),
|
||||
#endif
|
||||
nullptr
|
||||
};
|
||||
|
||||
DisplayPluginList result;
|
||||
for (int i = 0; PLUGIN_POOL[i]; ++i) {
|
||||
DisplayPlugin * plugin = PLUGIN_POOL[i];
|
||||
if (plugin->isSupported()) {
|
||||
plugin->init();
|
||||
result.push_back(DisplayPluginPointer(plugin));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
128
libraries/display-plugins/src/display-plugins/DisplayPlugin.h
Normal file
128
libraries/display-plugins/src/display-plugins/DisplayPlugin.h
Normal file
|
@ -0,0 +1,128 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "plugins/Plugin.h"
|
||||
|
||||
#include <QSize>
|
||||
#include <QPoint>
|
||||
#include <functional>
|
||||
|
||||
#include "gpu/GPUConfig.h"
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
enum Eye {
|
||||
Left,
|
||||
Right,
|
||||
Mono
|
||||
};
|
||||
|
||||
/*
|
||||
* Helper method to iterate over each eye
|
||||
*/
|
||||
template <typename F>
|
||||
void for_each_eye(F f) {
|
||||
f(Left);
|
||||
f(Right);
|
||||
}
|
||||
|
||||
/*
|
||||
* Helper method to iterate over each eye, with an additional lambda to take action between the eyes
|
||||
*/
|
||||
template <typename F, typename FF>
|
||||
void for_each_eye(F f, FF ff) {
|
||||
f(Eye::Left);
|
||||
ff();
|
||||
f(Eye::Right);
|
||||
}
|
||||
|
||||
class QWindow;
|
||||
|
||||
class DisplayPlugin : public Plugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
/// By default, all HMDs are stereo
|
||||
virtual bool isStereo() const { return isHmd(); }
|
||||
virtual bool isThrottled() const { return false; }
|
||||
|
||||
// Rendering support
|
||||
|
||||
/**
|
||||
* Called by the application before the frame rendering. Can be used for
|
||||
* render timing related calls (for instance, the Oculus begin frame timing
|
||||
* call)
|
||||
*/
|
||||
virtual void preRender() = 0;
|
||||
/**
|
||||
* Called by the application immediately before calling the display function.
|
||||
* For OpenGL based plugins, this is the best place to put activate the output
|
||||
* OpenGL context
|
||||
*/
|
||||
virtual void preDisplay() = 0;
|
||||
|
||||
/**
|
||||
* Sends the scene texture to the display plugin.
|
||||
*/
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) = 0;
|
||||
|
||||
/**
|
||||
* Called by the application immeidately after display. For OpenGL based
|
||||
* displays, this is the best place to put the buffer swap
|
||||
*/
|
||||
virtual void finishFrame() = 0;
|
||||
|
||||
// Does the rendering surface have current focus?
|
||||
virtual bool hasFocus() const = 0;
|
||||
|
||||
// The size of the rendering target (may be larger than the device size due to distortion)
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const = 0;
|
||||
|
||||
// The size of the UI
|
||||
virtual glm::uvec2 getRecommendedUiSize() const {
|
||||
return getRecommendedRenderSize();
|
||||
}
|
||||
|
||||
// Stereo specific methods
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
return baseProjection;
|
||||
}
|
||||
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
return glm::inverse(getEyePose(eye)) * baseModelview;
|
||||
}
|
||||
|
||||
// HMD specific methods
|
||||
// TODO move these into another class
|
||||
virtual glm::mat4 getEyePose(Eye eye) const {
|
||||
static const glm::mat4 pose; return pose;
|
||||
}
|
||||
|
||||
virtual glm::mat4 getHeadPose() const {
|
||||
static const glm::mat4 pose; return pose;
|
||||
}
|
||||
|
||||
virtual void abandonCalibration() {}
|
||||
virtual void resetSensors() {}
|
||||
virtual float devicePixelRatio() { return 1.0; }
|
||||
|
||||
//// The window for the surface, used for event interception. May be null.
|
||||
//virtual QWindow* getWindow() const = 0;
|
||||
|
||||
//virtual void installEventFilter(QObject* filter) {}
|
||||
//virtual void removeEventFilter(QObject* filter) {}
|
||||
|
||||
signals:
|
||||
void recommendedFramebufferSizeChanged(const QSize & size);
|
||||
void requestRender();
|
||||
};
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "MainWindowOpenGLDisplayPlugin.h"
|
|
@ -0,0 +1,13 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "WindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class MainWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
// NullDisplayPlugin.cpp
|
||||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "NullDisplayPlugin.h"
|
||||
|
||||
const QString NullDisplayPlugin::NAME("NullDisplayPlugin");
|
||||
|
||||
const QString & NullDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
glm::uvec2 NullDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return glm::uvec2(100, 100);
|
||||
}
|
||||
|
||||
bool NullDisplayPlugin::hasFocus() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
void NullDisplayPlugin::preRender() {}
|
||||
void NullDisplayPlugin::preDisplay() {}
|
||||
void NullDisplayPlugin::display(GLuint sceneTexture, const glm::uvec2& sceneSize) {}
|
||||
void NullDisplayPlugin::finishFrame() {}
|
||||
|
||||
void NullDisplayPlugin::activate() {}
|
||||
void NullDisplayPlugin::deactivate() {}
|
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "DisplayPlugin.h"
|
||||
|
||||
class NullDisplayPlugin : public DisplayPlugin {
|
||||
public:
|
||||
|
||||
virtual ~NullDisplayPlugin() final {}
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
void activate() override;
|
||||
void deactivate() override;
|
||||
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual bool hasFocus() const override;
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,117 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OpenGLDisplayPlugin.h"
|
||||
|
||||
#include <QOpenGLContext>
|
||||
#include <QCoreApplication>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
|
||||
OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
|
||||
connect(&_timer, &QTimer::timeout, this, [&] {
|
||||
emit requestRender();
|
||||
});
|
||||
}
|
||||
|
||||
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::preDisplay() {
|
||||
makeCurrent();
|
||||
};
|
||||
|
||||
void OpenGLDisplayPlugin::preRender() {
|
||||
// NOOP
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::finishFrame() {
|
||||
swapBuffers();
|
||||
doneCurrent();
|
||||
};
|
||||
|
||||
void OpenGLDisplayPlugin::customizeContext() {
|
||||
using namespace oglplus;
|
||||
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
|
||||
Context::Disable(Capability::Blend);
|
||||
Context::Disable(Capability::DepthTest);
|
||||
Context::Disable(Capability::CullFace);
|
||||
|
||||
|
||||
_program = loadDefaultShader();
|
||||
_plane = loadPlane(_program);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::activate() {
|
||||
_timer.start(1);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::deactivate() {
|
||||
_timer.stop();
|
||||
|
||||
makeCurrent();
|
||||
Q_ASSERT(0 == glGetError());
|
||||
_program.reset();
|
||||
_plane.reset();
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
// Pressing Alt (and Meta) key alone activates the menubar because its style inherits the
|
||||
// SHMenuBarAltKeyNavigation from QWindowsStyle. This makes it impossible for a scripts to
|
||||
// receive keyPress events for the Alt (and Meta) key in a reliable manner.
|
||||
//
|
||||
// This filter catches events before QMenuBar can steal the keyboard focus.
|
||||
// The idea was borrowed from
|
||||
// http://www.archivum.info/qt-interest@trolltech.com/2006-09/00053/Re-(Qt4)-Alt-key-focus-QMenuBar-(solved).html
|
||||
|
||||
// Pass input events on to the application
|
||||
bool OpenGLDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
switch (event->type()) {
|
||||
case QEvent::MouseButtonPress:
|
||||
case QEvent::MouseButtonRelease:
|
||||
case QEvent::MouseButtonDblClick:
|
||||
case QEvent::MouseMove:
|
||||
case QEvent::Wheel:
|
||||
|
||||
case QEvent::TouchBegin:
|
||||
case QEvent::TouchEnd:
|
||||
case QEvent::TouchUpdate:
|
||||
|
||||
case QEvent::FocusIn:
|
||||
case QEvent::FocusOut:
|
||||
|
||||
case QEvent::KeyPress:
|
||||
case QEvent::KeyRelease:
|
||||
case QEvent::ShortcutOverride:
|
||||
|
||||
case QEvent::DragEnter:
|
||||
case QEvent::Drop:
|
||||
|
||||
case QEvent::Resize:
|
||||
if (QCoreApplication::sendEvent(QCoreApplication::instance(), event)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::display(
|
||||
GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
using namespace oglplus;
|
||||
uvec2 size = getRecommendedRenderSize();
|
||||
Context::Viewport(size.x, size.y);
|
||||
glBindTexture(GL_TEXTURE_2D, finalTexture);
|
||||
drawUnitQuad();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::drawUnitQuad() {
|
||||
_program->Bind();
|
||||
_plane->Draw();
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
#include "DisplayPlugin.h"
|
||||
#include "OglplusHelpers.h"
|
||||
|
||||
class GlWindow;
|
||||
class QOpenGLContext;
|
||||
|
||||
class OpenGLDisplayPlugin : public DisplayPlugin {
|
||||
public:
|
||||
OpenGLDisplayPlugin();
|
||||
virtual ~OpenGLDisplayPlugin();
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void finishFrame() override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
|
||||
|
||||
protected:
|
||||
virtual void customizeContext();
|
||||
virtual void drawUnitQuad();
|
||||
virtual void makeCurrent() = 0;
|
||||
virtual void doneCurrent() = 0;
|
||||
virtual void swapBuffers() = 0;
|
||||
|
||||
QTimer _timer;
|
||||
ProgramPtr _program;
|
||||
ShapeWrapperPtr _plane;
|
||||
};
|
||||
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "WindowOpenGLDisplayPlugin.h"
|
||||
|
||||
#include <QGLWidget>
|
||||
#include <QOpenGLContext>
|
||||
|
||||
#include "plugins/PluginContainer.h"
|
||||
|
||||
WindowOpenGLDisplayPlugin::WindowOpenGLDisplayPlugin() {
|
||||
}
|
||||
|
||||
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedRenderSize() const {
|
||||
uvec2 result;
|
||||
if (_window) {
|
||||
result = toGlm(_window->geometry().size() * _window->devicePixelRatio());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedUiSize() const {
|
||||
uvec2 result;
|
||||
if (_window) {
|
||||
result = toGlm(_window->geometry().size());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool WindowOpenGLDisplayPlugin::hasFocus() const {
|
||||
return _window ? _window->hasFocus() : false;
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::activate() {
|
||||
OpenGLDisplayPlugin::activate();
|
||||
_window = CONTAINER->getPrimarySurface();
|
||||
_window->makeCurrent();
|
||||
customizeContext();
|
||||
_window->doneCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::deactivate() {
|
||||
OpenGLDisplayPlugin::deactivate();
|
||||
_window = nullptr;
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::makeCurrent() {
|
||||
_window->makeCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::doneCurrent() {
|
||||
_window->doneCurrent();
|
||||
}
|
||||
|
||||
void WindowOpenGLDisplayPlugin::swapBuffers() {
|
||||
_window->swapBuffers();
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OpenGLDisplayPlugin.h"
|
||||
|
||||
class QGLWidget;
|
||||
|
||||
class WindowOpenGLDisplayPlugin : public OpenGLDisplayPlugin {
|
||||
public:
|
||||
WindowOpenGLDisplayPlugin();
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override;
|
||||
virtual bool hasFocus() const override;
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
protected:
|
||||
virtual void makeCurrent() override;
|
||||
virtual void doneCurrent() override;
|
||||
virtual void swapBuffers() override;
|
||||
QGLWidget* _window{ nullptr };
|
||||
};
|
|
@ -0,0 +1,76 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <ViewFrustum.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
|
||||
using namespace Oculus;
|
||||
|
||||
void OculusBaseDisplayPlugin::activate() {
|
||||
glm::uvec2 eyeSizes[2];
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
|
||||
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
|
||||
ovrMatrix4f ovrPerspectiveProjection =
|
||||
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
|
||||
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
|
||||
|
||||
ovrPerspectiveProjection =
|
||||
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
|
||||
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
|
||||
|
||||
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
|
||||
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
|
||||
});
|
||||
_desiredFramebufferSize = uvec2(
|
||||
eyeSizes[0].x + eyeSizes[1].x,
|
||||
std::max(eyeSizes[0].y, eyeSizes[1].y));
|
||||
|
||||
_frameIndex = 0;
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return _desiredFramebufferSize;
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::preRender() {
|
||||
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, nullptr);
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
return _eyeProjections[eye];
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
return baseModelview * toGlm(_eyePoses[eye]);
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::resetSensors() {
|
||||
ovrHmd_RecenterPose(_hmd);
|
||||
}
|
||||
|
||||
glm::mat4 OculusBaseDisplayPlugin::getEyePose(Eye eye) const {
|
||||
return toGlm(_eyePoses[eye]);
|
||||
}
|
||||
|
||||
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
|
||||
// any use of head poses for rendering, ensuring you use the correct eye
|
||||
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
|
||||
ovrTrackingState state = ovrHmd_GetTrackingState(_hmd, 0.0f);
|
||||
return toGlm(state.HeadPose.ThePose);
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class OculusBaseDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
public:
|
||||
// Stereo specific methods
|
||||
virtual bool isHmd() const override { return true; }
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
virtual void activate() override;
|
||||
virtual void preRender() override;
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
|
||||
virtual void resetSensors() override;
|
||||
virtual glm::mat4 getEyePose(Eye eye) const override;
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
};
|
|
@ -0,0 +1,24 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/08/08
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
|
||||
namespace Oculus {
|
||||
ovrHmd _hmd;
|
||||
unsigned int _frameIndex{ 0 };
|
||||
ovrEyeRenderDesc _eyeRenderDescs[2];
|
||||
ovrPosef _eyePoses[2];
|
||||
ovrVector3f _eyeOffsets[2];
|
||||
ovrFovPort _eyeFovs[2];
|
||||
mat4 _eyeProjections[2];
|
||||
mat4 _compositeEyeProjections[2];
|
||||
uvec2 _desiredFramebufferSize;
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/26
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <OVR_CAPI.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
#if (OVR_MAJOR_VERSION < 6)
|
||||
#define OVR_SUCCESS(x) x
|
||||
#endif
|
||||
|
||||
// Convenience method for looping over each eye with a lambda
|
||||
template <typename Function>
|
||||
inline void ovr_for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
namespace Oculus {
|
||||
extern ovrHmd _hmd;
|
||||
extern unsigned int _frameIndex;
|
||||
extern ovrEyeRenderDesc _eyeRenderDescs[2];
|
||||
extern ovrPosef _eyePoses[2];
|
||||
extern ovrVector3f _eyeOffsets[2];
|
||||
extern ovrFovPort _eyeFovs[2];
|
||||
extern mat4 _eyeProjections[2];
|
||||
extern mat4 _compositeEyeProjections[2];
|
||||
extern uvec2 _desiredFramebufferSize;
|
||||
}
|
|
@ -0,0 +1,192 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Oculus_0_5_DisplayPlugin.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
#include <QOpenGLContext>
|
||||
#include <QGuiApplication>
|
||||
#include <QScreen>
|
||||
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <OglplusHelpers.h>
|
||||
|
||||
#include "plugins/PluginContainer.h"
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
using namespace Oculus;
|
||||
ovrTexture _eyeTextures[2];
|
||||
int _hmdScreen{ -1 };
|
||||
bool _hswDismissed{ false };
|
||||
|
||||
DisplayPlugin* makeOculusDisplayPlugin() {
|
||||
return new Oculus_0_5_DisplayPlugin();
|
||||
}
|
||||
|
||||
using namespace oglplus;
|
||||
|
||||
const QString Oculus_0_5_DisplayPlugin::NAME("Oculus Rift (0.5)");
|
||||
|
||||
const QString & Oculus_0_5_DisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
|
||||
bool Oculus_0_5_DisplayPlugin::isSupported() const {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!ovr_Initialize(nullptr)) {
|
||||
return false;
|
||||
}
|
||||
bool result = false;
|
||||
if (ovrHmd_Detect() > 0) {
|
||||
result = true;
|
||||
}
|
||||
|
||||
auto hmd = ovrHmd_Create(0);
|
||||
if (hmd) {
|
||||
QPoint targetPosition{ hmd->WindowsPos.x, hmd->WindowsPos.y };
|
||||
auto screens = qApp->screens();
|
||||
for(int i = 0; i < screens.size(); ++i) {
|
||||
auto screen = screens[i];
|
||||
QPoint position = screen->geometry().topLeft();
|
||||
if (position == targetPosition) {
|
||||
_hmdScreen = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ovr_Shutdown();
|
||||
return result;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::activate() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
}
|
||||
_hswDismissed = false;
|
||||
_hmd = ovrHmd_Create(0);
|
||||
if (!_hmd) {
|
||||
qFatal("Failed to acquire HMD");
|
||||
}
|
||||
|
||||
OculusBaseDisplayPlugin::activate();
|
||||
int screen = getHmdScreen();
|
||||
if (screen != -1) {
|
||||
CONTAINER->setFullscreen(qApp->screens()[screen]);
|
||||
}
|
||||
|
||||
_window->installEventFilter(this);
|
||||
_window->makeCurrent();
|
||||
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
|
||||
auto& header = config.Config.Header;
|
||||
header.API = ovrRenderAPI_OpenGL;
|
||||
header.BackBufferSize = _hmd->Resolution;
|
||||
header.Multisample = 1;
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_TimeWarp
|
||||
;
|
||||
|
||||
memset(_eyeTextures, 0, sizeof(ovrTexture) * 2);
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
auto& header = _eyeTextures[eye].Header;
|
||||
header.API = ovrRenderAPI_OpenGL;
|
||||
header.TextureSize = { (int)_desiredFramebufferSize.x, (int)_desiredFramebufferSize.y };
|
||||
header.RenderViewport.Size = header.TextureSize;
|
||||
header.RenderViewport.Size.w /= 2;
|
||||
if (eye == ovrEye_Right) {
|
||||
header.RenderViewport.Pos.x = header.RenderViewport.Size.w;
|
||||
}
|
||||
});
|
||||
|
||||
ovrEyeRenderDesc _eyeRenderDescs[ovrEye_Count];
|
||||
ovrBool result = ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
|
||||
Q_ASSERT(result);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::deactivate() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
_window->removeEventFilter(this);
|
||||
|
||||
OculusBaseDisplayPlugin::deactivate();
|
||||
|
||||
QScreen* riftScreen = nullptr;
|
||||
if (_hmdScreen >= 0) {
|
||||
riftScreen = qApp->screens()[_hmdScreen];
|
||||
}
|
||||
CONTAINER->unsetFullscreen(riftScreen);
|
||||
|
||||
ovrHmd_Destroy(_hmd);
|
||||
_hmd = nullptr;
|
||||
ovr_Shutdown();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::preRender() {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
OculusBaseDisplayPlugin::preRender();
|
||||
ovrHmd_BeginFrame(_hmd, _frameIndex);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::preDisplay() {
|
||||
_window->makeCurrent();
|
||||
}
|
||||
|
||||
void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
++_frameIndex;
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = finalTexture;
|
||||
});
|
||||
ovrHmd_EndFrame(_hmd, _eyePoses, _eyeTextures);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Pass input events on to the application
|
||||
bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
#if (OVR_MAJOR_VERSION == 5)
|
||||
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
|
||||
static ovrHSWDisplayState hswState;
|
||||
ovrHmd_GetHSWDisplayState(_hmd, &hswState);
|
||||
if (hswState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_hmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
|
||||
}
|
||||
|
||||
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we
|
||||
// trigger a swap, which causes the client to wait for the v-sync of the main screen running
|
||||
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
|
||||
// thread
|
||||
void Oculus_0_5_DisplayPlugin::finishFrame() {
|
||||
_window->doneCurrent();
|
||||
};
|
||||
|
||||
int Oculus_0_5_DisplayPlugin::getHmdScreen() const {
|
||||
return _hmdScreen;
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
class Oculus_0_5_DisplayPlugin : public OculusBaseDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
virtual int getHmdScreen() const override;
|
||||
|
||||
protected:
|
||||
virtual void preRender() override;
|
||||
virtual void preDisplay() override;
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
|
|
@ -0,0 +1,370 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2014/04/13.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "Oculus_0_6_DisplayPlugin.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
|
||||
#include <OglplusHelpers.h>
|
||||
#include <oglplus/opt/list_init.hpp>
|
||||
#include <oglplus/shapes/vector.hpp>
|
||||
#include <oglplus/opt/list_init.hpp>
|
||||
#include <oglplus/shapes/obj_mesh.hpp>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
using namespace Oculus;
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
SwapFboPtr _sceneFbo;
|
||||
MirrorFboPtr _mirrorFbo;
|
||||
ovrLayerEyeFov _sceneLayer;
|
||||
|
||||
// A base class for FBO wrappers that need to use the Oculus C
|
||||
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
|
||||
// ovrHmd_CreateMirrorTextureGL, etc
|
||||
template <typename C>
|
||||
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
|
||||
ovrHmd hmd;
|
||||
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
|
||||
color = 0;
|
||||
depth = 0;
|
||||
};
|
||||
|
||||
void Resize(const uvec2 & size) {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
this->size = size;
|
||||
initColor();
|
||||
initDone();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initDepth() override final {
|
||||
}
|
||||
};
|
||||
|
||||
// A wrapper for constructing and using a swap texture set,
|
||||
// where each frame you draw to a texture via the FBO,
|
||||
// then submit it and increment to the next texture.
|
||||
// The Oculus SDK manages the creation and destruction of
|
||||
// the textures
|
||||
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
|
||||
SwapFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) {
|
||||
}
|
||||
|
||||
~SwapFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void Increment() {
|
||||
++color->CurrentIndex;
|
||||
color->CurrentIndex %= color->TextureCount;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroySwapTextureSet(hmd, color);
|
||||
color = nullptr;
|
||||
}
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) {
|
||||
qFatal("Unable to create swap textures");
|
||||
}
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
|
||||
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
virtual void initDone() override {
|
||||
}
|
||||
|
||||
virtual void onBind(oglplus::Framebuffer::Target target) override {
|
||||
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
|
||||
}
|
||||
|
||||
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// We use a FBO to wrap the mirror texture because it makes it easier to
|
||||
// render to the screen via glBlitFramebuffer
|
||||
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
|
||||
MirrorFramebufferWrapper(const ovrHmd & hmd)
|
||||
: RiftFramebufferWrapper(hmd) { }
|
||||
|
||||
virtual ~MirrorFramebufferWrapper() {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void initColor() override {
|
||||
if (color) {
|
||||
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
|
||||
color = nullptr;
|
||||
}
|
||||
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
}
|
||||
|
||||
void initDone() override {
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
const QString Oculus_0_6_DisplayPlugin::NAME("Oculus Rift");
|
||||
|
||||
const QString & Oculus_0_6_DisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
bool Oculus_0_6_DisplayPlugin::isSupported() const {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
return false;
|
||||
}
|
||||
bool result = false;
|
||||
if (ovrHmd_Detect() > 0) {
|
||||
result = true;
|
||||
}
|
||||
ovr_Shutdown();
|
||||
return result;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
ovrLayerEyeFov& getSceneLayer() {
|
||||
return _sceneLayer;
|
||||
}
|
||||
#endif
|
||||
|
||||
//static gpu::TexturePointer _texture;
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::activate() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
}
|
||||
if (!OVR_SUCCESS(ovrHmd_Create(0, &_hmd))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to acquire HMD");
|
||||
}
|
||||
|
||||
OculusBaseDisplayPlugin::activate();
|
||||
|
||||
// Parent class relies on our _hmd intialization, so it must come after that.
|
||||
ovrLayerEyeFov& sceneLayer = getSceneLayer();
|
||||
memset(&sceneLayer, 0, sizeof(ovrLayerEyeFov));
|
||||
sceneLayer.Header.Type = ovrLayerType_EyeFov;
|
||||
sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
ovrFovPort & fov = sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
|
||||
ovrSizei & size = sceneLayer.Viewport[eye].Size = ovrHmd_GetFovTextureSize(_hmd, eye, fov, 1.0f);
|
||||
sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
|
||||
});
|
||||
// We're rendering both eyes to the same texture, so only one of the
|
||||
// pointers is populated
|
||||
sceneLayer.ColorTexture[0] = _sceneFbo->color;
|
||||
// not needed since the structure was zeroed on init, but explicit
|
||||
sceneLayer.ColorTexture[1] = nullptr;
|
||||
|
||||
PerformanceTimer::setActive(true);
|
||||
|
||||
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::customizeContext() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
OculusBaseDisplayPlugin::customizeContext();
|
||||
|
||||
//_texture = DependencyManager::get<TextureCache>()->
|
||||
// getImageTexture(PathUtils::resourcesPath() + "/images/cube_texture.png");
|
||||
uvec2 mirrorSize = toGlm(_window->geometry().size());
|
||||
_mirrorFbo = MirrorFboPtr(new MirrorFramebufferWrapper(_hmd));
|
||||
_mirrorFbo->Init(mirrorSize);
|
||||
|
||||
_sceneFbo = SwapFboPtr(new SwapFramebufferWrapper(_hmd));
|
||||
_sceneFbo->Init(getRecommendedRenderSize());
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::deactivate() {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
makeCurrent();
|
||||
_sceneFbo.reset();
|
||||
_mirrorFbo.reset();
|
||||
doneCurrent();
|
||||
PerformanceTimer::setActive(false);
|
||||
|
||||
OculusBaseDisplayPlugin::deactivate();
|
||||
|
||||
ovrHmd_Destroy(_hmd);
|
||||
_hmd = nullptr;
|
||||
ovr_Shutdown();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
using namespace oglplus;
|
||||
// Need to make sure only the display plugin is responsible for
|
||||
// controlling vsync
|
||||
wglSwapIntervalEXT(0);
|
||||
|
||||
_sceneFbo->Bound([&] {
|
||||
auto size = _sceneFbo->size;
|
||||
Context::Viewport(size.x, size.y);
|
||||
glBindTexture(GL_TEXTURE_2D, finalTexture);
|
||||
drawUnitQuad();
|
||||
});
|
||||
|
||||
ovrLayerEyeFov& sceneLayer = getSceneLayer();
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
sceneLayer.RenderPose[eye] = _eyePoses[eye];
|
||||
});
|
||||
|
||||
auto windowSize = toGlm(_window->size());
|
||||
|
||||
/*
|
||||
Two alternatives for mirroring to the screen, the first is to copy our own composited
|
||||
scene to the window framebuffer, before distortion. Note this only works if we're doing
|
||||
ui compositing ourselves, and not relying on the Oculus SDK compositor (or we don't want
|
||||
the UI visible in the output window (unlikely). This should be done before
|
||||
_sceneFbo->Increment or we're be using the wrong texture
|
||||
*/
|
||||
//_sceneFbo->Bound(GL_READ_FRAMEBUFFER, [&] {
|
||||
// glBlitFramebuffer(
|
||||
// 0, 0, _sceneFbo->size.x, _sceneFbo->size.y,
|
||||
// 0, 0, windowSize.x, _mirrorFbo.y,
|
||||
// GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
//});
|
||||
|
||||
{
|
||||
PerformanceTimer("OculusSubmit");
|
||||
ovrLayerHeader* layers = &sceneLayer.Header;
|
||||
ovrResult result = ovrHmd_SubmitFrame(_hmd, _frameIndex, nullptr, &layers, 1);
|
||||
}
|
||||
_sceneFbo->Increment();
|
||||
|
||||
/*
|
||||
The other alternative for mirroring is to use the Oculus mirror texture support, which
|
||||
will contain the post-distorted and fully composited scene regardless of how many layers
|
||||
we send.
|
||||
*/
|
||||
auto mirrorSize = _mirrorFbo->size;
|
||||
_mirrorFbo->Bound(Framebuffer::Target::Read, [&] {
|
||||
Context::BlitFramebuffer(
|
||||
0, mirrorSize.y, mirrorSize.x, 0,
|
||||
0, 0, windowSize.x, windowSize.y,
|
||||
BufferSelectBit::ColorBuffer, BlitFilter::Nearest);
|
||||
});
|
||||
|
||||
++_frameIndex;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Pass input events on to the application
|
||||
bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
|
||||
#if (OVR_MAJOR_VERSION == 6)
|
||||
if (event->type() == QEvent::Resize) {
|
||||
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
||||
qDebug() << resizeEvent->size().width() << " x " << resizeEvent->size().height();
|
||||
auto newSize = toGlm(resizeEvent->size());
|
||||
makeCurrent();
|
||||
_mirrorFbo->Resize(newSize);
|
||||
doneCurrent();
|
||||
}
|
||||
#endif
|
||||
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
|
||||
}
|
||||
|
||||
/*
|
||||
The swapbuffer call here is only required if we want to mirror the content to the screen.
|
||||
However, it should only be done if we can reliably disable v-sync on the mirror surface,
|
||||
otherwise the swapbuffer delay will interefere with the framerate of the headset
|
||||
*/
|
||||
void Oculus_0_6_DisplayPlugin::finishFrame() {
|
||||
swapBuffers();
|
||||
doneCurrent();
|
||||
};
|
||||
|
||||
|
||||
#if 0
|
||||
/*
|
||||
An alternative way to render the UI is to pass it specifically as a composition layer to
|
||||
the Oculus SDK which should technically result in higher quality. However, the SDK doesn't
|
||||
have a mechanism to present the image as a sphere section, which is our desired look.
|
||||
*/
|
||||
ovrLayerQuad& uiLayer = getUiLayer();
|
||||
if (nullptr == uiLayer.ColorTexture || overlaySize != _uiFbo->size) {
|
||||
_uiFbo->Resize(overlaySize);
|
||||
uiLayer.ColorTexture = _uiFbo->color;
|
||||
uiLayer.Viewport.Size.w = overlaySize.x;
|
||||
uiLayer.Viewport.Size.h = overlaySize.y;
|
||||
float overlayAspect = aspect(overlaySize);
|
||||
uiLayer.QuadSize.x = 1.0f;
|
||||
uiLayer.QuadSize.y = 1.0f / overlayAspect;
|
||||
}
|
||||
|
||||
_uiFbo->Bound([&] {
|
||||
Q_ASSERT(0 == glGetError());
|
||||
using namespace oglplus;
|
||||
Context::Viewport(_uiFbo->size.x, _uiFbo->size.y);
|
||||
glClearColor(0, 0, 0, 0);
|
||||
Context::Clear().ColorBuffer();
|
||||
|
||||
_program->Bind();
|
||||
glBindTexture(GL_TEXTURE_2D, overlayTexture);
|
||||
_plane->Use();
|
||||
_plane->Draw();
|
||||
Q_ASSERT(0 == glGetError());
|
||||
});
|
||||
#endif
|
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
class OffscreenGlCanvas;
|
||||
struct SwapFramebufferWrapper;
|
||||
struct MirrorFramebufferWrapper;
|
||||
|
||||
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
|
||||
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
|
||||
|
||||
class Oculus_0_6_DisplayPlugin : public OculusBaseDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
|
||||
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
||||
protected:
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void customizeContext() override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
|
@ -0,0 +1,197 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OpenVrDisplayPlugin.h"
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <QGLWidget>
|
||||
#include <GLMHelpers.h>
|
||||
#include <GlWindow.h>
|
||||
#include <QEvent>
|
||||
#include <QResizeEvent>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <ViewFrustum.h>
|
||||
|
||||
#include "OpenVrHelpers.h"
|
||||
#include "GLMHelpers.h"
|
||||
|
||||
#include <QLoggingCategory>
|
||||
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
||||
Q_LOGGING_CATEGORY(displayplugins, "hifi.displayplugins")
|
||||
|
||||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
const QString & OpenVrDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
vr::IVRSystem* _hmd{ nullptr };
|
||||
int hmdRefCount = 0;
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
static uvec2 _windowSize;
|
||||
static ivec2 _windowPosition;
|
||||
static uvec2 _renderTargetSize;
|
||||
|
||||
struct PerEyeData {
|
||||
uvec2 _viewportOrigin;
|
||||
uvec2 _viewportSize;
|
||||
mat4 _projectionMatrix;
|
||||
mat4 _eyeOffset;
|
||||
mat4 _pose;
|
||||
};
|
||||
|
||||
static PerEyeData _eyesData[2];
|
||||
|
||||
|
||||
template<typename F>
|
||||
void openvr_for_each_eye(F f) {
|
||||
f(vr::Hmd_Eye::Eye_Left);
|
||||
f(vr::Hmd_Eye::Eye_Right);
|
||||
}
|
||||
|
||||
mat4 toGlm(const vr::HmdMatrix44_t& m) {
|
||||
return glm::transpose(glm::make_mat4(&m.m[0][0]));
|
||||
}
|
||||
|
||||
mat4 toGlm(const vr::HmdMatrix34_t& m) {
|
||||
mat4 result = mat4(
|
||||
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
|
||||
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
|
||||
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
|
||||
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
|
||||
return result;
|
||||
}
|
||||
|
||||
bool OpenVrDisplayPlugin::isSupported() const {
|
||||
bool success = vr::VR_IsHmdPresent();
|
||||
if (success) {
|
||||
vr::HmdError eError = vr::HmdError_None;
|
||||
auto hmd = vr::VR_Init(&eError);
|
||||
success = (hmd != nullptr);
|
||||
vr::VR_Shutdown();
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::activate() {
|
||||
CONTAINER->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
hmdRefCount++;
|
||||
vr::HmdError eError = vr::HmdError_None;
|
||||
if (!_hmd) {
|
||||
_hmd = vr::VR_Init(&eError);
|
||||
Q_ASSERT(eError == vr::HmdError_None);
|
||||
}
|
||||
Q_ASSERT(_hmd);
|
||||
|
||||
_hmd->GetWindowBounds(&_windowPosition.x, &_windowPosition.y, &_windowSize.x, &_windowSize.y);
|
||||
_hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
PerEyeData& eyeData = _eyesData[eye];
|
||||
_hmd->GetEyeOutputViewport(eye,
|
||||
&eyeData._viewportOrigin.x, &eyeData._viewportOrigin.y,
|
||||
&eyeData._viewportSize.x, &eyeData._viewportSize.y);
|
||||
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
});
|
||||
|
||||
|
||||
_compositor = (vr::IVRCompositor*)vr::VR_GetGenericInterface(vr::IVRCompositor_Version, &eError);
|
||||
Q_ASSERT(eError == vr::HmdError_None);
|
||||
Q_ASSERT(_compositor);
|
||||
|
||||
_compositor->SetGraphicsDevice(vr::Compositor_DeviceType_OpenGL, NULL);
|
||||
|
||||
uint32_t unSize = _compositor->GetLastError(NULL, 0);
|
||||
if (unSize > 1) {
|
||||
char* buffer = new char[unSize];
|
||||
_compositor->GetLastError(buffer, unSize);
|
||||
printf("Compositor - %s\n", buffer);
|
||||
delete[] buffer;
|
||||
}
|
||||
Q_ASSERT(unSize <= 1);
|
||||
MainWindowOpenGLDisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::deactivate() {
|
||||
CONTAINER->setIsOptionChecked(StandingHMDSensorMode, false);
|
||||
|
||||
hmdRefCount--;
|
||||
|
||||
if (hmdRefCount == 0 && _hmd) {
|
||||
vr::VR_Shutdown();
|
||||
_hmd = nullptr;
|
||||
}
|
||||
_compositor = nullptr;
|
||||
MainWindowOpenGLDisplayPlugin::deactivate();
|
||||
}
|
||||
|
||||
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
|
||||
return _renderTargetSize;
|
||||
}
|
||||
|
||||
mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) const {
|
||||
return _eyesData[eye]._projectionMatrix;
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
|
||||
return baseModelview * getEyePose(eye);
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const {
|
||||
return getHeadPose() * _eyesData[eye]._eyeOffset;
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::customizeContext() {
|
||||
MainWindowOpenGLDisplayPlugin::customizeContext();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
|
||||
// Flip y-axis since GL UV coords are backwards.
|
||||
static vr::Compositor_TextureBounds leftBounds{ 0, 1, 0.5f, 0 };
|
||||
static vr::Compositor_TextureBounds rightBounds{ 0.5f, 1, 1, 0 };
|
||||
_compositor->Submit(vr::Eye_Left, (void*)finalTexture, &leftBounds);
|
||||
_compositor->Submit(vr::Eye_Right, (void*)finalTexture, &rightBounds);
|
||||
glFinish();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::finishFrame() {
|
||||
// swapBuffers();
|
||||
doneCurrent();
|
||||
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
|
||||
});
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/06/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include <QtGlobal>
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class OpenVrDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
public:
|
||||
virtual bool isSupported() const override;
|
||||
virtual const QString & getName() const override;
|
||||
virtual bool isHmd() const override { return true; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual glm::uvec2 getRecommendedRenderSize() const override;
|
||||
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
|
||||
|
||||
// Stereo specific methods
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
virtual void resetSensors() override;
|
||||
|
||||
virtual glm::mat4 getEyePose(Eye eye) const override;
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
protected:
|
||||
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
|
||||
virtual void customizeContext() override;
|
||||
// Do not perform swap in finish
|
||||
virtual void finishFrame() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/06/12
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#if defined(Q_OS_WIN)
|
||||
#include <openvr.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#endif
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterleavedStereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
const QString InterleavedStereoDisplayPlugin::NAME("Interleaved Stereo Display");
|
||||
|
||||
const QString & InterleavedStereoDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
InterleavedStereoDisplayPlugin::InterleavedStereoDisplayPlugin() {
|
||||
}
|
||||
|
||||
void InterleavedStereoDisplayPlugin::customizeContext() {
|
||||
StereoDisplayPlugin::customizeContext();
|
||||
// Set up the stencil buffers? Or use a custom shader?
|
||||
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
class InterleavedStereoDisplayPlugin : public StereoDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
InterleavedStereoDisplayPlugin();
|
||||
virtual const QString & getName() const override;
|
||||
|
||||
// initialize OpenGL context settings needed by the plugin
|
||||
virtual void customizeContext() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "SideBySideStereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <GlWindow.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
const QString SideBySideStereoDisplayPlugin::NAME("SBS Stereo Display");
|
||||
|
||||
const QString & SideBySideStereoDisplayPlugin::getName() const {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
SideBySideStereoDisplayPlugin::SideBySideStereoDisplayPlugin() {
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
class SideBySideStereoDisplayPlugin : public StereoDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
SideBySideStereoDisplayPlugin();
|
||||
virtual const QString & getName() const override;
|
||||
private:
|
||||
static const QString NAME;
|
||||
};
|
|
@ -0,0 +1,57 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "StereoDisplayPlugin.h"
|
||||
|
||||
#include <QApplication>
|
||||
#include <QDesktopWidget>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <MatrixStack.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
|
||||
StereoDisplayPlugin::StereoDisplayPlugin() {
|
||||
}
|
||||
|
||||
bool StereoDisplayPlugin::isSupported() const {
|
||||
// FIXME this should attempt to do a scan for supported 3D output
|
||||
return true;
|
||||
}
|
||||
|
||||
// FIXME make this into a setting that can be adjusted
|
||||
const float DEFAULT_IPD = 0.064f;
|
||||
const float HALF_DEFAULT_IPD = DEFAULT_IPD / 2.0f;
|
||||
|
||||
glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
|
||||
// Refer to http://www.nvidia.com/content/gtc-2010/pdfs/2010_gtc2010.pdf on creating
|
||||
// stereo projection matrices. Do NOT use "toe-in", use translation.
|
||||
|
||||
float nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
float screenZ = 0.25f; // screen projection plane
|
||||
// FIXME verify this is the right calculation
|
||||
float frustumshift = HALF_DEFAULT_IPD * nearZ / screenZ;
|
||||
if (eye == Right) {
|
||||
frustumshift = -frustumshift;
|
||||
}
|
||||
return glm::translate(baseProjection, vec3(frustumshift, 0, 0));
|
||||
}
|
||||
|
||||
glm::mat4 StereoDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||
float modelviewShift = HALF_DEFAULT_IPD;
|
||||
if (eye == Left) {
|
||||
modelviewShift = -modelviewShift;
|
||||
}
|
||||
return baseModelview * glm::translate(mat4(), vec3(modelviewShift, 0, 0));
|
||||
}
|
||||
|
||||
void StereoDisplayPlugin::activate() {
|
||||
WindowOpenGLDisplayPlugin::activate();
|
||||
CONTAINER->setFullscreen(qApp->primaryScreen());
|
||||
// FIXME Add menu items
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/29
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
|
||||
#include "../MainWindowOpenGLDisplayPlugin.h"
|
||||
|
||||
class StereoDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
StereoDisplayPlugin();
|
||||
virtual bool isStereo() const override final { return true; }
|
||||
virtual bool isSupported() const override final;
|
||||
|
||||
virtual void activate() override;
|
||||
|
||||
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
|
||||
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
|
||||
|
||||
};
|
78
libraries/entities-renderer/src/DeferredBufferWrite.slh
Executable file
78
libraries/entities-renderer/src/DeferredBufferWrite.slh
Executable file
|
@ -0,0 +1,78 @@
|
|||
<!
|
||||
// DeferredBufferWrite.slh
|
||||
// libraries/render-utils/src
|
||||
//
|
||||
// Created by Sam Gateau on 1/12/15.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
!>
|
||||
<@if not DEFERRED_BUFFER_WRITE_SLH@>
|
||||
<@def DEFERRED_BUFFER_WRITE_SLH@>
|
||||
|
||||
layout(location = 0) out vec4 _fragColor0;
|
||||
layout(location = 1) out vec4 _fragColor1;
|
||||
layout(location = 2) out vec4 _fragColor2;
|
||||
|
||||
// the glow intensity
|
||||
uniform float glowIntensity;
|
||||
|
||||
// the alpha threshold
|
||||
uniform float alphaThreshold;
|
||||
|
||||
uniform sampler2D normalFittingMap;
|
||||
|
||||
vec3 bestFitNormal(vec3 normal) {
|
||||
vec3 absNorm = abs(normal);
|
||||
float maxNAbs = max(absNorm.z, max(absNorm.x, absNorm.y));
|
||||
|
||||
vec2 texcoord = (absNorm.z < maxNAbs ?
|
||||
(absNorm.y < maxNAbs ? absNorm.yz : absNorm.xz) :
|
||||
absNorm.xy);
|
||||
texcoord = (texcoord.x < texcoord.y ? texcoord.yx : texcoord.xy);
|
||||
texcoord.y /= texcoord.x;
|
||||
vec3 cN = normal / maxNAbs;
|
||||
float fittingScale = texture(normalFittingMap, texcoord).a;
|
||||
cN *= fittingScale;
|
||||
return (cN * 0.5 + 0.5);
|
||||
}
|
||||
|
||||
float evalOpaqueFinalAlpha(float alpha, float mapAlpha) {
|
||||
return mix(alpha * glowIntensity, 1.0 - alpha * glowIntensity, step(mapAlpha, alphaThreshold));
|
||||
}
|
||||
|
||||
const vec3 DEFAULT_SPECULAR = vec3(0.1);
|
||||
const float DEFAULT_SHININESS = 10;
|
||||
|
||||
void packDeferredFragment(vec3 normal, float alpha, vec3 diffuse, vec3 specular, float shininess) {
|
||||
if (alpha != glowIntensity) {
|
||||
discard;
|
||||
}
|
||||
_fragColor0 = vec4(diffuse.rgb, alpha);
|
||||
_fragColor1 = vec4(bestFitNormal(normal), 1.0);
|
||||
_fragColor2 = vec4(specular, shininess / 128.0);
|
||||
}
|
||||
|
||||
void packDeferredFragmentLightmap(vec3 normal, float alpha, vec3 diffuse, vec3 specular, float shininess, vec3 emissive) {
|
||||
if (alpha != glowIntensity) {
|
||||
discard;
|
||||
}
|
||||
|
||||
_fragColor0 = vec4(diffuse.rgb, alpha);
|
||||
//_fragColor1 = vec4(normal, 0.0) * 0.5 + vec4(0.5, 0.5, 0.5, 1.0);
|
||||
_fragColor1 = vec4(bestFitNormal(normal), 0.5);
|
||||
_fragColor2 = vec4(emissive, shininess / 128.0);
|
||||
}
|
||||
|
||||
void packDeferredFragmentTranslucent(vec3 normal, float alpha, vec3 diffuse, vec3 specular, float shininess) {
|
||||
if (alpha <= alphaThreshold) {
|
||||
discard;
|
||||
}
|
||||
|
||||
_fragColor0 = vec4(diffuse.rgb, alpha);
|
||||
// _fragColor1 = vec4(normal, 0.0) * 0.5 + vec4(0.5, 0.5, 0.5, 1.0);
|
||||
// _fragColor2 = vec4(specular, shininess / 128.0);
|
||||
}
|
||||
|
||||
<@endif@>
|
|
@ -38,6 +38,7 @@
|
|||
#include "RenderableZoneEntityItem.h"
|
||||
#include "RenderableLineEntityItem.h"
|
||||
#include "RenderablePolyVoxEntityItem.h"
|
||||
#include "RenderablePolyLineEntityItem.h"
|
||||
#include "EntitiesRendererLogging.h"
|
||||
#include "AddressManager.h"
|
||||
#include "EntityRig.h"
|
||||
|
@ -66,6 +67,7 @@ EntityTreeRenderer::EntityTreeRenderer(bool wantScripts, AbstractViewStateInterf
|
|||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Zone, RenderableZoneEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Line, RenderableLineEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(PolyVox, RenderablePolyVoxEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(PolyLine, RenderablePolyLineEntityItem::factory)
|
||||
|
||||
_currentHoverOverEntityID = UNKNOWN_ENTITY_ID;
|
||||
_currentClickingOnEntityID = UNKNOWN_ENTITY_ID;
|
||||
|
@ -94,7 +96,6 @@ void EntityTreeRenderer::clear() {
|
|||
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
foreach(auto entity, _entitiesInScene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
}
|
||||
|
|
|
@ -18,6 +18,9 @@ namespace render {
|
|||
if (payload->entity->getType() == EntityTypes::Light) {
|
||||
return ItemKey::Builder::light();
|
||||
}
|
||||
if (payload && payload->entity->getType() == EntityTypes::PolyLine) {
|
||||
return ItemKey::Builder::transparentShape();
|
||||
}
|
||||
}
|
||||
return ItemKey::Builder::opaqueShape();
|
||||
}
|
||||
|
|
|
@ -35,6 +35,9 @@ void RenderableLineEntityItem::updateGeometry() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
void RenderableLineEntityItem::render(RenderArgs* args) {
|
||||
PerformanceTimer perfTimer("RenderableLineEntityItem::render");
|
||||
Q_ASSERT(getType() == EntityTypes::Line);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue