Merge branch 'master' of https://github.com/highfidelity/hifi into walk-1.25

This commit is contained in:
DaveDubUK 2015-06-25 12:15:07 +07:00
commit ececf68f06
162 changed files with 4410 additions and 4125 deletions

View file

@ -21,7 +21,11 @@ if (WIN32)
# FIXME need to account for different architectures
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/x64/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
else()
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
endif()
elseif(APPLE)

View file

@ -16,11 +16,16 @@ if (WIN32)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE PATH "List of glew include directories")
set(_LIB_DIR ${SOURCE_DIR}/lib/Release/Win32)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(_LIB_DIR ${SOURCE_DIR}/lib/Release/x64)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/bin/Release/x64 CACHE FILEPATH "Location of GLEW DLL")
else()
set(_LIB_DIR ${SOURCE_DIR}/lib/Release/Win32)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/bin/Release/Win32 CACHE FILEPATH "Location of GLEW DLL")
endif()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${_LIB_DIR}/glew32.lib CACHE FILEPATH "Location of GLEW release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE FILEPATH "Location of GLEW debug library")
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${SOURCE_DIR}/bin/Release/Win32 CACHE FILEPATH "Location of GLEW DLL")
endif ()

View file

@ -4,8 +4,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/jherico/oglplus.git
GIT_TAG 470d8e56fd6bf3913ceec03d82f42d3bafab2cbe
GIT_REPOSITORY https://github.com/matus-chochlik/oglplus.git
GIT_TAG a2681383928b1166f176512cbe0f95e96fe68d08
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -73,9 +73,14 @@ if (APPLE)
)
elseif (WIN32)
set(_TBB_LIB_DIR "${SOURCE_DIR}/lib/ia32/vc12")
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(_TBB_LIB_DIR "${SOURCE_DIR}/lib/intel64/vc12")
set(${EXTERNAL_NAME_UPPER}_DLL_PATH "${SOURCE_DIR}/bin/intel64/vc12" CACHE PATH "Path to TBB DLLs")
else()
set(_TBB_LIB_DIR "${SOURCE_DIR}/lib/ia32/vc12")
set(${EXTERNAL_NAME_UPPER}_DLL_PATH "${SOURCE_DIR}/bin/ia32/vc12" CACHE PATH "Path to TBB DLLs")
endif()
set(_LIB_EXT "lib")
set(${EXTERNAL_NAME_UPPER}_DLL_PATH "${SOURCE_DIR}/bin/ia32/vc12" CACHE PATH "Path to TBB DLLs")
elseif (ANDROID)
set(_TBB_LIB_DIR "${SOURCE_DIR}/lib")
set(_LIB_PREFIX "lib")

View file

@ -28,7 +28,11 @@ if (APPLE)
elseif (UNIX)
set(ARCH_DIR "UNIX")
elseif (WIN32)
set(ARCH_DIR "Win32")
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(ARCH_DIR "x64")
else()
set(ARCH_DIR "Win32")
endif()
endif ()
find_library(FACESHIFT_LIBRARY_RELEASE NAME faceshift PATH_SUFFIXES lib/${ARCH_DIR} HINTS ${FACESHIFT_SEARCH_DIRS})

View file

@ -16,7 +16,16 @@
#
if (WIN32)
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(ARCH_DIR "x64")
set(ARCH_NAME "64")
else()
set(ARCH_DIR "Win32")
set(ARCH_NAME "32")
endif()
find_path(NSIGHT_INCLUDE_DIRS
NAMES
nvToolsExt.h
@ -25,17 +34,18 @@ if (WIN32)
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
find_library(NSIGHT_LIBRARY_RELEASE nvToolsExt32_1
find_library(NSIGHT_LIBRARY_RELEASE "nvToolsExt${ARCH_NAME}_1"
PATH_SUFFIXES
"lib/Win32" "lib"
"lib/${ARCH_DIR}" "lib"
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
find_library(NSIGHT_LIBRARY_DEBUG nvToolsExt32_1
find_library(NSIGHT_LIBRARY_DEBUG "nvToolsExt${ARCH_NAME}_1"
PATH_SUFFIXES
"lib/Win32" "lib"
"lib/${ARCH_DIR}" "lib"
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
add_paths_to_fixup_libs("C:/Program Files/NVIDIA Corporation/NvToolsExt/bin/${ARCH_DIR}")
include(SelectLibraryConfigurations)
select_library_configurations(NSIGHT)
endif ()

View file

@ -30,12 +30,20 @@ elseif (UNIX)
find_library(SIXENSE_LIBRARY_RELEASE lib/linux_x64/release/libsixense_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
# find_library(SIXENSE_LIBRARY_DEBUG lib/linux_x64/debug/libsixensed_x64.so HINTS ${SIXENSE_SEARCH_DIRS})
elseif (WIN32)
find_library(SIXENSE_LIBRARY_RELEASE lib/win32/release_dll/sixense.lib HINTS ${SIXENSE_SEARCH_DIRS})
find_library(SIXENSE_LIBRARY_DEBUG lib/win32/debug_dll/sixensed.lib HINTS ${SIXENSE_SEARCH_DIRS})
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
set(ARCH_DIR "x64")
set(ARCH_SUFFIX "_x64")
else()
set(ARCH_DIR "Win32")
set(ARCH_SUFFIX "")
endif()
find_library(SIXENSE_LIBRARY_RELEASE "lib/${ARCH_DIR}/release_dll/sixense${ARCH_SUFFIX}.lib" HINTS ${SIXENSE_SEARCH_DIRS})
find_library(SIXENSE_LIBRARY_DEBUG "lib/${ARCH_DIR}/debug_dll/sixensed.lib" HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_DEBUG_DLL_PATH sixensed.dll PATH_SUFFIXES bin/win32/debug_dll HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_RELEASE_DLL_PATH sixense.dll PATH_SUFFIXES bin/win32/release_dll HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_DEVICE_DLL_PATH DeviceDLL.dll PATH_SUFFIXES samples/win32/sixense_simple3d HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_DEBUG_DLL_PATH "sixensed${ARCH_SUFFIX}.dll" PATH_SUFFIXES bin/${ARCH_DIR}/debug_dll HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_RELEASE_DLL_PATH "sixense${ARCH_SUFFIX}.dll" PATH_SUFFIXES bin/${ARCH_DIR}/release_dll HINTS ${SIXENSE_SEARCH_DIRS})
find_path(SIXENSE_DEVICE_DLL_PATH DeviceDLL.dll PATH_SUFFIXES samples/${ARCH_DIR}/sixense_simple3d HINTS ${SIXENSE_SEARCH_DIRS})
endif ()
include(SelectLibraryConfigurations)

View file

@ -49,7 +49,7 @@ elseif (UNIX AND NOT ANDROID)
endif ()
elseif (WIN32)
if (CMAKE_CL_64)
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
set(_TBB_ARCH_DIR "intel64")
else()
set(_TBB_ARCH_DIR "ia32")

View file

@ -123,7 +123,7 @@ var RainSquall = function (properties) {
function setUp() {
if (debug) {
squallCircle = Overlays.addOverlay("circle3d", {
size: { x: 2 * squallRadius, y: 2 * squallRadius },
size: { x: squallRadius, y: squallRadius },
color: SQUALL_CIRCLE_COLOR,
alpha: SQUALL_CIRCLE_ALPHA,
solid: true,

View file

@ -100,12 +100,12 @@ var NUM_BUTTONS = 3;
var screenSize = Controller.getViewportDimensions();
var startX = screenSize.x / 2 - (NUM_BUTTONS * (BUTTON_SIZE + PADDING)) / 2;
Script.include(["../../libraries/toolBars.js"]);
const persistKey = "highfidelity.gun.toolbar.position";
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL, "highfidelity.gun.toolbar", function (screenSize) {
return {
x: startX,
y: (screenSize.y - (BUTTON_SIZE + PADDING)),
};
});
var reticle = Overlays.addOverlay("image", {
x: screenSize.x / 2 - (BUTTON_SIZE / 2),
y: screenSize.y / 2 - (BUTTON_SIZE / 2),
@ -116,8 +116,6 @@ var reticle = Overlays.addOverlay("image", {
});
var offButton = toolBar.addOverlay("image", {
x: old ? old[0] : startX,
y: old ? old[1] : (screenSize.y - (BUTTON_SIZE + PADDING)),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/close.svg",
@ -181,7 +179,7 @@ function entityCollisionWithEntity(entity1, entity2, collision) {
}
function shootBullet(position, velocity, grenade) {
var BULLET_SIZE = 0.10;
var BULLET_SIZE = .09;
var BULLET_LIFETIME = 10.0;
var BULLET_GRAVITY = -0.25;
var GRENADE_VELOCITY = 15.0;

View file

@ -18,4 +18,5 @@ Script.load("users.js");
Script.load("grab.js");
Script.load("directory.js");
Script.load("mouseLook.js");
Script.load("hmdControls.js");
Script.load("dialTone.js");

View file

@ -33,15 +33,13 @@ var BUTTON_SIZE = 32;
var PADDING = 3;
Script.include(["libraries/toolBars.js"]);
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
const persistKey = "highfidelity.dice.toolbar.position";
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL, "highfidelity.dice.toolbar", function (screenSize) {
return {
x: (screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING),
y: (screenSize.y - (BUTTON_SIZE + PADDING))
};
});
var offButton = toolBar.addOverlay("image", {
x: old ? old[0] : (screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING),
y: old ? old[1] : (screenSize.y - (BUTTON_SIZE + PADDING)),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",

View file

@ -49,7 +49,6 @@ selectionManager.addEventListener(function() {
lightOverlayManager.updatePositions();
});
var windowDimensions = Controller.getViewportDimensions();
var toolIconUrl = HIFI_PUBLIC_BUCKET + "images/tools/";
var toolHeight = 50;
var toolWidth = 50;
@ -143,7 +142,12 @@ var toolBar = (function () {
browseMarketplaceButton;
function initialize() {
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL);
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL, "highfidelity.edit.toolbar", function (windowDimensions, toolbar) {
return {
x: windowDimensions.x - 8 - toolbar.width,
y: (windowDimensions.y - toolbar.height) / 2
};
});
browseMarketplaceButton = toolBar.addTool({
imageURL: toolIconUrl + "marketplace.svg",
@ -321,38 +325,6 @@ var toolBar = (function () {
}
}
const persistKey = "highfidelity.edit.toolbar.position";
that.move = function () {
var newViewPort,
toolsX,
toolsY;
newViewPort = Controller.getViewportDimensions();
if (toolBar === undefined) {
initialize();
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
if (old) {
windowDimensions = newViewPort;
toolBar.move(old[0], old[1]);
return;
}
} else if (windowDimensions.x === newViewPort.x &&
windowDimensions.y === newViewPort.y) {
return;
}
windowDimensions = newViewPort;
toolsX = windowDimensions.x - 8 - toolBar.width;
toolsY = (windowDimensions.y - toolBar.height) / 2;
toolBar.move(toolsX, toolsY);
};
var newModelButtonDown = false;
var browseMarketplaceButtonDown = false;
that.mousePressEvent = function (event) {
@ -552,6 +524,7 @@ var toolBar = (function () {
toolBar.cleanup();
};
initialize();
return that;
}());
@ -931,7 +904,6 @@ var lastPosition = null;
// Do some stuff regularly, like check for placement of various overlays
Script.update.connect(function (deltaTime) {
toolBar.move();
progressDialog.move();
selectionDisplay.checkMove();
var dOrientation = Math.abs(Quat.dot(Camera.orientation, lastOrientation) - 1);

285
examples/hmdControls.js Normal file
View file

@ -0,0 +1,285 @@
//
// hmdControls.js
// examples
//
// Created by Sam Gondelman on 6/17/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var MOVE_DISTANCE = 10.0;
var PITCH_INCREMENT = 0.5; // degrees
var pitchChange = 0; // degrees
var YAW_INCREMENT = 0.5; // degrees
var VR_YAW_INCREMENT = 15.0; // degrees
var yawChange = 0;
var BOOM_SPEED = 0.5;
var THRESHOLD = 0.2;
var CAMERA_UPDATE_TIME = 0.5;
var yawTimer = CAMERA_UPDATE_TIME;
var shifted = false;
var SHIFT_UPDATE_TIME = 0.5;
var shiftTimer = SHIFT_UPDATE_TIME;
var SHIFT_MAG = 4.0;
var warpActive = false;
var WARP_UPDATE_TIME = .5;
var warpTimer = WARP_UPDATE_TIME;
var warpPosition = { x: 0, y: 0, z: 0 };
var WARP_SPHERE_SIZE = 1;
var warpSphere = Overlays.addOverlay("sphere", {
position: { x: 0, y: 0, z: 0 },
size: WARP_SPHERE_SIZE,
color: { red: 0, green: 255, blue: 0 },
alpha: 1.0,
solid: true,
visible: false,
});
var WARP_LINE_HEIGHT = 10;
var warpLine = Overlays.addOverlay("line3d", {
start: { x: 0, y: 0, z:0 },
end: { x: 0, y: 0, z: 0 },
color: { red: 0, green: 255, blue: 255},
alpha: 1,
lineWidth: 5,
visible: false,
});
var velocity = { x: 0, y: 0, z: 0 };
var VERY_LONG_TIME = 1000000.0;
var active = Menu.isOptionChecked("Enable VR Mode");
var prevVRMode = Menu.isOptionChecked("Enable VR Mode");
var hmdControls = (function () {
function onKeyPressEvent(event) {
if (event.text == 'g' && event.isMeta) {
active = !active;
}
}
function findAction(name) {
var actions = Controller.getAllActions();
for (var i = 0; i < actions.length; i++) {
if (actions[i].actionName == name) {
return i;
}
}
// If the action isn't found, it will default to the first available action
return 0;
}
function onActionEvent(action, state) {
if (!active) {
return;
}
if (state < THRESHOLD) {
if (action == findAction("YAW_LEFT") || action == findAction("YAW_RIGHT")) {
yawTimer = CAMERA_UPDATE_TIME;
} else if (action == findAction("PITCH_UP") || action == findAction("PITCH_DOWN")) {
pitchTimer = CAMERA_UPDATE_TIME;
}
return;
}
switch (action) {
case findAction("LONGITUDINAL_BACKWARD"):
var direction = {x: 0.0, y: 0.0, z:1.0};
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("LONGITUDINAL_FORWARD"):
var direction = {x: 0.0, y: 0.0, z:-1.0};
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("LATERAL_LEFT"):
var direction = {x:-1.0, y: 0.0, z: 0.0}
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("LATERAL_RIGHT"):
var direction = {x:1.0, y: 0.0, z: 0.0};
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("VERTICAL_DOWN"):
var direction = {x: 0.0, y: -1.0, z: 0.0};
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("VERTICAL_UP"):
var direction = {x: 0.0, y: 1.0, z: 0.0};
direction = Vec3.multiply(Vec3.normalize(direction), shifted ? SHIFT_MAG * MOVE_DISTANCE : MOVE_DISTANCE);
velocity = Vec3.sum(velocity, direction);
break;
case findAction("YAW_LEFT"):
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
yawChange = yawChange + (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
yawTimer = CAMERA_UPDATE_TIME;
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
yawChange = yawChange + (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
}
break;
case findAction("YAW_RIGHT"):
if (yawTimer < 0.0 && Menu.isOptionChecked("Enable VR Mode")) {
yawChange = yawChange - (shifted ? SHIFT_MAG * VR_YAW_INCREMENT : VR_YAW_INCREMENT);
yawTimer = CAMERA_UPDATE_TIME;
} else if (!Menu.isOptionChecked("Enable VR Mode")) {
yawChange = yawChange - (shifted ? SHIFT_MAG * YAW_INCREMENT : YAW_INCREMENT);
}
break;
case findAction("PITCH_DOWN"):
if (!Menu.isOptionChecked("Enable VR Mode")) {
pitchChange = pitchChange - (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
}
break;
case findAction("PITCH_UP"):
if (!Menu.isOptionChecked("Enable VR Mode")) {
pitchChange = pitchChange + (shifted ? SHIFT_MAG * PITCH_INCREMENT : PITCH_INCREMENT);
}
break;
case findAction("SHIFT"): // speed up
if (shiftTimer < 0.0) {
shifted = !shifted;
shiftTimer = SHIFT_UPDATE_TIME;
}
break;
case findAction("ACTION1"): // start/end warp
if (warpTimer < 0.0) {
warpActive = !warpActive;
if (!warpActive) {
finishWarp();
}
warpTimer = WARP_UPDATE_TIME;
}
break;
case findAction("ACTION2"): // cancel warp
warpActive = false;
Overlays.editOverlay(warpSphere, {
visible: false,
});
Overlays.editOverlay(warpLine, {
visible: false,
});
default:
break;
}
}
function update(dt) {
if (prevVRMode != Menu.isOptionChecked("Enable VR Mode")) {
active = Menu.isOptionChecked("Enable VR Mode");
prevVRMode = Menu.isOptionChecked("Enable VR Mode");
}
if (yawTimer >= 0.0) {
yawTimer = yawTimer - dt;
}
if (shiftTimer >= 0.0) {
shiftTimer = shiftTimer - dt;
}
if (warpTimer >= 0.0) {
warpTimer = warpTimer - dt;
}
if (warpActive) {
updateWarp();
}
if (active) {
Controller.captureActionEvents();
MyAvatar.bodyYaw = MyAvatar.bodyYaw + yawChange;
MyAvatar.headPitch = Math.max(-180, Math.min(180, MyAvatar.headPitch + pitchChange));
yawChange = 0;
pitchChange = 0;
MyAvatar.motorVelocity = velocity;
MyAvatar.motorTimescale = 0.0;
velocity = { x: 0, y: 0, z: 0 };
} else {
Controller.releaseActionEvents();
yawChange = 0;
pitchChange = 0;
MyAvatar.motorVelocity = {x:0.0, y:0.0, z:0.0}
MyAvatar.motorTimescale = VERY_LONG_TIME;
}
}
function updateWarp() {
var look = Quat.getFront(Camera.getOrientation());
var pitch = Math.asin(look.y);
// Get relative to looking straight down
pitch += Math.PI / 2;
// Scale up
pitch *= 2;
var distance = pitch * pitch * pitch;
var warpDirection = Vec3.normalize({ x: look.x, y: 0, z: look.z });
warpPosition = Vec3.multiply(warpDirection, distance);
warpPosition = Vec3.sum(MyAvatar.position, warpPosition);
// Commented out until ray picking can be fixed
// var pickRay = {
// origin: Vec3.sum(warpPosition, WARP_PICK_OFFSET),
// direction: { x: 0, y: -1, z: 0 }
// };
// var intersection = Entities.findRayIntersection(pickRay);
// if (intersection.intersects && intersection.distance < WARP_PICK_MAX_DISTANCE) {
// // Warp 1 meter above the object - this is an approximation
// // TODO Get the actual offset to the Avatar's feet and plant them to
// // the object.
// warpPosition = Vec3.sum(intersection.intersection, { x: 0, y: 1, z:0 });
// }
// Adjust overlays to match warp position
Overlays.editOverlay(warpSphere, {
position: warpPosition,
visible: true,
});
Overlays.editOverlay(warpLine, {
start: warpPosition,
end: Vec3.sum(warpPosition, { x: 0, y: WARP_LINE_HEIGHT, z: 0 }),
visible: true,
});
}
function finishWarp() {
Overlays.editOverlay(warpSphere, {
visible: false,
});
Overlays.editOverlay(warpLine, {
visible: false,
});
MyAvatar.position = warpPosition;
}
function setUp() {
Controller.keyPressEvent.connect(onKeyPressEvent);
Controller.actionEvent.connect(onActionEvent);
Script.update.connect(update);
}
function tearDown() {
Controller.releaseActionEvents();
MyAvatar.motorVelocity = {x:0.0, y:0.0, z:0.0}
MyAvatar.motorTimescale = VERY_LONG_TIME;
}
setUp();
Script.scriptEnding.connect(tearDown);
}());

View file

@ -13,4 +13,5 @@ Script.load("progress.js");
Script.load("lobby.js");
Script.load("notifications.js");
Script.load("controllers/oculus/goTo.js");
Script.load("hmdControls.js");
//Script.load("scripts.js"); // Not created yet

View file

@ -932,6 +932,7 @@
</script>
</head>
<body class="properties" onload='loaded();'>
<div id="properties-list">
<div id="type" class="property">
@ -939,9 +940,12 @@
<label>Type: </label><span id="property-type"></span>
</div>
</div>
<div class="property">
<div id="id" class="property">
<span class="label" style="float: left; margin-right: 6px">
<label>ID: <label>
</span>
<div class="value">
<label id="property-id" class="selectable"></label>
<span id="property-id" class="selectable"></span>
</div>
</div>
<div class="property">
@ -951,17 +955,6 @@
</div>
</div>
<div class="property">
<div class="label">Hyperlink</div>
<div class="input-area">Href<br></div>
<div class="value">
<input id="property-hyperlink-href" class="url"></input>
</div>
<div class="input-area">Description<br></div> <div class="value">
<input id="property-hyperlink-description" class="url"></input>
</div>
</div>
<div class="property">
<span class="label">Locked</span>
<span class="value">
@ -976,6 +969,36 @@
</span>
</div>
<div class="property">
<div class="label">User Data</div>
<div class="value">
<textarea id="property-user-data"></textarea>
</div>
</div>
<div class="sub-section-header">
<label>Hyperlink</label>
</div>
<div class="property">
<div class="label">Href</div>
<div class="value">
<input id="property-hyperlink-href" class="url"></input>
</div>
</div>
<div class="property">
<div class="label">Description</div>
<div class="value">
<input id="property-hyperlink-description" class="url"></input>
</div>
</div>
<div class="section-header">
<label>Spacial Properites</label>
</div>
<div class="property">
<div class="label">Position</div>
<div class="value">
@ -1043,6 +1066,11 @@
</div>
</div>
<div class="section-header">
<label>Physical Properites</label>
</div>
<div class="property">
<div class="label">Linear Velocity</div>
<div class="value">
@ -1109,6 +1137,21 @@
</div>
</div>
<div id="color-section" class="property">
<div class="label">Color</div>
<div class="value">
<div id="property-color" class='color-picker'></div>
<div class="input-area">R <input class="coord" type='number' id="property-color-red"></input></div>
<div class="input-area">G <input class="coord" type='number' id="property-color-green"></input></div>
<div class="input-area">B <input class="coord" type='number' id="property-color-blue"></input></div>
</div>
</div>
<div class="section-header">
<label>Behavior</label>
</div>
<div class="property">
<span class="label">Ignore For Collisions</span>
<span class="value">
@ -1144,74 +1187,9 @@
</div>
</div>
<div class="property">
<div class="label">User Data</div>
<div class="value">
<textarea id="property-user-data"></textarea>
</div>
</div>
<div id="color-section" class="property">
<div class="label">Color</div>
<div class="value">
<div id="property-color" class='color-picker'></div>
<div class="input-area">R <input class="coord" type='number' id="property-color-red"></input></div>
<div class="input-area">G <input class="coord" type='number' id="property-color-green"></input></div>
<div class="input-area">B <input class="coord" type='number' id="property-color-blue"></input></div>
</div>
</div>
<div class="web-section property">
<div class="label">Source URL</div>
<div class="value">
<input type="text" id="property-web-source-url" class="url"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Max Particles</div>
<div class="value">
<input type='number' id="property-particle-maxparticles" min="0" max="2048" step="1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Life Span</div>
<div class="value">
<input type='number' id="property-particle-lifespan" min="0" step="0.1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Rate</div>
<div class="value">
<input type='number' id="property-particle-emit-rate" min="0" step="0.5"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Direction</div>
<div class="value">
<div class="input-area">X <input class="coord" type='number' id="property-particle-emit-direction-x"></input></div>
<div class="input-area">Y <input class="coord" type='number' id="property-particle-emit-direction-y"></input></div>
<div class="input-area">Z <input class="coord" type='number' id="property-particle-emit-direction-z"></input></div>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Strength</div>
<div class="value">
<input type='number' id="property-particle-emit-strength" min="0" step="0.1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Local Gravity</div>
<div class="value">
<input class="coord" type='number' id="property-particle-localgravity" step="0.05"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Radius</div>
<div class="value">
<input class="coord" type='number' id="property-particle-radius" min="0" step="0.005"></input>
</div>
<div class="section-header model-section zone-section">
<label>Model</label>
</div>
<div class="model-section property">
@ -1220,6 +1198,7 @@
<input type="text" id="property-model-url" class="url"></input>
</div>
</div>
<div class="model-section zone-section property">
<div class="label">Shape Type</div>
<div class="value">
@ -1280,8 +1259,75 @@
</div>
</div>
<div class="section-header web-section">
<label>Web</label>
</div>
<div class="web-section property">
<div class="label">Source URL</div>
<div class="value">
<input type="text" id="property-web-source-url" class="url"></input>
</div>
</div>
<div class="section-header particle-section">
<label>Particle</label>
</div>
<div class="particle-section property">
<div class="label">Max Particles</div>
<div class="value">
<input type='number' id="property-particle-maxparticles" min="0" max="2048" step="1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Life Span</div>
<div class="value">
<input type='number' id="property-particle-lifespan" min="0" step="0.1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Rate</div>
<div class="value">
<input type='number' id="property-particle-emit-rate" min="0" step="0.5"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Direction</div>
<div class="value">
<div class="input-area">X <input class="coord" type='number' id="property-particle-emit-direction-x"></input></div>
<div class="input-area">Y <input class="coord" type='number' id="property-particle-emit-direction-y"></input></div>
<div class="input-area">Z <input class="coord" type='number' id="property-particle-emit-direction-z"></input></div>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Strength</div>
<div class="value">
<input type='number' id="property-particle-emit-strength" min="0" step="0.1"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Local Gravity</div>
<div class="value">
<input class="coord" type='number' id="property-particle-localgravity" step="0.05"></input>
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Radius</div>
<div class="value">
<input class="coord" type='number' id="property-particle-radius" min="0" step="0.005"></input>
</div>
</div>
<div class="section-header text-section">
<label>Text</label>
</div>
<div class="text-section property">
<div class="label">Text</div>
<div class="label">Text Content</div>
<div class="value">
<input type="text" id="property-text-text"></input>
</div>
@ -1311,6 +1357,11 @@
</div>
</div>
<div class="section-header light-section">
<label>Light</label>
</div>
<div class="light-section property">
<span class="label">Spot Light</span>
<span class="value">
@ -1345,6 +1396,11 @@
</div>
</div>
<div class="section-header zone-section">
<label>Zone</label>
</div>
<div class="zone-section property">
<span class="label">Stage Sun Model Enabled</span>
<span class="value">
@ -1431,6 +1487,12 @@
</select>
</div>
</div>
<div class="sub-section-header zone-section skybox-section">
<label>Skybox</label>
</div>
<div class="zone-section skybox-section property">
<div class="label">Skybox Color</div>
<div class="value">
@ -1446,6 +1508,12 @@
<input type="text" id="property-zone-skybox-url" class="url"></input>
</div>
</div>
<div class="sub-section-header zone-section atmosphere-section">
<label>Atmosphere</label>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Center</div>
<div class="value">
@ -1495,8 +1563,6 @@
<input type='checkbox' id="property-zone-atmosphere-has-stars">
</span>
</div>
</div>
</body>
</html>

View file

@ -8,7 +8,7 @@ body {
background-color: rgb(76, 76, 76);
color: rgb(204, 204, 204);
font-family: Arial;
font-size: 8.25pt;
font-size: 9.0pt;
-webkit-touch-callout: none;
-webkit-user-select: none;
@ -44,20 +44,31 @@ body {
border: 1.5pt solid black;
}
.section-header {
.section-header, .sub-section-header {
background: #AAA;
border-bottom: 0.75pt solid #CCC;
background-color: #333333;
color: #999;
padding: 3pt;
padding-top: 6pt;
}
.section-header label {
.section-header label, .sub-section-header label {
font-weight: bold;
font-size: 11pt;
}
.sub-section-header {
padding-top: 4px;
}
.sub-section-header label {
font-size: 9pt;
}
.multi-property-section {
}
.property-section {
display: block;
margin: 10 10;
@ -132,7 +143,7 @@ input.no-spin::-webkit-inner-spin-button {
table#entity-table {
border-collapse: collapse;
font-family: Sans-Serif;
font-size: 7.5pt;
font-size: 9pt;
width: 100%;
}
@ -156,7 +167,7 @@ table#entity-table {
}
#entity-table td {
font-size: 8.25pt;
font-size: 9.0pt;
border: 0pt black solid;
word-wrap: nowrap;
white-space: nowrap;
@ -176,29 +187,29 @@ th#entity-type {
div.input-area {
display: inline-block;
font-size: 7.5pt;
font-size: 9pt;
}
input {
}
#type {
font-size: 10.5pt;
#type, #id {
font-size: 9.0pt;
}
#type label {
#type label, #id label {
color: rgb(150, 150, 150);
}
input, textarea {
background-color: rgb(102, 102, 102);
color: rgb(204, 204, 204);
background-color: rgb(63, 63, 63);
color: rgb(255, 255, 255);
border: none;
font-size: 7.5pt;
font-size: 9pt;
}
input:disabled, textarea:disabled {
background-color: rgb(102, 102, 102);
background-color: rgb(63, 63, 63);
color: rgb(160, 160, 160);
}
@ -224,7 +235,7 @@ input:disabled, textarea:disabled {
#properties-list .property {
padding: 4pt;
border-bottom: 0.75pt solid rgb(63, 63, 63);
min-height: 1em;
min-height: 12pt;
}
@ -245,7 +256,6 @@ table#properties-list {
font-weight: bold;
overflow: hidden;
text-overflow: ellipsis;
vertical-align: middle;
height: 1.2em;
}
@ -270,9 +280,7 @@ div.inner {
}
td {
vertical-align: top;
vertical-align: top;
}
#no-entities {

View file

@ -392,7 +392,8 @@ SelectionDisplay = (function () {
var grabberSpotLightCircle = Overlays.addOverlay("circle3d", {
color: lightOverlayColor,
isSolid: false
isSolid: false,
visible: false
});
var grabberSpotLightLineT = Overlays.addOverlay("line3d", spotLightLineProperties);
var grabberSpotLightLineB = Overlays.addOverlay("line3d", spotLightLineProperties);
@ -409,17 +410,20 @@ SelectionDisplay = (function () {
var grabberPointLightCircleX = Overlays.addOverlay("circle3d", {
rotation: Quat.fromPitchYawRollDegrees(0, 90, 0),
color: lightOverlayColor,
isSolid: false
isSolid: false,
visible: false
});
var grabberPointLightCircleY = Overlays.addOverlay("circle3d", {
rotation: Quat.fromPitchYawRollDegrees(90, 0, 0),
color: lightOverlayColor,
isSolid: false
isSolid: false,
visible: false
});
var grabberPointLightCircleZ = Overlays.addOverlay("circle3d", {
rotation: Quat.fromPitchYawRollDegrees(0, 0, 0),
color: lightOverlayColor,
isSolid: false
isSolid: false,
visible: false
});
var grabberPointLightT = Overlays.addOverlay("cube", grabberPropertiesEdge);
var grabberPointLightB = Overlays.addOverlay("cube", grabberPropertiesEdge);
@ -546,7 +550,7 @@ SelectionDisplay = (function () {
var rotateOverlayTarget = Overlays.addOverlay("circle3d", {
position: { x:0, y: 0, z: 0},
size: rotateOverlayTargetSize * 2,
size: rotateOverlayTargetSize,
color: { red: 0, green: 0, blue: 0 },
alpha: 0.0,
solid: true,
@ -1186,7 +1190,7 @@ SelectionDisplay = (function () {
});
Overlays.editOverlay(grabberSpotLightCircle, {
position: NEAR,
dimensions: { x: distance * 2, y: distance * 2, z: 1 },
dimensions: { x: distance, y: distance, z: 1 },
lineWidth: 1.5,
rotation: rotation,
visible: true,
@ -1258,19 +1262,19 @@ SelectionDisplay = (function () {
Overlays.editOverlay(grabberPointLightCircleX, {
position: position,
rotation: Quat.multiply(rotation, Quat.fromPitchYawRollDegrees(0, 90, 0)),
dimensions: { x: properties.dimensions.z, y: properties.dimensions.z, z: 1 },
dimensions: { x: properties.dimensions.z / 2.0, y: properties.dimensions.z / 2.0, z: 1 },
visible: true,
});
Overlays.editOverlay(grabberPointLightCircleY, {
position: position,
rotation: Quat.multiply(rotation, Quat.fromPitchYawRollDegrees(90, 0, 0)),
dimensions: { x: properties.dimensions.z, y: properties.dimensions.z, z: 1 },
dimensions: { x: properties.dimensions.z / 2.0, y: properties.dimensions.z / 2.0, z: 1 },
visible: true,
});
Overlays.editOverlay(grabberPointLightCircleZ, {
position: position,
rotation: rotation,
dimensions: { x: properties.dimensions.z, y: properties.dimensions.z, z: 1 },
dimensions: { x: properties.dimensions.z / 2.0, y: properties.dimensions.z / 2.0, z: 1 },
visible: true,
});
@ -1966,7 +1970,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayInner,
{
visible: true,
size: innerRadius * 2,
size: innerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -1976,7 +1980,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayOuter,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -1986,7 +1990,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayCurrent,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
startAt: 0,
endAt: 0,
innerRadius: 0.9,
@ -2064,13 +2068,13 @@ SelectionDisplay = (function () {
if (snapToInner) {
Overlays.editOverlay(rotateOverlayOuter, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayInner, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius,
majorTickMarksAngle: innerSnapAngle, minorTickMarksAngle: 0,
majorTickMarksLength: -0.25, minorTickMarksLength: 0, });
} else {
Overlays.editOverlay(rotateOverlayInner, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayOuter, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius,
majorTickMarksAngle: 45.0, minorTickMarksAngle: 5,
majorTickMarksLength: 0.25, minorTickMarksLength: 0.1, });
}
@ -2095,7 +2099,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayInner,
{
visible: true,
size: innerRadius * 2,
size: innerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -2105,7 +2109,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayOuter,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -2115,7 +2119,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayCurrent,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
startAt: 0,
endAt: 0,
innerRadius: 0.9,
@ -2186,13 +2190,13 @@ SelectionDisplay = (function () {
if (snapToInner) {
Overlays.editOverlay(rotateOverlayOuter, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayInner, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius,
majorTickMarksAngle: innerSnapAngle, minorTickMarksAngle: 0,
majorTickMarksLength: -0.25, minorTickMarksLength: 0, });
} else {
Overlays.editOverlay(rotateOverlayInner, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayOuter, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius,
majorTickMarksAngle: 45.0, minorTickMarksAngle: 5,
majorTickMarksLength: 0.25, minorTickMarksLength: 0.1, });
}
@ -2216,7 +2220,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayInner,
{
visible: true,
size: innerRadius * 2,
size: innerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -2226,7 +2230,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayOuter,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
innerRadius: 0.9,
startAt: 0,
endAt: 360,
@ -2236,7 +2240,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(rotateOverlayCurrent,
{
visible: true,
size: outerRadius * 2,
size: outerRadius,
startAt: 0,
endAt: 0,
innerRadius: 0.9,
@ -2306,13 +2310,13 @@ SelectionDisplay = (function () {
if (snapToInner) {
Overlays.editOverlay(rotateOverlayOuter, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayInner, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: innerRadius,
majorTickMarksAngle: innerSnapAngle, minorTickMarksAngle: 0,
majorTickMarksLength: -0.25, minorTickMarksLength: 0, });
} else {
Overlays.editOverlay(rotateOverlayInner, { startAt: 0, endAt: 360 });
Overlays.editOverlay(rotateOverlayOuter, { startAt: startAtRemainder, endAt: endAtRemainder });
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius * 2,
Overlays.editOverlay(rotateOverlayCurrent, { startAt: startAtCurrent, endAt: endAtCurrent, size: outerRadius,
majorTickMarksAngle: 45.0, minorTickMarksAngle: 5,
majorTickMarksLength: 0.25, minorTickMarksLength: 0.1, });
}

View file

@ -126,12 +126,12 @@ Tool.prototype = new Overlay2D;
Tool.IMAGE_HEIGHT = 50;
Tool.IMAGE_WIDTH = 50;
ToolBar = function(x, y, direction) {
ToolBar = function(x, y, direction, optionalPersistenceKey, optionalInitialPositionFunction) {
this.tools = new Array();
this.x = x;
this.y = y;
this.width = 0;
this.height = 0;
this.height = ToolBar.TITLE_BAR_HEIGHT;
this.back = this.back = Overlays.addOverlay("text", {
backgroundColor: { red: 255, green: 255, blue: 255 },
x: this.x,
@ -336,7 +336,7 @@ ToolBar = function(x, y, direction) {
return (that.x <= x) && (x <= (that.x + that.width)) &&
(that.y <= y) && (y <= (that.y + that.height));
}
that.hover = function (enable) {
that.hover = function (enable) { // Can be overriden or extended by clients.
that.isHovering = enable;
if (that.back) {
if (enable) {
@ -348,16 +348,36 @@ ToolBar = function(x, y, direction) {
});
}
};
that.windowDimensions = Controller.getViewportDimensions();
// Maybe fixme: Keeping the same percent of the window size isn't always the right thing.
// For example, maybe we want "keep the same percentage to whatever two edges are closest to the edge of screen".
// If we change that, the places to do so are onResizeViewport, save (maybe), and the initial move based on Settings, below.
that.onResizeViewport = function (newSize) { // Can be overridden or extended by clients.
var fractionX = that.x / that.windowDimensions.x;
var fractionY = that.y / that.windowDimensions.y;
that.windowDimensions = newSize || Controller.getViewportDimensions();
that.move(fractionX * that.windowDimensions.x, fractionY * that.windowDimensions.y);
};
if (optionalPersistenceKey) {
this.fractionKey = optionalPersistenceKey + '.fraction';
this.save = function () {
var screenSize = Controller.getViewportDimensions();
var fraction = {x: that.x / screenSize.x, y: that.y / screenSize.y};
Settings.setValue(this.fractionKey, JSON.stringify(fraction));
}
} else {
this.save = function () { }; // Called on move. Can be overriden or extended by clients.
}
// These are currently only doing that which is necessary for toolbar hover and toolbar drag.
// They have not yet been extended to tool hover/click/release, etc.
this.mousePressEvent = function (event) {
if (!that.contains(event)) {
if (Overlays.getOverlayAtPoint({ x: event.x, y: event.y }) == that.back) {
that.mightBeDragging = true;
that.dragOffsetX = that.x - event.x;
that.dragOffsetY = that.y - event.y;
} else {
that.mightBeDragging = false;
return;
}
that.mightBeDragging = true;
that.dragOffsetX = that.x - event.x;
that.dragOffsetY = that.y - event.y;
};
this.mouseMove = function (event) {
if (!that.mightBeDragging || !event.isLeftButton) {
@ -375,15 +395,19 @@ ToolBar = function(x, y, direction) {
}
that.move(that.dragOffsetX + event.x, that.dragOffsetY + event.y);
};
that.checkResize = function () { // Can be overriden or extended, but usually not. See onResizeViewport.
var currentWindowSize = Controller.getViewportDimensions();
if ((currentWindowSize.x !== that.windowDimensions.x) || (currentWindowSize.y !== that.windowDimensions.y)) {
that.onResizeViewport(currentWindowSize);
}
};
Controller.mousePressEvent.connect(this.mousePressEvent);
Controller.mouseMoveEvent.connect(this.mouseMove);
// Called on move. A different approach would be to have all this on the prototype,
// and let apps extend where needed. Ex. app defines its toolbar.move() to call this.__proto__.move and then save.
this.save = function () { };
Script.update.connect(that.checkResize);
// This compatability hack breaks the model, but makes converting existing scripts easier:
this.addOverlay = function (ignored, oldSchoolProperties) {
var properties = JSON.parse(JSON.stringify(oldSchoolProperties)); // a copy
if (that.numberOfTools() === 0) {
if ((that.numberOfTools() === 0) && (properties.x != undefined) && (properties.y != undefined)) {
that.move(properties.x, properties.y);
}
delete properties.x;
@ -392,7 +416,25 @@ ToolBar = function(x, y, direction) {
var id = that.tools[index].overlay();
return id;
}
if (this.fractionKey || optionalInitialPositionFunction) {
var savedFraction = JSON.parse(Settings.getValue(this.fractionKey) || '0'); // getValue can answer empty string
var screenSize = Controller.getViewportDimensions();
if (savedFraction) {
// If we have saved data, keep the toolbar at the same proportion of the screen width/height.
that.move(savedFraction.x * screenSize.x, savedFraction.y * screenSize.y);
} else if (!optionalInitialPositionFunction) {
print("No initPosition(screenSize, intializedToolbar) specified for ToolBar");
} else {
// Call the optionalInitialPositionFunctinon() AFTER the client has had a chance to set up.
var that = this;
Script.setTimeout(function () {
var position = optionalInitialPositionFunction(screenSize, that);
that.move(position.x, position.y);
}, 0);
}
}
}
ToolBar.SPACING = 4;
ToolBar.VERTICAL = 0;
ToolBar.HORIZONTAL = 1;
ToolBar.TITLE_BAR_HEIGHT = 10;

View file

@ -10,17 +10,16 @@
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('lineRider.js')
var MAX_POINTS_PER_LINE = 30;
var LINE_LIFETIME = 60 * 5 //5 minute lifetime
var LINE_DIMENSIONS = 5;
var LIFETIME = 6000;
var colorPalette = [{
red: 236,
green: 208,
blue: 120
}, {
red: 217,
red: 214,
green: 91,
blue: 67
}, {
@ -40,15 +39,6 @@ var colorPalette = [{
var currentColorIndex = 0;
var currentColor = colorPalette[currentColorIndex];
if (hydraCheck() === true) {
HydraPaint();
} else {
MousePaint();
}
function cycleColor() {
currentColor = colorPalette[++currentColorIndex];
if (currentColorIndex === colorPalette.length - 1) {
@ -57,42 +47,17 @@ function cycleColor() {
}
function hydraCheck() {
var numberOfButtons = Controller.getNumberOfButtons();
var numberOfTriggers = Controller.getNumberOfTriggers();
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
return hydrasConnected; //hydrasConnected;
}
//************ Mouse Paint **************************
MousePaint();
function MousePaint() {
var DRAWING_DISTANCE = 2;
var DRAWING_DISTANCE = 5;
var lines = [];
var deletedLines = [];
var isDrawing = false;
var path = [];
var lineRider = new LineRider();
lineRider.addStartHandler(function() {
var points = [];
//create points array from list of all points in path
path.forEach(function(point) {
points.push(point);
});
lineRider.setPath(points);
});
var LINE_WIDTH = 7;
var line;
var points = [];
var line, linePosition;
var BRUSH_SIZE = 0.02;
var BRUSH_SIZE = .05;
var brush = Entities.addEntity({
type: 'Sphere',
@ -110,51 +75,42 @@ function MousePaint() {
});
function newLine(point) {
function newLine(position) {
linePosition = position;
line = Entities.addEntity({
position: MyAvatar.position,
position: position,
type: "Line",
color: currentColor,
dimensions: {
x: 10,
y: 10,
z: 10
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
linePoints: [],
lineWidth: LINE_WIDTH,
lifetime: LINE_LIFETIME
lifetime: LIFETIME
});
points = [];
if (point) {
points.push(point);
path.push(point);
}
lines.push(line);
}
function mouseMoveEvent(event) {
var worldPoint = computeWorldPoint(event);
Entities.editEntity(brush, {
position: worldPoint
});
if (!isDrawing) {
return;
}
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DRAWING_DISTANCE);
var point = Vec3.sum(Camera.getPosition(), addVector);
points.push(point);
path.push(point);
Entities.editEntity(line, {
linePoints: points
});
Entities.editEntity(brush, {
position: point
});
var localPoint = computeLocalPoint(event)
var success = Entities.appendPoint(line, localPoint);
if (points.length === MAX_POINTS_PER_LINE) {
//We need to start a new line!
newLine(point);
if (!success) {
newLine(worldPoint);
Entities.appendPoint(line, computeLocalPoint(event));
}
}
@ -171,17 +127,26 @@ function MousePaint() {
lines.push(restoredLine);
}
function computeWorldPoint(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DRAWING_DISTANCE);
return Vec3.sum(Camera.getPosition(), addVector);
}
function computeLocalPoint(event) {
var localPoint = Vec3.subtract(computeWorldPoint(event), linePosition);
return localPoint;
}
function mousePressEvent(event) {
if(!event.isLeftButton) {
if (!event.isLeftButton) {
isDrawing = false;
return;
}
lineRider.mousePressEvent(event);
path = [];
newLine();
newLine(computeWorldPoint(event));
isDrawing = true;
}
function mouseReleaseEvent() {
@ -198,21 +163,21 @@ function MousePaint() {
if (event.text === "z") {
undoStroke();
}
if(event.text === "x") {
if (event.text === "x") {
redoStroke();
}
}
function cleanup() {
lines.forEach(function(line) {
Entities.deleteEntity(line);
// Entities.deleteEntity(line);
});
Entities.deleteEntity(brush);
lineRider.cleanup();
}
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
@ -222,266 +187,6 @@ function MousePaint() {
}
//*****************HYDRA PAINT *******************************************
function HydraPaint() {
var lineRider = new LineRider();
lineRider.addStartHandler(function() {
var points = [];
//create points array from list of all points in path
rightController.path.forEach(function(point) {
points.push(point);
});
lineRider.setPath(points);
});
var LEFT = 0;
var RIGHT = 1;
var currentTime = 0;
var minBrushSize = .02;
var maxBrushSize = .04
var minLineWidth = 5;
var maxLineWidth = 10;
var currentLineWidth = minLineWidth;
var MIN_PAINT_TRIGGER_THRESHOLD = .01;
var COLOR_CHANGE_TIME_FACTOR = 0.1;
var RIGHT_BUTTON_1 = 7
var RIGHT_BUTTON_2 = 8
var RIGHT_BUTTON_3 = 9;
var RIGHT_BUTTON_4 = 10
var LEFT_BUTTON_1 = 1;
var LEFT_BUTTON_2 = 2;
var LEFT_BUTTON_3 = 3;
var LEFT_BUTTON_4 = 4;
var STROKE_SMOOTH_FACTOR = 1;
var MIN_DRAW_DISTANCE = 0.2;
var MAX_DRAW_DISTANCE = 0.4;
function controller(side, undoButton, redoButton, cycleColorButton, startRideButton) {
this.triggerHeld = false;
this.triggerThreshold = 0.9;
this.side = side;
this.palm = 2 * side;
this.tip = 2 * side + 1;
this.trigger = side;
this.lines = [];
this.deletedLines = [] //just an array of properties objects
this.isPainting = false;
this.undoButton = undoButton;
this.undoButtonPressed = false;
this.prevUndoButtonPressed = false;
this.redoButton = redoButton;
this.redoButtonPressed = false;
this.prevRedoButtonPressed = false;
this.cycleColorButton = cycleColorButton;
this.cycleColorButtonPressed = false;
this.prevColorCycleButtonPressed = false;
this.startRideButton = startRideButton;
this.startRideButtonPressed = false;
this.prevStartRideButtonPressed = false;
this.strokeCount = 0;
this.currentBrushSize = minBrushSize;
this.points = [];
this.path = [];
this.brush = Entities.addEntity({
type: 'Sphere',
position: {
x: 0,
y: 0,
z: 0
},
color: currentColor,
dimensions: {
x: minBrushSize,
y: minBrushSize,
z: minBrushSize
}
});
this.newLine = function(point) {
this.line = Entities.addEntity({
position: MyAvatar.position,
type: "Line",
color: currentColor,
dimensions: {
x: 10,
y: 10,
z: 10
},
lineWidth: 5,
lifetime: LINE_LIFETIME
});
this.points = [];
if (point) {
this.points.push(point);
this.path.push(point);
}
this.lines.push(this.line);
}
this.update = function(deltaTime) {
this.updateControllerState();
this.avatarPalmOffset = Vec3.subtract(this.palmPosition, MyAvatar.position);
this.projectedForwardDistance = Vec3.dot(Quat.getFront(Camera.getOrientation()), this.avatarPalmOffset);
this.mappedPalmOffset = map(this.projectedForwardDistance, -.5, .5, MIN_DRAW_DISTANCE, MAX_DRAW_DISTANCE);
this.tipDirection = Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition));
this.offsetVector = Vec3.multiply(this.mappedPalmOffset, this.tipDirection);
this.drawPoint = Vec3.sum(this.palmPosition, this.offsetVector);
this.currentBrushSize = map(this.triggerValue, 0, 1, minBrushSize, maxBrushSize);
Entities.editEntity(this.brush, {
position: this.drawPoint,
dimensions: {
x: this.currentBrushSize,
y: this.currentBrushSize,
z: this.currentBrushSize
},
color: currentColor
});
if (this.triggerValue > MIN_PAINT_TRIGGER_THRESHOLD) {
if (!this.isPainting) {
this.isPainting = true;
this.newLine();
this.path = [];
}
if (this.strokeCount % STROKE_SMOOTH_FACTOR === 0) {
this.paint(this.drawPoint);
}
this.strokeCount++;
} else if (this.triggerValue < MIN_PAINT_TRIGGER_THRESHOLD && this.isPainting) {
this.releaseTrigger();
}
this.oldPalmPosition = this.palmPosition;
this.oldTipPosition = this.tipPosition;
}
this.releaseTrigger = function() {
this.isPainting = false;
}
this.updateControllerState = function() {
this.undoButtonPressed = Controller.isButtonPressed(this.undoButton);
this.redoButtonPressed = Controller.isButtonPressed(this.redoButton);
this.cycleColorButtonPressed = Controller.isButtonPressed(this.cycleColorButton);
this.startRideButtonPressed = Controller.isButtonPressed(this.startRideButton);
//This logic gives us button release
if (this.prevUndoButtonPressed === true && this.undoButtonPressed === false) {
//User released undo button, so undo
this.undoStroke();
}
if (this.prevRedoButtonPressed === true && this.redoButtonPressed === false) {
this.redoStroke();
}
if (this.prevCycleColorButtonPressed === true && this.cycleColorButtonPressed === false) {
cycleColor();
Entities.editEntity(this.brush, {
color: currentColor
});
}
if (this.prevStartRideButtonPressed === true && this.startRideButtonPressed === false) {
lineRider.toggleRide();
}
this.prevRedoButtonPressed = this.redoButtonPressed;
this.prevUndoButtonPressed = this.undoButtonPressed;
this.prevCycleColorButtonPressed = this.cycleColorButtonPressed;
this.prevStartRideButtonPressed = this.startRideButtonPressed;
this.palmPosition = Controller.getSpatialControlPosition(this.palm);
this.tipPosition = Controller.getSpatialControlPosition(this.tip);
this.triggerValue = Controller.getTriggerValue(this.trigger);
}
this.undoStroke = function() {
var deletedLine = this.lines.pop();
var deletedLineProps = Entities.getEntityProperties(deletedLine);
this.deletedLines.push(deletedLineProps);
Entities.deleteEntity(deletedLine);
}
this.redoStroke = function() {
var restoredLine = Entities.addEntity(this.deletedLines.pop());
Entities.addEntity(restoredLine);
this.lines.push(restoredLine);
}
this.paint = function(point) {
currentLineWidth = map(this.triggerValue, 0, 1, minLineWidth, maxLineWidth);
this.points.push(point);
this.path.push(point);
Entities.editEntity(this.line, {
linePoints: this.points,
lineWidth: currentLineWidth,
});
if (this.points.length > MAX_POINTS_PER_LINE) {
this.newLine(point);
}
}
this.cleanup = function() {
Entities.deleteEntity(this.brush);
this.lines.forEach(function(line) {
Entities.deleteEntity(line);
});
}
}
function update(deltaTime) {
rightController.update(deltaTime);
leftController.update(deltaTime);
currentTime += deltaTime;
}
function cleanup() {
rightController.cleanup();
leftController.cleanup();
lineRider.cleanup();
}
function mousePressEvent(event) {
lineRider.mousePressEvent(event);
}
function vectorIsZero(v) {
return v.x === 0 && v.y === 0 && v.z === 0;
}
var rightController = new controller(RIGHT, RIGHT_BUTTON_3, RIGHT_BUTTON_4, RIGHT_BUTTON_1, RIGHT_BUTTON_2);
var leftController = new controller(LEFT, LEFT_BUTTON_3, LEFT_BUTTON_4, LEFT_BUTTON_1, LEFT_BUTTON_2);
Script.update.connect(update);
Script.scriptEnding.connect(cleanup);
Controller.mousePressEvent.connect(mousePressEvent);
}
function randFloat(low, high) {
return low + Math.random() * (high - low);
}

View file

@ -29,19 +29,13 @@ var buttonOnColor = {
};
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var screenSize = Controller.getViewportDimensions();
var userCanPoint = false;
Script.include(["libraries/toolBars.js"]);
const persistKey = "highfidelity.pointer.toolbar.position";
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL, "highfidelity.pointer.toolbar", function (screenSize) {
return {x: screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING, y: screenSize.y - (BUTTON_SIZE + PADDING)},
});
var pointerButton = toolBar.addOverlay("image", {
x: old ? old[0] : screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING,
y: old ? old[1] : screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/laser.png",

View file

@ -9,48 +9,52 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
function hslToRgb(hslColor) {
var h = hslColor.hue;
var s = hslColor.sat;
var l = hslColor.light;
var r, g, b;
if (s == 0) {
r = g = b = l; // achromatic
} else {
var hue2rgb = function hue2rgb(p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
map = function(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
hslToRgb = function(hslColor) {
var h = hslColor.hue;
var s = hslColor.sat;
var l = hslColor.light;
var r, g, b;
if (s == 0) {
r = g = b = l; // achromatic
} else {
var hue2rgb = function hue2rgb(p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
}
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
return {
red: Math.round(r * 255),
green: Math.round(g * 255),
blue: Math.round(b * 255)
};
return {
red: Math.round(r * 255),
green: Math.round(g * 255),
blue: Math.round(b * 255)
};
}
function map(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
function randFloat(low, high) {
return low + Math.random() * (high - low);
randFloat = function(low, high) {
return low + Math.random() * (high - low);
}
function randInt(low, high) {
return Math.floor(randFloat(low, high));
randInt = function(low, high) {
return Math.floor(randFloat(low, high));
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 369 B

View file

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="Layer_1"
x="0px"
y="0px"
viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="left-arrow.svg"><metadata
id="metadata13"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
id="defs11" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1440"
inkscape:window-height="775"
id="namedview9"
showgrid="false"
inkscape:zoom="14.131736"
inkscape:cx="12.772881"
inkscape:cy="16.700001"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="g5" /><g
id="g3"
transform="matrix(-1,0,0,1,287.5,0)"><g
id="g5"><path
d="m 133.1,714.2 21.3,16.7 -21.3,16.7 0,-33.4 z"
id="path7"
inkscape:connector-curvature="0"
style="fill:#cccccc;fill-opacity:1" /></g></g></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 369 B

View file

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#7E7E7E" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 501 B

View file

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#FF5353" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 501 B

View file

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="225.8 714.2 326.5 512"
enable-background="new 225.8 714.2 326.5 512" xml:space="preserve">
<g>
<g>
<path fill="#FF5353" d="M552.4,1226.2l-326.5-256l326.5-256V1226.2z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 503 B

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#CCCCCC" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 507 B

View file

Before

Width:  |  Height:  |  Size: 501 B

After

Width:  |  Height:  |  Size: 501 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 B

View file

@ -1,3 +0,0 @@
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xl="http://www.w3.org/1999/xlink" version="1.1" viewBox="344 454 26 74" width="26pt" height="74pt"><metadata xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>2015-06-12 18:23Z</dc:date><!-- Produced by OmniGraffle Professional 5.4.4 --></metadata><defs></defs><g stroke="none" stroke-opacity="1" stroke-dasharray="none" fill="none" fill-opacity="1"><title>Canvas 1</title><rect fill="white" width="1728" height="1466"/><g><title> Navi Bar</title><line x1="356.58927" y1="466.42861" x2="356.58927" y2="515.4286" stroke="#b3b3b3" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="3"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 778 B

View file

@ -46,10 +46,40 @@ DialogContainer {
property int inputAreaHeight: 56.0 * root.scale // Height of the background's input area
property int inputAreaStep: (height - inputAreaHeight) / 2
MouseArea {
// Drag the icon
width: parent.height
height: parent.height
x: 0
y: 0
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
MouseArea {
// Drag the input rectangle
width: parent.width - parent.height
height: parent.inputAreaHeight
x: parent.height
y: parent.inputAreaStep
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
Image {
id: backArrow
source: addressBarDialog.backEnabled ? "../images/left-arrow.svg" : "../images/redarrow_reversed.svg"
source: addressBarDialog.backEnabled ? "../images/left-arrow.svg" : "../images/left-arrow-disabled.svg"
anchors {
fill: parent
@ -71,7 +101,7 @@ DialogContainer {
Image {
id: forwardArrow
source: addressBarDialog.forwardEnabled ? "../images/darkgreyarrow.svg" : "../images/redarrow.svg"
source: addressBarDialog.forwardEnabled ? "../images/right-arrow.svg" : "../images/right-arrow-disabled.svg"
anchors {
fill: parent
@ -111,38 +141,7 @@ DialogContainer {
addressBarDialog.loadAddress(addressLine.text)
}
}
MouseArea {
// Drag the icon
width: parent.height
height: parent.height
x: 0
y: 0
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
// Add this code to make text bar draggable
/*
MouseArea {
// Drag the input rectangle
width: parent.width - parent.height
height: parent.inputAreaHeight
x: parent.height
y: parent.inputAreaStep
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}*/
}
}

View file

@ -0,0 +1,177 @@
//
// Created by Bradley Austin Davis on 2015/06/19
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.4
import QtQuick.Controls 1.3
import QtGraphicalEffects 1.0
Hifi.AvatarInputs {
id: root
objectName: "AvatarInputs"
anchors.fill: parent
// width: 800
// height: 600
// color: "black"
readonly property int iconPadding: 5
readonly property int mirrorHeight: 215
readonly property int mirrorWidth: 265
readonly property int mirrorTopPad: iconPadding
readonly property int mirrorLeftPad: 10
readonly property int iconSize: 24
Item {
id: mirror
width: root.mirrorWidth
height: root.mirrorVisible ? root.mirrorHeight : 0
visible: root.mirrorVisible
anchors.left: parent.left
anchors.leftMargin: root.mirrorLeftPad
anchors.top: parent.top
anchors.topMargin: root.mirrorTopPad
clip: true
MouseArea {
id: hover
anchors.fill: parent
hoverEnabled: true
propagateComposedEvents: true
}
Image {
id: closeMirror
visible: hover.containsMouse
width: root.iconSize
height: root.iconSize
anchors.top: parent.top
anchors.topMargin: root.iconPadding
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
source: "../images/close.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.closeMirror();
}
}
}
Image {
id: zoomIn
visible: hover.containsMouse
width: root.iconSize
height: root.iconSize
anchors.bottom: parent.bottom
anchors.bottomMargin: root.iconPadding
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
source: root.mirrorZoomed ? "../images/minus.svg" : "../images/plus.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleZoom();
}
}
}
}
Item {
width: root.mirrorWidth
height: 44
x: root.mirrorLeftPad
y: root.mirrorVisible ? root.mirrorTopPad + root.mirrorHeight : 5
Rectangle {
anchors.fill: parent
color: root.mirrorVisible ? (root.audioClipping ? "red" : "#696969") : "#00000000"
Image {
id: faceMute
width: root.iconSize
height: root.iconSize
visible: root.cameraEnabled
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
anchors.verticalCenter: parent.verticalCenter
source: root.cameraMuted ? "../images/face-mute.svg" : "../images/face.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleCameraMute()
}
onDoubleClicked: {
root.resetSensors();
}
}
}
Image {
id: micMute
width: root.iconSize
height: root.iconSize
anchors.left: root.cameraEnabled ? faceMute.right : parent.left
anchors.leftMargin: root.iconPadding
anchors.verticalCenter: parent.verticalCenter
source: root.audioMuted ? "../images/mic-mute.svg" : "../images/mic.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleAudioMute()
}
}
}
Item {
id: audioMeter
anchors.verticalCenter: parent.verticalCenter
anchors.left: micMute.right
anchors.leftMargin: root.iconPadding
anchors.right: parent.right
anchors.rightMargin: root.iconPadding
height: 8
Rectangle {
id: blueRect
color: "blue"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: parent.left
width: parent.width / 4
}
Rectangle {
id: greenRect
color: "green"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: blueRect.right
anchors.right: redRect.left
}
Rectangle {
id: redRect
color: "red"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: parent.right
width: parent.width / 5
}
Rectangle {
z: 100
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: parent.right
width: (1.0 - root.audioLevel) * parent.width
color: "black"
}
}
}
}
}

View file

@ -0,0 +1,249 @@
import Hifi 1.0 as Hifi
import QtQuick 2.3
import QtQuick.Controls 1.2
Item {
anchors.fill: parent
anchors.leftMargin: 300
Hifi.Stats {
id: root
objectName: "Stats"
implicitHeight: row.height
implicitWidth: row.width
anchors.horizontalCenter: parent.horizontalCenter
readonly property int fontSize: 12
readonly property string fontColor: "white"
readonly property string bgColor: "#99333333"
Row {
id: row
spacing: 8
Rectangle {
width: generalCol.width + 8;
height: generalCol.height + 8;
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: generalCol
spacing: 4; x: 4; y: 4;
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Servers: " + root.serverCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Avatars: " + root.avatarCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Framerate: " + root.framerate
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Packets In/Out: " + root.packetInCount + "/" + root.packetOutCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Mbps In/Out: " + root.mbpsIn.toFixed(2) + "/" + root.mbpsOut.toFixed(2)
}
}
}
Rectangle {
width: pingCol.width + 8
height: pingCol.height + 8
color: root.bgColor;
visible: root.audioPing != -2
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: pingCol
spacing: 4; x: 4; y: 4;
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Audio ping: " + root.audioPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Avatar ping: " + root.avatarPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Entities avg ping: " + root.entitiesPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Voxel max ping: " + 0
}
}
}
Rectangle {
width: geoCol.width + 8
height: geoCol.height + 8
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: geoCol
spacing: 4; x: 4; y: 4;
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Position: " + root.position.x.toFixed(1) + ", " +
root.position.y.toFixed(1) + ", " + root.position.z.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Velocity: " + root.velocity.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Yaw: " + root.yaw.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Avatar Mixer: " + root.avatarMixerKbps + " kbps, " +
root.avatarMixerPps + "pps";
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Downloads: ";
}
}
}
Rectangle {
width: octreeCol.width + 8
height: octreeCol.height + 8
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: octreeCol
spacing: 4; x: 4; y: 4;
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Triangles: " + root.triangles +
" / Quads: " + root.quads + " / Material Switches: " + root.materialSwitches
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "\tMesh Parts Rendered Opaque: " + root.meshOpaque +
" / Translucent: " + root.meshTranslucent;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "\tOpaque considered: " + root.opaqueConsidered +
" / Out of view: " + root.opaqueOutOfView + " / Too small: " + root.opaqueTooSmall;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: !root.expanded
text: "Octree Elements Server: " + root.serverElements +
" Local: " + root.localElements;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Sending Mode: " + root.sendingMode;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Packets to Process: " + root.packetStats;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Elements - ";
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "\tServer: " + root.serverElements +
" Internal: " + root.serverInternal +
" Leaves: " + root.serverLeaves;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "\tLocal: " + root.localElements +
" Internal: " + root.localInternal +
" Leaves: " + root.localLeaves;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "LOD: " + root.lodStatus;
}
}
}
}
Rectangle {
y: 250
visible: root.timingExpanded
width: perfText.width + 8
height: perfText.height + 8
color: root.bgColor;
Text {
x: 4; y: 4
id: perfText
color: root.fontColor
font.family: root.monospaceFont
font.pixelSize: 12
text: "------------------------------------------ Function " +
"--------------------------------------- --msecs- -calls--\n" +
root.timingStats;
}
}
Connections {
target: root.parent
onWidthChanged: {
root.x = root.parent.width - root.width;
}
}
}
}

View file

@ -0,0 +1,31 @@
import Hifi 1.0 as Hifi
import QtQuick 2.3 as Original
import "controls"
import "styles"
Hifi.Tooltip {
id: root
HifiConstants { id: hifi }
// FIXME adjust position based on the edges of the screen
x: (lastMousePosition.x > surfaceSize.width/2) ? lastMousePosition.x - 140 : lastMousePosition.x + 20
//y: lastMousePosition.y + 5
y: (lastMousePosition.y > surfaceSize.height/2) ? lastMousePosition.y - 70 : lastMousePosition.y + 5
implicitWidth: border.implicitWidth
implicitHeight: border.implicitHeight
Border {
id: border
anchors.fill: parent
implicitWidth: text.implicitWidth
implicitHeight: Math.max(text.implicitHeight, 64)
Text {
id: text
anchors.fill: parent
anchors.margins: 16
font.pixelSize: hifi.fonts.pixelSize / 2
text: root.text
wrapMode: Original.Text.WordWrap
}
}
}

View file

@ -66,6 +66,9 @@
#include <EntityScriptingInterface.h>
#include <ErrorDialog.h>
#include <GlowEffect.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/GLBackend.h>
#include <HFActionEvent.h>
#include <HFBackEvent.h>
#include <InfoView.h>
@ -83,12 +86,14 @@
#include <PerfStat.h>
#include <PhysicsEngine.h>
#include <ProgramObject.h>
#include <RenderDeferredTask.h>
#include <ResourceCache.h>
#include <SceneScriptingInterface.h>
#include <ScriptCache.h>
#include <SettingHandle.h>
#include <SoundCache.h>
#include <TextRenderer.h>
#include <Tooltip.h>
#include <UserActivityLogger.h>
#include <UUID.h>
#include <VrMenu.h>
@ -106,11 +111,9 @@
#include "avatar/AvatarManager.h"
#include "audio/AudioToolBox.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "devices/CameraToolBox.h"
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
@ -120,12 +123,6 @@
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "gpu/Batch.h"
#include "gpu/Context.h"
#include "gpu/GLBackend.h"
#include "RenderDeferredTask.h"
#include "scripting/AccountScriptingInterface.h"
#include "scripting/AudioDeviceScriptingInterface.h"
#include "scripting/ClipboardScriptingInterface.h"
@ -141,6 +138,7 @@
#include "SpeechRecognizer.h"
#endif
#include "ui/AvatarInputs.h"
#include "ui/DataWebDialog.h"
#include "ui/DialogsManager.h"
#include "ui/LoginDialog.h"
@ -281,8 +279,6 @@ bool setupEssentials(int& argc, char** argv) {
auto animationCache = DependencyManager::set<AnimationCache>();
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
auto modelBlender = DependencyManager::set<ModelBlender>();
auto audioToolBox = DependencyManager::set<AudioToolBox>();
auto cameraToolBox = DependencyManager::set<CameraToolBox>();
auto avatarManager = DependencyManager::set<AvatarManager>();
auto lodManager = DependencyManager::set<LODManager>();
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
@ -335,8 +331,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_cursorVisible(true),
_lastMouseMove(usecTimestampNow()),
_lastMouseMoveWasSimulated(false),
_touchAvgX(0.0f),
_touchAvgY(0.0f),
_isTouchPressed(false),
_mousePressed(false),
_enableProcessOctreeThread(true),
@ -831,6 +825,7 @@ void Application::initializeUi() {
LoginDialog::registerType();
MessageDialog::registerType();
VrMenu::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
auto offscreenUi = DependencyManager::get<OffscreenUi>();
@ -844,7 +839,7 @@ void Application::initializeUi() {
VrMenu::executeQueuedLambdas();
offscreenUi->setMouseTranslator([this](const QPointF& p){
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(toGlm(p));
glm::vec2 pos = _compositor.screenToOverlay(toGlm(p));
return QPointF(pos.x, pos.y);
}
return QPointF(p);
@ -877,6 +872,7 @@ void Application::paintGL() {
OculusManager::beginFrameTiming();
}
PerformanceWarning::setSuppressShortTimings(Menu::getInstance()->isOptionChecked(MenuOption::SuppressShortTimings));
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::paintGL()");
@ -884,6 +880,12 @@ void Application::paintGL() {
{
PerformanceTimer perfTimer("renderOverlay");
/*
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context, nullptr, getViewFrustum(), lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
*/
_applicationOverlay.renderOverlay(&renderArgs);
}
@ -969,9 +971,7 @@ void Application::paintGL() {
glPopMatrix();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(&renderArgs, true, _glWidget->mapFromGlobal(QCursor::pos()));
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(&renderArgs, _mirrorViewRect);
}
@ -979,16 +979,18 @@ void Application::paintGL() {
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
glBlitFramebuffer(0, 0, _renderResolution.x, _renderResolution.y,
0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
GL_COLOR_BUFFER_BIT, GL_LINEAR);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
_applicationOverlay.displayOverlayTexture();
_compositor.displayOverlayTexture(&renderArgs);
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
_glWidget->swapBuffers();
}
@ -1046,27 +1048,24 @@ void Application::resizeGL() {
} else {
renderSize = _glWidget->getDeviceSize() * getRenderResolutionScale();
}
if (_renderResolution == toGlm(renderSize)) {
return;
if (_renderResolution != toGlm(renderSize)) {
_renderResolution = toGlm(renderSize);
DependencyManager::get<TextureCache>()->setFrameBufferSize(renderSize);
resetCamerasOnResizeGL(_myCamera, _renderResolution);
glViewport(0, 0, _renderResolution.x, _renderResolution.y); // shouldn't this account for the menu???
updateProjectionMatrix();
glLoadIdentity();
}
_renderResolution = toGlm(renderSize);
DependencyManager::get<TextureCache>()->setFrameBufferSize(renderSize);
resetCamerasOnResizeGL(_myCamera, _renderResolution);
glViewport(0, 0, _renderResolution.x, _renderResolution.y); // shouldn't this account for the menu???
updateProjectionMatrix();
glLoadIdentity();
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->resize(_glWidget->size());
auto canvasSize = _glWidget->size();
offscreenUi->resize(canvasSize);
_glWidget->makeCurrent();
// update Stats width
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
Stats::getInstance()->resetWidth(_renderResolution.x, horizontalOffset);
}
void Application::updateProjectionMatrix() {
@ -1564,27 +1563,9 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
_keyboardMouseDevice.mousePressEvent(event);
if (event->button() == Qt::LeftButton) {
_mouseDragStartedX = getTrueMouseX();
_mouseDragStartedY = getTrueMouseY();
_mouseDragStarted = getTrueMouse();
_mousePressed = true;
if (mouseOnScreen()) {
if (DependencyManager::get<AudioToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
if (DependencyManager::get<CameraToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
}
// nobody handled this - make it an action event on the _window object
HFActionEvent actionEvent(HFActionEvent::startType(),
computePickRay(event->x(), event->y()));
@ -1603,17 +1584,6 @@ void Application::mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceI
}
_controllerScriptingInterface.emitMouseDoublePressEvent(event);
if (activeWindow() == _window) {
if (event->button() == Qt::LeftButton) {
if (mouseOnScreen()) {
if (DependencyManager::get<CameraToolBox>()->mouseDoublePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
}
}
}
}
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
@ -1635,13 +1605,6 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
if (event->button() == Qt::LeftButton) {
_mousePressed = false;
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats) && mouseOnScreen()) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH;
Stats::getInstance()->checkClick(getMouseX(), getMouseY(),
getMouseDragStartedX(), getMouseDragStartedY(), horizontalOffset);
}
// fire an action end event
HFActionEvent actionEvent(HFActionEvent::endType(),
computePickRay(event->x(), event->y()));
@ -1669,22 +1632,18 @@ void Application::touchUpdateEvent(QTouchEvent* event) {
bool validTouch = false;
if (activeWindow() == _window) {
const QList<QTouchEvent::TouchPoint>& tPoints = event->touchPoints();
_touchAvgX = 0.0f;
_touchAvgY = 0.0f;
_touchAvg = vec2();
int numTouches = tPoints.count();
if (numTouches > 1) {
for (int i = 0; i < numTouches; ++i) {
_touchAvgX += (float)tPoints[i].pos().x();
_touchAvgY += (float)tPoints[i].pos().y();
_touchAvg += toGlm(tPoints[i].pos());
}
_touchAvgX /= (float)(numTouches);
_touchAvgY /= (float)(numTouches);
_touchAvg /= (float)(numTouches);
validTouch = true;
}
}
if (!_isTouchPressed) {
_touchDragStartedAvgX = _touchAvgX;
_touchDragStartedAvgY = _touchAvgY;
_touchDragStartedAvg = _touchAvg;
}
_isTouchPressed = validTouch;
}
@ -1720,8 +1679,7 @@ void Application::touchEndEvent(QTouchEvent* event) {
_keyboardMouseDevice.touchEndEvent(event);
// put any application specific touch behavior below here..
_touchDragStartedAvgX = _touchAvgX;
_touchDragStartedAvgY = _touchAvgY;
_touchDragStartedAvg = _touchAvg;
_isTouchPressed = false;
}
@ -1870,9 +1828,9 @@ void Application::setFullscreen(bool fullscreen) {
Menu::getInstance()->getActionForOption(MenuOption::Fullscreen)->setChecked(fullscreen);
}
// The following code block is useful on platforms that can have a visible
// app menu in a fullscreen window. However the OSX mechanism hides the
// application menu for fullscreen apps, so the check is not required.
// The following code block is useful on platforms that can have a visible
// app menu in a fullscreen window. However the OSX mechanism hides the
// application menu for fullscreen apps, so the check is not required.
#ifndef Q_OS_MAC
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
if (fullscreen) {
@ -1946,11 +1904,6 @@ void Application::setEnableVRMode(bool enableVRMode) {
} else {
OculusManager::abandonCalibration();
OculusManager::disconnect();
_mirrorCamera.setHmdPosition(glm::vec3());
_mirrorCamera.setHmdRotation(glm::quat());
_myCamera.setHmdPosition(glm::vec3());
_myCamera.setHmdRotation(glm::quat());
}
resizeGL();
@ -1962,44 +1915,34 @@ void Application::setLowVelocityFilter(bool lowVelocityFilter) {
bool Application::mouseOnScreen() const {
if (OculusManager::isConnected()) {
return getMouseX() >= 0 && getMouseX() <= _glWidget->getDeviceWidth() &&
getMouseY() >= 0 && getMouseY() <= _glWidget->getDeviceHeight();
ivec2 mouse = getMouse();
if (!glm::all(glm::greaterThanEqual(mouse, ivec2()))) {
return false;
}
ivec2 size = toGlm(_glWidget->getDeviceSize());
if (!glm::all(glm::lessThanEqual(mouse, size))) {
return false;
}
}
return true;
}
int Application::getMouseX() const {
ivec2 Application::getMouse() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseX(), getTrueMouseY()));
return pos.x;
return _compositor.screenToOverlay(getTrueMouse());
}
return getTrueMouseX();
return getTrueMouse();
}
int Application::getMouseY() const {
ivec2 Application::getMouseDragStarted() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseX(), getTrueMouseY()));
return pos.y;
return _compositor.screenToOverlay(getTrueMouseDragStarted());
}
return getTrueMouseY();
return getTrueMouseDragStarted();
}
int Application::getMouseDragStartedX() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseDragStartedX(),
getTrueMouseDragStartedY()));
return pos.x;
}
return getTrueMouseDragStartedX();
}
int Application::getMouseDragStartedY() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseDragStartedX(),
getTrueMouseDragStartedY()));
return pos.y;
}
return getTrueMouseDragStartedY();
ivec2 Application::getTrueMouseDragStarted() const {
return _mouseDragStarted;
}
FaceTracker* Application::getActiveFaceTracker() {
@ -2244,13 +2187,6 @@ void Application::init() {
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
_entityClipboardRenderer.setTree(&_entityClipboard);
_rearMirrorTools = new RearMirrorTools(_mirrorViewRect);
connect(_rearMirrorTools, SIGNAL(closeView()), SLOT(closeMirrorView()));
connect(_rearMirrorTools, SIGNAL(restoreView()), SLOT(restoreMirrorView()));
connect(_rearMirrorTools, SIGNAL(shrinkView()), SLOT(shrinkMirrorView()));
connect(_rearMirrorTools, SIGNAL(resetView()), SLOT(resetSensors()));
// initialize the GlowEffect with our widget
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
DependencyManager::get<GlowEffect>()->init(glow);
@ -2515,16 +2451,18 @@ void Application::update(float deltaTime) {
// Transfer the user inputs to the driveKeys
_myAvatar->clearDriveKeys();
_myAvatar->setDriveKeys(FWD, _userInputMapper.getActionState(UserInputMapper::LONGITUDINAL_FORWARD));
_myAvatar->setDriveKeys(BACK, _userInputMapper.getActionState(UserInputMapper::LONGITUDINAL_BACKWARD));
_myAvatar->setDriveKeys(UP, _userInputMapper.getActionState(UserInputMapper::VERTICAL_UP));
_myAvatar->setDriveKeys(DOWN, _userInputMapper.getActionState(UserInputMapper::VERTICAL_DOWN));
_myAvatar->setDriveKeys(LEFT, _userInputMapper.getActionState(UserInputMapper::LATERAL_LEFT));
_myAvatar->setDriveKeys(RIGHT, _userInputMapper.getActionState(UserInputMapper::LATERAL_RIGHT));
_myAvatar->setDriveKeys(ROT_UP, _userInputMapper.getActionState(UserInputMapper::PITCH_UP));
_myAvatar->setDriveKeys(ROT_DOWN, _userInputMapper.getActionState(UserInputMapper::PITCH_DOWN));
_myAvatar->setDriveKeys(ROT_LEFT, _userInputMapper.getActionState(UserInputMapper::YAW_LEFT));
_myAvatar->setDriveKeys(ROT_RIGHT, _userInputMapper.getActionState(UserInputMapper::YAW_RIGHT));
if (!_controllerScriptingInterface.areActionsCaptured()) {
_myAvatar->setDriveKeys(FWD, _userInputMapper.getActionState(UserInputMapper::LONGITUDINAL_FORWARD));
_myAvatar->setDriveKeys(BACK, _userInputMapper.getActionState(UserInputMapper::LONGITUDINAL_BACKWARD));
_myAvatar->setDriveKeys(UP, _userInputMapper.getActionState(UserInputMapper::VERTICAL_UP));
_myAvatar->setDriveKeys(DOWN, _userInputMapper.getActionState(UserInputMapper::VERTICAL_DOWN));
_myAvatar->setDriveKeys(LEFT, _userInputMapper.getActionState(UserInputMapper::LATERAL_LEFT));
_myAvatar->setDriveKeys(RIGHT, _userInputMapper.getActionState(UserInputMapper::LATERAL_RIGHT));
_myAvatar->setDriveKeys(ROT_UP, _userInputMapper.getActionState(UserInputMapper::PITCH_UP));
_myAvatar->setDriveKeys(ROT_DOWN, _userInputMapper.getActionState(UserInputMapper::PITCH_DOWN));
_myAvatar->setDriveKeys(ROT_LEFT, _userInputMapper.getActionState(UserInputMapper::YAW_LEFT));
_myAvatar->setDriveKeys(ROT_RIGHT, _userInputMapper.getActionState(UserInputMapper::YAW_RIGHT));
}
_myAvatar->setDriveKeys(BOOM_IN, _userInputMapper.getActionState(UserInputMapper::BOOM_IN));
_myAvatar->setDriveKeys(BOOM_OUT, _userInputMapper.getActionState(UserInputMapper::BOOM_OUT));
@ -3150,7 +3088,7 @@ PickRay Application::computePickRay(float x, float y) const {
y /= size.y;
PickRay result;
if (isHMDMode()) {
getApplicationOverlay().computeHmdPickRay(glm::vec2(x, y), result.origin, result.direction);
getApplicationCompositor().computeHmdPickRay(glm::vec2(x, y), result.origin, result.direction);
} else {
if (QThread::currentThread() == activeRenderingThread) {
getDisplayViewFrustum()->computePickRay(x, y, result.origin, result.direction);
@ -3179,13 +3117,11 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
BILLBOARD_SIZE, BILLBOARD_SIZE),
true);
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
return image;
}
@ -3370,6 +3306,11 @@ namespace render {
void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool selfAvatarOnly, bool billboard) {
// FIXME: This preRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
_myAvatar->preRender(renderArgs);
activeRenderingThread = QThread::currentThread();
PROFILE_RANGE(__FUNCTION__);
PerformanceTimer perfTimer("display");
@ -3381,9 +3322,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
// flip x if in mirror mode (also requires reversing winding order for backface culling)
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
glScalef(-1.0f, 1.0f, 1.0f);
glFrontFace(GL_CW);
//glScalef(-1.0f, 1.0f, 1.0f);
//glFrontFace(GL_CW);
} else {
glFrontFace(GL_CCW);
}
@ -3406,7 +3346,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
viewTransform.setTranslation(theCamera.getPosition());
viewTransform.setRotation(rotation);
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
// viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
}
if (renderArgs->_renderSide != RenderArgs::MONO) {
glm::mat4 invView = glm::inverse(_untranslatedViewMatrix);
@ -3689,7 +3629,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
_mirrorCamera.setPosition(_myAvatar->getPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
} else if (RearMirrorTools::rearViewZoomLevel.get() == BODY) {
} else if (!AvatarInputs::getInstance()->mirrorZoomed()) {
_mirrorCamera.setPosition(_myAvatar->getChestPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
@ -3739,10 +3679,6 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
displaySide(renderArgs, _mirrorCamera, true, billboard);
glPopMatrix();
if (!billboard) {
_rearMirrorTools->render(renderArgs, false, _glWidget->mapFromGlobal(QCursor::pos()));
}
// reset Viewport and projection matrix
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
glDisable(GL_SCISSOR_TEST);
@ -4749,11 +4685,9 @@ void Application::postLambdaEvent(std::function<void()> f) {
}
void Application::initPlugins() {
OculusManager::init();
}
void Application::shutdownPlugins() {
OculusManager::deinit();
}
glm::vec3 Application::getHeadPosition() const {
@ -4773,12 +4707,8 @@ QSize Application::getDeviceSize() const {
return _glWidget->getDeviceSize();
}
int Application::getTrueMouseX() const {
return _glWidget->mapFromGlobal(QCursor::pos()).x();
}
int Application::getTrueMouseY() const {
return _glWidget->mapFromGlobal(QCursor::pos()).y();
ivec2 Application::getTrueMouse() const {
return toGlm(_glWidget->mapFromGlobal(QCursor::pos()));
}
bool Application::isThrottleRendering() const {
@ -4808,3 +4738,25 @@ qreal Application::getDevicePixelRatio() {
return _window ? _window->windowHandle()->devicePixelRatio() : 1.0;
}
mat4 Application::getEyeProjection(int eye) const {
if (isHMDMode()) {
return OculusManager::getEyeProjection(eye);
}
return _viewFrustum.getProjection();
}
mat4 Application::getEyePose(int eye) const {
if (isHMDMode()) {
return OculusManager::getEyePose(eye);
}
return mat4();
}
mat4 Application::getHeadPose() const {
if (isHMDMode()) {
return OculusManager::getHeadPose();
}
return mat4();
}

View file

@ -62,12 +62,12 @@
#include "ui/ModelsBrowser.h"
#include "ui/NodeBounds.h"
#include "ui/OctreeStatsDialog.h"
#include "ui/RearMirrorTools.h"
#include "ui/SnapshotShareDialog.h"
#include "ui/LodToolsDialog.h"
#include "ui/LogDialog.h"
#include "ui/overlays/Overlays.h"
#include "ui/ApplicationOverlay.h"
#include "ui/ApplicationCompositor.h"
#include "ui/RunningScriptsWidget.h"
#include "ui/ToolWindow.h"
#include "ui/UserInputMapper.h"
@ -221,15 +221,21 @@ public:
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
bool mouseOnScreen() const;
int getMouseX() const;
int getMouseY() const;
glm::ivec2 getTrueMousePosition() const;
int getTrueMouseX() const;
int getTrueMouseY() const;
int getMouseDragStartedX() const;
int getMouseDragStartedY() const;
int getTrueMouseDragStartedX() const { return _mouseDragStartedX; }
int getTrueMouseDragStartedY() const { return _mouseDragStartedY; }
ivec2 getMouse() const;
ivec2 getTrueMouse() const;
ivec2 getMouseDragStarted() const;
ivec2 getTrueMouseDragStarted() const;
// TODO get rid of these and use glm types directly
int getMouseX() const { return getMouse().x; }
int getMouseY() const { return getMouse().y; }
int getTrueMouseX() const { return getTrueMouse().x; }
int getTrueMouseY() const { return getTrueMouse().y; }
int getMouseDragStartedX() const { return getMouseDragStarted().x; }
int getMouseDragStartedY() const { return getMouseDragStarted().y; }
int getTrueMouseDragStartedX() const { return getTrueMouseDragStarted().x; }
int getTrueMouseDragStartedY() const { return getTrueMouseDragStarted().y; }
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
FaceTracker* getActiveFaceTracker();
@ -238,6 +244,8 @@ public:
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
const ApplicationOverlay& getApplicationOverlay() const { return _applicationOverlay; }
ApplicationCompositor& getApplicationCompositor() { return _compositor; }
const ApplicationCompositor& getApplicationCompositor() const { return _compositor; }
Overlays& getOverlays() { return _overlays; }
float getFps() const { return _fps; }
@ -330,6 +338,9 @@ public:
bool isHMDMode() const;
glm::quat getHeadOrientation() const;
glm::vec3 getHeadPosition() const;
glm::mat4 getHeadPose() const;
glm::mat4 getEyePose(int eye) const;
glm::mat4 getEyeProjection(int eye) const;
QRect getDesirableApplicationGeometry();
RunningScriptsWidget* getRunningScriptsWidget() { return _runningScriptsWidget; }
@ -548,21 +559,17 @@ private:
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Camera _mirrorCamera; // Cammera for mirror view
QRect _mirrorViewRect;
RearMirrorTools* _rearMirrorTools;
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Transform _viewTransform;
glm::mat4 _untranslatedViewMatrix;
@ -580,18 +587,17 @@ private:
Environment _environment;
bool _cursorVisible;
int _mouseDragStartedX;
int _mouseDragStartedY;
ivec2 _mouseDragStarted;
quint64 _lastMouseMove;
bool _lastMouseMoveWasSimulated;
glm::vec3 _mouseRayOrigin;
glm::vec3 _mouseRayDirection;
float _touchAvgX;
float _touchAvgY;
float _touchDragStartedAvgX;
float _touchDragStartedAvgY;
vec2 _touchAvg;
vec2 _touchDragStartedAvg;
bool _isTouchPressed; // true if multitouch has been pressed (clear when finished)
bool _mousePressed; // true if mouse has been pressed (clear when finished)
@ -669,6 +675,7 @@ private:
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
ApplicationCompositor _compositor;
};
#endif // hifi_Application_h

View file

@ -49,8 +49,6 @@ Camera::Camera() :
_mode(CAMERA_MODE_THIRD_PERSON),
_position(0.0f, 0.0f, 0.0f),
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP)),
_hmdPosition(),
_hmdRotation(),
_isKeepLookingAt(false),
_lookingAt(0.0f, 0.0f, 0.0f)
{
@ -74,20 +72,6 @@ void Camera::setRotation(const glm::quat& rotation) {
}
}
void Camera::setHmdPosition(const glm::vec3& hmdPosition) {
_hmdPosition = hmdPosition;
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setHmdRotation(const glm::quat& hmdRotation) {
_hmdRotation = hmdRotation;
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setMode(CameraMode mode) {
_mode = mode;
emit modeUpdated(modeToString(mode));

View file

@ -45,26 +45,22 @@ public:
void setRotation(const glm::quat& rotation);
void setProjection(const glm::mat4 & projection);
void setHmdPosition(const glm::vec3& hmdPosition);
void setHmdRotation(const glm::quat& hmdRotation);
void setMode(CameraMode m);
glm::quat getRotation() const { return _rotation * _hmdRotation; }
glm::quat getRotation() const { return _rotation; }
const glm::mat4& getProjection() const { return _projection; }
const glm::vec3& getHmdPosition() const { return _hmdPosition; }
const glm::quat& getHmdRotation() const { return _hmdRotation; }
CameraMode getMode() const { return _mode; }
public slots:
QString getModeString() const;
void setModeString(const QString& mode);
glm::vec3 getPosition() const { return _position + _hmdPosition; }
glm::vec3 getPosition() const { return _position; }
void setPosition(const glm::vec3& position);
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
glm::quat getOrientation() const { return getRotation(); }
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
PickRay computePickRay(float x, float y);
// These only work on independent cameras
@ -86,8 +82,6 @@ private:
glm::vec3 _position;
glm::quat _rotation;
glm::mat4 _projection;
glm::vec3 _hmdPosition;
glm::quat _hmdRotation;
bool _isKeepLookingAt;
glm::vec3 _lookingAt;
};

View file

@ -1,114 +0,0 @@
//
// AudioToolBox.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <AudioClient.h>
#include <GLCanvas.h>
#include <PathUtils.h>
#include <GeometryCache.h>
#include <gpu/GLBackend.h>
#include "Application.h"
#include "AudioToolBox.h"
// Mute icon configration
const int MUTE_ICON_SIZE = 24;
AudioToolBox::AudioToolBox() :
_iconPulseTimeReference(usecTimestampNow())
{
}
bool AudioToolBox::mousePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
DependencyManager::get<AudioClient>()->toggleMute();
return true;
}
return false;
}
void AudioToolBox::render(int x, int y, int padding, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_micTexture) {
_micTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic.svg");
}
if (!_muteTexture) {
_muteTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic-mute.svg");
}
if (_boxTexture) {
_boxTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/audio-box.svg");
}
auto audioIO = DependencyManager::get<AudioClient>();
if (boxed) {
bool isClipping = ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f));
const int BOX_LEFT_PADDING = 5;
const int BOX_TOP_PADDING = 10;
const int BOX_WIDTH = 266;
const int BOX_HEIGHT = 44;
QRect boxBounds = QRect(x - BOX_LEFT_PADDING, y - BOX_TOP_PADDING, BOX_WIDTH, BOX_HEIGHT);
glm::vec4 quadColor;
if (isClipping) {
quadColor = glm::vec4(1.0f, 0.0f, 0.0f, 1.0f);
} else {
quadColor = glm::vec4(0.41f, 0.41f, 0.41f, 1.0f);
}
glm::vec2 topLeft(boxBounds.left(), boxBounds.top());
glm::vec2 bottomRight(boxBounds.right(), boxBounds.bottom());
static const glm::vec2 texCoordTopLeft(1,1);
static const glm::vec2 texCoordBottomRight(0, 0);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_boxTexture));
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
}
float iconColor = 1.0f;
_iconBounds = QRect(x + padding, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
if (!audioIO->isMuted()) {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_micTexture));
iconColor = 1.0f;
} else {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_muteTexture));
// Make muted icon pulsate
static const float PULSE_MIN = 0.4f;
static const float PULSE_MAX = 1.0f;
static const float PULSE_FREQUENCY = 1.0f; // in Hz
qint64 now = usecTimestampNow();
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// Prevents t from getting too big, which would diminish glm::cos precision
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
}
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
glm::vec2 texCoordTopLeft(1,1);
glm::vec2 texCoordBottomRight(0,0);
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,36 +0,0 @@
//
// AudioToolBox.h
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioToolBox_h
#define hifi_AudioToolBox_h
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <QOpenGLTexture>
class AudioToolBox : public Dependency {
SINGLETON_DEPENDENCY
public:
void render(int x, int y, int padding, bool boxed);
bool mousePressEvent(int x, int y);
protected:
AudioToolBox();
private:
gpu::TexturePointer _micTexture;
gpu::TexturePointer _muteTexture;
gpu::TexturePointer _boxTexture;
int _boxQuadID = GeometryCache::UNKNOWN_ID;
QRect _iconBounds;
qint64 _iconPulseTimeReference = 0;
};
#endif // hifi_AudioToolBox_h

View file

@ -107,6 +107,7 @@ Avatar::Avatar() :
}
Avatar::~Avatar() {
assert(_motionState == nullptr);
for(auto attachment : _unusedAttachments) {
delete attachment;
}
@ -444,10 +445,10 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
_skeletonModel.renderJointCollisionShapes(0.7f);
}
if (renderHead && shouldRenderHead(renderArgs, cameraPosition)) {
if (renderHead && shouldRenderHead(renderArgs)) {
getHead()->getFaceModel().renderJointCollisionShapes(0.7f);
}
if (renderBounding && shouldRenderHead(renderArgs, cameraPosition)) {
if (renderBounding && shouldRenderHead(renderArgs)) {
_skeletonModel.renderBoundingCollisionShapes(0.7f);
}
@ -532,6 +533,7 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
}
void Avatar::fixupModelsInScene() {
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
@ -580,7 +582,7 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
}
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const {
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
return true;
}

View file

@ -237,7 +237,7 @@ protected:
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
virtual void fixupModelsInScene();
void simulateAttachments(float deltaTime);

View file

@ -114,6 +114,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
// DO NOT update or fade out uninitialized Avatars
++avatarIterator;
} else if (avatar->shouldDie()) {
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
} else {
@ -163,12 +164,13 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
}
// protected
void AvatarManager::removeAvatarMotionState(Avatar* avatar) {
AvatarMotionState* motionState= avatar->_motionState;
void AvatarManager::removeAvatarMotionState(AvatarSharedPointer avatar) {
auto rawPointer = std::static_pointer_cast<Avatar>(avatar);
AvatarMotionState* motionState= rawPointer->_motionState;
if (motionState) {
// clean up physics stuff
motionState->clearObjectBackPointer();
avatar->_motionState = nullptr;
rawPointer->_motionState = nullptr;
_avatarMotionStates.remove(motionState);
_motionStatesToAdd.remove(motionState);
_motionStatesToDelete.push_back(motionState);
@ -181,7 +183,7 @@ void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
if (avatarIterator != _avatarHash.end()) {
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
if (avatar != _myAvatar && avatar->isInitialized()) {
removeAvatarMotionState(avatar.get());
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
_avatarHash.erase(avatarIterator);
}
@ -197,7 +199,7 @@ void AvatarManager::clearOtherAvatars() {
// don't remove myAvatar or uninitialized avatars from the list
++avatarIterator;
} else {
removeAvatarMotionState(avatar.get());
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
}

View file

@ -73,7 +73,7 @@ private:
// virtual overrides
virtual AvatarSharedPointer newSharedAvatar();
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
void removeAvatarMotionState(Avatar* avatar);
void removeAvatarMotionState(AvatarSharedPointer avatar);
virtual void removeAvatar(const QUuid& sessionUUID);
QVector<AvatarSharedPointer> _avatarFades;

View file

@ -124,18 +124,25 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
if (!(_isFaceTrackerConnected || billboard)) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f;
const float AVERAGE_SACCADE_INTERVAL = 4.0f;
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float MAXIMUM_SACCADE_SPEED = 0.8f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * 0.50f;
glm::vec3 saccadeDelta = (_saccadeTarget - _saccade) * 0.5f;
float speed = glm::length(saccadeDelta) / deltaTime;
if (speed > MAXIMUM_SACCADE_SPEED) {
saccadeDelta = saccadeDelta * MAXIMUM_SACCADE_SPEED / speed;
}
_saccade += saccadeDelta;
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;

View file

@ -27,6 +27,7 @@
#include <GeometryUtil.h>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <PathUtils.h>
#include <PerfStat.h>
#include <ShapeCollider.h>
#include <SharedUtil.h>
@ -76,7 +77,6 @@ const float MyAvatar::ZOOM_DEFAULT = 1.5f;
MyAvatar::MyAvatar() :
Avatar(),
_turningKeyPressTime(0.0f),
_gravity(0.0f, 0.0f, 0.0f),
_wasPushing(false),
_isPushing(false),
@ -97,8 +97,12 @@ MyAvatar::MyAvatar() :
_feetTouchFloor(true),
_isLookingAtLeftEye(true),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES)
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_firstPersonSkeletonModel(this),
_prevShouldDrawHead(true)
{
_firstPersonSkeletonModel.setIsFirstPerson(true);
ShapeCollider::initDispatchTable();
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
_driveKeys[i] = 0.0f;
@ -132,6 +136,7 @@ QByteArray MyAvatar::toByteArray() {
void MyAvatar::reset() {
_skeletonModel.reset();
_firstPersonSkeletonModel.reset();
getHead()->reset();
_targetVelocity = glm::vec3(0.0f);
@ -190,6 +195,7 @@ void MyAvatar::simulate(float deltaTime) {
{
PerformanceTimer perfTimer("skeleton");
_skeletonModel.simulate(deltaTime);
_firstPersonSkeletonModel.simulate(deltaTime);
}
if (!_skeletonModel.hasSkeleton()) {
@ -987,16 +993,35 @@ QString MyAvatar::getModelDescription() const {
}
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
Avatar::setFaceModelURL(faceModelURL);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
getHead()->getFaceModel().setVisibleInScene(_prevShouldDrawHead, scene);
_billboardValid = false;
}
void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
Avatar::setSkeletonModelURL(skeletonModelURL);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
_billboardValid = false;
if (_useFullAvatar) {
_skeletonModel.setVisibleInScene(_prevShouldDrawHead, scene);
const QUrl DEFAULT_SKELETON_MODEL_URL = QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_body.fst");
_firstPersonSkeletonModel.setURL(_skeletonModelURL, DEFAULT_SKELETON_MODEL_URL, true, !isMyAvatar());
_firstPersonSkeletonModel.setVisibleInScene(!_prevShouldDrawHead, scene);
} else {
_skeletonModel.setVisibleInScene(true, scene);
_firstPersonSkeletonModel.setVisibleInScene(false, scene);
_firstPersonSkeletonModel.reset();
}
}
void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "useFullAvatarURL", Qt::BlockingQueuedConnection,
Q_ARG(const QUrl&, fullAvatarURL),
@ -1178,17 +1203,13 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName, const g
void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel) {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
return; // wait until both models are loaded
return; // wait until all models are loaded
}
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
fixupModelsInScene();
const glm::vec3 cameraPos = Application::getInstance()->getCamera()->getPosition();
// Render head so long as the camera isn't inside it
if (shouldRenderHead(renderArgs, cameraPos)) {
if (shouldRenderHead(renderArgs)) {
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
}
if (postLighting) {
@ -1196,37 +1217,57 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bo
}
}
void MyAvatar::setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visible) {
if (model->isActive() && model->isRenderable()) {
model->setVisibleInScene(visible, scene);
}
}
void MyAvatar::preRender(RenderArgs* renderArgs) {
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
const bool shouldDrawHead = shouldRenderHead(renderArgs);
_skeletonModel.initWhenReady(scene);
if (_useFullAvatar) {
_firstPersonSkeletonModel.initWhenReady(scene);
}
if (shouldDrawHead != _prevShouldDrawHead) {
if (_useFullAvatar) {
if (shouldDrawHead) {
_skeletonModel.setVisibleInScene(true, scene);
_firstPersonSkeletonModel.setVisibleInScene(false, scene);
} else {
_skeletonModel.setVisibleInScene(false, scene);
_firstPersonSkeletonModel.setVisibleInScene(true, scene);
}
} else {
getHead()->getFaceModel().setVisibleInScene(shouldDrawHead, scene);
}
}
_prevShouldDrawHead = shouldDrawHead;
}
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.50f;
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const {
bool MyAvatar::cameraInsideHead() const {
const Head* head = getHead();
return (renderArgs->_renderMode != RenderArgs::NORMAL_RENDER_MODE) || (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
(glm::length(cameraPosition - head->getEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
const glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
return glm::length(cameraPosition - head->getEyePosition()) < (RENDER_HEAD_CUTOFF_DISTANCE * _scale);
}
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
return ((renderArgs->_renderMode != RenderArgs::DEFAULT_RENDER_MODE) ||
(Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
!cameraInsideHead());
}
void MyAvatar::updateOrientation(float deltaTime) {
// Gather rotation information from keyboard
const float TIME_BETWEEN_HMD_TURNS = 0.5f;
const float HMD_TURN_DEGREES = 22.5f;
if (!qApp->isHMDMode()) {
// Smoothly rotate body with arrow keys if not in HMD
_bodyYawDelta -= _driveKeys[ROT_RIGHT] * YAW_SPEED * deltaTime;
_bodyYawDelta += _driveKeys[ROT_LEFT] * YAW_SPEED * deltaTime;
} else {
// Jump turns if in HMD
if (_driveKeys[ROT_RIGHT] || _driveKeys[ROT_LEFT]) {
if (_turningKeyPressTime == 0.0f) {
setOrientation(getOrientation() *
glm::quat(glm::radians(glm::vec3(0.f, _driveKeys[ROT_LEFT] ? HMD_TURN_DEGREES : -HMD_TURN_DEGREES, 0.0f))));
}
_turningKeyPressTime += deltaTime;
if (_turningKeyPressTime > TIME_BETWEEN_HMD_TURNS) {
_turningKeyPressTime = 0.0f;
}
} else {
_turningKeyPressTime = 0.0f;
}
}
// Smoothly rotate body with arrow keys
_bodyYawDelta -= _driveKeys[ROT_RIGHT] * YAW_SPEED * deltaTime;
_bodyYawDelta += _driveKeys[ROT_LEFT] * YAW_SPEED * deltaTime;
getHead()->setBasePitch(getHead()->getBasePitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_SPEED * deltaTime);
// update body orientation by movement inputs
@ -1468,7 +1509,7 @@ void MyAvatar::maybeUpdateBillboard() {
}
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context);
QImage image = Application::getInstance()->renderAvatarBillboard(&renderArgs);
QImage image = qApp->renderAvatarBillboard(&renderArgs);
_billboard.clear();
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
@ -1567,7 +1608,6 @@ void MyAvatar::renderLaserPointers() {
//Gets the tip position for the laser pointer
glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
const ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
glm::vec3 direction = glm::normalize(palm->getTipPosition() - palm->getPosition());
glm::vec3 position = palm->getPosition();
@ -1576,7 +1616,8 @@ glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
glm::vec3 result;
if (applicationOverlay.calculateRayUICollisionPoint(position, direction, result)) {
const auto& compositor = qApp->getApplicationCompositor();
if (compositor.calculateRayUICollisionPoint(position, direction, result)) {
return result;
}

View file

@ -35,11 +35,12 @@ public:
void reset();
void update(float deltaTime);
void simulate(float deltaTime);
void preRender(RenderArgs* renderArgs);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting = false) override;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void renderDebugBodyPoints();
// setters
@ -206,11 +207,14 @@ signals:
private:
bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
float _turningKeyPressTime;
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);
glm::vec3 _gravity;
float _driveKeys[MAX_DRIVE_KEYS];
@ -266,6 +270,10 @@ private:
QString _headModelName;
QString _bodyModelName;
QString _fullAvatarModelName;
// used for rendering when in first person view or when in an HMD.
SkeletonModel _firstPersonSkeletonModel;
bool _prevShouldDrawHead;
};
#endif // hifi_MyAvatar_h

View file

@ -21,6 +21,7 @@
#include "Menu.h"
#include "SkeletonModel.h"
#include "Util.h"
#include "InterfaceLogging.h"
enum StandingFootState {
LEFT_FOOT,
@ -38,7 +39,8 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
_standingFoot(NO_FOOT),
_standingOffset(0.0f),
_clampedFootPosition(0.0f),
_headClipDistance(DEFAULT_NEAR_CLIP)
_headClipDistance(DEFAULT_NEAR_CLIP),
_isFirstPerson(false)
{
assert(_owningAvatar);
_enableShapes = true;
@ -98,7 +100,7 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setRotation(_owningAvatar->getOrientation() * refOrientation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
Model::simulate(deltaTime, fullUpdate);
if (!isActive() || !_owningAvatar->isMyAvatar()) {
@ -140,6 +142,11 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
}
if (_isFirstPerson) {
cauterizeHead();
updateClusterMatrices();
}
_boundingShape.setTranslation(_translation + _rotation * _boundingShapeLocalOffset);
_boundingShape.setRotation(_rotation);
}
@ -806,3 +813,57 @@ void SkeletonModel::renderBoundingCollisionShapes(float alpha) {
bool SkeletonModel::hasSkeleton() {
return isActive() ? _geometry->getFBXGeometry().rootJointIndex != -1 : false;
}
void SkeletonModel::initHeadBones() {
_headBones.clear();
const FBXGeometry& fbxGeometry = _geometry->getFBXGeometry();
const int neckJointIndex = fbxGeometry.neckJointIndex;
std::queue<int> q;
q.push(neckJointIndex);
_headBones.push_back(neckJointIndex);
// fbxJoints only hold links to parents not children, so we have to do a bit of extra work here.
while (q.size() > 0) {
int jointIndex = q.front();
for (int i = 0; i < fbxGeometry.joints.size(); i++) {
const FBXJoint& fbxJoint = fbxGeometry.joints[i];
if (jointIndex == fbxJoint.parentIndex) {
_headBones.push_back(i);
q.push(i);
}
}
q.pop();
}
}
void SkeletonModel::invalidateHeadBones() {
_headBones.clear();
}
void SkeletonModel::cauterizeHead() {
if (isActive()) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const int neckJointIndex = geometry.neckJointIndex;
if (neckJointIndex > 0 && neckJointIndex < _jointStates.size()) {
// lazy init of headBones
if (_headBones.size() == 0) {
initHeadBones();
}
// preserve the translation for the neck
glm::vec4 trans = _jointStates[neckJointIndex].getTransform()[3];
glm::vec4 zero(0, 0, 0, 0);
for (const int &i : _headBones) {
JointState& joint = _jointStates[i];
glm::mat4 newXform(zero, zero, zero, trans);
joint.setTransform(newXform);
joint.setVisibleTransform(newXform);
}
}
}
}
void SkeletonModel::onInvalidate() {
invalidateHeadBones();
}

View file

@ -112,6 +112,11 @@ public:
float getHeadClipDistance() const { return _headClipDistance; }
void setIsFirstPerson(bool value) { _isFirstPerson = value; }
bool getIsFirstPerson() const { return _isFirstPerson; }
virtual void onInvalidate() override;
signals:
void skeletonLoaded();
@ -132,7 +137,11 @@ protected:
void maybeUpdateLeanRotation(const JointState& parentState, JointState& state);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void cauterizeHead();
void initHeadBones();
void invalidateHeadBones();
private:
void renderJointConstraints(int jointIndex);
@ -164,6 +173,9 @@ private:
glm::vec3 _clampedFootPosition;
float _headClipDistance; // Near clip distance to use if no separate head model
bool _isFirstPerson;
std::vector<int> _headBones;
};
#endif // hifi_SkeletonModel_h

View file

@ -1,121 +0,0 @@
//
// CameraToolBox.cpp
// interface/src/devices
//
// Created by David Rowe on 30 Apr 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <GLCanvas.h>
#include <PathUtils.h>
#include "gpu/GLBackend.h"
#include "Application.h"
#include "CameraToolBox.h"
#include "FaceTracker.h"
CameraToolBox::CameraToolBox() :
_iconPulseTimeReference(usecTimestampNow()),
_doubleClickTimer(NULL)
{
}
CameraToolBox::~CameraToolBox() {
if (_doubleClickTimer) {
_doubleClickTimer->stop();
delete _doubleClickTimer;
}
}
bool CameraToolBox::mousePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
if (!_doubleClickTimer) {
// Toggle mute after waiting to check that it's not a double-click.
const int DOUBLE_CLICK_WAIT = 200; // ms
_doubleClickTimer = new QTimer(this);
connect(_doubleClickTimer, SIGNAL(timeout()), this, SLOT(toggleMute()));
_doubleClickTimer->setSingleShot(true);
_doubleClickTimer->setInterval(DOUBLE_CLICK_WAIT);
_doubleClickTimer->start();
}
return true;
}
return false;
}
bool CameraToolBox::mouseDoublePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
if (_doubleClickTimer) {
_doubleClickTimer->stop();
delete _doubleClickTimer;
_doubleClickTimer = NULL;
}
Application::getInstance()->resetSensors();
return true;
}
return false;
}
void CameraToolBox::toggleMute() {
delete _doubleClickTimer;
_doubleClickTimer = NULL;
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
if (faceTracker) {
faceTracker->toggleMute();
}
}
void CameraToolBox::render(int x, int y, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_enabledTexture) {
_enabledTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face.svg");
}
if (!_mutedTexture) {
_mutedTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face-mute.svg");
}
const int MUTE_ICON_SIZE = 24;
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
float iconColor = 1.0f;
if (!Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking)) {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_enabledTexture));
} else {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_mutedTexture));
// Make muted icon pulsate
static const float PULSE_MIN = 0.4f;
static const float PULSE_MAX = 1.0f;
static const float PULSE_FREQUENCY = 1.0f; // in Hz
qint64 now = usecTimestampNow();
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// Prevents t from getting too big, which would diminish glm::cos precision
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
}
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
glm::vec2 texCoordTopLeft(1,1);
glm::vec2 texCoordBottomRight(0,0);
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,45 +0,0 @@
//
// CameraToolBox.h
// interface/src/devices
//
// Created by David Rowe on 30 Apr 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CameraToolBox_h
#define hifi_CameraToolBox_h
#include <QObject>
#include <DependencyManager.h>
#include <GeometryCache.h>
class CameraToolBox : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
void render(int x, int y, bool boxed);
bool mousePressEvent(int x, int y);
bool mouseDoublePressEvent(int x, int y);
protected:
CameraToolBox();
~CameraToolBox();
private slots:
void toggleMute();
private:
gpu::TexturePointer _enabledTexture;
gpu::TexturePointer _mutedTexture;
int _boxQuadID = GeometryCache::UNKNOWN_ID;
QRect _iconBounds;
qint64 _iconPulseTimeReference = 0;
QTimer* _doubleClickTimer;
};
#endif // hifi_CameraToolBox_h

View file

@ -17,7 +17,7 @@
#include "Joystick.h"
const float CONTROLLER_THRESHOLD = 0.25f;
const float CONTROLLER_THRESHOLD = 0.3f;
#ifdef HAVE_SDL2
const float MAX_AXIS = 32768.0f;
@ -173,9 +173,7 @@ void Joystick::assignDefaultInputMapping(UserInputMapper& mapper) {
// Button controls
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(SDL_CONTROLLER_BUTTON_Y), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_A), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(SDL_CONTROLLER_BUTTON_X), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_B), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_X), DPAD_MOVE_SPEED);
// Zoom
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(RIGHT_SHOULDER), BOOM_SPEED);
@ -203,13 +201,16 @@ void Joystick::assignDefaultInputMapping(UserInputMapper& mapper) {
// Button controls
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(SDL_CONTROLLER_BUTTON_Y), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_A), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(SDL_CONTROLLER_BUTTON_X), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_B), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_X), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
// Zoom
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(RIGHT_SHOULDER), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), BOOM_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(LEFT_SHOULDER), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), BOOM_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(SDL_CONTROLLER_BUTTON_LEFTSHOULDER));
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(SDL_CONTROLLER_BUTTON_B));
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(SDL_CONTROLLER_BUTTON_A));
#endif
}

View file

@ -276,7 +276,10 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(MOUSE_AXIS_WHEEL_X_POS), BUTTON_YAW_SPEED);
#endif
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(Qt::Key_Space));
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(Qt::Key_R));
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(Qt::Key_T));
}
float KeyboardMouseDevice::getButton(int channel) const {

View file

@ -11,10 +11,10 @@
//
#include "InterfaceConfig.h"
#include "OculusManager.h"
#include "ui/overlays/Text3DOverlay.h"
#include <CursorManager.h>
#include <QDesktopWidget>
#include <QGuiApplication>
#include <QScreen>
@ -37,7 +37,6 @@
#include "InterfaceLogging.h"
#include "Application.h"
template <typename Function>
void for_each_eye(Function function) {
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
@ -54,6 +53,71 @@ void for_each_eye(const ovrHmd & hmd, Function function) {
function(eye);
}
}
enum CalibrationState {
UNCALIBRATED,
WAITING_FOR_DELTA,
WAITING_FOR_ZERO,
WAITING_FOR_ZERO_HELD,
CALIBRATED
};
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
}
inline glm::vec3 toGlm(const ovrVector3f & ov) {
return glm::make_vec3(&ov.x);
}
inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
return glm::ivec2(ov.x, ov.y);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}
inline glm::quat toGlm(const ovrQuatf & oq) {
return glm::make_quat(&oq.x);
}
inline glm::mat4 toGlm(const ovrPosef & op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
return{ v.x, v.y, v.z };
}
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
return{ v.x, v.y };
}
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
return{ (int)v.x, (int)v.y };
}
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
#ifdef Q_OS_WIN
@ -171,54 +235,54 @@ private:
}
};
SwapFramebufferWrapper* OculusManager::_swapFbo{ nullptr };
MirrorFramebufferWrapper* OculusManager::_mirrorFbo{ nullptr };
ovrLayerEyeFov OculusManager::_sceneLayer;
static SwapFramebufferWrapper* _swapFbo{ nullptr };
static MirrorFramebufferWrapper* _mirrorFbo{ nullptr };
static ovrLayerEyeFov _sceneLayer;
#else
ovrTexture OculusManager::_eyeTextures[ovrEye_Count];
GlWindow* OculusManager::_outputWindow{ nullptr };
static ovrTexture _eyeTextures[ovrEye_Count];
static GlWindow* _outputWindow{ nullptr };
#endif
bool OculusManager::_isConnected = false;
ovrHmd OculusManager::_ovrHmd;
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
ovrVector3f OculusManager::_eyeOffset[ovrEye_Count];
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
ovrSizei OculusManager::_renderTargetSize;
glm::mat4 OculusManager::_eyeProjection[ovrEye_Count];
unsigned int OculusManager::_frameIndex = 0;
bool OculusManager::_frameTimingActive = false;
Camera* OculusManager::_camera = NULL;
ovrEyeType OculusManager::_activeEye = ovrEye_Count;
bool OculusManager::_hswDismissed = false;
static bool _isConnected = false;
static ovrHmd _ovrHmd;
static ovrFovPort _eyeFov[ovrEye_Count];
static ovrVector3f _eyeOffset[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrSizei _renderTargetSize;
static glm::mat4 _eyeProjection[ovrEye_Count];
static unsigned int _frameIndex = 0;
static bool _frameTimingActive = false;
static Camera* _camera = NULL;
static ovrEyeType _activeEye = ovrEye_Count;
static bool _hswDismissed = false;
float OculusManager::CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
float OculusManager::CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
float OculusManager::CALIBRATION_ZERO_MAXIMUM_LENGTH = 0.01f;
float OculusManager::CALIBRATION_ZERO_MAXIMUM_ANGLE = 2.0f * RADIANS_PER_DEGREE;
quint64 OculusManager::CALIBRATION_ZERO_HOLD_TIME = 3000000; // usec
float OculusManager::CALIBRATION_MESSAGE_DISTANCE = 2.5f;
OculusManager::CalibrationState OculusManager::_calibrationState;
glm::vec3 OculusManager::_calibrationPosition;
glm::quat OculusManager::_calibrationOrientation;
quint64 OculusManager::_calibrationStartTime;
int OculusManager::_calibrationMessage = 0;
glm::vec3 OculusManager::_eyePositions[ovrEye_Count];
static const float CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
static const float CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
static const float CALIBRATION_ZERO_MAXIMUM_LENGTH = 0.01f;
static const float CALIBRATION_ZERO_MAXIMUM_ANGLE = 2.0f * RADIANS_PER_DEGREE;
static const quint64 CALIBRATION_ZERO_HOLD_TIME = 3000000; // usec
static const float CALIBRATION_MESSAGE_DISTANCE = 2.5f;
static CalibrationState _calibrationState;
static glm::vec3 _calibrationPosition;
static glm::quat _calibrationOrientation;
static quint64 _calibrationStartTime;
static int _calibrationMessage = 0;
static glm::vec3 _eyePositions[ovrEye_Count];
// TODO expose this as a developer toggle
bool OculusManager::_eyePerFrameMode = false;
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
float OculusManager::_offscreenRenderScale = 1.0;
ovrRecti OculusManager::_eyeViewports[ovrEye_Count];
static bool _eyePerFrameMode = false;
static ovrEyeType _lastEyeRendered = ovrEye_Count;
static ovrSizei _recommendedTexSize = { 0, 0 };
static float _offscreenRenderScale = 1.0;
static glm::mat4 _combinedProjection;
static ovrPosef _eyeRenderPoses[ovrEye_Count];
static ovrRecti _eyeViewports[ovrEye_Count];
static ovrVector3f _eyeOffsets[ovrEye_Count];
void OculusManager::init() {
}
void OculusManager::deinit() {
}
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
@ -269,7 +333,15 @@ void OculusManager::connect(QOpenGLContext* shareContext) {
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
_eyeProjection[eye] = toGlm(ovrMatrix4f_Projection(_eyeFov[eye],
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
ovrEyeRenderDesc erd = ovrHmd_GetRenderDesc(_ovrHmd, eye, _eyeFov[eye]);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
});
ovrFovPort combinedFov = _ovrHmd->MaxEyeFov[0];
combinedFov.RightTan = _ovrHmd->MaxEyeFov[1].RightTan;
_combinedProjection = toGlm(ovrMatrix4f_Projection(combinedFov,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
@ -387,7 +459,7 @@ void OculusManager::disconnect() {
}
}
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
void positionCalibrationBillboard(Text3DOverlay* billboard) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat headOrientation = myAvatar->getHeadOrientation();
headOrientation.x = 0;
@ -398,7 +470,7 @@ void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
billboard->setRotation(headOrientation);
}
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
void calibrate(const glm::vec3& position, const glm::quat& orientation) {
static QString instructionMessage = "Hold still to calibrate";
static QString progressMessage;
static Text3DOverlay* billboard;
@ -528,18 +600,12 @@ void OculusManager::endFrameTiming() {
//Sets the camera FoV and aspect ratio
void OculusManager::configureCamera(Camera& camera) {
ovrFovPort fov;
if (_activeEye == ovrEye_Count) {
// When not rendering, provide a FOV encompasing both eyes
fov = _eyeFov[0];
fov.RightTan = _eyeFov[1].RightTan;
} else {
// When rendering, provide the exact FOV
fov = _eyeFov[_activeEye];
}
// Convert the FOV to the correct projection matrix
glm::mat4 projection = toGlm(ovrMatrix4f_Projection(fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
camera.setProjection(projection);
camera.setProjection(_combinedProjection);
return;
}
camera.setProjection(_eyeProjection[_activeEye]);
}
//Displays everything for the oculus, frame timing must be active
@ -609,13 +675,11 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
}
trackerPosition = bodyOrientation * trackerPosition;
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
ovrPosef eyePoses[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, _eyeOffsets, eyePoses, nullptr);
#ifndef Q_OS_WIN
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
#endif
static ovrPosef eyeRenderPose[ovrEye_Count];
//Render each eye into an fbo
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
// If we're in eye-per-frame mode, only render one eye
@ -625,29 +689,17 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
return;
}
_lastEyeRendered = _activeEye = eye;
eyeRenderPose[eye] = eyePoses[eye];
_eyeRenderPoses[eye] = eyePoses[eye];
// Set the camera rotation for this eye
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
// Update the application camera with the latest HMD position
whichCamera.setHmdPosition(trackerPosition);
whichCamera.setHmdRotation(orientation);
vec3 eyePosition = toGlm(_eyeRenderPoses[eye].Position);
eyePosition = whichCamera.getRotation() * eyePosition;
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation());
_camera->setPosition(whichCamera.getPosition());
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
_camera->setPosition(whichCamera.getPosition() + eyePosition);
configureCamera(*_camera);
// Store the latest left and right eye render locations for things that need to know
glm::vec3 thisEyePosition = position + trackerPosition +
(bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) *
glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z));
_eyePositions[eye] = thisEyePosition;
_camera->update(1.0f / Application::getInstance()->getFps());
glMatrixMode(GL_PROJECTION);
@ -662,8 +714,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, *_camera, false);
qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
qApp->displaySide(renderArgs, *_camera);
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
});
_activeEye = ovrEye_Count;
@ -709,7 +761,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// Submit the frame to the Oculus SDK for timewarp and distortion
for_each_eye([&](ovrEyeType eye) {
_sceneLayer.RenderPose[eye] = eyeRenderPose[eye];
_sceneLayer.RenderPose[eye] = _eyeRenderPoses[eye];
});
auto header = &_sceneLayer.Header;
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
@ -725,6 +777,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// rendering to complete before it starts the distortion rendering,
// but without triggering a CPU/GPU synchronization
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(syncObject);
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
for_each_eye([&](ovrEyeType eye) {
@ -734,10 +787,21 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
});
// restore our normal viewport
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
ovrHmd_EndFrame(_ovrHmd, _eyeRenderPoses, _eyeTextures);
glCanvas->makeCurrent();
#endif
// in order to account account for changes in the pick ray caused by head movement
// we need to force a mouse move event on every frame (perhaps we could change this
// to based on the head moving a minimum distance from the last position in which we
// sent?)
{
QMouseEvent mouseEvent(QEvent::MouseMove, glCanvas->mapFromGlobal(QCursor::pos()),
Qt::NoButton, Qt::NoButton, 0);
qApp->mouseMoveEvent(&mouseEvent, 0);
}
}
@ -835,3 +899,15 @@ int OculusManager::getHMDScreen() {
#endif
}
mat4 OculusManager::getEyeProjection(int eye) {
return _eyeProjection[eye];
}
mat4 OculusManager::getEyePose(int eye) {
return toGlm(_eyeRenderPoses[eye]);
}
mat4 OculusManager::getHeadPose() {
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
return toGlm(ts.HeadPose.ThePose);
}

View file

@ -13,8 +13,6 @@
#ifndef hifi_OculusManager_h
#define hifi_OculusManager_h
#include <OVR_CAPI.h>
#include <ProgramObject.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
@ -23,23 +21,11 @@
#include "RenderArgs.h"
class QOpenGLContext;
class Camera;
class GlWindow;
class PalmData;
class Text3DOverlay;
#ifdef Q_OS_WIN
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
#endif
/// Handles interaction with the Oculus Rift.
class OculusManager {
public:
static void init();
static void deinit();
static void connect(QOpenGLContext* shareContext);
static void disconnect();
static bool isConnected();
@ -59,121 +45,14 @@ public:
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
static glm::vec3 getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
static glm::vec3 getLeftEyePosition();
static glm::vec3 getRightEyePosition();
static int getHMDScreen();
private:
static void initSdk();
static void shutdownSdk();
static bool _isConnected;
static glm::vec3 _eyePositions[ovrEye_Count];
static ovrHmd _ovrHmd;
static ovrFovPort _eyeFov[ovrEye_Count];
static ovrVector3f _eyeOffset[ovrEye_Count];
static glm::mat4 _eyeProjection[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrRecti _eyeViewports[ovrEye_Count];
static ovrSizei _renderTargetSize;
static unsigned int _frameIndex;
static bool _frameTimingActive;
static Camera* _camera;
static ovrEyeType _activeEye;
static bool _hswDismissed;
static void calibrate(const glm::vec3 position, const glm::quat orientation);
enum CalibrationState {
UNCALIBRATED,
WAITING_FOR_DELTA,
WAITING_FOR_ZERO,
WAITING_FOR_ZERO_HELD,
CALIBRATED
};
static void positionCalibrationBillboard(Text3DOverlay* message);
static float CALIBRATION_DELTA_MINIMUM_LENGTH;
static float CALIBRATION_DELTA_MINIMUM_ANGLE;
static float CALIBRATION_ZERO_MAXIMUM_LENGTH;
static float CALIBRATION_ZERO_MAXIMUM_ANGLE;
static quint64 CALIBRATION_ZERO_HOLD_TIME;
static float CALIBRATION_MESSAGE_DISTANCE;
static CalibrationState _calibrationState;
static glm::vec3 _calibrationPosition;
static glm::quat _calibrationOrientation;
static quint64 _calibrationStartTime;
static int _calibrationMessage;
// TODO drop this variable and use the existing 'Developer | Render | Scale Resolution' value
static ovrSizei _recommendedTexSize;
static float _offscreenRenderScale;
static bool _eyePerFrameMode;
static ovrEyeType _lastEyeRendered;
#ifdef Q_OS_WIN
static SwapFramebufferWrapper* _swapFbo;
static MirrorFramebufferWrapper* _mirrorFbo;
static ovrLayerEyeFov _sceneLayer;
#else
static ovrTexture _eyeTextures[ovrEye_Count];
static GlWindow* _outputWindow;
#endif
static glm::mat4 getEyeProjection(int eye);
static glm::mat4 getEyePose(int eye);
static glm::mat4 getHeadPose();
};
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
}
inline glm::vec3 toGlm(const ovrVector3f & ov) {
return glm::make_vec3(&ov.x);
}
inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
return glm::ivec2(ov.x, ov.y);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}
inline glm::quat toGlm(const ovrQuatf & oq) {
return glm::make_quat(&oq.x);
}
inline glm::mat4 toGlm(const ovrPosef & op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
return{ v.x, v.y, v.z };
}
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
return{ v.x, v.y };
}
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
return{ (int)v.x, (int)v.y };
}
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
#endif // hifi_OculusManager_h

View file

@ -36,6 +36,8 @@ const float NECK_X = 0.25f; // meters
const float NECK_Y = 0.3f; // meters
const float NECK_Z = 0.3f; // meters
const float CONTROLLER_THRESHOLD = 0.35f;
#ifdef __APPLE__
typedef int (*SixenseBaseFunction)();
typedef int (*SixenseTakeIntFunction)(int);
@ -326,6 +328,12 @@ void SixenseManager::update(float deltaTime) {
}
_controllersAtBase = (numControllersAtBase == 2);
}
for (auto axisState : _axisStateMap) {
if (fabsf(axisState.second) < CONTROLLER_THRESHOLD) {
_axisStateMap[axisState.first] = 0.0f;
}
}
#endif // HAVE_SIXENSE
}
@ -512,7 +520,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
pos = qApp->getApplicationOverlay().getPalmClickLocation(palm);
pos = qApp->getApplicationCompositor().getPalmClickLocation(palm);
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
@ -724,6 +732,13 @@ void SixenseManager::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(BUTTON_3, 1), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(BUTTON_1, 1), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(BUTTON_2, 0));
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(BUTTON_2, 1));
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(BUTTON_4, 0));
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(BUTTON_4, 1));
}
float SixenseManager::getButton(int channel) const {

View file

@ -12,6 +12,7 @@
#include "InterfaceConfig.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GlowEffect.h>
#include "gpu/GLBackend.h"
@ -106,21 +107,20 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
_activeEye = &eye;
glViewport(portalX, portalY, portalW, portalH);
glScissor(portalX, portalY, portalW, portalH);
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
float fov = atan(1.0f / projection[1][1]);
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
eyeCamera.setProjection(projection);
glMatrixMode(GL_PROJECTION);
glLoadIdentity(); // reset projection matrix
glFrustum(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ); // set left view frustum
GLfloat p[4][4];
// Really?
glGetFloatv(GL_PROJECTION_MATRIX, &(p[0][0]));
float cotangent = p[1][1];
GLfloat fov = atan(1.0f / cotangent);
glTranslatef(eye.modelTranslation, 0.0, 0.0); // translate to cancel parallax
glLoadMatrixf(glm::value_ptr(projection));
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, eyeCamera, false);
qApp->getApplicationOverlay().displayOverlayTextureStereo(whichCamera, _aspect, fov);
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
_activeEye = NULL;
}, [&]{
// render right side view

View file

@ -23,7 +23,7 @@
#ifdef Q_OS_WIN
static BOOL CALLBACK enumWindowsCallback(HWND hWnd, LPARAM lParam) {
const UINT TIMEOUT = 200; // ms
DWORD response;
DWORD_PTR response;
LRESULT result = SendMessageTimeout(hWnd, UWM_IDENTIFY_INSTANCES, 0, 0, SMTO_BLOCK | SMTO_ABORTIFHUNG, TIMEOUT, &response);
if (result == 0) { // Timeout; continue search.
return TRUE;

View file

@ -28,6 +28,7 @@ ControllerScriptingInterface::ControllerScriptingInterface() :
{
}
static int actionMetaTypeId = qRegisterMetaType<UserInputMapper::Action>();
static int inputChannelMetaTypeId = qRegisterMetaType<UserInputMapper::InputChannel>();
static int inputMetaTypeId = qRegisterMetaType<UserInputMapper::Input>();

View file

@ -79,6 +79,7 @@ public:
bool isMouseCaptured() const { return _mouseCaptured; }
bool isTouchCaptured() const { return _touchCaptured; }
bool isWheelCaptured() const { return _wheelCaptured; }
bool areActionsCaptured() const { return _actionsCaptured; }
bool isJoystickCaptured(int joystickIndex) const;
void updateInputControllers();
@ -123,6 +124,9 @@ public slots:
virtual void captureWheelEvents() { _wheelCaptured = true; }
virtual void releaseWheelEvents() { _wheelCaptured = false; }
virtual void captureActionEvents() { _actionsCaptured = true; }
virtual void releaseActionEvents() { _actionsCaptured = false; }
virtual void captureJoystick(int joystickIndex);
virtual void releaseJoystick(int joystickIndex);
@ -143,6 +147,7 @@ private:
bool _mouseCaptured;
bool _touchCaptured;
bool _wheelCaptured;
bool _actionsCaptured;
QMultiMap<int,KeyEvent> _capturedKeys;
QSet<int> _capturedJoysticks;

View file

@ -25,9 +25,9 @@ bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
glm::vec3 direction = orientation * glm::vec3(0.0f, 0.0f, -1.0f);
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
const auto& compositor = Application::getInstance()->getApplicationCompositor();
return applicationOverlay.calculateRayUICollisionPoint(position, direction, result);
return compositor.calculateRayUICollisionPoint(position, direction, result);
}
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
@ -40,7 +40,7 @@ QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* conte
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter);
glm::quat rotation = ::rotationBetween(glm::vec3(0.0f, 0.0f, -1.0f), direction);
glm::vec3 eulers = ::safeEulerAngles(rotation);
return qScriptValueFromValue<glm::vec2>(engine, Application::getInstance()->getApplicationOverlay()
return qScriptValueFromValue<glm::vec2>(engine, Application::getInstance()->getApplicationCompositor()
.sphericalToOverlay(glm::vec2(eulers.y, -eulers.x)));
}
return QScriptValue::NullValue;

View file

@ -27,11 +27,11 @@ public:
static QScriptValue getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine);
public slots:
void toggleMagnifier() { Application::getInstance()->getApplicationOverlay().toggleMagnifier(); };
void toggleMagnifier() { Application::getInstance()->getApplicationCompositor().toggleMagnifier(); };
private:
HMDScriptingInterface() {};
bool getMagnifier() const { return Application::getInstance()->getApplicationOverlay().hasMagnifier(); };
bool getMagnifier() const { return Application::getInstance()->getApplicationCompositor().hasMagnifier(); };
bool isHMDMode() const { return Application::getInstance()->isHMDMode(); }
bool getHUDLookAtPosition3D(glm::vec3& result) const;

View file

@ -11,6 +11,7 @@
//
#include "starfield/renderer/Renderer.h"
#include "Application.h"
using namespace starfield;
@ -52,6 +53,10 @@ void Renderer::render(float perspective, float aspect, mat4 const& orientation,
matrix[3][1] = 0.0f;
matrix[3][2] = 0.0f;
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadMatrixf(glm::value_ptr(qApp->getDisplayViewFrustum()->getProjection()));
glMatrixMode(GL_MODELVIEW);
// extract local z vector
vec3 ahead = vec3(matrix[2]);
@ -74,6 +79,10 @@ void Renderer::render(float perspective, float aspect, mat4 const& orientation,
floodFill(cursor, TileSelection(*this, _tileArray, _tileArray + _tiling.getTileCount(), (TileSelection::Cursor*) _batchCountArray));
this->glBatch(glm::value_ptr(matrix), prepareBatch((unsigned*) _batchOffs, _outIndexPos), alpha);
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
}
// renderer construction

View file

@ -0,0 +1,763 @@
//
// ApplicationCompositor.cpp
// interface/src/ui/overlays
//
// Created by Benjamin Arnold on 5/27/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include "ApplicationCompositor.h"
#include <glm/gtc/type_ptr.hpp>
#include <avatar/AvatarManager.h>
#include <gpu/GLBackend.h>
#include <CursorManager.h>
#include <Tooltip.h>
#include "Application.h"
// Used to animate the magnification windows
static const float MAG_SPEED = 0.08f;
static const quint64 MSECS_TO_USECS = 1000ULL;
static const quint64 TOOLTIP_DELAY = 500 * MSECS_TO_USECS;
static const float WHITE_TEXT[] = { 0.93f, 0.93f, 0.93f };
static const float RETICLE_COLOR[] = { 0.0f, 198.0f / 255.0f, 244.0f / 255.0f };
static const float reticleSize = TWO_PI / 100.0f;
static const float CONNECTION_STATUS_BORDER_COLOR[] = { 1.0f, 0.0f, 0.0f };
static const float CONNECTION_STATUS_BORDER_LINE_WIDTH = 4.0f;
static const float CURSOR_PIXEL_SIZE = 32.0f;
static const float MOUSE_PITCH_RANGE = 1.0f * PI;
static const float MOUSE_YAW_RANGE = 0.5f * TWO_PI;
static const glm::vec2 MOUSE_RANGE(MOUSE_YAW_RANGE, MOUSE_PITCH_RANGE);
static gpu::BufferPointer _hemiVertices;
static gpu::BufferPointer _hemiIndices;
static int _hemiIndexCount{ 0 };
EntityItemID ApplicationCompositor::_noItemId;
static QString _tooltipId;
// Return a point's cartesian coordinates on a sphere from pitch and yaw
glm::vec3 getPoint(float yaw, float pitch) {
return glm::vec3(glm::cos(-pitch) * (-glm::sin(yaw)),
glm::sin(-pitch),
glm::cos(-pitch) * (-glm::cos(yaw)));
}
//Checks if the given ray intersects the sphere at the origin. result will store a multiplier that should
//be multiplied by dir and added to origin to get the location of the collision
bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r, float* result)
{
//Source: http://wiki.cgsociety.org/index.php/Ray_Sphere_Intersection
//Compute A, B and C coefficients
float a = glm::dot(dir, dir);
float b = 2 * glm::dot(dir, origin);
float c = glm::dot(origin, origin) - (r * r);
//Find discriminant
float disc = b * b - 4 * a * c;
// if discriminant is negative there are no real roots, so return
// false as ray misses sphere
if (disc < 0) {
return false;
}
// compute q as described above
float distSqrt = sqrtf(disc);
float q;
if (b < 0) {
q = (-b - distSqrt) / 2.0f;
} else {
q = (-b + distSqrt) / 2.0f;
}
// compute t0 and t1
float t0 = q / a;
float t1 = c / q;
// make sure t0 is smaller than t1
if (t0 > t1) {
// if t0 is bigger than t1 swap them around
float temp = t0;
t0 = t1;
t1 = temp;
}
// if t1 is less than zero, the object is in the ray's negative direction
// and consequently the ray misses the sphere
if (t1 < 0) {
return false;
}
// if t0 is less than zero, the intersection point is at t1
if (t0 < 0) {
*result = t1;
return true;
} else { // else the intersection point is at t0
*result = t0;
return true;
}
}
ApplicationCompositor::ApplicationCompositor() {
memset(_reticleActive, 0, sizeof(_reticleActive));
memset(_magActive, 0, sizeof(_reticleActive));
memset(_magSizeMult, 0, sizeof(_magSizeMult));
auto geometryCache = DependencyManager::get<GeometryCache>();
_reticleQuad = geometryCache->allocateID();
_magnifierQuad = geometryCache->allocateID();
_magnifierBorder = geometryCache->allocateID();
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverEnterEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId != entityItemID) {
_hoverItemId = entityItemID;
_hoverItemEnterUsecs = usecTimestampNow();
auto properties = entityScriptingInterface->getEntityProperties(_hoverItemId);
_hoverItemHref = properties.getHref();
auto cursor = Cursor::Manager::instance().getCursor();
if (!_hoverItemHref.isEmpty()) {
cursor->setIcon(Cursor::Icon::LINK);
} else {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
}
});
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId == entityItemID) {
_hoverItemId = _noItemId;
_hoverItemHref.clear();
auto cursor = Cursor::Manager::instance().getCursor();
cursor->setIcon(Cursor::Icon::DEFAULT);
if (!_tooltipId.isEmpty()) {
qDebug() << "Closing tooltip " << _tooltipId;
Tooltip::closeTip(_tooltipId);
_tooltipId.clear();
}
}
});
}
ApplicationCompositor::~ApplicationCompositor() {
}
void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorIndex) {
auto& cursorManager = Cursor::Manager::instance();
auto cursor = cursorManager.getCursor(cursorIndex);
auto iconId = cursor->getIcon();
if (!_cursors.count(iconId)) {
auto iconPath = cursorManager.getIconImage(cursor->getIcon());
_cursors[iconId] = DependencyManager::get<TextureCache>()->
getImageTexture(iconPath);
}
batch.setUniformTexture(0, _cursors[iconId]);
}
// Draws the FBO texture for the screen
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
if (_alpha == 0.0f) {
return;
}
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
if (!texture) {
return;
}
updateTooltips();
auto deviceSize = qApp->getDeviceSize();
glViewport(0, 0, deviceSize.width(), deviceSize.height());
//Handle fading and deactivation/activation of UI
gpu::Batch batch;
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
batch._glBindTexture(GL_TEXTURE_2D, texture);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
geometryCache->renderUnitQuad(batch, vec4(vec3(1), _alpha));
// Doesn't actually render
renderPointers(batch);
//draw the mouse pointer
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 canvasSize = qApp->getCanvasSize();
vec2 mousePosition = toNormalizedDeviceScale(vec2(qApp->getMouse()), canvasSize);
// Invert the Y axis
mousePosition.y *= -1.0f;
Transform model;
model.setTranslation(vec3(mousePosition, 0));
vec2 mouseSize = CURSOR_PIXEL_SIZE / canvasSize;
model.setScale(vec3(mouseSize, 1.0f));
batch.setModelTransform(model);
bindCursorTexture(batch);
vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
geometryCache->renderUnitQuad(batch, vec4(1));
renderArgs->_context->render(batch);
}
vec2 getPolarCoordinates(const PalmData& palm) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
return polar;
}
// Draws the FBO texture for Oculus rift.
void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int eye) {
if (_alpha == 0.0f) {
return;
}
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
if (!texture) {
return;
}
updateTooltips();
vec2 canvasSize = qApp->getCanvasSize();
_textureAspectRatio = aspect(canvasSize);
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
gpu::Batch batch;
geometryCache->useSimpleDrawPipeline(batch);
batch._glDisable(GL_DEPTH_TEST);
batch._glDisable(GL_CULL_FACE);
batch._glBindTexture(GL_TEXTURE_2D, texture);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
batch.setViewTransform(Transform());
batch.setProjectionTransform(qApp->getEyeProjection(eye));
mat4 eyePose = qApp->getEyePose(eye);
glm::mat4 overlayXfm = glm::inverse(eyePose);
#ifdef DEBUG_OVERLAY
{
batch.setModelTransform(glm::translate(mat4(), vec3(0, 0, -2)));
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}
#else
{
batch.setModelTransform(overlayXfm);
drawSphereSection(batch);
}
#endif
// Doesn't actually render
renderPointers(batch);
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
bindCursorTexture(batch);
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Controller Pointers
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
mat4 pointerXfm = glm::mat4_cast(quat(vec3(polar.y, -polar.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
// Render reticle at location
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
renderArgs->_context->render(batch);
}
void ApplicationCompositor::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const {
cursorPos *= qApp->getCanvasSize();
const glm::vec2 projection = screenToSpherical(cursorPos);
// The overlay space orientation of the mouse coordinates
const glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
// FIXME We now have the direction of the ray FROM THE DEFAULT HEAD POSE.
// Now we need to account for the actual camera position relative to the overlay
glm::vec3 overlaySpaceDirection = glm::normalize(orientation * IDENTITY_FRONT);
// We need the RAW camera orientation and position, because this is what the overlay is
// rendered relative to
const glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
const glm::quat overlayOrientation = qApp->getCamera()->getRotation();
// Intersection UI overlay space
glm::vec3 worldSpaceDirection = overlayOrientation * overlaySpaceDirection;
glm::vec3 worldSpaceIntersection = (glm::normalize(worldSpaceDirection) * _oculusUIRadius) + overlayPosition;
glm::vec3 worldSpaceHeadPosition = (overlayOrientation * glm::vec3(qApp->getHeadPose()[3])) + overlayPosition;
// Intersection in world space
origin = worldSpaceHeadPosition;
direction = glm::normalize(worldSpaceIntersection - worldSpaceHeadPosition);
}
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationCompositor::getPalmClickLocation(const PalmData *palm) const {
QPoint rv;
auto canvasSize = qApp->getCanvasSize();
if (qApp->isHMDMode()) {
glm::vec2 polar = getPolarCoordinates(*palm);
glm::vec2 point = sphericalToScreen(-polar);
rv.rx() = point.x;
rv.ry() = point.y;
} else {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::dmat4 projection;
qApp->getProjectionMatrix(&projection);
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
glm::vec3 tipPos = invOrientation * (tip - eyePos);
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
glm::vec3 ndcSpacePos;
if (clipSpacePos.w != 0) {
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
}
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * canvasSize.x);
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * canvasSize.y);
}
return rv;
}
//Finds the collision point of a world space ray
bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat inverseOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 relativePosition = inverseOrientation * (position - myAvatar->getDefaultEyePosition());
glm::vec3 relativeDirection = glm::normalize(inverseOrientation * direction);
float t;
if (raySphereIntersect(relativeDirection, relativePosition, _oculusUIRadius * myAvatar->getScale(), &t)){
result = position + direction * t;
return true;
}
return false;
}
//Renders optional pointers
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
//If we are in oculus, render reticle later
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
_reticleActive[LEFT_CONTROLLER] = false;
_reticleActive[RIGHT_CONTROLLER] = false;
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
//only render controller pointer if we aren't already rendering a mouse pointer
_reticleActive[MOUSE] = false;
_magActive[MOUSE] = false;
renderControllerPointers(batch);
}
}
void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Static variables used for storing controller state
static quint64 pressedTime[NUMBER_OF_RETICLES] = { 0ULL, 0ULL, 0ULL };
static bool isPressed[NUMBER_OF_RETICLES] = { false, false, false };
static bool stateWhenPressed[NUMBER_OF_RETICLES] = { false, false, false };
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
const int index = palmIndex - 1;
const PalmData* palmData = NULL;
if (palmIndex >= handData->getPalms().size()) {
return;
}
if (handData->getPalms()[palmIndex].isActive()) {
palmData = &handData->getPalms()[palmIndex];
} else {
continue;
}
int controllerButtons = palmData->getControllerButtons();
//Check for if we should toggle or drag the magnification window
if (controllerButtons & BUTTON_3) {
if (isPressed[index] == false) {
//We are now dragging the window
isPressed[index] = true;
//set the pressed time in us
pressedTime[index] = usecTimestampNow();
stateWhenPressed[index] = _magActive[index];
}
} else if (isPressed[index]) {
isPressed[index] = false;
//If the button was only pressed for < 250 ms
//then disable it.
const int MAX_BUTTON_PRESS_TIME = 250 * MSECS_TO_USECS;
if (usecTimestampNow() < pressedTime[index] + MAX_BUTTON_PRESS_TIME) {
_magActive[index] = !stateWhenPressed[index];
}
}
//if we have the oculus, we should make the cursor smaller since it will be
//magnified
if (qApp->isHMDMode()) {
QPoint point = getPalmClickLocation(palmData);
_reticlePosition[index] = point;
//When button 2 is pressed we drag the mag window
if (isPressed[index]) {
_magActive[index] = true;
}
// If oculus is enabled, we draw the crosshairs later
continue;
}
auto canvasSize = qApp->getCanvasSize();
int mouseX, mouseY;
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
QPoint res = getPalmClickLocation(palmData);
mouseX = res.x();
mouseY = res.y();
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
}
//If the cursor is out of the screen then don't render it
if (mouseX < 0 || mouseX >= (int)canvasSize.x || mouseY < 0 || mouseY >= (int)canvasSize.y) {
_reticleActive[index] = false;
continue;
}
_reticleActive[index] = true;
const float reticleSize = 40.0f;
mouseX -= reticleSize / 2.0f;
mouseY += reticleSize / 2.0f;
glm::vec2 topLeft(mouseX, mouseY);
glm::vec2 bottomRight(mouseX + reticleSize, mouseY - reticleSize);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f));
}
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
if (!_magnifier) {
return;
}
auto canvasSize = qApp->getCanvasSize();
const int widgetWidth = canvasSize.x;
const int widgetHeight = canvasSize.y;
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
// Magnification Texture Coordinates
const float magnifyULeft = (magPos.x - halfWidth) / (float)widgetWidth;
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
//Get yaw / pitch value for the corners
const glm::vec2 topLeftYawPitch = overlayToSpherical(glm::vec2(magPos.x - newHalfWidth,
magPos.y - newHalfHeight));
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
magPos.y + newHalfHeight));
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
const glm::vec3 topRight = getPoint(bottomRightYawPitch.x, topLeftYawPitch.y);
auto geometryCache = DependencyManager::get<GeometryCache>();
if (bottomLeft != _previousMagnifierBottomLeft || bottomRight != _previousMagnifierBottomRight
|| topLeft != _previousMagnifierTopLeft || topRight != _previousMagnifierTopRight) {
QVector<glm::vec3> border;
border << topLeft;
border << bottomLeft;
border << bottomRight;
border << topRight;
border << topLeft;
geometryCache->updateVertices(_magnifierBorder, border, glm::vec4(1.0f, 0.0f, 0.0f, _alpha));
_previousMagnifierBottomLeft = bottomLeft;
_previousMagnifierBottomRight = bottomRight;
_previousMagnifierTopLeft = topLeft;
_previousMagnifierTopRight = topRight;
}
glPushMatrix(); {
if (showBorder) {
glDisable(GL_TEXTURE_2D);
glLineWidth(1.0f);
//Outer Line
geometryCache->renderVertices(gpu::LINE_STRIP, _magnifierBorder);
glEnable(GL_TEXTURE_2D);
}
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
glm::vec2(magnifyULeft, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVTop),
glm::vec2(magnifyULeft, magnifyVTop),
magnifierColor, _magnifierQuad);
} glPopMatrix();
}
void ApplicationCompositor::buildHemiVertices(
const float fov, const float aspectRatio, const int slices, const int stacks) {
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
if (textureFOV == fov && textureAspectRatio == aspectRatio) {
return;
}
textureFOV = fov;
textureAspectRatio = aspectRatio;
auto geometryCache = DependencyManager::get<GeometryCache>();
_hemiVertices = gpu::BufferPointer(new gpu::Buffer());
_hemiIndices = gpu::BufferPointer(new gpu::Buffer());
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
vec3 pos;
vec2 uv;
// Compute vertices positions and texture UV coordinate
// Create and write to buffer
for (int i = 0; i < stacks; i++) {
uv.y = (float)i / (float)(stacks - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (uv.y - 0.5f);
for (int j = 0; j < slices; j++) {
uv.x = (float)j / (float)(slices - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (uv.x - 0.5f);
pos = getPoint(yaw, pitch);
static const vec4 color(1);
_hemiVertices->append(sizeof(pos), (gpu::Byte*)&pos);
_hemiVertices->append(sizeof(vec2), (gpu::Byte*)&uv);
_hemiVertices->append(sizeof(vec4), (gpu::Byte*)&color);
}
}
// Compute number of indices needed
static const int VERTEX_PER_TRANGLE = 3;
static const int TRIANGLE_PER_RECTANGLE = 2;
int numberOfRectangles = (slices - 1) * (stacks - 1);
_hemiIndexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
std::vector<GLushort> indices;
for (int i = 0; i < stacks - 1; i++) {
for (int j = 0; j < slices - 1; j++) {
GLushort bottomLeftIndex = i * slices + j;
GLushort bottomRightIndex = bottomLeftIndex + 1;
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
// FIXME make a z-order curve for better vertex cache locality
indices.push_back(topLeftIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(topRightIndex);
indices.push_back(topRightIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(bottomRightIndex);
}
}
_hemiIndices->append(sizeof(GLushort) * indices.size(), (gpu::Byte*)&indices[0]);
}
void ApplicationCompositor::drawSphereSection(gpu::Batch& batch) {
buildHemiVertices(_textureFov, _textureAspectRatio, 80, 80);
static const int VERTEX_DATA_SLOT = 0;
static const int TEXTURE_DATA_SLOT = 1;
static const int COLOR_DATA_SLOT = 2;
gpu::Stream::FormatPointer streamFormat(new gpu::Stream::Format()); // 1 for everyone
streamFormat->setAttribute(gpu::Stream::POSITION, VERTEX_DATA_SLOT, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
streamFormat->setAttribute(gpu::Stream::TEXCOORD, TEXTURE_DATA_SLOT, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
streamFormat->setAttribute(gpu::Stream::COLOR, COLOR_DATA_SLOT, gpu::Element(gpu::VEC4, gpu::FLOAT, gpu::RGBA));
batch.setInputFormat(streamFormat);
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
batch.setInputBuffer(VERTEX_DATA_SLOT, posView);
batch.setInputBuffer(TEXTURE_DATA_SLOT, uvView);
batch.setInputBuffer(COLOR_DATA_SLOT, colView);
batch.setIndexBuffer(gpu::UINT16, _hemiIndices, 0);
batch.drawIndexed(gpu::TRIANGLES, _hemiIndexCount);
}
glm::vec2 ApplicationCompositor::directionToSpherical(const glm::vec3& direction) {
glm::vec2 result;
// Compute yaw
glm::vec3 normalProjection = glm::normalize(glm::vec3(direction.x, 0.0f, direction.z));
result.x = glm::acos(glm::dot(IDENTITY_FRONT, normalProjection));
if (glm::dot(IDENTITY_RIGHT, normalProjection) > 0.0f) {
result.x = -glm::abs(result.x);
} else {
result.x = glm::abs(result.x);
}
// Compute pitch
result.y = angleBetween(IDENTITY_UP, direction) - PI_OVER_TWO;
return result;
}
glm::vec3 ApplicationCompositor::sphericalToDirection(const glm::vec2& sphericalPos) {
glm::quat rotation(glm::vec3(sphericalPos.y, sphericalPos.x, 0.0f));
return rotation * IDENTITY_FRONT;
}
glm::vec2 ApplicationCompositor::screenToSpherical(const glm::vec2& screenPos) {
auto screenSize = qApp->getCanvasSize();
glm::vec2 result;
result.x = -(screenPos.x / screenSize.x - 0.5f);
result.y = (screenPos.y / screenSize.y - 0.5f);
result.x *= MOUSE_YAW_RANGE;
result.y *= MOUSE_PITCH_RANGE;
return result;
}
glm::vec2 ApplicationCompositor::sphericalToScreen(const glm::vec2& sphericalPos) {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result /= MOUSE_RANGE;
result += 0.5f;
result *= qApp->getCanvasSize();
return result;
}
glm::vec2 ApplicationCompositor::sphericalToOverlay(const glm::vec2& sphericalPos) const {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result /= _textureFov;
result.x /= _textureAspectRatio;
result += 0.5f;
result *= qApp->getCanvasSize();
return result;
}
glm::vec2 ApplicationCompositor::overlayToSpherical(const glm::vec2& overlayPos) const {
glm::vec2 result = overlayPos;
result /= qApp->getCanvasSize();
result -= 0.5f;
result *= _textureFov;
result.x *= _textureAspectRatio;
result.x *= -1.0f;
return result;
}
glm::vec2 ApplicationCompositor::screenToOverlay(const glm::vec2& screenPos) const {
return sphericalToOverlay(screenToSpherical(screenPos));
}
glm::vec2 ApplicationCompositor::overlayToScreen(const glm::vec2& overlayPos) const {
return sphericalToScreen(overlayToSpherical(overlayPos));
}
void ApplicationCompositor::updateTooltips() {
if (_hoverItemId != _noItemId) {
quint64 hoverDuration = usecTimestampNow() - _hoverItemEnterUsecs;
if (_hoverItemEnterUsecs != UINT64_MAX && !_hoverItemHref.isEmpty() && hoverDuration > TOOLTIP_DELAY) {
// TODO Enable and position the tooltip
_hoverItemEnterUsecs = UINT64_MAX;
_tooltipId = Tooltip::showTip("URL: " + _hoverItemHref);
}
}
}

View file

@ -0,0 +1,120 @@
//
// Created by Bradley Austin Davis Arnold on 2015/06/13
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ApplicationCompositor_h
#define hifi_ApplicationCompositor_h
#include <QObject>
#include <cstdint>
#include <EntityItemID.h>
#include <GeometryCache.h>
#include <GLMHelpers.h>
#include <gpu/Batch.h>
#include <gpu/Texture.h>
class Camera;
class PalmData;
class RenderArgs;
const float MAGNIFY_WIDTH = 220.0f;
const float MAGNIFY_HEIGHT = 100.0f;
const float MAGNIFY_MULT = 2.0f;
const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
// Handles the drawing of the overlays to the screen
// TODO, move divide up the rendering, displaying and input handling
// facilities of this class
class ApplicationCompositor : public QObject {
Q_OBJECT
public:
ApplicationCompositor();
~ApplicationCompositor();
void displayOverlayTexture(RenderArgs* renderArgs);
void displayOverlayTextureHmd(RenderArgs* renderArgs, int eye);
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
bool hasMagnifier() const { return _magnifier; }
void toggleMagnifier() { _magnifier = !_magnifier; }
float getHmdUIAngularSize() const { return _hmdUIAngularSize; }
void setHmdUIAngularSize(float hmdUIAngularSize) { _hmdUIAngularSize = hmdUIAngularSize; }
// Converter from one frame of reference to another.
// Frame of reference:
// Direction: Ray that represents the spherical values
// Screen: Position on the screen (x,y)
// Spherical: Pitch and yaw that gives the position on the sphere we project on (yaw,pitch)
// Overlay: Position on the overlay (x,y)
// (x,y) in Overlay are similar than (x,y) in Screen except they can be outside of the bound of te screen.
// This allows for picking outside of the screen projection in 3D.
glm::vec2 sphericalToOverlay(const glm::vec2 & sphericalPos) const;
glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
GLuint getOverlayTexture() const;
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
static glm::vec2 sphericalToScreen(const glm::vec2 & sphericalPos);
private:
void displayOverlayTextureStereo(RenderArgs* renderArgs, float aspectRatio, float fov);
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
void buildHemiVertices(const float fov, const float aspectRatio, const int slices, const int stacks);
void drawSphereSection(gpu::Batch& batch);
void updateTooltips();
void renderPointers(gpu::Batch& batch);
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
void renderControllerPointers(gpu::Batch& batch);
void renderPointersOculus(gpu::Batch& batch);
// Support for hovering and tooltips
static EntityItemID _noItemId;
EntityItemID _hoverItemId{ _noItemId };
QString _hoverItemHref;
quint64 _hoverItemEnterUsecs{ 0 };
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
float _textureFov{ glm::radians(DEFAULT_HMD_UI_ANGULAR_SIZE) };
float _textureAspectRatio{ 1.0f };
int _hemiVerticesID{ GeometryCache::UNKNOWN_ID };
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
QPoint _reticlePosition[NUMBER_OF_RETICLES];
bool _magActive[NUMBER_OF_RETICLES];
float _magSizeMult[NUMBER_OF_RETICLES];
bool _magnifier{ true };
float _alpha{ 1.0f };
float _oculusUIRadius{ 1.0f };
QMap<uint16_t, gpu::TexturePointer> _cursors;
int _reticleQuad;
int _magnifierQuad;
int _magnifierBorder;
int _previousBorderWidth{ -1 };
int _previousBorderHeight{ -1 };
glm::vec3 _previousMagnifierBottomLeft;
glm::vec3 _previousMagnifierBottomRight;
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
};
#endif // hifi_ApplicationCompositor_h

File diff suppressed because it is too large Load diff

View file

@ -13,16 +13,8 @@
#define hifi_ApplicationOverlay_h
#include <gpu/Texture.h>
class Camera;
class Overlays;
class QOpenGLFramebufferObject;
const float MAGNIFY_WIDTH = 220.0f;
const float MAGNIFY_HEIGHT = 100.0f;
const float MAGNIFY_MULT = 2.0f;
const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
// Handles the drawing of the overlays to the screen
// TODO, move divide up the rendering, displaying and input handling
// facilities of this class
@ -33,122 +25,26 @@ public:
~ApplicationOverlay();
void renderOverlay(RenderArgs* renderArgs);
void displayOverlayTexture();
void displayOverlayTextureStereo(Camera& whichCamera, float aspectRatio, float fov);
void displayOverlayTextureHmd(Camera& whichCamera);
GLuint getOverlayTexture();
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
bool hasMagnifier() const { return _magnifier; }
void toggleMagnifier() { _magnifier = !_magnifier; }
float getHmdUIAngularSize() const { return _hmdUIAngularSize; }
void setHmdUIAngularSize(float hmdUIAngularSize) { _hmdUIAngularSize = hmdUIAngularSize; }
// Converter from one frame of reference to another.
// Frame of reference:
// Direction: Ray that represents the spherical values
// Screen: Position on the screen (x,y)
// Spherical: Pitch and yaw that gives the position on the sphere we project on (yaw,pitch)
// Overlay: Position on the overlay (x,y)
// (x,y) in Overlay are similar than (x,y) in Screen except they can be outside of the bound of te screen.
// This allows for picking outside of the screen projection in 3D.
glm::vec2 sphericalToOverlay(const glm::vec2 & sphericalPos) const;
glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
static glm::vec2 sphericalToScreen(const glm::vec2 & sphericalPos);
private:
// Interleaved vertex data
struct TextureVertex {
glm::vec3 position;
glm::vec2 uv;
};
void renderStatsAndLogs(RenderArgs* renderArgs);
void renderDomainConnectionStatusBorder(RenderArgs* renderArgs);
void renderRearViewToFbo(RenderArgs* renderArgs);
void renderRearView(RenderArgs* renderArgs);
void renderQmlUi(RenderArgs* renderArgs);
void renderOverlays(RenderArgs* renderArgs);
void buildFramebufferObject();
typedef QPair<GLuint, GLuint> VerticesIndices;
class TexturedHemisphere {
public:
TexturedHemisphere();
~TexturedHemisphere();
void bind();
void release();
GLuint getTexture();
void buildFramebufferObject();
void buildVBO(const float fov, const float aspectRatio, const int slices, const int stacks);
void render();
private:
void cleanupVBO();
GLuint _vertices;
GLuint _indices;
QOpenGLFramebufferObject* _framebufferObject;
VerticesIndices _vbo;
};
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
void renderReticle(glm::quat orientation, float alpha);
void renderPointers();;
void renderMagnifier(glm::vec2 magPos, float sizeMult, bool showBorder);
void renderControllerPointers();
void renderPointersOculus();
void renderAudioMeter();
void renderCameraToggle();
void renderStatsAndLogs();
void renderDomainConnectionStatusBorder();
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
float _alpha{ 1.0f };
float _trailingAudioLoudness{ 0.0f };
GLuint _uiTexture{ 0 };
TexturedHemisphere _overlays;
float _textureFov;
float _textureAspectRatio;
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
QPoint _reticlePosition[NUMBER_OF_RETICLES];
bool _magActive[NUMBER_OF_RETICLES];
float _magSizeMult[NUMBER_OF_RETICLES];
quint64 _lastMouseMove;
bool _magnifier;
float _alpha = 1.0f;
float _oculusUIRadius;
float _trailingAudioLoudness;
gpu::TexturePointer _crosshairTexture;
QMap<uint16_t, gpu::TexturePointer> _cursors;
GLuint _newUiTexture{ 0 };
int _reticleQuad;
int _magnifierQuad;
int _audioRedQuad;
int _audioGreenQuad;
int _audioBlueQuad;
int _domainStatusBorder;
int _magnifierBorder;
int _previousBorderWidth;
int _previousBorderHeight;
glm::vec3 _previousMagnifierBottomLeft;
glm::vec3 _previousMagnifierBottomRight;
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
ivec2 _previousBorderSize{ -1 };
QOpenGLFramebufferObject* _overlayFramebuffer{ nullptr };
};
#endif // hifi_ApplicationOverlay_h

View file

@ -0,0 +1,134 @@
//
// Created by Bradley Austin Davis 2015/06/19
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "AvatarInputs.h"
#include <SettingHandle.h>
#include "Menu.h"
#include "devices/FaceTracker.h"
HIFI_QML_DEF(AvatarInputs)
static AvatarInputs* INSTANCE{ nullptr };
static const char SETTINGS_GROUP_NAME[] = "Rear View Tools";
static const char ZOOM_LEVEL_SETTINGS[] = "ZoomLevel";
static Setting::Handle<int> rearViewZoomLevel(QStringList() << SETTINGS_GROUP_NAME << ZOOM_LEVEL_SETTINGS, 0);
AvatarInputs* AvatarInputs::getInstance() {
if (!INSTANCE) {
AvatarInputs::registerType();
AvatarInputs::show();
Q_ASSERT(INSTANCE);
}
return INSTANCE;
}
AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
int zoomSetting = rearViewZoomLevel.get();
_mirrorZoomed = zoomSetting == 0;
}
#define AI_UPDATE(name, src) \
{ \
auto val = src; \
if (_##name != val) { \
_##name = val; \
emit name##Changed(); \
} \
}
#define AI_UPDATE_FLOAT(name, src, epsilon) \
{ \
float val = src; \
if (fabs(_##name - val) >= epsilon) { \
_##name = val; \
emit name##Changed(); \
} \
}
void AvatarInputs::update() {
if (!Menu::getInstance()) {
return;
}
AI_UPDATE(mirrorVisible, Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !qApp->isHMDMode()
&& !Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror));
AI_UPDATE(cameraEnabled, !Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking));
AI_UPDATE(cameraMuted, Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking));
auto audioIO = DependencyManager::get<AudioClient>();
const float CLIPPING_INDICATOR_TIME = 1.0f;
const float AUDIO_METER_AVERAGING = 0.5;
const float LOG2 = log(2.0f);
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
const float LOG2_LOUDNESS_FLOOR = 11.0f;
float audioLevel = 0.0f;
auto audio = DependencyManager::get<AudioClient>();
float loudness = audio->getLastInputLoudness() + 1.0f;
_trailingAudioLoudness = AUDIO_METER_AVERAGING * _trailingAudioLoudness + (1.0f - AUDIO_METER_AVERAGING) * loudness;
float log2loudness = logf(_trailingAudioLoudness) / LOG2;
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE;
} else {
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE;
}
if (audioLevel > 1.0) {
audioLevel = 1.0;
}
AI_UPDATE_FLOAT(audioLevel, audioLevel, 0.01);
AI_UPDATE(audioClipping, ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f)));
AI_UPDATE(audioMuted, audioIO->isMuted());
//// Make muted icon pulsate
//static const float PULSE_MIN = 0.4f;
//static const float PULSE_MAX = 1.0f;
//static const float PULSE_FREQUENCY = 1.0f; // in Hz
//qint64 now = usecTimestampNow();
//if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// // Prevents t from getting too big, which would diminish glm::cos precision
// _iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
//}
//float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
//float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
//iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
void AvatarInputs::toggleCameraMute() {
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
if (faceTracker) {
faceTracker->toggleMute();
}
}
void AvatarInputs::toggleAudioMute() {
DependencyManager::get<AudioClient>()->toggleMute();
}
void AvatarInputs::resetSensors() {
qApp->resetSensors();
}
void AvatarInputs::toggleZoom() {
_mirrorZoomed = !_mirrorZoomed;
rearViewZoomLevel.set(_mirrorZoomed ? 0 : 1);
emit mirrorZoomedChanged();
}
void AvatarInputs::closeMirror() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);
}
}

View file

@ -0,0 +1,59 @@
//
// Created by Bradley Austin Davis 2015/06/19
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarInputs_h
#define hifi_AvatarInputs_h
#include <QQuickItem>
#include <OffscreenUi.h>
#define AI_PROPERTY(type, name, initialValue) \
Q_PROPERTY(type name READ name NOTIFY name##Changed) \
public: \
type name() { return _##name; }; \
private: \
type _##name{ initialValue };
class AvatarInputs : public QQuickItem {
Q_OBJECT
HIFI_QML_DECL
AI_PROPERTY(bool, cameraEnabled, false)
AI_PROPERTY(bool, cameraMuted, false)
AI_PROPERTY(bool, audioMuted, false)
AI_PROPERTY(bool, audioClipping, false)
AI_PROPERTY(float, audioLevel, 0)
AI_PROPERTY(bool, mirrorVisible, false)
AI_PROPERTY(bool, mirrorZoomed, true)
public:
static AvatarInputs* getInstance();
AvatarInputs(QQuickItem* parent = nullptr);
void update();
signals:
void cameraEnabledChanged();
void cameraMutedChanged();
void audioMutedChanged();
void audioClippingChanged();
void audioLevelChanged();
void mirrorVisibleChanged();
void mirrorZoomedChanged();
protected:
Q_INVOKABLE void resetSensors();
Q_INVOKABLE void toggleCameraMute();
Q_INVOKABLE void toggleAudioMute();
Q_INVOKABLE void toggleZoom();
Q_INVOKABLE void closeMirror();
private:
float _trailingAudioLoudness{ 0 };
};
#endif // hifi_AvatarInputs_h

View file

@ -175,7 +175,7 @@ void PreferencesDialog::loadPreferences() {
ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationOverlay().getHmdUIAngularSize());
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationCompositor().getHmdUIAngularSize());
SixenseManager& sixense = SixenseManager::getInstance();
ui.sixenseReticleMoveSpeedSpin->setValue(sixense.getReticleMoveSpeed());
@ -239,7 +239,7 @@ void PreferencesDialog::savePreferences() {
qApp->setMaxOctreePacketsPerSecond(ui.maxOctreePPSSpin->value());
qApp->getApplicationOverlay().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
qApp->getApplicationCompositor().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
SixenseManager& sixense = SixenseManager::getInstance();
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());

View file

@ -1,134 +0,0 @@
//
// RearMirrorTools.cpp
// interface/src/ui
//
// Created by Stojce Slavkovski on 10/23/2013.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <QMouseEvent>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <gpu/GLBackend.h>
#include "Application.h"
#include "RearMirrorTools.h"
#include "Util.h"
const int ICON_SIZE = 24;
const int ICON_PADDING = 5;
const char SETTINGS_GROUP_NAME[] = "Rear View Tools";
const char ZOOM_LEVEL_SETTINGS[] = "ZoomLevel";
Setting::Handle<int> RearMirrorTools::rearViewZoomLevel(QStringList() << SETTINGS_GROUP_NAME << ZOOM_LEVEL_SETTINGS,
ZoomLevel::HEAD);
RearMirrorTools::RearMirrorTools(QRect& bounds) :
_bounds(bounds),
_windowed(false),
_fullScreen(false)
{
_closeTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/close.svg");
_zoomHeadTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/plus.svg");
_zoomBodyTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/minus.svg");
_shrinkIconRect = QRect(ICON_PADDING, ICON_PADDING, ICON_SIZE, ICON_SIZE);
_closeIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.top() + ICON_PADDING, ICON_SIZE, ICON_SIZE);
_resetIconRect = QRect(_bounds.width() - ICON_SIZE - ICON_PADDING, _bounds.top() + ICON_PADDING, ICON_SIZE, ICON_SIZE);
_bodyZoomIconRect = QRect(_bounds.width() - ICON_SIZE - ICON_PADDING, _bounds.bottom() - ICON_PADDING - ICON_SIZE, ICON_SIZE, ICON_SIZE);
_headZoomIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.bottom() - ICON_PADDING - ICON_SIZE, ICON_SIZE, ICON_SIZE);
}
void RearMirrorTools::render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePosition) {
if (fullScreen) {
_fullScreen = true;
displayIcon(QRect(QPoint(), qApp->getDeviceSize()), _shrinkIconRect, _closeTexture);
} else {
// render rear view tools if mouse is in the bounds
_windowed = _bounds.contains(mousePosition);
if (_windowed) {
displayIcon(_bounds, _closeIconRect, _closeTexture);
ZoomLevel zoomLevel = (ZoomLevel)rearViewZoomLevel.get();
displayIcon(_bounds, _headZoomIconRect, _zoomHeadTexture, zoomLevel == HEAD);
displayIcon(_bounds, _bodyZoomIconRect, _zoomBodyTexture, zoomLevel == BODY);
}
}
}
bool RearMirrorTools::mousePressEvent(int x, int y) {
if (_windowed) {
if (_closeIconRect.contains(x, y)) {
_windowed = false;
emit closeView();
return true;
}
if (_headZoomIconRect.contains(x, y)) {
rearViewZoomLevel.set(HEAD);
return true;
}
if (_bodyZoomIconRect.contains(x, y)) {
rearViewZoomLevel.set(BODY);
return true;
}
if (_bounds.contains(x, y)) {
_windowed = false;
emit restoreView();
return true;
}
}
if (_fullScreen) {
if (_shrinkIconRect.contains(x, y)) {
_fullScreen = false;
emit shrinkView();
return true;
}
}
return false;
}
void RearMirrorTools::displayIcon(QRect bounds, QRect iconBounds, const gpu::TexturePointer& texture, bool selected) {
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(bounds.left(), bounds.right(), bounds.bottom(), bounds.top(), -1.0, 1.0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_TEXTURE_2D);
glm::vec4 quadColor;
if (selected) {
quadColor = glm::vec4(.5f, .5f, .5f, 1.0f);
} else {
quadColor = glm::vec4(1, 1, 1, 1);
}
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(texture));
glm::vec2 topLeft(iconBounds.left(), iconBounds.top());
glm::vec2 bottomRight(iconBounds.right(), iconBounds.bottom());
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,56 +0,0 @@
//
// RearMirrorTools.h
// interface/src/ui
//
// Created by Stojce Slavkovski on 10/23/2013.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RearMirrorTools_h
#define hifi_RearMirrorTools_h
#include <gpu/Texture.h>
#include <SettingHandle.h>
enum ZoomLevel {
HEAD = 0,
BODY = 1
};
class RearMirrorTools : public QObject {
Q_OBJECT
public:
RearMirrorTools(QRect& bounds);
void render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePos);
bool mousePressEvent(int x, int y);
static Setting::Handle<int> rearViewZoomLevel;
signals:
void closeView();
void shrinkView();
void resetView();
void restoreView();
private:
QRect _bounds;
gpu::TexturePointer _closeTexture;
gpu::TexturePointer _zoomBodyTexture;
gpu::TexturePointer _zoomHeadTexture;
QRect _closeIconRect;
QRect _resetIconRect;
QRect _shrinkIconRect;
QRect _headZoomIconRect;
QRect _bodyZoomIconRect;
bool _windowed;
bool _fullScreen;
void displayIcon(QRect bounds, QRect iconBounds, const gpu::TexturePointer& texture, bool selected = false);
};
#endif // hifi_RearMirrorTools_h

View file

@ -1,17 +1,17 @@
//
// Stats.cpp
// interface/src/ui
//
// Created by Lucas Crisman on 22/03/14.
// Created by Bradley Austin Davis 2015/06/17
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <sstream>
#include <gpu/GPUConfig.h>
#include <stdlib.h>
#include "Stats.h"
#include <sstream>
#include <QFontDatabase>
#include <glm/glm.hpp>
#include <glm/gtx/component_wise.hpp>
@ -25,147 +25,31 @@
#include <LODManager.h>
#include <PerfStat.h>
#include "Stats.h"
#include "BandwidthRecorder.h"
#include "InterfaceConfig.h"
#include "Menu.h"
#include "Util.h"
#include "SequenceNumberStats.h"
HIFI_QML_DEF(Stats)
using namespace std;
const int STATS_PELS_PER_LINE = 16;
const int STATS_PELS_INITIALOFFSET = 12;
const int STATS_GENERAL_MIN_WIDTH = 165;
const int STATS_PING_MIN_WIDTH = 190;
const int STATS_GEO_MIN_WIDTH = 240;
const int STATS_OCTREE_MIN_WIDTH = 410;
static Stats* INSTANCE{ nullptr };
Stats* Stats::getInstance() {
static Stats stats;
return &stats;
if (!INSTANCE) {
Stats::registerType();
Stats::show();
Q_ASSERT(INSTANCE);
}
return INSTANCE;
}
Stats::Stats():
_expanded(false),
_recentMaxPackets(0),
_resetRecentMaxPacketsSoon(true),
_generalStatsWidth(STATS_GENERAL_MIN_WIDTH),
_pingStatsWidth(STATS_PING_MIN_WIDTH),
_geoStatsWidth(STATS_GEO_MIN_WIDTH),
_octreeStatsWidth(STATS_OCTREE_MIN_WIDTH),
_lastHorizontalOffset(0)
{
auto canvasSize = Application::getInstance()->getCanvasSize();
resetWidth(canvasSize.x, 0);
}
void Stats::toggleExpanded() {
_expanded = !_expanded;
}
// called on mouse click release
// check for clicks over stats in order to expand or contract them
void Stats::checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseDragStartedY, int horizontalOffset) {
auto canvasSize = Application::getInstance()->getCanvasSize();
if (0 != glm::compMax(glm::abs(glm::ivec2(mouseX - mouseDragStartedX, mouseY - mouseDragStartedY)))) {
// not worried about dragging on stats
return;
}
int statsHeight = 0,
statsWidth = 0,
statsX = 0,
statsY = 0,
lines = 0;
statsX = horizontalOffset;
// top-left stats click
lines = _expanded ? 5 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _generalStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _generalStatsWidth;
// ping stats click
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
lines = _expanded ? 4 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _pingStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _pingStatsWidth;
}
// geo stats panel click
lines = _expanded ? 4 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _geoStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _geoStatsWidth;
// top-right stats click
lines = _expanded ? 11 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
statsWidth = canvasSize.x - statsX;
if (mouseX > statsX && mouseX < statsX + statsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
}
void Stats::resetWidth(int width, int horizontalOffset) {
auto canvasSize = Application::getInstance()->getCanvasSize();
int extraSpace = canvasSize.x - horizontalOffset - 2
- STATS_GENERAL_MIN_WIDTH
- (Menu::getInstance()->isOptionChecked(MenuOption::TestPing) ? STATS_PING_MIN_WIDTH -1 : 0)
- STATS_GEO_MIN_WIDTH
- STATS_OCTREE_MIN_WIDTH;
int panels = 4;
_generalStatsWidth = STATS_GENERAL_MIN_WIDTH;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
_pingStatsWidth = STATS_PING_MIN_WIDTH;
} else {
_pingStatsWidth = 0;
panels = 3;
}
_geoStatsWidth = STATS_GEO_MIN_WIDTH;
_octreeStatsWidth = STATS_OCTREE_MIN_WIDTH;
if (extraSpace > panels) {
_generalStatsWidth += (int) extraSpace / panels;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
_pingStatsWidth += (int) extraSpace / panels;
}
_geoStatsWidth += (int) extraSpace / panels;
_octreeStatsWidth += canvasSize.x -
(_generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3);
}
}
// translucent background box that makes stats more readable
void Stats::drawBackground(unsigned int rgba, int x, int y, int width, int height) {
glm::vec4 color(((rgba >> 24) & 0xff) / 255.0f,
((rgba >> 16) & 0xff) / 255.0f,
((rgba >> 8) & 0xff) / 255.0f,
(rgba & 0xff) / 255.0f);
// FIX ME: is this correct? It seems to work to fix textures bleeding into us...
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
DependencyManager::get<GeometryCache>()->renderQuad(x, y, width, height, color);
Stats::Stats(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
const QFont font = QFontDatabase::systemFont(QFontDatabase::FixedFont);
_monospaceFont = font.family();
}
bool Stats::includeTimingRecord(const QString& name) {
@ -190,107 +74,240 @@ bool Stats::includeTimingRecord(const QString& name) {
return false;
}
// display expanded or contracted stats
void Stats::display(
const float* color,
int horizontalOffset,
float fps,
int inPacketsPerSecond,
int outPacketsPerSecond,
int inKbitsPerSecond,
int outKbitsPerSecond,
int voxelPacketsToProcess)
{
auto canvasSize = Application::getInstance()->getCanvasSize();
unsigned int backgroundColor = 0x33333399;
int verticalOffset = 0, lines = 0;
float scale = 0.10f;
float rotation = 0.0f;
int font = 2;
QLocale locale(QLocale::English);
std::stringstream octreeStats;
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
if (_lastHorizontalOffset != horizontalOffset) {
resetWidth(canvasSize.x, horizontalOffset);
_lastHorizontalOffset = horizontalOffset;
#define STAT_UPDATE(name, src) \
{ \
auto val = src; \
if (_##name != val) { \
_##name = val; \
emit name##Changed(); \
} \
}
glPointSize(1.0f);
#define STAT_UPDATE_FLOAT(name, src, epsilon) \
{ \
float val = src; \
if (fabs(_##name - val) >= epsilon) { \
_##name = val; \
emit name##Changed(); \
} \
}
void Stats::updateStats() {
if (!Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
if (isVisible()) {
setVisible(false);
}
return;
} else {
if (!isVisible()) {
setVisible(true);
}
}
bool shouldDisplayTimingDetail = Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails) &&
Menu::getInstance()->isOptionChecked(MenuOption::Stats) && isExpanded();
if (shouldDisplayTimingDetail != PerformanceTimer::isActive()) {
PerformanceTimer::setActive(shouldDisplayTimingDetail);
}
auto nodeList = DependencyManager::get<NodeList>();
auto avatarManager = DependencyManager::get<AvatarManager>();
// we need to take one avatar out so we don't include ourselves
int totalAvatars = DependencyManager::get<AvatarManager>()->size() - 1;
int totalServers = DependencyManager::get<NodeList>()->size();
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
STAT_UPDATE(serverCount, nodeList->size());
STAT_UPDATE(framerate, (int)qApp->getFps());
lines = 5;
int columnOneWidth = _generalStatsWidth;
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
STAT_UPDATE(packetInCount, bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond());
STAT_UPDATE(packetOutCount, bandwidthRecorder->getCachedTotalAverageOutputPacketsPerSecond());
STAT_UPDATE_FLOAT(mbpsIn, (float)bandwidthRecorder->getCachedTotalAverageOutputKilobitsPerSecond() / 1000.0f, 0.01f);
STAT_UPDATE_FLOAT(mbpsOut, (float)bandwidthRecorder->getCachedTotalAverageInputKilobitsPerSecond() / 1000.0f, 0.01f);
// Second column: ping
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
SharedNodePointer avatarMixerNode = nodeList->soloNodeOfType(NodeType::AvatarMixer);
STAT_UPDATE(audioPing, audioMixerNode ? audioMixerNode->getPingMs() : -1);
STAT_UPDATE(avatarPing, avatarMixerNode ? avatarMixerNode->getPingMs() : -1);
//// Now handle voxel servers, since there could be more than one, we average their ping times
unsigned long totalPingOctree = 0;
int octreeServerCount = 0;
int pingOctreeMax = 0;
int pingVoxel;
nodeList->eachNode([&](const SharedNodePointer& node) {
// TODO: this should also support entities
if (node->getType() == NodeType::EntityServer) {
totalPingOctree += node->getPingMs();
octreeServerCount++;
if (pingOctreeMax < node->getPingMs()) {
pingOctreeMax = node->getPingMs();
}
}
});
if (octreeServerCount) {
pingVoxel = totalPingOctree / octreeServerCount;
}
//STAT_UPDATE(entitiesPing, pingVoxel);
//if (_expanded) {
// QString voxelMaxPing;
// if (pingVoxel >= 0) { // Average is only meaningful if pingVoxel is valid.
// voxelMaxPing = QString("Voxel max ping: %1").arg(pingOctreeMax);
// } else {
// voxelMaxPing = QString("Voxel max ping: --");
// }
} else {
// -2 causes the QML to hide the ping column
STAT_UPDATE(audioPing, -2);
}
// Third column, avatar stats
MyAvatar* myAvatar = avatarManager->getMyAvatar();
glm::vec3 avatarPos = myAvatar->getPosition();
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
STAT_UPDATE_FLOAT(velocity, glm::length(myAvatar->getVelocity()), 0.1f);
STAT_UPDATE_FLOAT(yaw, myAvatar->getBodyYaw(), 0.1f);
if (_expanded) {
SharedNodePointer avatarMixer = nodeList->soloNodeOfType(NodeType::AvatarMixer);
if (avatarMixer) {
STAT_UPDATE(avatarMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AvatarMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AvatarMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AvatarMixer)));
} else {
STAT_UPDATE(avatarMixerKbps, -1);
STAT_UPDATE(avatarMixerPps, -1);
}
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (audioMixerNode) {
STAT_UPDATE(audioMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
} else {
STAT_UPDATE(audioMixerKbps, -1);
STAT_UPDATE(audioMixerPps, -1);
}
STAT_UPDATE(downloads, ResourceCache::getLoadingRequests().size());
STAT_UPDATE(downloadsPending, ResourceCache::getPendingRequestCount());
// TODO fix to match original behavior
//stringstream downloads;
//downloads << "Downloads: ";
//foreach(Resource* resource, ) {
// downloads << (int)(resource->getProgress() * 100.0f) << "% ";
//}
//downloads << "(" << << " pending)";
} // expanded avatar column
// Fourth column, octree stats
int serverCount = 0;
int movingServerCount = 0;
unsigned long totalNodes = 0;
unsigned long totalInternal = 0;
unsigned long totalLeaves = 0;
std::stringstream sendingModeStream("");
sendingModeStream << "[";
NodeToOctreeSceneStats* octreeServerSceneStats = Application::getInstance()->getOcteeSceneStats();
for (NodeToOctreeSceneStatsIterator i = octreeServerSceneStats->begin(); i != octreeServerSceneStats->end(); i++) {
//const QUuid& uuid = i->first;
OctreeSceneStats& stats = i->second;
serverCount++;
if (_expanded) {
if (serverCount > 1) {
sendingModeStream << ",";
}
if (stats.isMoving()) {
sendingModeStream << "M";
movingServerCount++;
} else {
sendingModeStream << "S";
}
}
// calculate server node totals
totalNodes += stats.getTotalElements();
if (_expanded) {
totalInternal += stats.getTotalInternal();
totalLeaves += stats.getTotalLeaves();
}
}
if (_expanded) {
if (serverCount == 0) {
sendingModeStream << "---";
}
sendingModeStream << "] " << serverCount << " servers";
if (movingServerCount > 0) {
sendingModeStream << " <SCENE NOT STABLE>";
} else {
sendingModeStream << " <SCENE STABLE>";
}
QString sendingModeResult = sendingModeStream.str().c_str();
STAT_UPDATE(sendingMode, sendingModeResult);
}
// Incoming packets
QLocale locale(QLocale::English);
auto voxelPacketsToProcess = qApp->getOctreePacketProcessor().packetsToProcessCount();
if (_expanded) {
std::stringstream octreeStats;
QString packetsString = locale.toString((int)voxelPacketsToProcess);
QString maxString = locale.toString((int)_recentMaxPackets);
octreeStats << "Octree Packets to Process: " << qPrintable(packetsString)
<< " [Recent Max: " << qPrintable(maxString) << "]";
QString str = octreeStats.str().c_str();
STAT_UPDATE(packetStats, str);
// drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_resetRecentMaxPacketsSoon && voxelPacketsToProcess > 0) {
_recentMaxPackets = 0;
_resetRecentMaxPacketsSoon = false;
}
if (voxelPacketsToProcess == 0) {
_resetRecentMaxPacketsSoon = true;
} else if (voxelPacketsToProcess > _recentMaxPackets) {
_recentMaxPackets = voxelPacketsToProcess;
}
// Server Octree Elements
STAT_UPDATE(serverElements, totalNodes);
STAT_UPDATE(localElements, OctreeElement::getNodeCount());
if (_expanded) {
STAT_UPDATE(serverInternal, totalInternal);
STAT_UPDATE(serverLeaves, totalLeaves);
// Local Voxels
STAT_UPDATE(localInternal, OctreeElement::getInternalNodeCount());
STAT_UPDATE(localLeaves, OctreeElement::getLeafNodeCount());
// LOD Details
STAT_UPDATE(lodStatus, "You can see " + DependencyManager::get<LODManager>()->getLODFeedbackText());
}
bool performanceTimerIsActive = PerformanceTimer::isActive();
bool displayPerf = _expanded && Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails);
if (displayPerf && performanceTimerIsActive) {
if (!_timingExpanded) {
_timingExpanded = true;
emit timingExpandedChanged();
}
PerformanceTimer::tallyAllTimerRecords(); // do this even if we're not displaying them, so they don't stack up
columnOneWidth = _generalStatsWidth + _pingStatsWidth + _geoStatsWidth; // 3 columns wide...
// we will also include room for 1 line per timing record and a header of 4 lines
lines += 4;
const QMap<QString, PerformanceTimerRecord>& allRecords = PerformanceTimer::getAllTimerRecords();
QMapIterator<QString, PerformanceTimerRecord> i(allRecords);
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
int statsLines = 0;
while (i.hasNext()) {
i.next();
if (includeTimingRecord(i.key())) {
lines++;
statsLines++;
if (onlyDisplayTopTen && statsLines == 10) {
break;
}
}
}
}
drawBackground(backgroundColor, horizontalOffset, 0, columnOneWidth, (lines + 1) * STATS_PELS_PER_LINE);
horizontalOffset += 5;
int columnOneHorizontalOffset = horizontalOffset;
QString serverNodes = QString("Servers: %1").arg(totalServers);
QString avatarNodes = QString("Avatars: %1").arg(totalAvatars);
QString framesPerSecond = QString("Framerate: %1 FPS").arg(fps, 3, 'f', 0);
verticalOffset = STATS_PELS_INITIALOFFSET; // first one is offset by less than a line
drawText(horizontalOffset, verticalOffset, scale, rotation, font, serverNodes.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarNodes.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, framesPerSecond.toUtf8().constData(), color);
QString packetsPerSecondString = QString("Packets In/Out: %1/%2").arg(inPacketsPerSecond).arg(outPacketsPerSecond);
QString averageMegabitsPerSecond = QString("Mbps In/Out: %1/%2").
arg((float)inKbitsPerSecond * 1.0f / 1000.0f).
arg((float)outKbitsPerSecond * 1.0f / 1000.0f);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, packetsPerSecondString.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, averageMegabitsPerSecond.toUtf8().constData(), color);
// TODO: the display of these timing details should all be moved to JavaScript
if (displayPerf && performanceTimerIsActive) {
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
// Timing details...
verticalOffset += STATS_PELS_PER_LINE * 4; // skip 3 lines to be under the other columns
drawText(columnOneHorizontalOffset, verticalOffset, scale, rotation, font,
"-------------------------------------------------------- Function "
"------------------------------------------------------- --msecs- -calls--", color);
// First iterate all the records, and for the ones that should be included, insert them into
// a new Map sorted by average time...
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
QMap<float, QString> sortedRecords;
const QMap<QString, PerformanceTimerRecord>& allRecords = PerformanceTimer::getAllTimerRecords();
QMapIterator<QString, PerformanceTimerRecord> i(allRecords);
@ -306,316 +323,54 @@ void Stats::display(
int linesDisplayed = 0;
QMapIterator<float, QString> j(sortedRecords);
j.toBack();
QString perfLines;
while (j.hasPrevious()) {
j.previous();
QChar noBreakingSpace = QChar::Nbsp;
QString functionName = j.value();
static const QChar noBreakingSpace = QChar::Nbsp;
QString functionName = j.value();
const PerformanceTimerRecord& record = allRecords.value(functionName);
QString perfLine = QString("%1: %2 [%3]").
arg(QString(qPrintable(functionName)), 120, noBreakingSpace).
perfLines += QString("%1: %2 [%3]\n").
arg(QString(qPrintable(functionName)), 90, noBreakingSpace).
arg((float)record.getMovingAverage() / (float)USECS_PER_MSEC, 8, 'f', 3, noBreakingSpace).
arg((int)record.getCount(), 6, 10, noBreakingSpace);
verticalOffset += STATS_PELS_PER_LINE;
drawText(columnOneHorizontalOffset, verticalOffset, scale, rotation, font, perfLine.toUtf8().constData(), color);
linesDisplayed++;
if (onlyDisplayTopTen && linesDisplayed == 10) {
break;
}
}
_timingStats = perfLines;
emit timingStatsChanged();
} else if (_timingExpanded) {
_timingExpanded = false;
emit timingExpandedChanged();
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + 1;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
int pingAudio = -1, pingAvatar = -1, pingVoxel = -1, pingOctreeMax = -1;
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
SharedNodePointer avatarMixerNode = nodeList->soloNodeOfType(NodeType::AvatarMixer);
pingAudio = audioMixerNode ? audioMixerNode->getPingMs() : -1;
pingAvatar = avatarMixerNode ? avatarMixerNode->getPingMs() : -1;
// Now handle voxel servers, since there could be more than one, we average their ping times
unsigned long totalPingOctree = 0;
int octreeServerCount = 0;
nodeList->eachNode([&totalPingOctree, &pingOctreeMax, &octreeServerCount](const SharedNodePointer& node){
// TODO: this should also support entities
if (node->getType() == NodeType::EntityServer) {
totalPingOctree += node->getPingMs();
octreeServerCount++;
if (pingOctreeMax < node->getPingMs()) {
pingOctreeMax = node->getPingMs();
}
}
});
if (octreeServerCount) {
pingVoxel = totalPingOctree / octreeServerCount;
}
lines = _expanded ? 4 : 3;
// only draw our background if column one didn't draw a wide background
if (columnOneWidth == _generalStatsWidth) {
drawBackground(backgroundColor, horizontalOffset, 0, _pingStatsWidth, (lines + 1) * STATS_PELS_PER_LINE);
}
horizontalOffset += 5;
QString audioPing;
if (pingAudio >= 0) {
audioPing = QString("Audio ping: %1").arg(pingAudio);
} else {
audioPing = QString("Audio ping: --");
}
QString avatarPing;
if (pingAvatar >= 0) {
avatarPing = QString("Avatar ping: %1").arg(pingAvatar);
} else {
avatarPing = QString("Avatar ping: --");
}
QString voxelAvgPing;
if (pingVoxel >= 0) {
voxelAvgPing = QString("Entities avg ping: %1").arg(pingVoxel);
} else {
voxelAvgPing = QString("Entities avg ping: --");
}
drawText(horizontalOffset, verticalOffset, scale, rotation, font, audioPing.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarPing.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, voxelAvgPing.toUtf8().constData(), color);
if (_expanded) {
QString voxelMaxPing;
if (pingVoxel >= 0) { // Average is only meaningful if pingVoxel is valid.
voxelMaxPing = QString("Voxel max ping: %1").arg(pingOctreeMax);
} else {
voxelMaxPing = QString("Voxel max ping: --");
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, voxelMaxPing.toUtf8().constData(), color);
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _pingStatsWidth + 2;
}
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 avatarPos = myAvatar->getPosition();
lines = _expanded ? 7 : 3;
if (columnOneWidth == _generalStatsWidth) {
drawBackground(backgroundColor, horizontalOffset, 0, _geoStatsWidth, (lines + 1) * STATS_PELS_PER_LINE);
}
horizontalOffset += 5;
QString avatarPosition = QString("Position: %1, %2, %3").
arg(avatarPos.x, -1, 'f', 1).
arg(avatarPos.y, -1, 'f', 1).
arg(avatarPos.z, -1, 'f', 1);
QString avatarVelocity = QString("Velocity: %1").arg(glm::length(myAvatar->getVelocity()), -1, 'f', 1);
QString avatarBodyYaw = QString("Yaw: %1").arg(myAvatar->getBodyYaw(), -1, 'f', 1);
QString avatarMixerStats;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarPosition.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarVelocity.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarBodyYaw.toUtf8().constData(), color);
if (_expanded) {
SharedNodePointer avatarMixer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AvatarMixer);
if (avatarMixer) {
avatarMixerStats = QString("Avatar Mixer: %1 kbps, %2 pps").
arg(roundf(bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer))).
arg(roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
} else {
avatarMixerStats = QString("No Avatar Mixer");
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarMixerStats.toUtf8().constData(), color);
stringstream downloads;
downloads << "Downloads: ";
foreach (Resource* resource, ResourceCache::getLoadingRequests()) {
downloads << (int)(resource->getProgress() * 100.0f) << "% ";
}
downloads << "(" << ResourceCache::getPendingRequestCount() << " pending)";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downloads.str().c_str(), color);
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3;
lines = _expanded ? 10 : 3;
drawBackground(backgroundColor, horizontalOffset, 0, canvasSize.x - horizontalOffset,
(lines + 1) * STATS_PELS_PER_LINE);
horizontalOffset += 5;
// Model/Entity render details
octreeStats.str("");
octreeStats << "Triangles: " << _renderDetails._trianglesRendered
<< " / Quads:" << _renderDetails._quadsRendered
<< " / Material Switches:" << _renderDetails._materialSwitches;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
if (_expanded) {
octreeStats.str("");
octreeStats << " Mesh Parts Rendered Opaque: " << _renderDetails._opaque._rendered
<< " / Translucent:" << _renderDetails._translucent._rendered;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Opaque considered: " << _renderDetails._opaque._considered
<< " / Out of view:" << _renderDetails._opaque._outOfView
<< " / Too small:" << _renderDetails._opaque._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Translucent considered: " << _renderDetails._translucent._considered
<< " / Out of view:" << _renderDetails._translucent._outOfView
<< " / Too small:" << _renderDetails._translucent._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
std::stringstream sendingMode("");
sendingMode << "Octree Sending Mode: [";
int serverCount = 0;
int movingServerCount = 0;
unsigned long totalNodes = 0;
unsigned long totalInternal = 0;
unsigned long totalLeaves = 0;
NodeToOctreeSceneStats* octreeServerSceneStats = Application::getInstance()->getOcteeSceneStats();
for(NodeToOctreeSceneStatsIterator i = octreeServerSceneStats->begin(); i != octreeServerSceneStats->end(); i++) {
//const QUuid& uuid = i->first;
OctreeSceneStats& stats = i->second;
serverCount++;
if (_expanded) {
if (serverCount > 1) {
sendingMode << ",";
}
if (stats.isMoving()) {
sendingMode << "M";
movingServerCount++;
} else {
sendingMode << "S";
}
}
// calculate server node totals
totalNodes += stats.getTotalElements();
if (_expanded) {
totalInternal += stats.getTotalInternal();
totalLeaves += stats.getTotalLeaves();
}
}
if (_expanded) {
if (serverCount == 0) {
sendingMode << "---";
}
sendingMode << "] " << serverCount << " servers";
if (movingServerCount > 0) {
sendingMode << " <SCENE NOT STABLE>";
} else {
sendingMode << " <SCENE STABLE>";
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)sendingMode.str().c_str(), color);
}
// Incoming packets
if (_expanded) {
octreeStats.str("");
QString packetsString = locale.toString((int)voxelPacketsToProcess);
QString maxString = locale.toString((int)_recentMaxPackets);
octreeStats << "Octree Packets to Process: " << qPrintable(packetsString)
<< " [Recent Max: " << qPrintable(maxString) << "]";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_resetRecentMaxPacketsSoon && voxelPacketsToProcess > 0) {
_recentMaxPackets = 0;
_resetRecentMaxPacketsSoon = false;
}
if (voxelPacketsToProcess == 0) {
_resetRecentMaxPacketsSoon = true;
} else {
if (voxelPacketsToProcess > _recentMaxPackets) {
_recentMaxPackets = voxelPacketsToProcess;
}
}
QString serversTotalString = locale.toString((uint)totalNodes); // consider adding: .rightJustified(10, ' ');
unsigned long localTotal = OctreeElement::getNodeCount();
QString localTotalString = locale.toString((uint)localTotal); // consider adding: .rightJustified(10, ' ');
// Server Octree Elements
if (!_expanded) {
octreeStats.str("");
octreeStats << "Octree Elements Server: " << qPrintable(serversTotalString)
<< " Local:" << qPrintable(localTotalString);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_expanded) {
octreeStats.str("");
octreeStats << "Octree Elements -";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
QString serversInternalString = locale.toString((uint)totalInternal);
QString serversLeavesString = locale.toString((uint)totalLeaves);
octreeStats.str("");
octreeStats << " Server: " << qPrintable(serversTotalString) <<
" Internal: " << qPrintable(serversInternalString) <<
" Leaves: " << qPrintable(serversLeavesString);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
// Local Voxels
unsigned long localInternal = OctreeElement::getInternalNodeCount();
unsigned long localLeaves = OctreeElement::getLeafNodeCount();
QString localInternalString = locale.toString((uint)localInternal);
QString localLeavesString = locale.toString((uint)localLeaves);
octreeStats.str("");
octreeStats << " Local: " << qPrintable(serversTotalString) <<
" Internal: " << qPrintable(localInternalString) <<
" Leaves: " << qPrintable(localLeavesString) << "";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
// LOD Details
octreeStats.str("");
QString displayLODDetails = DependencyManager::get<LODManager>()->getLODFeedbackText();
octreeStats << "LOD: You can see " << qPrintable(displayLODDetails.trimmed());
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
void Stats::setRenderDetails(const RenderDetails& details) {
STAT_UPDATE(triangles, details._trianglesRendered);
STAT_UPDATE(quads, details._quadsRendered);
STAT_UPDATE(materialSwitches, details._materialSwitches);
if (_expanded) {
STAT_UPDATE(meshOpaque, details._opaque._rendered);
STAT_UPDATE(meshTranslucent, details._opaque._rendered);
STAT_UPDATE(opaqueConsidered, details._opaque._considered);
STAT_UPDATE(opaqueOutOfView, details._opaque._outOfView);
STAT_UPDATE(opaqueTooSmall, details._opaque._tooSmall);
STAT_UPDATE(translucentConsidered, details._translucent._considered);
STAT_UPDATE(translucentOutOfView, details._translucent._outOfView);
STAT_UPDATE(translucentTooSmall, details._translucent._tooSmall);
}
}
/*
// display expanded or contracted stats
void Stats::display(
int voxelPacketsToProcess)
{
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
}
*/

View file

@ -1,8 +1,5 @@
//
// Stats.h
// interface/src/ui
//
// Created by Lucas Crisman on 22/03/14.
// Created by Bradley Austin Davis 2015/06/17
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -13,47 +10,139 @@
#define hifi_Stats_h
#include <QObject>
#include <QQuickItem>
#include <QVector3D>
#include <OffscreenUi.h>
#include <RenderArgs.h>
class Stats: public QObject {
#define STATS_PROPERTY(type, name, initialValue) \
Q_PROPERTY(type name READ name NOTIFY name##Changed) \
public: \
type name() { return _##name; }; \
private: \
type _##name{ initialValue };
class Stats : public QQuickItem {
Q_OBJECT
HIFI_QML_DECL
Q_PROPERTY(bool expanded READ isExpanded WRITE setExpanded NOTIFY expandedChanged)
Q_PROPERTY(bool timingExpanded READ isTimingExpanded NOTIFY timingExpandedChanged)
Q_PROPERTY(QString monospaceFont READ monospaceFont CONSTANT)
STATS_PROPERTY(int, serverCount, 0)
STATS_PROPERTY(int, framerate, 0)
STATS_PROPERTY(int, avatarCount, 0)
STATS_PROPERTY(int, packetInCount, 0)
STATS_PROPERTY(int, packetOutCount, 0)
STATS_PROPERTY(float, mbpsIn, 0)
STATS_PROPERTY(float, mbpsOut, 0)
STATS_PROPERTY(int, audioPing, 0)
STATS_PROPERTY(int, avatarPing, 0)
STATS_PROPERTY(int, entitiesPing, 0)
STATS_PROPERTY(QVector3D, position, QVector3D(0, 0, 0) )
STATS_PROPERTY(float, velocity, 0)
STATS_PROPERTY(float, yaw, 0)
STATS_PROPERTY(int, avatarMixerKbps, 0)
STATS_PROPERTY(int, avatarMixerPps, 0)
STATS_PROPERTY(int, audioMixerKbps, 0)
STATS_PROPERTY(int, audioMixerPps, 0)
STATS_PROPERTY(int, downloads, 0)
STATS_PROPERTY(int, downloadsPending, 0)
STATS_PROPERTY(int, triangles, 0)
STATS_PROPERTY(int, quads, 0)
STATS_PROPERTY(int, materialSwitches, 0)
STATS_PROPERTY(int, meshOpaque, 0)
STATS_PROPERTY(int, meshTranslucent, 0)
STATS_PROPERTY(int, opaqueConsidered, 0)
STATS_PROPERTY(int, opaqueOutOfView, 0)
STATS_PROPERTY(int, opaqueTooSmall, 0)
STATS_PROPERTY(int, translucentConsidered, 0)
STATS_PROPERTY(int, translucentOutOfView, 0)
STATS_PROPERTY(int, translucentTooSmall, 0)
STATS_PROPERTY(QString, sendingMode, QString())
STATS_PROPERTY(QString, packetStats, QString())
STATS_PROPERTY(QString, lodStatus, QString())
STATS_PROPERTY(QString, timingStats, QString())
STATS_PROPERTY(int, serverElements, 0)
STATS_PROPERTY(int, serverInternal, 0)
STATS_PROPERTY(int, serverLeaves, 0)
STATS_PROPERTY(int, localElements, 0)
STATS_PROPERTY(int, localInternal, 0)
STATS_PROPERTY(int, localLeaves, 0)
public:
static Stats* getInstance();
Stats();
static void drawBackground(unsigned int rgba, int x, int y, int width, int height);
void toggleExpanded();
bool isExpanded() { return _expanded; }
void checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseDragStartedY, int horizontalOffset);
void resetWidth(int width, int horizontalOffset);
void display(const float* color, int horizontalOffset, float fps, int inPacketsPerSecond, int outPacketsPerSecond,
int inKbitsPerSecond, int outKbitsPerSecond, int voxelPacketsToProcess);
Stats(QQuickItem* parent = nullptr);
bool includeTimingRecord(const QString& name);
void setRenderDetails(const RenderDetails& details) { _renderDetails = details; }
void setRenderDetails(const RenderDetails& details);
const QString& monospaceFont() {
return _monospaceFont;
}
void updateStats();
bool isExpanded() { return _expanded; }
bool isTimingExpanded() { return _timingExpanded; }
void setExpanded(bool expanded) {
if (_expanded != expanded) {
_expanded = expanded;
emit expandedChanged();
}
}
signals:
void expandedChanged();
void timingExpandedChanged();
void serverCountChanged();
void framerateChanged();
void avatarCountChanged();
void packetInCountChanged();
void packetOutCountChanged();
void mbpsInChanged();
void mbpsOutChanged();
void audioPingChanged();
void avatarPingChanged();
void entitiesPingChanged();
void positionChanged();
void velocityChanged();
void yawChanged();
void avatarMixerKbpsChanged();
void avatarMixerPpsChanged();
void audioMixerKbpsChanged();
void audioMixerPpsChanged();
void downloadsChanged();
void downloadsPendingChanged();
void trianglesChanged();
void quadsChanged();
void materialSwitchesChanged();
void meshOpaqueChanged();
void meshTranslucentChanged();
void opaqueConsideredChanged();
void opaqueOutOfViewChanged();
void opaqueTooSmallChanged();
void translucentConsideredChanged();
void translucentOutOfViewChanged();
void translucentTooSmallChanged();
void sendingModeChanged();
void packetStatsChanged();
void lodStatusChanged();
void serverElementsChanged();
void serverInternalChanged();
void serverLeavesChanged();
void localElementsChanged();
void localInternalChanged();
void localLeavesChanged();
void timingStatsChanged();
private:
static Stats* _sharedInstance;
bool _expanded;
int _recentMaxPackets; // recent max incoming voxel packets to process
bool _resetRecentMaxPacketsSoon;
int _generalStatsWidth;
int _bandwidthStatsWidth;
int _pingStatsWidth;
int _geoStatsWidth;
int _octreeStatsWidth;
int _lastHorizontalOffset;
RenderDetails _renderDetails;
int _recentMaxPackets{ 0 } ; // recent max incoming voxel packets to process
bool _resetRecentMaxPacketsSoon{ true };
bool _expanded{ false };
bool _timingExpanded{ false };
QString _monospaceFont;
};
#endif // hifi_Stats_h

View file

@ -8,9 +8,12 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "UserInputMapper.h"
#include <algorithm>
#include "Application.h"
#include "UserInputMapper.h"
// UserInputMapper Class
@ -207,6 +210,9 @@ void UserInputMapper::update(float deltaTime) {
// Scale all the channel step with the scale
for (auto i = 0; i < NUM_ACTIONS; i++) {
_actionStates[i] *= _actionScales[i];
if (_actionStates[i] > 0) {
emit Application::getInstance()->getControllerScriptingInterface()->actionEvent(i, _actionStates[i]);
}
}
}
@ -241,6 +247,9 @@ void UserInputMapper::assignDefaulActionScales() {
_actionScales[PITCH_UP] = 1.0f; // 1 degree per unit
_actionScales[BOOM_IN] = 1.0f; // 1m per unit
_actionScales[BOOM_OUT] = 1.0f; // 1m per unit
_actionStates[SHIFT] = 1.0f; // on
_actionStates[ACTION1] = 1.0f; // default
_actionStates[ACTION2] = 1.0f; // default
}
// This is only necessary as long as the actions are hardcoded
@ -258,4 +267,7 @@ void UserInputMapper::createActionNames() {
_actionNames[PITCH_UP] = "PITCH_UP";
_actionNames[BOOM_IN] = "BOOM_IN";
_actionNames[BOOM_OUT] = "BOOM_OUT";
_actionNames[SHIFT] = "SHIFT";
_actionNames[ACTION1] = "ACTION1";
_actionNames[ACTION2] = "ACTION2";
}

View file

@ -139,6 +139,11 @@ public:
BOOM_IN,
BOOM_OUT,
SHIFT,
ACTION1,
ACTION2,
NUM_ACTIONS,
};

View file

@ -16,15 +16,12 @@
#include "Application.h"
#include "Base3DOverlay.h"
const glm::vec3 DEFAULT_POSITION = glm::vec3(0.0f, 0.0f, 0.0f);
const float DEFAULT_LINE_WIDTH = 1.0f;
const bool DEFAULT_IS_SOLID = false;
const bool DEFAULT_IS_DASHED_LINE = false;
Base3DOverlay::Base3DOverlay() :
_position(DEFAULT_POSITION),
_lineWidth(DEFAULT_LINE_WIDTH),
_rotation(),
_isSolid(DEFAULT_IS_SOLID),
_isDashedLine(DEFAULT_IS_DASHED_LINE),
_ignoreRayIntersection(false),
@ -35,9 +32,8 @@ Base3DOverlay::Base3DOverlay() :
Base3DOverlay::Base3DOverlay(const Base3DOverlay* base3DOverlay) :
Overlay(base3DOverlay),
_position(base3DOverlay->_position),
_transform(base3DOverlay->_transform),
_lineWidth(base3DOverlay->_lineWidth),
_rotation(base3DOverlay->_rotation),
_isSolid(base3DOverlay->_isSolid),
_isDashedLine(base3DOverlay->_isDashedLine),
_ignoreRayIntersection(base3DOverlay->_ignoreRayIntersection),
@ -46,14 +42,6 @@ Base3DOverlay::Base3DOverlay(const Base3DOverlay* base3DOverlay) :
{
}
Base3DOverlay::~Base3DOverlay() {
}
// TODO: Implement accurate getBounds() implementations
AABox Base3DOverlay::getBounds() const {
return AABox(_position, glm::vec3(1.0f));
}
void Base3DOverlay::setProperties(const QScriptValue& properties) {
Overlay::setProperties(properties);
@ -151,13 +139,13 @@ void Base3DOverlay::setProperties(const QScriptValue& properties) {
QScriptValue Base3DOverlay::getProperty(const QString& property) {
if (property == "position" || property == "start" || property == "p1" || property == "point") {
return vec3toScriptValue(_scriptEngine, _position);
return vec3toScriptValue(_scriptEngine, getPosition());
}
if (property == "lineWidth") {
return _lineWidth;
}
if (property == "rotation") {
return quatToScriptValue(_scriptEngine, _rotation);
return quatToScriptValue(_scriptEngine, getRotation());
}
if (property == "isSolid" || property == "isFilled" || property == "solid" || property == "filed") {
return _isSolid;

View file

@ -11,10 +11,7 @@
#ifndef hifi_Base3DOverlay_h
#define hifi_Base3DOverlay_h
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <BoxBase.h>
#include <Transform.h>
#include "Overlay.h"
@ -24,32 +21,38 @@ class Base3DOverlay : public Overlay {
public:
Base3DOverlay();
Base3DOverlay(const Base3DOverlay* base3DOverlay);
~Base3DOverlay();
// getters
virtual bool is3D() const { return true; }
const glm::vec3& getPosition() const { return _position; }
const glm::vec3& getCenter() const { return _position; } // TODO: consider implementing registration points in this class
const glm::vec3& getPosition() const { return _transform.getTranslation(); }
const glm::quat& getRotation() const { return _transform.getRotation(); }
const glm::vec3& getScale() const { return _transform.getScale(); }
// TODO: consider implementing registration points in this class
const glm::vec3& getCenter() const { return getPosition(); }
float getLineWidth() const { return _lineWidth; }
bool getIsSolid() const { return _isSolid; }
bool getIsDashedLine() const { return _isDashedLine; }
bool getIsSolidLine() const { return !_isDashedLine; }
const glm::quat& getRotation() const { return _rotation; }
bool getIgnoreRayIntersection() const { return _ignoreRayIntersection; }
bool getDrawInFront() const { return _drawInFront; }
bool getDrawOnHUD() const { return _drawOnHUD; }
// setters
void setPosition(const glm::vec3& position) { _position = position; }
void setPosition(const glm::vec3& value) { _transform.setTranslation(value); }
void setRotation(const glm::quat& value) { _transform.setRotation(value); }
void setScale(float value) { _transform.setScale(value); }
void setScale(const glm::vec3& value) { _transform.setScale(value); }
void setLineWidth(float lineWidth) { _lineWidth = lineWidth; }
void setIsSolid(bool isSolid) { _isSolid = isSolid; }
void setIsDashedLine(bool isDashedLine) { _isDashedLine = isDashedLine; }
void setRotation(const glm::quat& value) { _rotation = value; }
void setIgnoreRayIntersection(bool value) { _ignoreRayIntersection = value; }
void setDrawInFront(bool value) { _drawInFront = value; }
void setDrawOnHUD(bool value) { _drawOnHUD = value; }
virtual AABox getBounds() const;
virtual AABox getBounds() const = 0;
virtual void setProperties(const QScriptValue& properties);
virtual QScriptValue getProperty(const QString& property);
@ -62,9 +65,9 @@ public:
}
protected:
glm::vec3 _position;
Transform _transform;
float _lineWidth;
glm::quat _rotation;
bool _isSolid;
bool _isDashedLine;
bool _ignoreRayIntersection;

View file

@ -9,26 +9,20 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "GeometryUtil.h"
#include "PlaneShape.h"
#include "BillboardOverlay.h"
BillboardOverlay::BillboardOverlay() :
_fromImage(),
_scale(1.0f),
_isFacingAvatar(true)
{
#include "Application.h"
#include "GeometryUtil.h"
BillboardOverlay::BillboardOverlay() {
_isLoaded = false;
}
BillboardOverlay::BillboardOverlay(const BillboardOverlay* billboardOverlay) :
Base3DOverlay(billboardOverlay),
Planar3DOverlay(billboardOverlay),
_url(billboardOverlay->_url),
_texture(billboardOverlay->_texture),
_fromImage(billboardOverlay->_fromImage),
_scale(billboardOverlay->_scale),
_isFacingAvatar(billboardOverlay->_isFacingAvatar)
{
}
@ -46,8 +40,8 @@ void BillboardOverlay::render(RenderArgs* args) {
glm::quat rotation;
if (_isFacingAvatar) {
// rotate about vertical to face the camera
rotation = Application::getInstance()->getCamera()->getRotation();
rotation *= glm::angleAxis(glm::pi<float>(), glm::vec3(0.0f, 1.0f, 0.0f));
rotation = args->_viewFrustum->getOrientation();
rotation *= glm::angleAxis(glm::pi<float>(), IDENTITY_UP);
rotation *= getRotation();
} else {
rotation = getRotation();
@ -89,11 +83,9 @@ void BillboardOverlay::render(RenderArgs* args) {
auto batch = args->_batch;
if (batch) {
Transform transform;
transform.setTranslation(_position);
transform.setRotation(rotation);
transform.setScale(_scale);
Transform transform = _transform;
transform.postScale(glm::vec3(getDimensions(), 1.0f));
batch->setModelTransform(transform);
batch->setUniformTexture(0, _texture->getGPUTexture());
@ -111,10 +103,10 @@ void BillboardOverlay::render(RenderArgs* args) {
glBindTexture(GL_TEXTURE_2D, _texture->getID());
glPushMatrix(); {
glTranslatef(_position.x, _position.y, _position.z);
glTranslatef(getPosition().x, getPosition().y, getPosition().z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glScalef(_scale, _scale, _scale);
glScalef(_dimensions.x, _dimensions.y, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha));
@ -130,7 +122,7 @@ void BillboardOverlay::render(RenderArgs* args) {
}
void BillboardOverlay::setProperties(const QScriptValue &properties) {
Base3DOverlay::setProperties(properties);
Planar3DOverlay::setProperties(properties);
QScriptValue urlValue = properties.property("url");
if (urlValue.isValid()) {
@ -171,11 +163,6 @@ void BillboardOverlay::setProperties(const QScriptValue &properties) {
}
}
QScriptValue scaleValue = properties.property("scale");
if (scaleValue.isValid()) {
_scale = scaleValue.toVariant().toFloat();
}
QScriptValue isFacingAvatarValue = properties.property("isFacingAvatar");
if (isFacingAvatarValue.isValid()) {
_isFacingAvatar = isFacingAvatarValue.toVariant().toBool();
@ -189,14 +176,11 @@ QScriptValue BillboardOverlay::getProperty(const QString& property) {
if (property == "subImage") {
return qRectToScriptValue(_scriptEngine, _fromImage);
}
if (property == "scale") {
return _scale;
}
if (property == "isFacingAvatar") {
return _isFacingAvatar;
}
return Base3DOverlay::getProperty(property);
return Planar3DOverlay::getProperty(property);
}
void BillboardOverlay::setURL(const QString& url) {
@ -212,13 +196,11 @@ bool BillboardOverlay::findRayIntersection(const glm::vec3& origin, const glm::v
float& distance, BoxFace& face) {
if (_texture) {
glm::quat rotation;
glm::quat rotation = getRotation();
if (_isFacingAvatar) {
// rotate about vertical to face the camera
rotation = Application::getInstance()->getCamera()->getRotation();
rotation *= glm::angleAxis(glm::pi<float>(), glm::vec3(0.0f, 1.0f, 0.0f));
} else {
rotation = _rotation;
}
// Produce the dimensions of the billboard based on the image's aspect ratio and the overlay's scale.
@ -226,9 +208,9 @@ bool BillboardOverlay::findRayIntersection(const glm::vec3& origin, const glm::v
float width = isNull ? _texture->getWidth() : _fromImage.width();
float height = isNull ? _texture->getHeight() : _fromImage.height();
float maxSize = glm::max(width, height);
glm::vec2 dimensions = _scale * glm::vec2(width / maxSize, height / maxSize);
glm::vec2 dimensions = _dimensions * glm::vec2(width / maxSize, height / maxSize);
return findRayRectangleIntersection(origin, direction, rotation, _position, dimensions, distance);
return findRayRectangleIntersection(origin, direction, rotation, getPosition(), dimensions, distance);
}
return false;

View file

@ -12,14 +12,11 @@
#ifndef hifi_BillboardOverlay_h
#define hifi_BillboardOverlay_h
#include <QScopedPointer>
#include <QUrl>
#include <TextureCache.h>
#include "Base3DOverlay.h"
#include "Planar3DOverlay.h"
class BillboardOverlay : public Base3DOverlay {
class BillboardOverlay : public Planar3DOverlay {
Q_OBJECT
public:
BillboardOverlay();
@ -29,7 +26,6 @@ public:
// setters
void setURL(const QString& url);
void setScale(float scale) { _scale = scale; }
void setIsFacingAvatar(bool isFacingAvatar) { _isFacingAvatar = isFacingAvatar; }
virtual void setProperties(const QScriptValue& properties);
@ -48,8 +44,7 @@ private:
QRect _fromImage; // where from in the image to sample
float _scale;
bool _isFacingAvatar;
bool _isFacingAvatar = true;
};
#endif // hifi_BillboardOverlay_h

View file

@ -8,16 +8,12 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include "Circle3DOverlay.h"
#include <DeferredLightingEffect.h>
#include <GeometryCache.h>
#include <GlowEffect.h>
#include <SharedUtil.h>
#include <StreamUtils.h>
#include <RegisteredMetaTypes.h>
#include "Circle3DOverlay.h"
Circle3DOverlay::Circle3DOverlay() :
_startAt(0.0f),
@ -66,9 +62,6 @@ Circle3DOverlay::Circle3DOverlay(const Circle3DOverlay* circle3DOverlay) :
{
}
Circle3DOverlay::~Circle3DOverlay() {
}
void Circle3DOverlay::render(RenderArgs* args) {
if (!_visible) {
return; // do nothing if we're not visible
@ -103,15 +96,13 @@ void Circle3DOverlay::render(RenderArgs* args) {
_lastColor = colorX;
auto geometryCache = DependencyManager::get<GeometryCache>();
Transform transform;
transform.setTranslation(getCenter());
transform.setRotation(getRotation());
transform.setScale(glm::vec3(getDimensions(), 0.01f));
Q_ASSERT(args->_batch);
auto& batch = *args->_batch;
batch._glLineWidth(_lineWidth);
auto transform = _transform;
transform.postScale(glm::vec3(getDimensions(), 1.0f));
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false);
@ -402,12 +393,12 @@ bool Circle3DOverlay::findRayIntersection(const glm::vec3& origin,
if (intersects) {
glm::vec3 hitPosition = origin + (distance * direction);
glm::vec3 localHitPosition = glm::inverse(_rotation) * (hitPosition - _position);
localHitPosition.y = localHitPosition.y * _dimensions.x / _dimensions.y; // Scale to make circular
glm::vec3 localHitPosition = glm::inverse(getRotation()) * (hitPosition - getPosition());
localHitPosition.y = localHitPosition.y * getDimensions().x / getDimensions().y; // Scale to make circular
float distanceToHit = glm::length(localHitPosition);
float innerRadius = _dimensions.x / 2.0f * _innerRadius;
float outerRadius = _dimensions.x / 2.0f * _outerRadius;
float innerRadius = getDimensions().x / 2.0f * _innerRadius;
float outerRadius = getDimensions().x / 2.0f * _outerRadius;
intersects = innerRadius <= distanceToHit && distanceToHit <= outerRadius;
}

View file

@ -11,6 +11,9 @@
#ifndef hifi_Circle3DOverlay_h
#define hifi_Circle3DOverlay_h
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include "Planar3DOverlay.h"
class Circle3DOverlay : public Planar3DOverlay {
@ -19,7 +22,7 @@ class Circle3DOverlay : public Planar3DOverlay {
public:
Circle3DOverlay();
Circle3DOverlay(const Circle3DOverlay* circle3DOverlay);
~Circle3DOverlay();
virtual void render(RenderArgs* args);
virtual void setProperties(const QScriptValue& properties);
virtual QScriptValue getProperty(const QString& property);

View file

@ -19,17 +19,11 @@
#include "Application.h"
#include "Cube3DOverlay.h"
Cube3DOverlay::Cube3DOverlay() : _borderSize(0) {
}
Cube3DOverlay::Cube3DOverlay(const Cube3DOverlay* cube3DOverlay) :
Volume3DOverlay(cube3DOverlay)
{
}
Cube3DOverlay::~Cube3DOverlay() {
}
void Cube3DOverlay::render(RenderArgs* args) {
if (!_visible) {
return; // do nothing if we're not visible

View file

@ -17,9 +17,9 @@ class Cube3DOverlay : public Volume3DOverlay {
Q_OBJECT
public:
Cube3DOverlay();
Cube3DOverlay() {}
Cube3DOverlay(const Cube3DOverlay* cube3DOverlay);
~Cube3DOverlay();
virtual void render(RenderArgs* args);
virtual Cube3DOverlay* createClone() const;

View file

@ -9,28 +9,29 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include "Grid3DOverlay.h"
#include <PathUtils.h>
#include "Application.h"
#include "Grid3DOverlay.h"
ProgramObject Grid3DOverlay::_gridProgram;
Grid3DOverlay::Grid3DOverlay() : Base3DOverlay(),
Grid3DOverlay::Grid3DOverlay() :
_minorGridWidth(1.0),
_majorGridEvery(5) {
}
Grid3DOverlay::Grid3DOverlay(const Grid3DOverlay* grid3DOverlay) :
Base3DOverlay(grid3DOverlay),
Planar3DOverlay(grid3DOverlay),
_minorGridWidth(grid3DOverlay->_minorGridWidth),
_majorGridEvery(grid3DOverlay->_majorGridEvery)
{
}
Grid3DOverlay::~Grid3DOverlay() {
}
void Grid3DOverlay::render(RenderArgs* args) {
if (!_visible) {
return; // do nothing if we're not visible
@ -41,7 +42,7 @@ void Grid3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
// center the grid around the camera position on the plane
glm::vec3 rotated = glm::inverse(_rotation) * Application::getInstance()->getCamera()->getPosition();
glm::vec3 rotated = glm::inverse(getRotation()) * Application::getInstance()->getCamera()->getPosition();
float spacing = _minorGridWidth;
@ -53,7 +54,7 @@ void Grid3DOverlay::render(RenderArgs* args) {
if (batch) {
Transform transform;
transform.setRotation(_rotation);
transform.setRotation(getRotation());
// Minor grid
@ -61,7 +62,7 @@ void Grid3DOverlay::render(RenderArgs* args) {
batch->_glLineWidth(1.0f);
auto position = glm::vec3(_minorGridWidth * (floorf(rotated.x / spacing) - MINOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MINOR_GRID_DIVISIONS / 2),
_position.z);
getPosition().z);
float scale = MINOR_GRID_DIVISIONS * spacing;
transform.setTranslation(position);
@ -78,7 +79,7 @@ void Grid3DOverlay::render(RenderArgs* args) {
spacing *= _majorGridEvery;
auto position = glm::vec3(spacing * (floorf(rotated.x / spacing) - MAJOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MAJOR_GRID_DIVISIONS / 2),
_position.z);
getPosition().z);
float scale = MAJOR_GRID_DIVISIONS * spacing;
transform.setTranslation(position);
@ -161,7 +162,7 @@ void Grid3DOverlay::render(RenderArgs* args) {
}
void Grid3DOverlay::setProperties(const QScriptValue& properties) {
Base3DOverlay::setProperties(properties);
Planar3DOverlay::setProperties(properties);
if (properties.property("minorGridWidth").isValid()) {
_minorGridWidth = properties.property("minorGridWidth").toVariant().toFloat();
@ -180,7 +181,7 @@ QScriptValue Grid3DOverlay::getProperty(const QString& property) {
return _majorGridEvery;
}
return Base3DOverlay::getProperty(property);
return Planar3DOverlay::getProperty(property);
}
Grid3DOverlay* Grid3DOverlay::createClone() const {

View file

@ -15,20 +15,16 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <glm/glm.hpp>
#include <ProgramObject.h>
#include <SharedUtil.h>
#include "Base3DOverlay.h"
#include "Planar3DOverlay.h"
class Grid3DOverlay : public Base3DOverlay {
class Grid3DOverlay : public Planar3DOverlay {
Q_OBJECT
public:
Grid3DOverlay();
Grid3DOverlay(const Grid3DOverlay* grid3DOverlay);
~Grid3DOverlay();
virtual void render(RenderArgs* args);
virtual void setProperties(const QScriptValue& properties);

View file

@ -8,17 +8,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <QPainter>
#include <QSvgRenderer>
#include "ImageOverlay.h"
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <SharedUtil.h>
#include "ImageOverlay.h"
#include <RegisteredMetaTypes.h>
ImageOverlay::ImageOverlay() :
_imageURL(),
@ -38,9 +32,6 @@ ImageOverlay::ImageOverlay(const ImageOverlay* imageOverlay) :
{
}
ImageOverlay::~ImageOverlay() {
}
// TODO: handle setting image multiple times, how do we manage releasing the bound texture?
void ImageOverlay::setImageURL(const QUrl& url) {
_imageURL = url;

View file

@ -15,17 +15,11 @@
#include "InterfaceConfig.h"
#include <QImage>
#include <QNetworkReply>
#include <QRect>
#include <QScriptValue>
#include <QString>
#include <QUrl>
#include <NetworkAccessManager.h>
#include <SharedUtil.h>
#include <TextureCache.h>
#include "Overlay.h"
#include "Overlay2D.h"
class ImageOverlay : public Overlay2D {
@ -34,7 +28,7 @@ class ImageOverlay : public Overlay2D {
public:
ImageOverlay();
ImageOverlay(const ImageOverlay* imageOverlay);
~ImageOverlay();
virtual void render(RenderArgs* args);
// getters

View file

@ -13,6 +13,7 @@
#include <GlowEffect.h>
#include <GeometryCache.h>
#include <RegisteredMetaTypes.h>
#include "Line3DOverlay.h"
@ -33,13 +34,12 @@ Line3DOverlay::~Line3DOverlay() {
}
AABox Line3DOverlay::getBounds() const {
auto start = _position + _start;
auto end = _position + _end;
auto min = glm::min(start, end);
auto max = glm::max(start, end);
return AABox(min, max - min);
auto extents = Extents{};
extents.addPoint(_start);
extents.addPoint(_end);
extents.transform(_transform);
return AABox(extents);
}
void Line3DOverlay::render(RenderArgs* args) {
@ -55,14 +55,11 @@ void Line3DOverlay::render(RenderArgs* args) {
auto batch = args->_batch;
if (batch) {
Transform transform;
transform.setTranslation(_position);
transform.setRotation(_rotation);
batch->setModelTransform(transform);
batch->setModelTransform(_transform);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(*batch, _position, _end, colorv4, _geometryCacheID);
DependencyManager::get<GeometryCache>()->renderDashedLine(*batch, _start, _end, colorv4, _geometryCacheID);
} else {
DependencyManager::get<GeometryCache>()->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
}
@ -87,7 +84,7 @@ void Line3DOverlay::render(RenderArgs* args) {
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(_position, _end, colorv4, _geometryCacheID);
DependencyManager::get<GeometryCache>()->renderDashedLine(_start, _end, colorv4, _geometryCacheID);
} else {
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
}

View file

@ -21,7 +21,7 @@ public:
Line3DOverlay(const Line3DOverlay* line3DOverlay);
~Line3DOverlay();
virtual void render(RenderArgs* args);
virtual AABox getBounds() const override;
virtual AABox getBounds() const;
// getters
const glm::vec3& getStart() const { return _start; }

View file

@ -24,10 +24,6 @@ LocalModelsOverlay::LocalModelsOverlay(const LocalModelsOverlay* localModelsOver
Volume3DOverlay(localModelsOverlay),
_entityTreeRenderer(localModelsOverlay->_entityTreeRenderer)
{
}
LocalModelsOverlay::~LocalModelsOverlay() {
}
void LocalModelsOverlay::update(float deltatime) {
@ -46,7 +42,7 @@ void LocalModelsOverlay::render(RenderArgs* args) {
glPushMatrix(); {
Application* app = Application::getInstance();
glm::vec3 oldTranslation = app->getViewMatrixTranslation();
app->setViewMatrixTranslation(oldTranslation + _position);
app->setViewMatrixTranslation(oldTranslation + getPosition());
_entityTreeRenderer->render(args);
Application::getInstance()->setViewMatrixTranslation(oldTranslation);
} glPopMatrix();

View file

@ -21,7 +21,6 @@ class LocalModelsOverlay : public Volume3DOverlay {
public:
LocalModelsOverlay(EntityTreeRenderer* entityTreeRenderer);
LocalModelsOverlay(const LocalModelsOverlay* localModelsOverlay);
~LocalModelsOverlay();
virtual void update(float deltatime);
virtual void render(RenderArgs* args);

View file

@ -9,15 +9,15 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ModelOverlay.h"
#include <Application.h>
#include <GlowEffect.h>
#include "ModelOverlay.h"
ModelOverlay::ModelOverlay()
: _model(),
_modelTextures(QVariantMap()),
_scale(1.0f),
_updateModel(false)
{
_model.init();
@ -25,12 +25,10 @@ ModelOverlay::ModelOverlay()
}
ModelOverlay::ModelOverlay(const ModelOverlay* modelOverlay) :
Base3DOverlay(modelOverlay),
Volume3DOverlay(modelOverlay),
_model(),
_modelTextures(QVariantMap()),
_url(modelOverlay->_url),
_rotation(modelOverlay->_rotation),
_scale(modelOverlay->_scale),
_updateModel(false)
{
_model.init();
@ -45,8 +43,9 @@ void ModelOverlay::update(float deltatime) {
_updateModel = false;
_model.setSnapModelToCenter(true);
_model.setRotation(_rotation);
_model.setTranslation(_position);
_model.setScale(getScale());
_model.setRotation(getRotation());
_model.setTranslation(getPosition());
_model.setURL(_url);
_model.simulate(deltatime, true);
} else {
@ -56,13 +55,13 @@ void ModelOverlay::update(float deltatime) {
}
bool ModelOverlay::addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
Base3DOverlay::addToScene(overlay, scene, pendingChanges);
Volume3DOverlay::addToScene(overlay, scene, pendingChanges);
_model.addToScene(scene, pendingChanges);
return true;
}
void ModelOverlay::removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
Base3DOverlay::removeFromScene(overlay, scene, pendingChanges);
Volume3DOverlay::removeFromScene(overlay, scene, pendingChanges);
_model.removeFromScene(scene, pendingChanges);
}
@ -100,54 +99,26 @@ void ModelOverlay::render(RenderArgs* args) {
}
void ModelOverlay::setProperties(const QScriptValue &properties) {
Base3DOverlay::setProperties(properties);
auto position = getPosition();
auto rotation = getRotation();
auto scale = getDimensions();
Volume3DOverlay::setProperties(properties);
if (position != getPosition() || rotation != getRotation() || scale != getDimensions()) {
_model.setScaleToFit(true, getScale());
_updateModel = true;
}
QScriptValue urlValue = properties.property("url");
if (urlValue.isValid()) {
_url = urlValue.toVariant().toString();
if (urlValue.isValid() && urlValue.isString()) {
_url = urlValue.toString();
_updateModel = true;
_isLoaded = false;
}
QScriptValue scaleValue = properties.property("scale");
if (scaleValue.isValid()) {
_scale = scaleValue.toVariant().toFloat();
_model.setScaleToFit(true, _scale);
_updateModel = true;
}
QScriptValue rotationValue = properties.property("rotation");
if (rotationValue.isValid()) {
QScriptValue x = rotationValue.property("x");
QScriptValue y = rotationValue.property("y");
QScriptValue z = rotationValue.property("z");
QScriptValue w = rotationValue.property("w");
if (x.isValid() && y.isValid() && z.isValid() && w.isValid()) {
_rotation.x = x.toVariant().toFloat();
_rotation.y = y.toVariant().toFloat();
_rotation.z = z.toVariant().toFloat();
_rotation.w = w.toVariant().toFloat();
}
_updateModel = true;
}
QScriptValue dimensionsValue = properties.property("dimensions");
if (dimensionsValue.isValid()) {
QScriptValue x = dimensionsValue.property("x");
QScriptValue y = dimensionsValue.property("y");
QScriptValue z = dimensionsValue.property("z");
if (x.isValid() && y.isValid() && z.isValid()) {
glm::vec3 dimensions;
dimensions.x = x.toVariant().toFloat();
dimensions.y = y.toVariant().toFloat();
dimensions.z = z.toVariant().toFloat();
_model.setScaleToFit(true, dimensions);
}
_updateModel = true;
}
QScriptValue texturesValue = properties.property("textures");
if (texturesValue.isValid()) {
if (texturesValue.isValid() && texturesValue.toVariant().canConvert(QVariant::Map)) {
QVariantMap textureMap = texturesValue.toVariant().toMap();
foreach(const QString& key, textureMap.keys()) {
@ -161,22 +132,12 @@ void ModelOverlay::setProperties(const QScriptValue &properties) {
_modelTextures[key] = newTextureURL; // Keep local track of textures for getProperty()
}
}
if (properties.property("position").isValid()) {
_updateModel = true;
}
}
QScriptValue ModelOverlay::getProperty(const QString& property) {
if (property == "url") {
return _url.toString();
}
if (property == "scale") {
return _scale;
}
if (property == "rotation") {
return quatToScriptValue(_scriptEngine, _rotation);
}
if (property == "dimensions") {
return vec3toScriptValue(_scriptEngine, _model.getScaleToFitDimensions());
}
@ -192,7 +153,7 @@ QScriptValue ModelOverlay::getProperty(const QString& property) {
}
}
return Base3DOverlay::getProperty(property);
return Volume3DOverlay::getProperty(property);
}
bool ModelOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,

View file

@ -14,9 +14,9 @@
#include <Model.h>
#include "Base3DOverlay.h"
#include "Volume3DOverlay.h"
class ModelOverlay : public Base3DOverlay {
class ModelOverlay : public Volume3DOverlay {
Q_OBJECT
public:
ModelOverlay();
@ -41,9 +41,6 @@ private:
QVariantMap _modelTextures;
QUrl _url;
glm::quat _rotation;
float _scale;
bool _updateModel;
};

View file

@ -8,12 +8,13 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include "Overlay.h"
#include <NumericalConstants.h>
#include <RegisteredMetaTypes.h>
static const xColor DEFAULT_OVERLAY_COLOR = { 255, 255, 255 };
static const float DEFAULT_ALPHA = 0.7f;
Overlay::Overlay() :
_renderItemID(render::Item::INVALID_ITEM_ID),

View file

@ -14,18 +14,11 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <QRect>
#include <QScriptValue>
#include <QString>
#include <RegisteredMetaTypes.h>
#include <SharedUtil.h> // for xColor
#include <RenderArgs.h>
#include <AABox.h>
#include <render/Scene.h>
const xColor DEFAULT_OVERLAY_COLOR = { 255, 255, 255 };
const float DEFAULT_ALPHA = 0.7f;
class QScriptEngine;
class QScriptValue;
class Overlay : public QObject {
Q_OBJECT
@ -37,7 +30,6 @@ public:
};
typedef std::shared_ptr<Overlay> Pointer;
typedef render::Payload<Overlay> Payload;
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
@ -47,6 +39,8 @@ public:
void init(QScriptEngine* scriptEngine);
virtual void update(float deltatime) {}
virtual void render(RenderArgs* args) = 0;
virtual AABox getBounds() const = 0;
virtual bool addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);

View file

@ -8,16 +8,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <SharedUtil.h>
#include "Overlay2D.h"
Overlay2D::Overlay2D() {
}
#include <RegisteredMetaTypes.h>
Overlay2D::Overlay2D(const Overlay2D* overlay2D) :
Overlay(overlay2D),
@ -25,7 +18,9 @@ Overlay2D::Overlay2D(const Overlay2D* overlay2D) :
{
}
Overlay2D::~Overlay2D() {
AABox Overlay2D::getBounds() const {
return AABox(glm::vec3(_bounds.x(), _bounds.y(), 0.0f),
glm::vec3(_bounds.width(), _bounds.height(), 0.01f));
}
void Overlay2D::setProperties(const QScriptValue& properties) {
@ -40,7 +35,7 @@ void Overlay2D::setProperties(const QScriptValue& properties) {
boundsRect.setHeight(bounds.property("height").toVariant().toInt());
setBounds(boundsRect);
} else {
QRect oldBounds = getBounds();
QRect oldBounds = _bounds;
QRect newBounds = oldBounds;
if (properties.property("x").isValid()) {

Some files were not shown because too many files have changed in this diff Show more