diff --git a/.gitignore b/.gitignore index 62686287bc..087e24208d 100644 --- a/.gitignore +++ b/.gitignore @@ -39,5 +39,11 @@ interface/external/Leap/util/ interface/external/Sixense/include/ interface/external/Sixense/lib/ +# Ignore Visage +interface/external/visage/dependencies/ +interface/external/visage/include/ +interface/external/visage/lib/ +interface/resources/visage/ + # Ignore interfaceCache for Linux users interface/interfaceCache/ diff --git a/cmake/modules/FindVisage.cmake b/cmake/modules/FindVisage.cmake new file mode 100644 index 0000000000..84f672525d --- /dev/null +++ b/cmake/modules/FindVisage.cmake @@ -0,0 +1,76 @@ +# Try to find the Visage controller library +# +# You must provide a VISAGE_ROOT_DIR which contains lib and include directories +# +# Once done this will define +# +# VISAGE_FOUND - system found Visage +# VISAGE_INCLUDE_DIRS - the Visage include directory +# VISAGE_LIBRARIES - Link this to use Visage +# +# Created on 2/11/2014 by Andrzej Kapolka +# Copyright (c) 2014 High Fidelity +# + +if (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS) + # in cache already + set(VISAGE_FOUND TRUE) +else (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS) + find_path(VISAGE_INCLUDE_DIR VisageTracker2.h ${VISAGE_ROOT_DIR}/include) + + if (APPLE) + find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h /usr/include/libxml2) + find_path(VISAGE_OPENCV_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include) + find_path(VISAGE_OPENCV2_INCLUDE_DIR opencv.hpp ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include/opencv2) + if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR) + set(VISAGE_INCLUDE_DIRS + "${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}" + CACHE INTERNAL "Visage include dirs") + endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR) + + find_library(VISAGE_CORE_LIBRARY libvscore.a ${VISAGE_ROOT_DIR}/lib) + find_library(VISAGE_VISION_LIBRARY libvsvision.a ${VISAGE_ROOT_DIR}/lib) + find_library(VISAGE_OPENCV_LIBRARY libOpenCV.a ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/lib) + if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY) + set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}" + CACHE INTERNAL "Visage libraries") + endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY) + + elseif (WIN32) + find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h ${VISAGE_ROOT_DIR}/dependencies/libxml2/include) + find_path(VISAGE_OPENCV_INCLUDE_DIR opencv/cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include) + find_path(VISAGE_OPENCV2_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include/opencv) + if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR) + set(VISAGE_INCLUDE_DIRS + "${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}" + CACHE INTERNAL "Visage include dirs") + endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR) + + find_library(VISAGE_CORE_LIBRARY vscore.lib ${VISAGE_ROOT_DIR}/lib) + find_library(VISAGE_VISION_LIBRARY vsvision.lib ${VISAGE_ROOT_DIR}/lib) + find_library(VISAGE_OPENCV_LIBRARY opencv_core243.lib ${VISAGE_ROOT_DIR}/dependencies/OpenCV/lib) + if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY) + set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}" + CACHE INTERNAL "Visage libraries") + endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY) + + endif () + + if (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES) + set(VISAGE_FOUND TRUE) + endif (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES) + + if (VISAGE_FOUND) + if (NOT VISAGE_FIND_QUIETLY) + message(STATUS "Found Visage: ${VISAGE_LIBRARIES}") + endif (NOT VISAGE_FIND_QUIETLY) + else (VISAGE_FOUND) + if (VISAGE_FIND_REQUIRED) + message(FATAL_ERROR "Could not find Visage") + endif (VISAGE_FIND_REQUIRED) + endif (VISAGE_FOUND) + + # show the VISAGE_INCLUDE_DIRS and VISAGE_LIBRARIES variables only in the advanced view + mark_as_advanced(VISAGE_INCLUDE_DIRS VISAGE_LIBRARIES) + +endif (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS) diff --git a/examples/editVoxels.js b/examples/editVoxels.js index c1f0c8dc49..c67ff0dcfa 100644 --- a/examples/editVoxels.js +++ b/examples/editVoxels.js @@ -16,14 +16,7 @@ // Click and drag to create more new voxels in the same direction // -function vLength(v) { - return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z); -} - -function vMinus(a, b) { - var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z }; - return rval; -} +var windowDimensions = Controller.getViewportDimensions(); var NEW_VOXEL_SIZE = 1.0; var NEW_VOXEL_DISTANCE_FROM_CAMERA = 3.0; @@ -76,6 +69,52 @@ var clickSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-publ var audioOptions = new AudioInjectionOptions();
 audioOptions.volume = 0.5; +var editToolsOn = false; // starts out off + + +var voxelPreview = Overlays.addOverlay("cube", { + position: { x: 0, y: 0, z: 0}, + size: 1, + color: { red: 255, green: 0, blue: 0}, + alpha: 1, + solid: false, + visible: false, + lineWidth: 4 + }); + + +// These will be our "overlay IDs" +var swatches = new Array(); +var swatchHeight = 54; +var swatchWidth = 31; +var swatchesWidth = swatchWidth * numColors; +var swatchesX = (windowDimensions.x - swatchesWidth) / 2; +var swatchesY = windowDimensions.y - swatchHeight; + +// create the overlays, position them in a row, set their colors, and for the selected one, use a different source image +// location so that it displays the "selected" marker +for (s = 0; s < numColors; s++) { + var imageFromX = 12 + (s * 27); + var imageFromY = 0; + if (s == whichColor) { + imageFromY = 55; + } + var swatchX = swatchesX + (30 * s); + + swatches[s] = Overlays.addOverlay("image", { + x: swatchX, + y: swatchesY, + width: swatchWidth, + height: swatchHeight, + subImage: { x: imageFromX, y: imageFromY, width: (swatchWidth - 1), height: swatchHeight }, + imageURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/images/testing-swatches.svg", + color: colors[s], + alpha: 1, + visible: editToolsOn + }); +} + + function setAudioPosition() { var camera = Camera.getPosition(); var forwardVector = Quat.getFront(MyAvatar.orientation); @@ -101,7 +140,141 @@ function fixEulerAngles(eulers) { return rVal; } +var trackLastMouseX = 0; +var trackLastMouseY = 0; +var trackAsDelete = false; +var trackAsRecolor = false; + +function showPreviewVoxel() { + if (editToolsOn) { + var voxelColor; + + var pickRay = Camera.computePickRay(trackLastMouseX, trackLastMouseY); + var intersection = Voxels.findRayIntersection(pickRay); + + if (whichColor == -1) { + // Copy mode - use clicked voxel color + voxelColor = { red: intersection.voxel.red, + green: intersection.voxel.green, + blue: intersection.voxel.blue }; + } else { + voxelColor = { red: colors[whichColor].red, + green: colors[whichColor].green, + blue: colors[whichColor].blue }; + } + + var guidePosition; + + if (trackAsDelete) { + guidePosition = { x: intersection.voxel.x, + y: intersection.voxel.y, + z: intersection.voxel.z }; + Overlays.editOverlay(voxelPreview, { + position: guidePosition, + size: intersection.voxel.s, + visible: true, + color: { red: 255, green: 0, blue: 0 }, + solid: false, + alpha: 1 + }); + } else if (trackAsRecolor) { + guidePosition = { x: intersection.voxel.x - 0.001, + y: intersection.voxel.y - 0.001, + z: intersection.voxel.z - 0.001 }; + + Overlays.editOverlay(voxelPreview, { + position: guidePosition, + size: intersection.voxel.s + 0.002, + visible: true, + color: voxelColor, + solid: true, + alpha: 0.8 + }); + + } else if (!isExtruding) { + guidePosition = { x: intersection.voxel.x, + y: intersection.voxel.y, + z: intersection.voxel.z }; + + if (intersection.face == "MIN_X_FACE") { + guidePosition.x -= intersection.voxel.s; + } else if (intersection.face == "MAX_X_FACE") { + guidePosition.x += intersection.voxel.s; + } else if (intersection.face == "MIN_Y_FACE") { + guidePosition.y -= intersection.voxel.s; + } else if (intersection.face == "MAX_Y_FACE") { + guidePosition.y += intersection.voxel.s; + } else if (intersection.face == "MIN_Z_FACE") { + guidePosition.z -= intersection.voxel.s; + } else if (intersection.face == "MAX_Z_FACE") { + guidePosition.z += intersection.voxel.s; + } + + Overlays.editOverlay(voxelPreview, { + position: guidePosition, + size: intersection.voxel.s, + visible: true, + color: voxelColor, + solid: true, + alpha: 0.7 + }); + } else if (isExtruding) { + Overlays.editOverlay(voxelPreview, { visible: false }); + } + } else { + Overlays.editOverlay(voxelPreview, { visible: false }); + } +} + +function trackMouseEvent(event) { + trackLastMouseX = event.x; + trackLastMouseY = event.y; + trackAsDelete = event.isControl; + trackAsRecolor = event.isShifted; + showPreviewVoxel(); +} + +function trackKeyPressEvent(event) { + if (event.text == "CONTROL") { + trackAsDelete = true; + showPreviewVoxel(); + } + if (event.text == "SHIFT") { + trackAsRecolor = true; + } + showPreviewVoxel(); +} + +function trackKeyReleaseEvent(event) { + if (event.text == "CONTROL") { + trackAsDelete = false; + showPreviewVoxel(); + } + if (event.text == "SHIFT") { + trackAsRecolor = false; + } + + // on TAB release, toggle our tool state + if (event.text == "TAB") { + editToolsOn = !editToolsOn; + moveTools(); + Audio.playSound(clickSound, audioOptions); + } + showPreviewVoxel(); +} + function mousePressEvent(event) { + + // if our tools are off, then don't do anything + if (!editToolsOn) { + return; + } + + if (event.isRightButton) { + // debugging of right button click on mac... + print(">>>> RIGHT BUTTON <<<<<"); + } + trackMouseEvent(event); // used by preview support mouseX = event.x; mouseY = event.y; var pickRay = Camera.computePickRay(event.x, event.y); @@ -118,16 +291,17 @@ function mousePressEvent(event) { // get position for initial azimuth, elevation orbitCenter = intersection.intersection; var orbitVector = Vec3.subtract(cameraPosition, orbitCenter); - orbitRadius = vLength(orbitVector); + orbitRadius = Vec3.length(orbitVector); orbitAzimuth = Math.atan2(orbitVector.z, orbitVector.x); orbitAltitude = Math.asin(orbitVector.y / Vec3.length(orbitVector)); - } else if (event.isRightButton || event.isControl) { + } else if (trackAsDelete || event.isRightButton) { // Delete voxel Voxels.eraseVoxel(intersection.voxel.x, intersection.voxel.y, intersection.voxel.z, intersection.voxel.s); Audio.playSound(deleteSound, audioOptions); + Overlays.editOverlay(voxelPreview, { visible: false }); - } else if (event.isShifted) { + } else if (trackAsRecolor) { // Recolor Voxel Voxels.setVoxel(intersection.voxel.x, intersection.voxel.y, @@ -135,6 +309,7 @@ function mousePressEvent(event) { intersection.voxel.s, colors[whichColor].red, colors[whichColor].green, colors[whichColor].blue); Audio.playSound(changeColorSound, audioOptions); + Overlays.editOverlay(voxelPreview, { visible: false }); } else { // Add voxel on face if (whichColor == -1) { @@ -178,6 +353,7 @@ function mousePressEvent(event) { lastVoxelScale = newVoxel.s; Audio.playSound(addSound, audioOptions); + Overlays.editOverlay(voxelPreview, { visible: false }); dragStart = { x: event.x, y: event.y }; isAdding = true; } @@ -185,42 +361,52 @@ function mousePressEvent(event) { } function keyPressEvent(event) { - key_alt = event.isAlt; - key_shift = event.isShifted; - var nVal = parseInt(event.text); - if (event.text == "0") { - print("Color = Copy"); - whichColor = -1; - Audio.playSound(clickSound, audioOptions); - } else if ((nVal > 0) && (nVal <= numColors)) { - whichColor = nVal - 1; - print("Color = " + (whichColor + 1)); - Audio.playSound(clickSound, audioOptions); - } else if (event.text == "9") { - // Create a brand new 1 meter voxel in front of your avatar - var color = whichColor; - if (color == -1) color = 0; - var newPosition = getNewVoxelPosition(); - var newVoxel = { - x: newPosition.x, - y: newPosition.y , - z: newPosition.z, - s: NEW_VOXEL_SIZE, - red: colors[color].red, - green: colors[color].green, - blue: colors[color].blue }; - Voxels.setVoxel(newVoxel.x, newVoxel.y, newVoxel.z, newVoxel.s, newVoxel.red, newVoxel.green, newVoxel.blue); - setAudioPosition(); - Audio.playSound(addSound, audioOptions); - } else if (event.text == " ") { + // if our tools are off, then don't do anything + if (editToolsOn) { + key_alt = event.isAlt; + key_shift = event.isShifted; + var nVal = parseInt(event.text); + if (event.text == "0") { + print("Color = Copy"); + whichColor = -1; + Audio.playSound(clickSound, audioOptions); + moveTools(); + } else if ((nVal > 0) && (nVal <= numColors)) { + whichColor = nVal - 1; + print("Color = " + (whichColor + 1)); + Audio.playSound(clickSound, audioOptions); + moveTools(); + } else if (event.text == "9") { + // Create a brand new 1 meter voxel in front of your avatar + var color = whichColor; + if (color == -1) color = 0; + var newPosition = getNewVoxelPosition(); + var newVoxel = { + x: newPosition.x, + y: newPosition.y , + z: newPosition.z, + s: NEW_VOXEL_SIZE, + red: colors[color].red, + green: colors[color].green, + blue: colors[color].blue }; + Voxels.setVoxel(newVoxel.x, newVoxel.y, newVoxel.z, newVoxel.s, newVoxel.red, newVoxel.green, newVoxel.blue); + setAudioPosition(); + Audio.playSound(addSound, audioOptions); + } + } + + // do this even if not in edit tools + if (event.text == " ") { // Reset my orientation! var orientation = { x:0, y:0, z:0, w:1 }; Camera.setOrientation(orientation); MyAvatar.orientation = orientation; } + trackKeyPressEvent(event); // used by preview support } function keyReleaseEvent(event) { + trackKeyReleaseEvent(event); // used by preview support key_alt = false; key_shift = false; } @@ -248,7 +434,7 @@ function mouseMoveEvent(event) { var lastVoxelDistance = { x: pickRay.origin.x - lastVoxelPosition.x, y: pickRay.origin.y - lastVoxelPosition.y, z: pickRay.origin.z - lastVoxelPosition.z }; - var distance = vLength(lastVoxelDistance); + var distance = Vec3.length(lastVoxelDistance); var mouseSpot = { x: pickRay.direction.x * distance, y: pickRay.direction.y * distance, z: pickRay.direction.z * distance }; mouseSpot.x += pickRay.origin.x; mouseSpot.y += pickRay.origin.y; @@ -279,9 +465,17 @@ function mouseMoveEvent(event) { } } } + + // update the add voxel/delete voxel overlay preview + trackMouseEvent(event); } function mouseReleaseEvent(event) { + // if our tools are off, then don't do anything + if (!editToolsOn) { + return; + } + if (isOrbiting) { var cameraOrientation = Camera.getOrientation(); var eulers = Quat.safeEulerAngles(cameraOrientation); @@ -296,6 +490,41 @@ function mouseReleaseEvent(event) { isExtruding = false; } +function moveTools() { + swatchesX = (windowDimensions.x - swatchesWidth) / 2; + swatchesY = windowDimensions.y - swatchHeight; + + // create the overlays, position them in a row, set their colors, and for the selected one, use a different source image + // location so that it displays the "selected" marker + for (s = 0; s < numColors; s++) { + var imageFromX = 12 + (s * 27); + var imageFromY = 0; + if (s == whichColor) { + imageFromY = 55; + } + var swatchX = swatchesX + ((swatchWidth - 1) * s); + + Overlays.editOverlay(swatches[s], { + x: swatchX, + y: swatchesY, + subImage: { x: imageFromX, y: imageFromY, width: (swatchWidth - 1), height: swatchHeight }, + color: colors[s], + alpha: 1, + visible: editToolsOn + }); + } +} + + +function update() { + var newWindowDimensions = Controller.getViewportDimensions(); + if (newWindowDimensions.x != windowDimensions.x || newWindowDimensions.y != windowDimensions.y) { + windowDimensions = newWindowDimensions; + print("window resized..."); + moveTools(); + } +} + Controller.mousePressEvent.connect(mousePressEvent); Controller.mouseReleaseEvent.connect(mouseReleaseEvent); Controller.mouseMoveEvent.connect(mouseMoveEvent); @@ -303,5 +532,15 @@ Controller.keyPressEvent.connect(keyPressEvent); Controller.keyReleaseEvent.connect(keyReleaseEvent); function scriptEnding() { + Overlays.deleteOverlay(voxelPreview); + for (s = 0; s < numColors; s++) { + Overlays.deleteOverlay(swatches[s]); + } } Script.scriptEnding.connect(scriptEnding); + + +Script.willSendVisualDataCallback.connect(update); + + + diff --git a/interface/CMakeLists.txt b/interface/CMakeLists.txt index 8e96006828..a0a9033187 100644 --- a/interface/CMakeLists.txt +++ b/interface/CMakeLists.txt @@ -11,6 +11,7 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake set(FACESHIFT_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/faceshift) set(LIBOVR_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/LibOVR) set(SIXENSE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/Sixense) +set(VISAGE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/visage) if (DEFINED ENV{JOB_ID}) set(BUILD_SEQ $ENV{JOB_ID}) @@ -138,9 +139,10 @@ find_package(Faceshift) find_package(GLM REQUIRED) find_package(LibOVR) find_package(Sixense) +find_package(Visage) find_package(ZLIB) -# likewise with Sixense library for Razer Hydra +# include the Sixense library for Razer Hydra if available if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE) add_definitions(-DHAVE_SIXENSE) include_directories(SYSTEM ${SIXENSE_INCLUDE_DIRS}) @@ -150,6 +152,21 @@ if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE) target_link_libraries(${TARGET_NAME} ${SIXENSE_LIBRARIES}) endif (SIXENSE_FOUND AND NOT DISABLE_SIXENSE) +# likewise with Visage library for webcam feature tracking +if (VISAGE_FOUND AND NOT DISABLE_VISAGE) + add_definitions(-DHAVE_VISAGE -DVISAGE_STATIC) + include_directories(SYSTEM ${VISAGE_INCLUDE_DIRS}) + if (APPLE) + add_definitions(-DMAC_OS_X) + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-comment -isystem ${VISAGE_INCLUDE_DIRS}") + find_library(AVFoundation AVFoundation) + find_library(CoreMedia CoreMedia) + find_library(NEW_STD_LIBRARY libc++.dylib /usr/lib/) + target_link_libraries(${TARGET_NAME} ${AVFoundation} ${CoreMedia} ${NEW_STD_LIBRARY}) + endif (APPLE) + target_link_libraries(${TARGET_NAME} ${VISAGE_LIBRARIES}) +endif (VISAGE_FOUND AND NOT DISABLE_VISAGE) + # and with LibOVR for Oculus Rift if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR) add_definitions(-DHAVE_LIBOVR) diff --git a/interface/external/visage/readme.txt b/interface/external/visage/readme.txt new file mode 100644 index 0000000000..8aa28f81c4 --- /dev/null +++ b/interface/external/visage/readme.txt @@ -0,0 +1,14 @@ + +Instructions for adding the Visage driver to Interface +Andrzej Kapolka, February 11, 2014 + +1. Copy the Visage sdk folders (lib, include, dependencies) into the interface/external/visage folder. + This readme.txt should be there as well. + +2. Copy the Visage configuration data folder (visageSDK-MacOS/Samples/MacOSX/data/) to interface/resources/visage + (i.e., so that interface/resources/visage/candide3.wfm is accessible) + +3. Copy the Visage license file to interface/resources/visage/license.vlc. + +4. Delete your build directory, run cmake and build, and you should be all set. + diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index af8920c9d6..ac45bf39d8 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -450,22 +450,22 @@ void Application::paintGL() { _myCamera.setUpShift(0.0f); _myCamera.setDistance(0.0f); _myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing - _myCamera.setTargetPosition(_myAvatar->getHead().calculateAverageEyePosition()); - _myCamera.setTargetRotation(_myAvatar->getHead().getOrientation()); + _myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition()); + _myCamera.setTargetRotation(_myAvatar->getHead()->getOrientation()); } else if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) { _myCamera.setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay - _myCamera.setTargetPosition(_myAvatar->getHead().calculateAverageEyePosition()); - _myCamera.setTargetRotation(_myAvatar->getHead().getCameraOrientation()); + _myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition()); + _myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation()); } else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) { _myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing _myCamera.setTargetPosition(_myAvatar->getUprightHeadPosition()); - _myCamera.setTargetRotation(_myAvatar->getHead().getCameraOrientation()); + _myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation()); } else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { _myCamera.setTightness(0.0f); - float headHeight = _myAvatar->getHead().calculateAverageEyePosition().y - _myAvatar->getPosition().y; + float headHeight = _myAvatar->getHead()->calculateAverageEyePosition().y - _myAvatar->getPosition().y; _myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _myAvatar->getScale()); _myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight, 0)); _myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PIf, 0.0f))); @@ -529,14 +529,14 @@ void Application::paintGL() { _mirrorCamera.setTargetPosition(_myAvatar->getChestPosition()); } else { // HEAD zoom level _mirrorCamera.setDistance(MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale()); - if (_myAvatar->getSkeletonModel().isActive() && _myAvatar->getHead().getFaceModel().isActive()) { + if (_myAvatar->getSkeletonModel().isActive() && _myAvatar->getHead()->getFaceModel().isActive()) { // as a hack until we have a better way of dealing with coordinate precision issues, reposition the // face/body so that the average eye position lies at the origin eyeRelativeCamera = true; _mirrorCamera.setTargetPosition(glm::vec3()); } else { - _mirrorCamera.setTargetPosition(_myAvatar->getHead().calculateAverageEyePosition()); + _mirrorCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition()); } } @@ -558,26 +558,26 @@ void Application::paintGL() { if (eyeRelativeCamera) { // save absolute translations glm::vec3 absoluteSkeletonTranslation = _myAvatar->getSkeletonModel().getTranslation(); - glm::vec3 absoluteFaceTranslation = _myAvatar->getHead().getFaceModel().getTranslation(); + glm::vec3 absoluteFaceTranslation = _myAvatar->getHead()->getFaceModel().getTranslation(); // get the eye positions relative to the neck and use them to set the face translation glm::vec3 leftEyePosition, rightEyePosition; - _myAvatar->getHead().getFaceModel().setTranslation(glm::vec3()); - _myAvatar->getHead().getFaceModel().getEyePositions(leftEyePosition, rightEyePosition); - _myAvatar->getHead().getFaceModel().setTranslation((leftEyePosition + rightEyePosition) * -0.5f); + _myAvatar->getHead()->getFaceModel().setTranslation(glm::vec3()); + _myAvatar->getHead()->getFaceModel().getEyePositions(leftEyePosition, rightEyePosition); + _myAvatar->getHead()->getFaceModel().setTranslation((leftEyePosition + rightEyePosition) * -0.5f); // get the neck position relative to the body and use it to set the skeleton translation glm::vec3 neckPosition; _myAvatar->getSkeletonModel().setTranslation(glm::vec3()); _myAvatar->getSkeletonModel().getNeckPosition(neckPosition); - _myAvatar->getSkeletonModel().setTranslation(_myAvatar->getHead().getFaceModel().getTranslation() - + _myAvatar->getSkeletonModel().setTranslation(_myAvatar->getHead()->getFaceModel().getTranslation() - neckPosition); displaySide(_mirrorCamera, true); // restore absolute translations _myAvatar->getSkeletonModel().setTranslation(absoluteSkeletonTranslation); - _myAvatar->getHead().getFaceModel().setTranslation(absoluteFaceTranslation); + _myAvatar->getHead()->getFaceModel().setTranslation(absoluteFaceTranslation); } else { displaySide(_mirrorCamera, true); } @@ -1867,7 +1867,7 @@ void Application::init() { // TODO: move _myAvatar out of Application. Move relevant code to MyAvataar or AvatarManager _avatarManager.init(); _myCamera.setMode(CAMERA_MODE_FIRST_PERSON); - _myCamera.setModeShiftRate(1.0f); + _myCamera.setModeShiftPeriod(1.0f); _mirrorCamera.setMode(CAMERA_MODE_MIRROR); _mirrorCamera.setAspectRatio((float)MIRROR_VIEW_WIDTH / (float)MIRROR_VIEW_HEIGHT); @@ -1978,8 +1978,8 @@ const float MAX_VOXEL_EDIT_DISTANCE = 50.0f; const float HEAD_SPHERE_RADIUS = 0.07f; bool Application::isLookingAtMyAvatar(Avatar* avatar) { - glm::vec3 theirLookat = avatar->getHead().getLookAtPosition(); - glm::vec3 myHeadPosition = _myAvatar->getHead().getPosition(); + glm::vec3 theirLookat = avatar->getHead()->getLookAtPosition(); + glm::vec3 myHeadPosition = _myAvatar->getHead()->getPosition(); if (pointInSphere(theirLookat, myHeadPosition, HEAD_SPHERE_RADIUS * _myAvatar->getScale())) { return true; @@ -2040,10 +2040,19 @@ void Application::updateFaceshift() { // Copy angular velocity if measured by faceshift, to the head if (_faceshift.isActive()) { - _myAvatar->getHead().setAngularVelocity(_faceshift.getHeadAngularVelocity()); + _myAvatar->getHead()->setAngularVelocity(_faceshift.getHeadAngularVelocity()); } } +void Application::updateVisage() { + + bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings); + PerformanceWarning warn(showWarnings, "Application::updateVisage()"); + + // Update Visage + _visage.update(); +} + void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) { bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings); @@ -2057,23 +2066,35 @@ void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) { float distance = TREE_SCALE; if (_myAvatar->getLookAtTargetAvatar()) { distance = glm::distance(_mouseRayOrigin, - static_cast(_myAvatar->getLookAtTargetAvatar())->getHead().calculateAverageEyePosition()); + static_cast(_myAvatar->getLookAtTargetAvatar())->getHead()->calculateAverageEyePosition()); } else if (_isHoverVoxel) { distance = glm::distance(_mouseRayOrigin, getMouseVoxelWorldCoordinates(_hoverVoxel)); } lookAtSpot = _mouseRayOrigin + _mouseRayDirection * distance; } + bool trackerActive = false; + float eyePitch, eyeYaw; if (_faceshift.isActive()) { + eyePitch = _faceshift.getEstimatedEyePitch(); + eyeYaw = _faceshift.getEstimatedEyeYaw(); + trackerActive = true; + + } else if (_visage.isActive()) { + eyePitch = _visage.getEstimatedEyePitch(); + eyeYaw = _visage.getEstimatedEyeYaw(); + trackerActive = true; + } + if (trackerActive) { // deflect using Faceshift gaze data - glm::vec3 origin = _myAvatar->getHead().calculateAverageEyePosition(); + glm::vec3 origin = _myAvatar->getHead()->calculateAverageEyePosition(); float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f; float deflection = Menu::getInstance()->getFaceshiftEyeDeflection(); lookAtSpot = origin + _myCamera.getRotation() * glm::quat(glm::radians(glm::vec3( - _faceshift.getEstimatedEyePitch() * pitchSign * deflection, _faceshift.getEstimatedEyeYaw() * deflection, 0.0f))) * + eyePitch * pitchSign * deflection, eyeYaw * deflection, 0.0f))) * glm::inverse(_myCamera.getRotation()) * (lookAtSpot - origin); } - _myAvatar->getHead().setLookAtPosition(lookAtSpot); + _myAvatar->getHead()->setLookAtPosition(lookAtSpot); } void Application::updateHoverVoxels(float deltaTime, float& distance, BoxFace& face) { @@ -2229,17 +2250,17 @@ void Application::cameraMenuChanged() { if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) { if (_myCamera.getMode() != CAMERA_MODE_MIRROR) { _myCamera.setMode(CAMERA_MODE_MIRROR); - _myCamera.setModeShiftRate(100.0f); + _myCamera.setModeShiftPeriod(0.00f); } } else if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) { if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) { _myCamera.setMode(CAMERA_MODE_FIRST_PERSON); - _myCamera.setModeShiftRate(1.0f); + _myCamera.setModeShiftPeriod(1.0f); } } else { if (_myCamera.getMode() != CAMERA_MODE_THIRD_PERSON) { _myCamera.setMode(CAMERA_MODE_THIRD_PERSON); - _myCamera.setModeShiftRate(1.0f); + _myCamera.setModeShiftPeriod(1.0f); } } } @@ -2321,6 +2342,7 @@ void Application::update(float deltaTime) { glm::vec3 lookAtSpot; updateFaceshift(); + updateVisage(); _myAvatar->updateLookAtTargetAvatar(lookAtSpot); updateMyAvatarLookAtPosition(lookAtSpot); @@ -3833,6 +3855,7 @@ void Application::resetSensors() { _mouseY = _glWidget->height() / 2; _faceshift.reset(); + _visage.reset(); if (OculusManager::isConnected()) { OculusManager::reset(); diff --git a/interface/src/Application.h b/interface/src/Application.h index 530b09ed63..d80cd6b661 100644 --- a/interface/src/Application.h +++ b/interface/src/Application.h @@ -54,6 +54,7 @@ #include "avatar/Profile.h" #include "devices/Faceshift.h" #include "devices/SixenseManager.h" +#include "devices/Visage.h" #include "renderer/AmbientOcclusionEffect.h" #include "renderer/GeometryCache.h" #include "renderer/GlowEffect.h" @@ -160,6 +161,7 @@ public: const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; } const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; } Faceshift* getFaceshift() { return &_faceshift; } + Visage* getVisage() { return &_visage; } SixenseManager* getSixenseManager() { return &_sixenseManager; } BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; } QSettings* getSettings() { return _settings; } @@ -286,6 +288,7 @@ private: // Various helper functions called during update() void updateMouseRay(); void updateFaceshift(); + void updateVisage(); void updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot); void updateHoverVoxels(float deltaTime, float& distance, BoxFace& face); void updateMouseVoxels(float deltaTime, float& distance, BoxFace& face); @@ -385,6 +388,7 @@ private: Profile _profile; // The data-server linked profile for this user Faceshift _faceshift; + Visage _visage; SixenseManager _sixenseManager; QStringList _activeScripts; diff --git a/interface/src/Audio.cpp b/interface/src/Audio.cpp index 0cf67be2bf..22148609c8 100644 --- a/interface/src/Audio.cpp +++ b/interface/src/Audio.cpp @@ -467,8 +467,8 @@ void Audio::handleAudioInput() { if (audioMixer && audioMixer->getActiveSocket()) { MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar(); - glm::vec3 headPosition = interfaceAvatar->getHead().getPosition(); - glm::quat headOrientation = interfaceAvatar->getHead().getOrientation(); + glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition(); + glm::quat headOrientation = interfaceAvatar->getHead()->getOrientation(); // we need the amount of bytes in the buffer + 1 for type // + 12 for 3 floats for position + float for bearing + 1 attenuation byte diff --git a/interface/src/Camera.cpp b/interface/src/Camera.cpp index 694fd30f50..8729ef58b6 100644 --- a/interface/src/Camera.cpp +++ b/interface/src/Camera.cpp @@ -15,8 +15,6 @@ #include "Menu.h" #include "Util.h" -const float CAMERA_MINIMUM_MODE_SHIFT_RATE = 0.5f; - const float CAMERA_FIRST_PERSON_MODE_UP_SHIFT = 0.0f; const float CAMERA_FIRST_PERSON_MODE_DISTANCE = 0.0f; const float CAMERA_FIRST_PERSON_MODE_TIGHTNESS = 100.0f; @@ -57,7 +55,7 @@ Camera::Camera() : _newTightness(0.0f), _modeShift(1.0f), _linearModeShift(0.0f), - _modeShiftRate(1.0f), + _modeShiftPeriod(1.0f), _scale(1.0f), _lookingAt(0.0f, 0.0f, 0.0f), _isKeepLookingAt(false) @@ -75,18 +73,18 @@ void Camera::update(float deltaTime) { // use iterative forces to keep the camera at the desired position and angle void Camera::updateFollowMode(float deltaTime) { if (_linearModeShift < 1.0f) { - _linearModeShift += _modeShiftRate * deltaTime; - _modeShift = ONE_HALF - ONE_HALF * cosf(_linearModeShift * PIE ); - _upShift = _previousUpShift * (1.0f - _modeShift) + _newUpShift * _modeShift; - _distance = _previousDistance * (1.0f - _modeShift) + _newDistance * _modeShift; - _tightness = _previousTightness * (1.0f - _modeShift) + _newTightness * _modeShift; - + _linearModeShift += deltaTime / _modeShiftPeriod; if (_needsToInitialize || _linearModeShift > 1.0f) { _linearModeShift = 1.0f; _modeShift = 1.0f; _upShift = _newUpShift; _distance = _newDistance; _tightness = _newTightness; + } else { + _modeShift = ONE_HALF - ONE_HALF * cosf(_linearModeShift * PIE ); + _upShift = _previousUpShift * (1.0f - _modeShift) + _newUpShift * _modeShift; + _distance = _previousDistance * (1.0f - _modeShift) + _newDistance * _modeShift; + _tightness = _previousTightness * (1.0f - _modeShift) + _newTightness * _modeShift; } } @@ -121,13 +119,10 @@ float Camera::getFarClip() const { : std::numeric_limits::max() - 1; } -void Camera::setModeShiftRate ( float rate ) { - - _modeShiftRate = rate; - - if (_modeShiftRate < CAMERA_MINIMUM_MODE_SHIFT_RATE ) { - _modeShiftRate = CAMERA_MINIMUM_MODE_SHIFT_RATE; - } +void Camera::setModeShiftPeriod (float period) { + const float MIN_PERIOD = 0.001f; + const float MAX_PERIOD = 3.0f; + _modeShiftPeriod = glm::clamp(period, MIN_PERIOD, MAX_PERIOD); } void Camera::setMode(CameraMode m) { @@ -307,7 +302,8 @@ void CameraScriptableObject::setMode(const QString& mode) { } if (currentMode != targetMode) { _camera->setMode(targetMode); - _camera->setModeShiftRate(10.0f); + const float DEFAULT_MODE_SHIFT_PERIOD = 0.5f; // half second + _camera->setModeShiftPeriod(DEFAULT_MODE_SHIFT_PERIOD); } } diff --git a/interface/src/Camera.h b/interface/src/Camera.h index b4ba3dbe05..7b95ce97f1 100644 --- a/interface/src/Camera.h +++ b/interface/src/Camera.h @@ -43,7 +43,7 @@ public: void setTargetRotation(const glm::quat& rotation); void setMode(CameraMode m); - void setModeShiftRate(float r); + void setModeShiftPeriod(float r); void setFieldOfView(float f); void setAspectRatio(float a); void setNearClip(float n); @@ -109,7 +109,7 @@ private: float _newTightness; float _modeShift; float _linearModeShift; - float _modeShiftRate; + float _modeShiftPeriod; float _scale; glm::vec3 _lookingAt; diff --git a/interface/src/ControllerScriptingInterface.cpp b/interface/src/ControllerScriptingInterface.cpp index b3d6170bff..b60615f124 100644 --- a/interface/src/ControllerScriptingInterface.cpp +++ b/interface/src/ControllerScriptingInterface.cpp @@ -250,3 +250,7 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) { } } +glm::vec2 ControllerScriptingInterface::getViewportDimensions() const { + QGLWidget* widget = Application::getInstance()->getGLWidget(); + return glm::vec2(widget->width(), widget->height()); +} diff --git a/interface/src/ControllerScriptingInterface.h b/interface/src/ControllerScriptingInterface.h index f0a50559f9..6fe5a60fa4 100644 --- a/interface/src/ControllerScriptingInterface.h +++ b/interface/src/ControllerScriptingInterface.h @@ -74,6 +74,8 @@ public slots: virtual void captureJoystick(int joystickIndex); virtual void releaseJoystick(int joystickIndex); + virtual glm::vec2 getViewportDimensions() const; + private: const PalmData* getPrimaryPalm() const; const PalmData* getPalm(int palmIndex) const; diff --git a/interface/src/Menu.cpp b/interface/src/Menu.cpp index cc1c815ca6..327f905194 100644 --- a/interface/src/Menu.cpp +++ b/interface/src/Menu.cpp @@ -327,7 +327,8 @@ Menu::Menu() : QMenu* avatarOptionsMenu = developerMenu->addMenu("Avatar Options"); addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Avatars, 0, true); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::CollisionProxies); + addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderSkeletonCollisionProxies); + addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderHeadCollisionProxies); addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, false); addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, @@ -772,7 +773,7 @@ void Menu::editPreferences() { QFormLayout* form = new QFormLayout(); layout->addLayout(form, 1); - QString faceURLString = applicationInstance->getAvatar()->getHead().getFaceModel().getURL().toString(); + QString faceURLString = applicationInstance->getAvatar()->getHead()->getFaceModel().getURL().toString(); QLineEdit* faceURLEdit = new QLineEdit(faceURLString); faceURLEdit->setMinimumWidth(QLINE_MINIMUM_WIDTH); faceURLEdit->setPlaceholderText(DEFAULT_HEAD_MODEL_URL.toString()); @@ -790,7 +791,7 @@ void Menu::editPreferences() { form->addRow("Display name:", displayNameEdit); QSlider* pupilDilation = new QSlider(Qt::Horizontal); - pupilDilation->setValue(applicationInstance->getAvatar()->getHead().getPupilDilation() * pupilDilation->maximum()); + pupilDilation->setValue(applicationInstance->getAvatar()->getHead()->getPupilDilation() * pupilDilation->maximum()); form->addRow("Pupil Dilation:", pupilDilation); QSlider* faceshiftEyeDeflection = new QSlider(Qt::Horizontal); @@ -873,7 +874,7 @@ void Menu::editPreferences() { applicationInstance->getAvatar()->sendIdentityPacket(); } - applicationInstance->getAvatar()->getHead().setPupilDilation(pupilDilation->value() / (float)pupilDilation->maximum()); + applicationInstance->getAvatar()->getHead()->setPupilDilation(pupilDilation->value() / (float)pupilDilation->maximum()); _maxVoxels = maxVoxels->value(); applicationInstance->getVoxels()->setMaxVoxels(_maxVoxels); diff --git a/interface/src/Menu.h b/interface/src/Menu.h index efe668b793..bd9cdc523e 100644 --- a/interface/src/Menu.h +++ b/interface/src/Menu.h @@ -185,7 +185,6 @@ namespace MenuOption { const QString Bandwidth = "Bandwidth Display"; const QString BandwidthDetails = "Bandwidth Details"; const QString ChatCircling = "Chat Circling"; - const QString CollisionProxies = "Collision Proxies"; const QString Collisions = "Collisions"; const QString CollideWithAvatars = "Collide With Avatars"; const QString CollideWithParticles = "Collide With Particles"; @@ -267,6 +266,8 @@ namespace MenuOption { const QString Preferences = "Preferences..."; const QString RandomizeVoxelColors = "Randomize Voxel TRUE Colors"; const QString ReloadAllScripts = "Reload All Scripts"; + const QString RenderSkeletonCollisionProxies = "Skeleton Collision Proxies"; + const QString RenderHeadCollisionProxies = "Head Collision Proxies"; const QString ResetAvatarSize = "Reset Avatar Size"; const QString ResetSwatchColors = "Reset Swatch Colors"; const QString RunTimingTests = "Run Timing Tests"; diff --git a/interface/src/VoxelSystem.cpp b/interface/src/VoxelSystem.cpp index a3352f36e7..693a010182 100644 --- a/interface/src/VoxelSystem.cpp +++ b/interface/src/VoxelSystem.cpp @@ -2105,7 +2105,7 @@ void VoxelSystem::hideOutOfView(bool forceFullFrustum) { bool VoxelSystem::hideAllSubTreeOperation(OctreeElement* element, void* extraData) { VoxelTreeElement* voxel = (VoxelTreeElement*)element; hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData; - + // If we've culled at least once, then we will use the status of this voxel in the last culled frustum to determine // how to proceed. If we've never culled, then we just consider all these voxels to be UNKNOWN so that we will not // consider that case. @@ -2141,7 +2141,7 @@ bool VoxelSystem::hideAllSubTreeOperation(OctreeElement* element, void* extraDat bool VoxelSystem::showAllSubTreeOperation(OctreeElement* element, void* extraData) { VoxelTreeElement* voxel = (VoxelTreeElement*)element; hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData; - + // If we've culled at least once, then we will use the status of this voxel in the last culled frustum to determine // how to proceed. If we've never culled, then we just consider all these voxels to be UNKNOWN so that we will not // consider that case. @@ -2184,7 +2184,7 @@ bool VoxelSystem::showAllSubTreeOperation(OctreeElement* element, void* extraDat bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData) { VoxelTreeElement* voxel = (VoxelTreeElement*)element; hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData; - + // If we're still recursing the tree using this operator, then we don't know if we're inside or outside... // so before we move forward we need to determine our frustum location ViewFrustum::location inFrustum = voxel->inFrustum(args->thisViewFrustum); @@ -2201,7 +2201,6 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData // ok, now do some processing for this node... switch (inFrustum) { case ViewFrustum::OUTSIDE: { - // If this node is outside the current view, then we might want to hide it... unless it was previously OUTSIDE, // if it was previously outside, then we can safely assume it's already hidden, and we can also safely assume // that all of it's children are outside both of our views, in which case we can just stop recursing... @@ -2215,12 +2214,10 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData // we need to hide it. Additionally we know that ALL of it's children are also fully OUTSIDE so we can recurse // the children and simply mark them as hidden args->tree->recurseNodeWithOperation(voxel, hideAllSubTreeOperation, args ); - return false; } break; case ViewFrustum::INSIDE: { - // If this node is INSIDE the current view, then we might want to show it... unless it was previously INSIDE, // if it was previously INSIDE, then we can safely assume it's already shown, and we can also safely assume // that all of it's children are INSIDE both of our views, in which case we can just stop recursing... @@ -2234,12 +2231,10 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData // we need to show it. Additionally we know that ALL of it's children are also fully INSIDE so we can recurse // the children and simply mark them as visible (as appropriate based on LOD) args->tree->recurseNodeWithOperation(voxel, showAllSubTreeOperation, args); - return false; } break; case ViewFrustum::INTERSECT: { args->nodesScanned++; - // If this node INTERSECTS the current view, then we might want to show it... unless it was previously INSIDE // the last known view, in which case it will already be visible, and we know that all it's children are also // previously INSIDE and visible. So in this case stop recursing @@ -2253,8 +2248,15 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData // if the child node INTERSECTs the view, then we want to check to see if it thinks it should render // if it should render but is missing it's VBO index, then we want to flip it on, and we can stop recursing from // here because we know will block any children anyway + + float voxelSizeScale = Menu::getInstance()->getVoxelSizeScale(); + int boundaryLevelAdjust = Menu::getInstance()->getBoundaryLevelAdjust(); + bool shouldRender = voxel->calculateShouldRender(&args->thisViewFrustum, voxelSizeScale, boundaryLevelAdjust); + voxel->setShouldRender(shouldRender); + if (voxel->getShouldRender() && !voxel->isKnownBufferIndex()) { voxel->setDirtyBit(); // will this make it draw? + voxel->markWithChangedTime(); // both are needed to force redraw args->nodesShown++; return false; } @@ -2267,7 +2269,6 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData } break; } // switch - return true; // keep going! } diff --git a/interface/src/avatar/Avatar.cpp b/interface/src/avatar/Avatar.cpp index f0dbcc8dbc..9eb5e0d577 100644 --- a/interface/src/avatar/Avatar.cpp +++ b/interface/src/avatar/Avatar.cpp @@ -62,8 +62,6 @@ const float DISPLAYNAME_ALPHA = 0.95f; Avatar::Avatar() : AvatarData(), - _head(this), - _hand(this), _skeletonModel(this), _bodyYawDelta(0.0f), _mode(AVATAR_MODE_STANDING), @@ -84,18 +82,16 @@ Avatar::Avatar() : moveToThread(Application::getInstance()->thread()); // give the pointer to our head to inherited _headData variable from AvatarData - _headData = &_head; - _handData = &_hand; + _headData = static_cast(new Head(this)); + _handData = static_cast(new Hand(this)); } Avatar::~Avatar() { - _headData = NULL; - _handData = NULL; } void Avatar::init() { - _head.init(); - _hand.init(); + getHead()->init(); + getHand()->init(); _skeletonModel.init(); _initialized = true; } @@ -118,16 +114,17 @@ void Avatar::simulate(float deltaTime) { // copy velocity so we can use it later for acceleration glm::vec3 oldVelocity = getVelocity(); - _hand.simulate(deltaTime, false); + getHand()->simulate(deltaTime, false); _skeletonModel.simulate(deltaTime); - _head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll)); + Head* head = getHead(); + head->setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll)); glm::vec3 headPosition; if (!_skeletonModel.getHeadPosition(headPosition)) { headPosition = _position; } - _head.setPosition(headPosition); - _head.setScale(_scale); - _head.simulate(deltaTime, false); + head->setPosition(headPosition); + head->setScale(_scale); + getHead()->simulate(deltaTime, false); // use speed and angular velocity to determine walking vs. standing if (_speed + fabs(_bodyYawDelta) > 0.2) { @@ -195,11 +192,12 @@ void Avatar::render(bool forceRenderHead) { Glower glower(_moving && lengthToTarget > GLOW_DISTANCE ? 1.0f : 0.0f); // render body - if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) { - _skeletonModel.renderCollisionProxies(1.f); - //_head.getFaceModel().renderCollisionProxies(0.5f); + if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) { + _skeletonModel.renderCollisionProxies(0.7f); + } + if (Menu::getInstance()->isOptionChecked(MenuOption::RenderHeadCollisionProxies)) { + getHead()->getFaceModel().renderCollisionProxies(0.7f); } - if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) { renderBody(forceRenderHead); } @@ -207,7 +205,7 @@ void Avatar::render(bool forceRenderHead) { // render sphere when far away const float MAX_ANGLE = 10.f; float height = getSkeletonHeight(); - glm::vec3 delta = height * (_head.getCameraOrientation() * IDENTITY_UP) / 2.f; + glm::vec3 delta = height * (getHead()->getCameraOrientation() * IDENTITY_UP) / 2.f; float angle = abs(angleBetween(toTarget + delta, toTarget - delta)); if (angle < MAX_ANGLE) { @@ -215,7 +213,7 @@ void Avatar::render(bool forceRenderHead) { glPushMatrix(); glTranslatef(_position.x, _position.y, _position.z); glScalef(height / 2.f, height / 2.f, height / 2.f); - glutSolidSphere(1.2f + _head.getAverageLoudness() * .0005f, 20, 20); + glutSolidSphere(1.2f + getHead()->getAverageLoudness() * .0005f, 20, 20); glPopMatrix(); } } @@ -231,7 +229,7 @@ void Avatar::render(bool forceRenderHead) { } glPushMatrix(); - glm::vec3 chatPosition = getHead().getEyePosition() + getBodyUpDirection() * CHAT_MESSAGE_HEIGHT * _scale; + glm::vec3 chatPosition = getHead()->getEyePosition() + getBodyUpDirection() * CHAT_MESSAGE_HEIGHT * _scale; glTranslatef(chatPosition.x, chatPosition.y, chatPosition.z); glm::quat chatRotation = Application::getInstance()->getCamera()->getRotation(); glm::vec3 chatAxis = glm::axis(chatRotation); @@ -288,9 +286,9 @@ void Avatar::renderBody(bool forceRenderHead) { //printf("Render other at %.3f, %.2f, %.2f\n", pos.x, pos.y, pos.z); _skeletonModel.render(1.0f); if (forceRenderHead) { - _head.render(1.0f); + getHead()->render(1.0f); } - _hand.render(false); + getHand()->render(false); } void Avatar::renderDisplayName() { @@ -386,7 +384,7 @@ bool Avatar::findRayIntersection(const glm::vec3& origin, const glm::vec3& direc if (_skeletonModel.findRayIntersection(origin, direction, modelDistance)) { minDistance = qMin(minDistance, modelDistance); } - if (_head.getFaceModel().findRayIntersection(origin, direction, modelDistance)) { + if (getHead()->getFaceModel().findRayIntersection(origin, direction, modelDistance)) { minDistance = qMin(minDistance, modelDistance); } if (minDistance < FLT_MAX) { @@ -401,7 +399,7 @@ bool Avatar::findSphereCollisions(const glm::vec3& penetratorCenter, float penet // Temporarily disabling collisions against the skeleton because the collision proxies up // near the neck are bad and prevent the hand from hitting the face. //return _skeletonModel.findSphereCollisions(penetratorCenter, penetratorRadius, collisions, 1.0f, skeletonSkipIndex); - return _head.getFaceModel().findSphereCollisions(penetratorCenter, penetratorRadius, collisions); + return getHead()->getFaceModel().findSphereCollisions(penetratorCenter, penetratorRadius, collisions); } bool Avatar::findParticleCollisions(const glm::vec3& particleCenter, float particleRadius, CollisionList& collisions) { @@ -480,7 +478,7 @@ bool Avatar::findParticleCollisions(const glm::vec3& particleCenter, float parti void Avatar::setFaceModelURL(const QUrl &faceModelURL) { AvatarData::setFaceModelURL(faceModelURL); const QUrl DEFAULT_FACE_MODEL_URL = QUrl::fromLocalFile("resources/meshes/defaultAvatar_head.fst"); - _head.getFaceModel().setURL(_faceModelURL, DEFAULT_FACE_MODEL_URL); + getHead()->getFaceModel().setURL(_faceModelURL, DEFAULT_FACE_MODEL_URL); } void Avatar::setSkeletonModelURL(const QUrl &skeletonModelURL) { @@ -605,7 +603,7 @@ bool Avatar::collisionWouldMoveAvatar(CollisionInfo& collision) const { return false; //return _skeletonModel.collisionHitsMoveableJoint(collision); } - if (model == &(_head.getFaceModel())) { + if (model == &(getHead()->getFaceModel())) { // ATM we always handle MODEL_COLLISIONS against the face. return true; } @@ -618,8 +616,8 @@ void Avatar::applyCollision(CollisionInfo& collision) { } // TODO: make skeleton also respond to collisions Model* model = static_cast(collision._data); - if (model == &(_head.getFaceModel())) { - _head.applyCollision(collision); + if (model == &(getHead()->getFaceModel())) { + getHead()->applyCollision(collision); } } @@ -628,7 +626,7 @@ float Avatar::getPelvisFloatingHeight() const { } float Avatar::getPelvisToHeadLength() const { - return glm::distance(_position, _head.getPosition()); + return glm::distance(_position, getHead()->getPosition()); } void Avatar::setShowDisplayName(bool showDisplayName) { diff --git a/interface/src/avatar/Avatar.h b/interface/src/avatar/Avatar.h index c7b52301e3..6fa0f203e9 100755 --- a/interface/src/avatar/Avatar.h +++ b/interface/src/avatar/Avatar.h @@ -74,7 +74,7 @@ public: void render(bool forceRenderHead); //setters - void setDisplayingLookatVectors(bool displayingLookatVectors) { _head.setRenderLookatVectors(displayingLookatVectors); } + void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); } void setMouseRay(const glm::vec3 &origin, const glm::vec3 &direction); //getters @@ -83,8 +83,9 @@ public: glm::vec3 getChestPosition() const; float getScale() const { return _scale; } const glm::vec3& getVelocity() const { return _velocity; } - Head& getHead() { return _head; } - Hand& getHand() { return _hand; } + const Head* getHead() const { return static_cast(_headData); } + Head* getHead() { return static_cast(_headData); } + Hand* getHand() { return static_cast(_handData); } glm::quat getWorldAlignedOrientation() const; Node* getOwningAvatarMixer() { return _owningAvatarMixer.data(); } @@ -134,8 +135,6 @@ public slots: void updateCollisionFlags(); protected: - Head _head; - Hand _hand; SkeletonModel _skeletonModel; float _bodyYawDelta; AvatarMode _mode; diff --git a/interface/src/avatar/FaceModel.cpp b/interface/src/avatar/FaceModel.cpp index c9d2565cee..2a1593b776 100644 --- a/interface/src/avatar/FaceModel.cpp +++ b/interface/src/avatar/FaceModel.cpp @@ -48,9 +48,6 @@ bool FaceModel::render(float alpha) { if (!Model::render(alpha)) { return false; } - if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) { - renderCollisionProxies(alpha); - } return true; } diff --git a/interface/src/avatar/Hand.cpp b/interface/src/avatar/Hand.cpp index 7c9905557e..efbdfa2438 100644 --- a/interface/src/avatar/Hand.cpp +++ b/interface/src/avatar/Hand.cpp @@ -125,6 +125,41 @@ void Hand::simulate(float deltaTime, bool isMine) { } } +void Hand::playSlaps(PalmData& palm, Avatar* avatar) +{ + // Check for palm collisions + glm::vec3 myPalmPosition = palm.getPosition(); + float palmCollisionDistance = 0.1f; + bool wasColliding = palm.getIsCollidingWithPalm(); + palm.setIsCollidingWithPalm(false); + // If 'Play Slaps' is enabled, look for palm-to-palm collisions and make sound + for (size_t j = 0; j < avatar->getHand()->getNumPalms(); j++) { + PalmData& otherPalm = avatar->getHand()->getPalms()[j]; + if (!otherPalm.isActive()) { + continue; + } + glm::vec3 otherPalmPosition = otherPalm.getPosition(); + if (glm::length(otherPalmPosition - myPalmPosition) < palmCollisionDistance) { + palm.setIsCollidingWithPalm(true); + if (!wasColliding) { + const float PALM_COLLIDE_VOLUME = 1.f; + const float PALM_COLLIDE_FREQUENCY = 1000.f; + const float PALM_COLLIDE_DURATION_MAX = 0.75f; + const float PALM_COLLIDE_DECAY_PER_SAMPLE = 0.01f; + Application::getInstance()->getAudio()->startDrumSound(PALM_COLLIDE_VOLUME, + PALM_COLLIDE_FREQUENCY, + PALM_COLLIDE_DURATION_MAX, + PALM_COLLIDE_DECAY_PER_SAMPLE); + // If the other person's palm is in motion, move mine downward to show I was hit + const float MIN_VELOCITY_FOR_SLAP = 0.05f; + if (glm::length(otherPalm.getVelocity()) > MIN_VELOCITY_FOR_SLAP) { + // add slapback here + } + } + } + } +} + // We create a static CollisionList that is recycled for each collision test. const float MAX_COLLISIONS_PER_AVATAR = 32; static CollisionList handCollisions(MAX_COLLISIONS_PER_AVATAR); @@ -139,41 +174,12 @@ void Hand::collideAgainstAvatar(Avatar* avatar, bool isMyHand) { PalmData& palm = getPalms()[i]; if (!palm.isActive()) { continue; - } - glm::vec3 totalPenetration; - if (isMyHand && Menu::getInstance()->isOptionChecked(MenuOption::PlaySlaps)) { - // Check for palm collisions - glm::vec3 myPalmPosition = palm.getPosition(); - float palmCollisionDistance = 0.1f; - bool wasColliding = palm.getIsCollidingWithPalm(); - palm.setIsCollidingWithPalm(false); - // If 'Play Slaps' is enabled, look for palm-to-palm collisions and make sound - for (size_t j = 0; j < avatar->getHand().getNumPalms(); j++) { - PalmData& otherPalm = avatar->getHand().getPalms()[j]; - if (!otherPalm.isActive()) { - continue; - } - glm::vec3 otherPalmPosition = otherPalm.getPosition(); - if (glm::length(otherPalmPosition - myPalmPosition) < palmCollisionDistance) { - palm.setIsCollidingWithPalm(true); - if (!wasColliding) { - const float PALM_COLLIDE_VOLUME = 1.f; - const float PALM_COLLIDE_FREQUENCY = 1000.f; - const float PALM_COLLIDE_DURATION_MAX = 0.75f; - const float PALM_COLLIDE_DECAY_PER_SAMPLE = 0.01f; - Application::getInstance()->getAudio()->startDrumSound(PALM_COLLIDE_VOLUME, - PALM_COLLIDE_FREQUENCY, - PALM_COLLIDE_DURATION_MAX, - PALM_COLLIDE_DECAY_PER_SAMPLE); - // If the other person's palm is in motion, move mine downward to show I was hit - const float MIN_VELOCITY_FOR_SLAP = 0.05f; - if (glm::length(otherPalm.getVelocity()) > MIN_VELOCITY_FOR_SLAP) { - // add slapback here - } - } - } - } } + if (isMyHand && Menu::getInstance()->isOptionChecked(MenuOption::PlaySlaps)) { + playSlaps(palm, avatar); + } + + glm::vec3 totalPenetration; handCollisions.clear(); if (avatar->findSphereCollisions(palm.getPosition(), scaledPalmRadius, handCollisions)) { for (int j = 0; j < handCollisions.size(); ++j) { @@ -307,7 +313,8 @@ void Hand::render(bool isMine) { _buckyBalls.render(); } - if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) { + if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) { + // draw a green sphere at hand joint location, which is actually near the wrist) for (size_t i = 0; i < getNumPalms(); i++) { PalmData& palm = getPalms()[i]; if (!palm.isActive()) { diff --git a/interface/src/avatar/Hand.h b/interface/src/avatar/Hand.h index 5a423630b4..9888c9f054 100755 --- a/interface/src/avatar/Hand.h +++ b/interface/src/avatar/Hand.h @@ -93,6 +93,8 @@ private: void calculateGeometry(); void handleVoxelCollision(PalmData* palm, const glm::vec3& fingerTipPosition, VoxelTreeElement* voxel, float deltaTime); + + void playSlaps(PalmData& palm, Avatar* avatar); }; #endif diff --git a/interface/src/avatar/Head.cpp b/interface/src/avatar/Head.cpp index ddb0660364..5c6100764a 100644 --- a/interface/src/avatar/Head.cpp +++ b/interface/src/avatar/Head.cpp @@ -62,24 +62,20 @@ void Head::simulate(float deltaTime, bool isMine) { // Update audio trailing average for rendering facial animations Faceshift* faceshift = Application::getInstance()->getFaceshift(); + Visage* visage = Application::getInstance()->getVisage(); if (isMine) { - _isFaceshiftConnected = faceshift->isActive(); + _isFaceshiftConnected = false; + if (faceshift->isActive()) { + _blendshapeCoefficients = faceshift->getBlendshapeCoefficients(); + _isFaceshiftConnected = true; + + } else if (visage->isActive()) { + _blendshapeCoefficients = visage->getBlendshapeCoefficients(); + _isFaceshiftConnected = true; + } } - if (isMine && faceshift->isActive()) { - const float EYE_OPEN_SCALE = 0.5f; - _leftEyeBlink = faceshift->getLeftBlink() - EYE_OPEN_SCALE * faceshift->getLeftEyeOpen(); - _rightEyeBlink = faceshift->getRightBlink() - EYE_OPEN_SCALE * faceshift->getRightEyeOpen(); - - // set these values based on how they'll be used. if we use faceshift in the long term, we'll want a complete - // mapping between their blendshape coefficients and our avatar features - const float MOUTH_SIZE_SCALE = 2500.0f; - _averageLoudness = faceshift->getMouthSize() * faceshift->getMouthSize() * MOUTH_SIZE_SCALE; - const float BROW_HEIGHT_SCALE = 0.005f; - _browAudioLift = faceshift->getBrowUpCenter() * BROW_HEIGHT_SCALE; - _blendshapeCoefficients = faceshift->getBlendshapeCoefficients(); - - } else if (!_isFaceshiftConnected) { + if (!_isFaceshiftConnected) { // Update eye saccades const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f; const float AVERAGE_SACCADE_INTERVAL = 4.0f; diff --git a/interface/src/avatar/MyAvatar.cpp b/interface/src/avatar/MyAvatar.cpp index 1b30b3d524..016159f415 100644 --- a/interface/src/avatar/MyAvatar.cpp +++ b/interface/src/avatar/MyAvatar.cpp @@ -53,7 +53,6 @@ MyAvatar::MyAvatar() : _elapsedTimeSinceCollision(0.0f), _lastCollisionPosition(0, 0, 0), _speedBrakes(false), - _isCollisionsOn(true), _isThrustOn(false), _thrustMultiplier(1.0f), _moveTarget(0,0,0), @@ -73,8 +72,8 @@ void MyAvatar::reset() { // TODO? resurrect headMouse stuff? //_headMouseX = _glWidget->width() / 2; //_headMouseY = _glWidget->height() / 2; - _head.reset(); - _hand.reset(); + getHead()->reset(); + getHand()->reset(); setVelocity(glm::vec3(0,0,0)); setThrust(glm::vec3(0,0,0)); @@ -131,19 +130,20 @@ void MyAvatar::update(float deltaTime) { //_headMouseY = glm::clamp(_headMouseY, 0, _glWidget->height()); } + Head* head = getHead(); if (OculusManager::isConnected()) { float yaw, pitch, roll; OculusManager::getEulerAngles(yaw, pitch, roll); - _head.setYaw(yaw); - _head.setPitch(pitch); - _head.setRoll(roll); + head->setYaw(yaw); + head->setPitch(pitch); + head->setRoll(roll); } // Get audio loudness data from audio input device Audio* audio = Application::getInstance()->getAudio(); - _head.setAudioLoudness(audio->getLastInputLoudness()); - _head.setAudioAverageLoudness(audio->getAudioAverageInputLoudness()); + head->setAudioLoudness(audio->getLastInputLoudness()); + head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness()); if (Menu::getInstance()->isOptionChecked(MenuOption::Gravity)) { setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition())); @@ -267,7 +267,7 @@ void MyAvatar::simulate(float deltaTime) { if (!Application::getInstance()->getFaceshift()->isActive() && OculusManager::isConnected() && fabsf(forwardAcceleration) > OCULUS_ACCELERATION_PULL_THRESHOLD && - fabs(_head.getYaw()) > OCULUS_YAW_OFFSET_THRESHOLD) { + fabs(getHead()->getYaw()) > OCULUS_YAW_OFFSET_THRESHOLD) { // if we're wearing the oculus // and this acceleration is above the pull threshold @@ -277,7 +277,7 @@ void MyAvatar::simulate(float deltaTime) { _bodyYaw = getAbsoluteHeadYaw(); // set the head yaw to zero for this draw - _head.setYaw(0); + getHead()->setYaw(0); // correct the oculus yaw offset OculusManager::updateYawOffset(); @@ -315,17 +315,20 @@ void MyAvatar::simulate(float deltaTime) { _position += _velocity * deltaTime; // update avatar skeleton and simulate hand and head - _hand.collideAgainstOurself(); - _hand.simulate(deltaTime, true); + getHand()->collideAgainstOurself(); + getHand()->simulate(deltaTime, true); + _skeletonModel.simulate(deltaTime); - _head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll)); + + Head* head = getHead(); + head->setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll)); glm::vec3 headPosition; if (!_skeletonModel.getHeadPosition(headPosition)) { headPosition = _position; } - _head.setPosition(headPosition); - _head.setScale(_scale); - _head.simulate(deltaTime, true); + head->setPosition(headPosition); + head->setScale(_scale); + head->simulate(deltaTime, true); // Zero thrust out now that we've added it to velocity in this frame _thrust = glm::vec3(0, 0, 0); @@ -337,22 +340,34 @@ const float MAX_PITCH = 90.0f; // Update avatar head rotation with sensor data void MyAvatar::updateFromGyros(float deltaTime) { Faceshift* faceshift = Application::getInstance()->getFaceshift(); + Visage* visage = Application::getInstance()->getVisage(); glm::vec3 estimatedPosition, estimatedRotation; + bool trackerActive = false; if (faceshift->isActive()) { estimatedPosition = faceshift->getHeadTranslation(); estimatedRotation = safeEulerAngles(faceshift->getHeadRotation()); + trackerActive = true; + + } else if (visage->isActive()) { + estimatedPosition = visage->getHeadTranslation(); + estimatedRotation = safeEulerAngles(visage->getHeadRotation()); + trackerActive = true; + } + + Head* head = getHead(); + if (trackerActive) { // Rotate the body if the head is turned beyond the screen if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) { - const float FACESHIFT_YAW_TURN_SENSITIVITY = 0.5f; - const float FACESHIFT_MIN_YAW_TURN = 15.f; - const float FACESHIFT_MAX_YAW_TURN = 50.f; - if ( (fabs(estimatedRotation.y) > FACESHIFT_MIN_YAW_TURN) && - (fabs(estimatedRotation.y) < FACESHIFT_MAX_YAW_TURN) ) { + const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f; + const float TRACKER_MIN_YAW_TURN = 15.f; + const float TRACKER_MAX_YAW_TURN = 50.f; + if ( (fabs(estimatedRotation.y) > TRACKER_MIN_YAW_TURN) && + (fabs(estimatedRotation.y) < TRACKER_MAX_YAW_TURN) ) { if (estimatedRotation.y > 0.f) { - _bodyYawDelta += (estimatedRotation.y - FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY; + _bodyYawDelta += (estimatedRotation.y - TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY; } else { - _bodyYawDelta += (estimatedRotation.y + FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY; + _bodyYawDelta += (estimatedRotation.y + TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY; } } } @@ -360,10 +375,10 @@ void MyAvatar::updateFromGyros(float deltaTime) { // restore rotation, lean to neutral positions const float RESTORE_PERIOD = 1.f; // seconds float restorePercentage = glm::clamp(deltaTime/RESTORE_PERIOD, 0.f, 1.f); - _head.setYaw(glm::mix(_head.getYaw(), 0.0f, restorePercentage)); - _head.setRoll(glm::mix(_head.getRoll(), 0.0f, restorePercentage)); - _head.setLeanSideways(glm::mix(_head.getLeanSideways(), 0.0f, restorePercentage)); - _head.setLeanForward(glm::mix(_head.getLeanForward(), 0.0f, restorePercentage)); + head->setYaw(glm::mix(head->getYaw(), 0.0f, restorePercentage)); + head->setRoll(glm::mix(head->getRoll(), 0.0f, restorePercentage)); + head->setLeanSideways(glm::mix(head->getLeanSideways(), 0.0f, restorePercentage)); + head->setLeanForward(glm::mix(head->getLeanForward(), 0.0f, restorePercentage)); return; } @@ -372,17 +387,17 @@ void MyAvatar::updateFromGyros(float deltaTime) { const float AVATAR_HEAD_PITCH_MAGNIFY = 1.0f; const float AVATAR_HEAD_YAW_MAGNIFY = 1.0f; const float AVATAR_HEAD_ROLL_MAGNIFY = 1.0f; - _head.tweakPitch(estimatedRotation.x * AVATAR_HEAD_PITCH_MAGNIFY); - _head.tweakYaw(estimatedRotation.y * AVATAR_HEAD_YAW_MAGNIFY); - _head.tweakRoll(estimatedRotation.z * AVATAR_HEAD_ROLL_MAGNIFY); + head->tweakPitch(estimatedRotation.x * AVATAR_HEAD_PITCH_MAGNIFY); + head->tweakYaw(estimatedRotation.y * AVATAR_HEAD_YAW_MAGNIFY); + head->tweakRoll(estimatedRotation.z * AVATAR_HEAD_ROLL_MAGNIFY); // Update torso lean distance based on accelerometer data const float TORSO_LENGTH = 0.5f; glm::vec3 relativePosition = estimatedPosition - glm::vec3(0.0f, -TORSO_LENGTH, 0.0f); const float MAX_LEAN = 45.0f; - _head.setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)), + head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN)); - _head.setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)), + head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN)); // if Faceshift drive is enabled, set the avatar drive based on the head position @@ -391,11 +406,11 @@ void MyAvatar::updateFromGyros(float deltaTime) { } // Move with Lean by applying thrust proportional to leaning - glm::quat orientation = _head.getCameraOrientation(); + glm::quat orientation = head->getCameraOrientation(); glm::vec3 front = orientation * IDENTITY_FRONT; glm::vec3 right = orientation * IDENTITY_RIGHT; - float leanForward = _head.getLeanForward(); - float leanSideways = _head.getLeanSideways(); + float leanForward = head->getLeanForward(); + float leanSideways = head->getLeanSideways(); // Degrees of 'dead zone' when leaning, and amount of acceleration to apply to lean angle const float LEAN_FWD_DEAD_ZONE = 15.f; @@ -426,7 +441,7 @@ static TextRenderer* textRenderer() { void MyAvatar::renderDebugBodyPoints() { glm::vec3 torsoPosition(getPosition()); - glm::vec3 headPosition(getHead().getEyePosition()); + glm::vec3 headPosition(getHead()->getEyePosition()); float torsoToHead = glm::length(headPosition - torsoPosition); glm::vec3 position; printf("head-above-torso %.2f, scale = %0.2f\n", torsoToHead, getScale()); @@ -452,7 +467,10 @@ void MyAvatar::renderDebugBodyPoints() { void MyAvatar::render(bool forceRenderHead) { // render body - if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) { + if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) { + _skeletonModel.renderCollisionProxies(1.f); + } + if (Menu::getInstance()->isOptionChecked(MenuOption::RenderHeadCollisionProxies)) { _skeletonModel.renderCollisionProxies(1.f); } if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) { @@ -469,7 +487,7 @@ void MyAvatar::render(bool forceRenderHead) { } glPushMatrix(); - glm::vec3 chatPosition = getHead().getEyePosition() + getBodyUpDirection() * CHAT_MESSAGE_HEIGHT * _scale; + glm::vec3 chatPosition = getHead()->getEyePosition() + getBodyUpDirection() * CHAT_MESSAGE_HEIGHT * _scale; glTranslatef(chatPosition.x, chatPosition.y, chatPosition.z); glm::quat chatRotation = Application::getInstance()->getCamera()->getRotation(); glm::vec3 chatAxis = glm::axis(chatRotation); @@ -579,13 +597,13 @@ void MyAvatar::saveData(QSettings* settings) { settings->setValue("bodyPitch", _bodyPitch); settings->setValue("bodyRoll", _bodyRoll); - settings->setValue("headPitch", _head.getPitch()); + settings->setValue("headPitch", getHead()->getPitch()); settings->setValue("position_x", _position.x); settings->setValue("position_y", _position.y); settings->setValue("position_z", _position.z); - settings->setValue("pupilDilation", _head.getPupilDilation()); + settings->setValue("pupilDilation", getHead()->getPupilDilation()); settings->setValue("leanScale", _leanScale); settings->setValue("scale", _targetScale); @@ -605,13 +623,13 @@ void MyAvatar::loadData(QSettings* settings) { _bodyPitch = loadSetting(settings, "bodyPitch", 0.0f); _bodyRoll = loadSetting(settings, "bodyRoll", 0.0f); - _head.setPitch(loadSetting(settings, "headPitch", 0.0f)); + getHead()->setPitch(loadSetting(settings, "headPitch", 0.0f)); _position.x = loadSetting(settings, "position_x", 0.0f); _position.y = loadSetting(settings, "position_y", 0.0f); _position.z = loadSetting(settings, "position_z", 0.0f); - _head.setPupilDilation(loadSetting(settings, "pupilDilation", 0.0f)); + getHead()->setPupilDilation(loadSetting(settings, "pupilDilation", 0.0f)); _leanScale = loadSetting(settings, "leanScale", 0.05f); _targetScale = loadSetting(settings, "scale", 1.0f); @@ -647,9 +665,9 @@ void MyAvatar::orbit(const glm::vec3& position, int deltaX, int deltaY) { setOrientation(orientation); // then vertically - float oldPitch = _head.getPitch(); - _head.setPitch(oldPitch + deltaY * -ANGULAR_SCALE); - rotation = glm::angleAxis(_head.getPitch() - oldPitch, orientation * IDENTITY_RIGHT); + float oldPitch = getHead()->getPitch(); + getHead()->setPitch(oldPitch + deltaY * -ANGULAR_SCALE); + rotation = glm::angleAxis(getHead()->getPitch() - oldPitch, orientation * IDENTITY_RIGHT); setPosition(position + rotation * (getPosition() - position)); } @@ -669,8 +687,8 @@ void MyAvatar::updateLookAtTargetAvatar(glm::vec3 &eyePosition) { float distance; if (avatar->findRayIntersection(mouseOrigin, mouseDirection, distance)) { // rescale to compensate for head embiggening - eyePosition = (avatar->getHead().calculateAverageEyePosition() - avatar->getHead().getScalePivot()) * - (avatar->getScale() / avatar->getHead().getScale()) + avatar->getHead().getScalePivot(); + eyePosition = (avatar->getHead()->calculateAverageEyePosition() - avatar->getHead()->getScalePivot()) * + (avatar->getScale() / avatar->getHead()->getScale()) + avatar->getHead()->getScalePivot(); _lookAtTargetAvatar = avatarPointer; return; } else { @@ -686,7 +704,8 @@ void MyAvatar::clearLookAtTargetAvatar() { } float MyAvatar::getAbsoluteHeadYaw() const { - return glm::yaw(_head.getOrientation()); + const Head* head = static_cast(_headData); + return glm::yaw(head->getOrientation()); } glm::vec3 MyAvatar::getUprightHeadPosition() const { @@ -700,17 +719,17 @@ void MyAvatar::renderBody(bool forceRenderHead) { // Render head so long as the camera isn't inside it const float RENDER_HEAD_CUTOFF_DISTANCE = 0.10f; Camera* myCamera = Application::getInstance()->getCamera(); - if (forceRenderHead || (glm::length(myCamera->getPosition() - _head.calculateAverageEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE)) { - _head.render(1.0f); + if (forceRenderHead || (glm::length(myCamera->getPosition() - getHead()->calculateAverageEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE)) { + getHead()->render(1.0f); } - _hand.render(true); + getHand()->render(true); } void MyAvatar::updateThrust(float deltaTime) { // // Gather thrust information from keyboard and sensors to apply to avatar motion // - glm::quat orientation = getHead().getCameraOrientation(); + glm::quat orientation = getHead()->getCameraOrientation(); glm::vec3 front = orientation * IDENTITY_FRONT; glm::vec3 right = orientation * IDENTITY_RIGHT; glm::vec3 up = orientation * IDENTITY_UP; @@ -731,7 +750,7 @@ void MyAvatar::updateThrust(float deltaTime) { _thrust -= _driveKeys[DOWN] * _scale * THRUST_MAG_DOWN * _thrustMultiplier * deltaTime * up; _bodyYawDelta -= _driveKeys[ROT_RIGHT] * YAW_MAG * deltaTime; _bodyYawDelta += _driveKeys[ROT_LEFT] * YAW_MAG * deltaTime; - _head.setPitch(_head.getPitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_MAG * deltaTime); + getHead()->setPitch(getHead()->getPitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_MAG * deltaTime); // If thrust keys are being held down, slowly increase thrust to allow reaching great speeds if (_driveKeys[FWD] || _driveKeys[BACK] || _driveKeys[RIGHT] || _driveKeys[LEFT] || _driveKeys[UP] || _driveKeys[DOWN]) { @@ -996,10 +1015,10 @@ void MyAvatar::updateCollisionWithAvatars(float deltaTime) { } // collide our hands against them - _hand.collideAgainstAvatar(avatar, true); + getHand()->collideAgainstAvatar(avatar, true); // collide their hands against us - avatar->getHand().collideAgainstAvatar(this, false); + avatar->getHand()->collideAgainstAvatar(this, false); } } } @@ -1114,7 +1133,7 @@ void MyAvatar::updateChatCircle(float deltaTime) { void MyAvatar::setGravity(glm::vec3 gravity) { _gravity = gravity; - _head.setGravity(_gravity); + getHead()->setGravity(_gravity); // use the gravity to determine the new world up direction, if possible float gravityLength = glm::length(gravity); diff --git a/interface/src/avatar/MyAvatar.h b/interface/src/avatar/MyAvatar.h index d8cb4c05aa..1bc5de204b 100644 --- a/interface/src/avatar/MyAvatar.h +++ b/interface/src/avatar/MyAvatar.h @@ -86,7 +86,6 @@ public: public slots: void goHome(); - void setWantCollisionsOn(bool wantCollisionsOn) { _isCollisionsOn = wantCollisionsOn; } void increaseSize(); void decreaseSize(); void resetSize(); @@ -110,7 +109,6 @@ private: float _elapsedTimeSinceCollision; glm::vec3 _lastCollisionPosition; bool _speedBrakes; - bool _isCollisionsOn; bool _isThrustOn; float _thrustMultiplier; glm::vec3 _moveTarget; diff --git a/interface/src/avatar/SkeletonModel.cpp b/interface/src/avatar/SkeletonModel.cpp index 9bf2e0f727..0396b80e58 100644 --- a/interface/src/avatar/SkeletonModel.cpp +++ b/interface/src/avatar/SkeletonModel.cpp @@ -8,10 +8,9 @@ #include -#include - #include "Application.h" #include "Avatar.h" +#include "Hand.h" #include "Menu.h" #include "SkeletonModel.h" @@ -33,8 +32,8 @@ void SkeletonModel::simulate(float deltaTime) { // find the left and rightmost active Leap palms int leftPalmIndex, rightPalmIndex; - HandData& hand = _owningAvatar->getHand(); - hand.getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex); + Hand* hand = _owningAvatar->getHand(); + hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex); const float HAND_RESTORATION_PERIOD = 1.f; // seconds float handRestorePercent = glm::clamp(deltaTime / HAND_RESTORATION_PERIOD, 0.f, 1.f); @@ -52,14 +51,14 @@ void SkeletonModel::simulate(float deltaTime) { } else if (leftPalmIndex == rightPalmIndex) { // right hand only applyPalmData(geometry.rightHandJointIndex, geometry.rightFingerJointIndices, geometry.rightFingertipJointIndices, - hand.getPalms()[leftPalmIndex]); + hand->getPalms()[leftPalmIndex]); restoreLeftHandPosition(handRestorePercent); } else { applyPalmData(geometry.leftHandJointIndex, geometry.leftFingerJointIndices, geometry.leftFingertipJointIndices, - hand.getPalms()[leftPalmIndex]); + hand->getPalms()[leftPalmIndex]); applyPalmData(geometry.rightHandJointIndex, geometry.rightFingerJointIndices, geometry.rightFingertipJointIndices, - hand.getPalms()[rightPalmIndex]); + hand->getPalms()[rightPalmIndex]); } } @@ -182,8 +181,8 @@ void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const glm::mat3 axes = glm::mat3_cast(_rotation); glm::mat3 inverse = glm::mat3(glm::inverse(parentState.transform * glm::translate(state.translation) * joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation))); - state.rotation = glm::angleAxis(-_owningAvatar->getHead().getLeanSideways(), glm::normalize(inverse * axes[2])) * - glm::angleAxis(-_owningAvatar->getHead().getLeanForward(), glm::normalize(inverse * axes[0])) * joint.rotation; + state.rotation = glm::angleAxis(-_owningAvatar->getHead()->getLeanSideways(), glm::normalize(inverse * axes[2])) * + glm::angleAxis(-_owningAvatar->getHead()->getLeanForward(), glm::normalize(inverse * axes[0])) * joint.rotation; } void SkeletonModel::stretchArm(int jointIndex, const glm::vec3& position) { diff --git a/interface/src/devices/SixenseManager.cpp b/interface/src/devices/SixenseManager.cpp index 79feb5eb3f..9ff34e698e 100644 --- a/interface/src/devices/SixenseManager.cpp +++ b/interface/src/devices/SixenseManager.cpp @@ -45,7 +45,7 @@ void SixenseManager::update(float deltaTime) { return; } MyAvatar* avatar = Application::getInstance()->getAvatar(); - Hand& hand = avatar->getHand(); + Hand* hand = avatar->getHand(); int maxControllers = sixenseGetMaxControllers(); for (int i = 0; i < maxControllers; i++) { @@ -60,16 +60,16 @@ void SixenseManager::update(float deltaTime) { // Either find a palm matching the sixense controller, or make a new one PalmData* palm; bool foundHand = false; - for (int j = 0; j < hand.getNumPalms(); j++) { - if (hand.getPalms()[j].getSixenseID() == data.controller_index) { - palm = &hand.getPalms()[j]; + for (int j = 0; j < hand->getNumPalms(); j++) { + if (hand->getPalms()[j].getSixenseID() == data.controller_index) { + palm = &(hand->getPalms()[j]); foundHand = true; } } if (!foundHand) { - PalmData newPalm(&hand); - hand.getPalms().push_back(newPalm); - palm = &hand.getPalms()[hand.getNumPalms() - 1]; + PalmData newPalm(hand); + hand->getPalms().push_back(newPalm); + palm = &(hand->getPalms()[hand->getNumPalms() - 1]); palm->setSixenseID(data.controller_index); printf("Found new Sixense controller, ID %i\n", data.controller_index); } @@ -107,7 +107,7 @@ void SixenseManager::update(float deltaTime) { } // initialize the "finger" based on the direction - FingerData finger(palm, &hand); + FingerData finger(palm, hand); finger.setActive(true); finger.setRawRootPosition(position); const float FINGER_LENGTH = 300.0f; // Millimeters @@ -130,7 +130,7 @@ void SixenseManager::update(float deltaTime) { // if the controllers haven't been moved in a while, disable const int MOVEMENT_DISABLE_DURATION = 30 * 1000 * 1000; if (usecTimestampNow() - _lastMovement > MOVEMENT_DISABLE_DURATION) { - for (vector::iterator it = hand.getPalms().begin(); it != hand.getPalms().end(); it++) { + for (vector::iterator it = hand->getPalms().begin(); it != hand->getPalms().end(); it++) { it->setActive(false); } } diff --git a/interface/src/devices/Visage.cpp b/interface/src/devices/Visage.cpp new file mode 100644 index 0000000000..c3dfeab6b2 --- /dev/null +++ b/interface/src/devices/Visage.cpp @@ -0,0 +1,166 @@ +// +// Visage.cpp +// interface +// +// Created by Andrzej Kapolka on 2/11/14. +// Copyright (c) 2014 High Fidelity, Inc. All rights reserved. +// + +#include + +#include + +#ifdef HAVE_VISAGE +#include +#endif + +#include "Visage.h" +#include "renderer/FBXReader.h" + +namespace VisageSDK { +#ifdef WIN32 + void __declspec(dllimport) initializeLicenseManager(char* licenseKeyFileName); +#else + void initializeLicenseManager(char* licenseKeyFileName); +#endif +} + +using namespace VisageSDK; + +const glm::vec3 DEFAULT_HEAD_ORIGIN(0.0f, 0.0f, 0.7f); + +Visage::Visage() : +#ifdef HAVE_VISAGE + _leftInnerBrowIndex(0), + _rightInnerBrowIndex(0), +#endif + _active(false), + _headOrigin(DEFAULT_HEAD_ORIGIN), + _estimatedEyePitch(0.0f), + _estimatedEyeYaw(0.0f) { + +#ifdef HAVE_VISAGE + switchToResourcesParentIfRequired(); + QByteArray licensePath = "resources/visage/license.vlc"; + initializeLicenseManager(licensePath.data()); + _tracker = new VisageTracker2("resources/visage/Facial Features Tracker - Asymmetric.cfg"); + if (_tracker->trackFromCam()) { + _data = new FaceData(); + + } else { + delete _tracker; + _tracker = NULL; + } +#endif +} + +Visage::~Visage() { +#ifdef HAVE_VISAGE + if (_tracker) { + _tracker->stop(); + delete _tracker; + delete _data; + } +#endif +} + +#ifdef HAVE_VISAGE +static int leftEyeBlinkIndex = 0; +static int rightEyeBlinkIndex = 1; +static int centerBrowIndex = 16; + +static QHash createBlendshapeIndices() { + QHash blendshapeMap; + blendshapeMap.insert("Sneer", "au_nose_wrinkler"); + blendshapeMap.insert("JawFwd", "au_jaw_z_push"); + blendshapeMap.insert("JawLeft", "au_jaw_x_push"); + blendshapeMap.insert("JawOpen", "au_jaw_drop"); + blendshapeMap.insert("LipsLowerDown", "au_lower_lip_drop"); + blendshapeMap.insert("LipsUpperUp", "au_upper_lip_raiser"); + blendshapeMap.insert("LipsStretch_R", "au_lip_stretcher_left"); + blendshapeMap.insert("BrowsU_R", "au_left_outer_brow_raiser"); + blendshapeMap.insert("BrowsD_R", "au_left_brow_lowerer"); + blendshapeMap.insert("LipsStretch_L", "au_lip_stretcher_right"); + blendshapeMap.insert("BrowsU_L", "au_right_outer_brow_raiser"); + blendshapeMap.insert("BrowsD_L", "au_right_brow_lowerer"); + + QHash blendshapeIndices; + for (int i = 0;; i++) { + QByteArray blendshape = FACESHIFT_BLENDSHAPES[i]; + if (blendshape.isEmpty()) { + break; + } + if (blendshape == "EyeBlink_L") { + leftEyeBlinkIndex = i; + + } else if (blendshape == "EyeBlink_R") { + rightEyeBlinkIndex = i; + + } else if (blendshape == "BrowsU_C") { + centerBrowIndex = i; + } + QByteArray mapping = blendshapeMap.value(blendshape); + if (!mapping.isEmpty()) { + blendshapeIndices.insert(mapping, i + 1); + } + } + + return blendshapeIndices; +} + +static const QHash& getBlendshapeIndices() { + static QHash blendshapeIndices = createBlendshapeIndices(); + return blendshapeIndices; +} +#endif + +const float TRANSLATION_SCALE = 20.0f; + +void Visage::update() { +#ifdef HAVE_VISAGE + _active = (_tracker && _tracker->getTrackingData(_data) == TRACK_STAT_OK); + if (!_active) { + return; + } + _headRotation = glm::quat(glm::vec3(-_data->faceRotation[0], -_data->faceRotation[1], _data->faceRotation[2])); + _headTranslation = (glm::vec3(_data->faceTranslation[0], _data->faceTranslation[1], _data->faceTranslation[2]) - + _headOrigin) * TRANSLATION_SCALE; + _estimatedEyePitch = glm::degrees(-_data->gazeDirection[1]); + _estimatedEyeYaw = glm::degrees(-_data->gazeDirection[0]); + + if (_blendshapeIndices.isEmpty()) { + _blendshapeIndices.resize(_data->actionUnitCount); + int maxIndex = -1; + for (int i = 0; i < _data->actionUnitCount; i++) { + QByteArray name = _data->actionUnitsNames[i]; + if (name == "au_left_inner_brow_raiser") { + _leftInnerBrowIndex = i; + } else if (name == "au_right_inner_brow_raiser") { + _rightInnerBrowIndex = i; + } + int index = getBlendshapeIndices().value(name) - 1; + maxIndex = qMax(maxIndex, _blendshapeIndices[i] = index); + } + _blendshapeCoefficients.resize(maxIndex + 1); + } + + qFill(_blendshapeCoefficients.begin(), _blendshapeCoefficients.end(), 0.0f); + for (int i = 0; i < _data->actionUnitCount; i++) { + if (!_data->actionUnitsUsed[i]) { + continue; + } + int index = _blendshapeIndices.at(i); + if (index != -1) { + _blendshapeCoefficients[index] = _data->actionUnits[i]; + } + } + _blendshapeCoefficients[leftEyeBlinkIndex] = 1.0f - _data->eyeClosure[1]; + _blendshapeCoefficients[rightEyeBlinkIndex] = 1.0f - _data->eyeClosure[0]; + _blendshapeCoefficients[centerBrowIndex] = (_data->actionUnits[_leftInnerBrowIndex] + + _data->actionUnits[_rightInnerBrowIndex]) * 0.5f; +#endif +} + +void Visage::reset() { + _headOrigin += _headTranslation / TRANSLATION_SCALE; +} diff --git a/interface/src/devices/Visage.h b/interface/src/devices/Visage.h new file mode 100644 index 0000000000..a5c826d1bf --- /dev/null +++ b/interface/src/devices/Visage.h @@ -0,0 +1,66 @@ +// +// Visage.h +// interface +// +// Created by Andrzej Kapolka on 2/11/14. +// Copyright (c) 2014 High Fidelity, Inc. All rights reserved. +// + +#ifndef __interface__Visage__ +#define __interface__Visage__ + +#include + +#include + +#include +#include + +namespace VisageSDK { + class VisageTracker2; + struct FaceData; +} + +/// Handles input from the Visage webcam feature tracking software. +class Visage { +public: + + Visage(); + ~Visage(); + + bool isActive() const { return _active; } + + const glm::quat& getHeadRotation() const { return _headRotation; } + const glm::vec3& getHeadTranslation() const { return _headTranslation; } + + float getEstimatedEyePitch() const { return _estimatedEyePitch; } + float getEstimatedEyeYaw() const { return _estimatedEyeYaw; } + + const std::vector& getBlendshapeCoefficients() const { return _blendshapeCoefficients; } + + void update(); + void reset(); + +private: + +#ifdef HAVE_VISAGE + VisageSDK::VisageTracker2* _tracker; + VisageSDK::FaceData* _data; + int _leftInnerBrowIndex; + int _rightInnerBrowIndex; + QVector _blendshapeIndices; +#endif + + bool _active; + glm::quat _headRotation; + glm::vec3 _headTranslation; + + glm::vec3 _headOrigin; + + float _estimatedEyePitch; + float _estimatedEyeYaw; + + std::vector _blendshapeCoefficients; +}; + +#endif /* defined(__interface__Visage__) */ diff --git a/interface/src/renderer/FBXReader.cpp b/interface/src/renderer/FBXReader.cpp index 35512d88da..8b881940ca 100644 --- a/interface/src/renderer/FBXReader.cpp +++ b/interface/src/renderer/FBXReader.cpp @@ -1405,6 +1405,11 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) cluster.jointIndex = 0; } extracted.mesh.clusters.append(cluster); + // BUG: joints that fall into this context do not get their bindTransform and + // inverseBindRotation data members properly set. This causes bad boneRadius + // and boneLength calculations for collision proxies. Affected joints are usually: + // hair, teeth, tongue. I tried to figure out how to fix this but was going + // crosseyed trying to understand FBX so I gave up for the time being -- Andrew. } // whether we're skinned depends on how many clusters are attached diff --git a/interface/src/renderer/FBXReader.h b/interface/src/renderer/FBXReader.h index 8efb23c98c..b89d0954b4 100644 --- a/interface/src/renderer/FBXReader.h +++ b/interface/src/renderer/FBXReader.h @@ -21,6 +21,9 @@ class FBXNode; typedef QList FBXNodeList; +/// The names of the blendshapes expected by Faceshift, terminated with an empty string. +extern const char* FACESHIFT_BLENDSHAPES[]; + /// A node within an FBX document. class FBXNode { public: diff --git a/interface/src/renderer/Model.cpp b/interface/src/renderer/Model.cpp index 48e1d0f70c..f1916db4d1 100644 --- a/interface/src/renderer/Model.cpp +++ b/interface/src/renderer/Model.cpp @@ -421,6 +421,7 @@ void Model::setURL(const QUrl& url, const QUrl& fallback) { _dilatedTextures.clear(); _lodHysteresis = NetworkGeometry::NO_HYSTERESIS; + // we retain a reference to the base geometry so that its reference count doesn't fall to zero _baseGeometry = _geometry = Application::getInstance()->getGeometryCache()->getGeometry(url, fallback); } diff --git a/interface/src/renderer/Model.h b/interface/src/renderer/Model.h index 1d1cdc22a7..28189d0379 100644 --- a/interface/src/renderer/Model.h +++ b/interface/src/renderer/Model.h @@ -227,7 +227,7 @@ private: void deleteGeometry(); void renderMeshes(float alpha, bool translucent); - QSharedPointer _baseGeometry; + QSharedPointer _baseGeometry; ///< reference required to prevent collection of base float _lodHysteresis; float _pupilDilation; diff --git a/interface/src/ui/Snapshot.cpp b/interface/src/ui/Snapshot.cpp index e16b0c570d..6ffaf23564 100644 --- a/interface/src/ui/Snapshot.cpp +++ b/interface/src/ui/Snapshot.cpp @@ -64,7 +64,7 @@ void Snapshot::saveSnapshot(QGLWidget* widget, Profile* profile, Avatar* avatar) QImage shot = widget->grabFrameBuffer(); glm::vec3 location = avatar->getPosition(); - glm::quat orientation = avatar->getHead().getOrientation(); + glm::quat orientation = avatar->getHead()->getOrientation(); // add metadata shot.setText(LOCATION_X, QString::number(location.x)); diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index a49bd83df6..8e9eb430e8 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -66,10 +66,6 @@ QByteArray AvatarData::toByteArray() { if (!_headData) { _headData = new HeadData(this); } - // lazily allocate memory for HandData in case we're not an Avatar instance - if (!_handData) { - _handData = new HandData(this); - } QByteArray avatarDataByteArray; avatarDataByteArray.resize(MAX_PACKET_SIZE); @@ -155,8 +151,8 @@ QByteArray AvatarData::toByteArray() { // pupil dilation destinationBuffer += packFloatToByte(destinationBuffer, _headData->_pupilDilation, 1.0f); - // leap hand data - destinationBuffer += _handData->encodeRemoteData(destinationBuffer); + // hand data + destinationBuffer += HandData::encodeData(_handData, destinationBuffer); return avatarDataByteArray.left(destinationBuffer - startPosition); } @@ -262,7 +258,7 @@ int AvatarData::parseData(const QByteArray& packet) { // pupil dilation sourceBuffer += unpackFloatFromByte(sourceBuffer, _headData->_pupilDilation, 1.0f); - // leap hand data + // hand data if (sourceBuffer - startPosition < packet.size()) { // check passed, bytes match sourceBuffer += _handData->decodeRemoteData(packet.mid(sourceBuffer - startPosition)); diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index 48cfd12940..a889b52bd0 100755 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -139,9 +139,6 @@ public: const HeadData* getHeadData() const { return _headData; } const HandData* getHandData() const { return _handData; } - void setHeadData(HeadData* headData) { _headData = headData; } - void setHandData(HandData* handData) { _handData = handData; } - virtual const glm::vec3& getVelocity() const { return vec3Zero; } virtual bool findParticleCollisions(const glm::vec3& particleCenter, float particleRadius, CollisionList& collisions) { diff --git a/libraries/avatars/src/HandData.cpp b/libraries/avatars/src/HandData.cpp index 5a923eea93..e4bb187f28 100644 --- a/libraries/avatars/src/HandData.cpp +++ b/libraries/avatars/src/HandData.cpp @@ -113,17 +113,30 @@ _owningHandData(owningHandData) setTrailLength(standardTrailLength); } +// static +int HandData::encodeData(HandData* hand, unsigned char* destinationBuffer) { + if (hand) { + return hand->encodeRemoteData(destinationBuffer); + } + // else encode empty data: + // One byte for zero hands + // One byte for error checking. + *destinationBuffer = 0; + *(destinationBuffer + 1) = 1; + return 2; +} + int HandData::encodeRemoteData(unsigned char* destinationBuffer) { const unsigned char* startPosition = destinationBuffer; - unsigned int numHands = 0; + unsigned int numPalms = 0; for (unsigned int handIndex = 0; handIndex < getNumPalms(); ++handIndex) { PalmData& palm = getPalms()[handIndex]; if (palm.isActive()) { - numHands++; + numPalms++; } } - *destinationBuffer++ = numHands; + *destinationBuffer++ = numPalms; for (unsigned int handIndex = 0; handIndex < getNumPalms(); ++handIndex) { PalmData& palm = getPalms()[handIndex]; @@ -162,9 +175,9 @@ int HandData::encodeRemoteData(unsigned char* destinationBuffer) { int HandData::decodeRemoteData(const QByteArray& dataByteArray) { const unsigned char* startPosition; const unsigned char* sourceBuffer = startPosition = reinterpret_cast(dataByteArray.data()); - unsigned int numHands = *sourceBuffer++; + unsigned int numPalms = *sourceBuffer++; - for (unsigned int handIndex = 0; handIndex < numHands; ++handIndex) { + for (unsigned int handIndex = 0; handIndex < numPalms; ++handIndex) { if (handIndex >= getNumPalms()) addNewPalm(); PalmData& palm = getPalms()[handIndex]; @@ -203,7 +216,7 @@ int HandData::decodeRemoteData(const QByteArray& dataByteArray) { } } // Turn off any hands which weren't used. - for (unsigned int handIndex = numHands; handIndex < getNumPalms(); ++handIndex) { + for (unsigned int handIndex = numPalms; handIndex < getNumPalms(); ++handIndex) { PalmData& palm = getPalms()[handIndex]; palm.setActive(false); } diff --git a/libraries/avatars/src/HandData.h b/libraries/avatars/src/HandData.h index 550c62e829..5f7a49e0a2 100755 --- a/libraries/avatars/src/HandData.h +++ b/libraries/avatars/src/HandData.h @@ -71,6 +71,8 @@ public: void setFingerTrailLength(unsigned int length); void updateFingerTrails(); + static int encodeData(HandData* hand, unsigned char* destinationBuffer); + // Use these for sending and receiving hand data int encodeRemoteData(unsigned char* destinationBuffer); int decodeRemoteData(const QByteArray& dataByteArray); diff --git a/libraries/script-engine/src/AbstractControllerScriptingInterface.h b/libraries/script-engine/src/AbstractControllerScriptingInterface.h index d9878d0b71..1878edd4d6 100644 --- a/libraries/script-engine/src/AbstractControllerScriptingInterface.h +++ b/libraries/script-engine/src/AbstractControllerScriptingInterface.h @@ -52,6 +52,10 @@ public slots: virtual void captureWheelEvents() = 0; virtual void releaseWheelEvents() = 0; + virtual void captureJoystick(int joystickIndex) = 0; + virtual void releaseJoystick(int joystickIndex) = 0; + + virtual glm::vec2 getViewportDimensions() const = 0; signals: void keyPressEvent(const KeyEvent& event); diff --git a/libraries/script-engine/src/EventTypes.cpp b/libraries/script-engine/src/EventTypes.cpp index 963912fd34..d0ba160add 100644 --- a/libraries/script-engine/src/EventTypes.cpp +++ b/libraries/script-engine/src/EventTypes.cpp @@ -102,6 +102,8 @@ KeyEvent::KeyEvent(const QKeyEvent& event) { text = "END"; } else if (key == Qt::Key_Help) { text = "HELP"; + } else if (key == Qt::Key_CapsLock) { + text = "CAPS LOCK"; } } @@ -208,6 +210,8 @@ void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event) { event.key = Qt::Key_End; } else if (event.text.toUpper() == "HELP") { event.key = Qt::Key_Help; + } else if (event.text.toUpper() == "CAPS LOCK") { + event.key = Qt::Key_CapsLock; } else { event.key = event.text.at(0).unicode(); } @@ -258,10 +262,17 @@ MouseEvent::MouseEvent() : }; -MouseEvent::MouseEvent(const QMouseEvent& event) { - x = event.x(); - y = event.y(); - +MouseEvent::MouseEvent(const QMouseEvent& event) : + x(event.x()), + y(event.y()), + isLeftButton(event.buttons().testFlag(Qt::LeftButton)), + isRightButton(event.buttons().testFlag(Qt::RightButton)), + isMiddleButton(event.buttons().testFlag(Qt::MiddleButton)), + isShifted(event.modifiers().testFlag(Qt::ShiftModifier)), + isControl(event.modifiers().testFlag(Qt::ControlModifier)), + isMeta(event.modifiers().testFlag(Qt::MetaModifier)), + isAlt(event.modifiers().testFlag(Qt::AltModifier)) +{ // single button that caused the event switch (event.button()) { case Qt::LeftButton: @@ -280,16 +291,6 @@ MouseEvent::MouseEvent(const QMouseEvent& event) { button = "NONE"; break; } - // button pressed state - isLeftButton = isLeftButton || (event.buttons().testFlag(Qt::LeftButton)); - isRightButton = isRightButton || (event.buttons().testFlag(Qt::RightButton)); - isMiddleButton = isMiddleButton || (event.buttons().testFlag(Qt::MiddleButton)); - - // keyboard modifiers - isShifted = event.modifiers().testFlag(Qt::ShiftModifier); - isMeta = event.modifiers().testFlag(Qt::MetaModifier); - isControl = event.modifiers().testFlag(Qt::ControlModifier); - isAlt = event.modifiers().testFlag(Qt::AltModifier); } QScriptValue mouseEventToScriptValue(QScriptEngine* engine, const MouseEvent& event) {