mirror of
https://github.com/overte-org/overte.git
synced 2025-04-10 16:12:28 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels
This commit is contained in:
commit
3c4d2aa199
29 changed files with 939 additions and 100 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -39,5 +39,11 @@ interface/external/Leap/util/
|
|||
interface/external/Sixense/include/
|
||||
interface/external/Sixense/lib/
|
||||
|
||||
# Ignore Visage
|
||||
interface/external/visage/dependencies/
|
||||
interface/external/visage/include/
|
||||
interface/external/visage/lib/
|
||||
interface/resources/visage/
|
||||
|
||||
# Ignore interfaceCache for Linux users
|
||||
interface/interfaceCache/
|
||||
|
|
76
cmake/modules/FindVisage.cmake
Normal file
76
cmake/modules/FindVisage.cmake
Normal file
|
@ -0,0 +1,76 @@
|
|||
# Try to find the Visage controller library
|
||||
#
|
||||
# You must provide a VISAGE_ROOT_DIR which contains lib and include directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# VISAGE_FOUND - system found Visage
|
||||
# VISAGE_INCLUDE_DIRS - the Visage include directory
|
||||
# VISAGE_LIBRARIES - Link this to use Visage
|
||||
#
|
||||
# Created on 2/11/2014 by Andrzej Kapolka
|
||||
# Copyright (c) 2014 High Fidelity
|
||||
#
|
||||
|
||||
if (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(VISAGE_FOUND TRUE)
|
||||
else (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
||||
find_path(VISAGE_INCLUDE_DIR VisageTracker2.h ${VISAGE_ROOT_DIR}/include)
|
||||
|
||||
if (APPLE)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h /usr/include/libxml2)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR opencv.hpp ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include/opencv2)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY libvscore.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY libvsvision.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY libOpenCV.a ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
elseif (WIN32)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h ${VISAGE_ROOT_DIR}/dependencies/libxml2/include)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR opencv/cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include/opencv)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY vscore.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY vsvision.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY opencv_core243.lib ${VISAGE_ROOT_DIR}/dependencies/OpenCV/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
endif ()
|
||||
|
||||
if (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
set(VISAGE_FOUND TRUE)
|
||||
endif (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
|
||||
if (VISAGE_FOUND)
|
||||
if (NOT VISAGE_FIND_QUIETLY)
|
||||
message(STATUS "Found Visage: ${VISAGE_LIBRARIES}")
|
||||
endif (NOT VISAGE_FIND_QUIETLY)
|
||||
else (VISAGE_FOUND)
|
||||
if (VISAGE_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Visage")
|
||||
endif (VISAGE_FIND_REQUIRED)
|
||||
endif (VISAGE_FOUND)
|
||||
|
||||
# show the VISAGE_INCLUDE_DIRS and VISAGE_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(VISAGE_INCLUDE_DIRS VISAGE_LIBRARIES)
|
||||
|
||||
endif (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
68
examples/audioBall.js
Normal file
68
examples/audioBall.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
//
|
||||
// audioBall.js
|
||||
// hifi
|
||||
//
|
||||
// Created by Athanasios Gaitatzes on 2/10/14.
|
||||
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
// This script creates a particle in front of the user that stays in front of
|
||||
// the user's avatar as they move, and animates it's radius and color
|
||||
// in response to the audio intensity.
|
||||
//
|
||||
|
||||
var sound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/mexicanWhipoorwill.raw");
|
||||
var CHANCE_OF_PLAYING_SOUND = 0.01;
|
||||
|
||||
var FACTOR = 0.75;
|
||||
|
||||
var countParticles = 0; // the first time around we want to create the particle and thereafter to modify it.
|
||||
var particleID;
|
||||
|
||||
function updateParticle() {
|
||||
// the particle should be placed in front of the user's avatar
|
||||
var avatarFront = Quat.getFront(MyAvatar.orientation);
|
||||
|
||||
// move particle three units in front of the avatar
|
||||
var particlePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(avatarFront, 3));
|
||||
|
||||
if (Math.random() < CHANCE_OF_PLAYING_SOUND) {
|
||||
// play a sound at the location of the particle
|
||||
var options = new AudioInjectionOptions();
|
||||
options.position = particlePosition;
|
||||
options.volume = 0.75;
|
||||
Audio.playSound(sound, options);
|
||||
}
|
||||
|
||||
var audioAverageLoudness = MyAvatar.audioAverageLoudness * FACTOR;
|
||||
//print ("Audio Loudness = " + MyAvatar.audioLoudness + " -- Audio Average Loudness = " + MyAvatar.audioAverageLoudness);
|
||||
|
||||
if (countParticles < 1) {
|
||||
var particleProperies = {
|
||||
position: particlePosition // the particle should stay in front of the user's avatar as he moves
|
||||
, color: { red: 0, green: 255, blue: 0 }
|
||||
, radius: audioAverageLoudness
|
||||
, velocity: { x: 0.0, y: 0.0, z: 0.0 }
|
||||
, gravity: { x: 0.0, y: 0.0, z: 0.0 }
|
||||
, damping: 0.0
|
||||
}
|
||||
|
||||
particleID = Particles.addParticle (particleProperies);
|
||||
countParticles++;
|
||||
}
|
||||
else {
|
||||
// animates the particles radius and color in response to the changing audio intensity
|
||||
var newProperties = {
|
||||
position: particlePosition // the particle should stay in front of the user's avatar as he moves
|
||||
, color: { red: 0, green: 255 * audioAverageLoudness, blue: 0 }
|
||||
, radius: audioAverageLoudness
|
||||
};
|
||||
|
||||
Particles.editParticle (particleID, newProperties);
|
||||
}
|
||||
}
|
||||
|
||||
// register the call back so it fires before each data send
|
||||
Script.willSendVisualDataCallback.connect(updateParticle);
|
||||
|
||||
// register our scriptEnding callback
|
||||
Script.scriptEnding.connect(function scriptEnding() {});
|
|
@ -16,14 +16,7 @@
|
|||
// Click and drag to create more new voxels in the same direction
|
||||
//
|
||||
|
||||
function vLength(v) {
|
||||
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
|
||||
}
|
||||
|
||||
function vMinus(a, b) {
|
||||
var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z };
|
||||
return rval;
|
||||
}
|
||||
var windowDimensions = Controller.getViewportDimensions();
|
||||
|
||||
var NEW_VOXEL_SIZE = 1.0;
|
||||
var NEW_VOXEL_DISTANCE_FROM_CAMERA = 3.0;
|
||||
|
@ -76,6 +69,52 @@ var clickSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-publ
|
|||
var audioOptions = new AudioInjectionOptions();
|
||||
audioOptions.volume = 0.5;
|
||||
|
||||
var editToolsOn = false; // starts out off
|
||||
|
||||
|
||||
var voxelPreview = Overlays.addOverlay("cube", {
|
||||
position: { x: 0, y: 0, z: 0},
|
||||
size: 1,
|
||||
color: { red: 255, green: 0, blue: 0},
|
||||
alpha: 1,
|
||||
solid: false,
|
||||
visible: false,
|
||||
lineWidth: 4
|
||||
});
|
||||
|
||||
|
||||
// These will be our "overlay IDs"
|
||||
var swatches = new Array();
|
||||
var swatchHeight = 54;
|
||||
var swatchWidth = 31;
|
||||
var swatchesWidth = swatchWidth * numColors;
|
||||
var swatchesX = (windowDimensions.x - swatchesWidth) / 2;
|
||||
var swatchesY = windowDimensions.y - swatchHeight;
|
||||
|
||||
// create the overlays, position them in a row, set their colors, and for the selected one, use a different source image
|
||||
// location so that it displays the "selected" marker
|
||||
for (s = 0; s < numColors; s++) {
|
||||
var imageFromX = 12 + (s * 27);
|
||||
var imageFromY = 0;
|
||||
if (s == whichColor) {
|
||||
imageFromY = 55;
|
||||
}
|
||||
var swatchX = swatchesX + (30 * s);
|
||||
|
||||
swatches[s] = Overlays.addOverlay("image", {
|
||||
x: swatchX,
|
||||
y: swatchesY,
|
||||
width: swatchWidth,
|
||||
height: swatchHeight,
|
||||
subImage: { x: imageFromX, y: imageFromY, width: (swatchWidth - 1), height: swatchHeight },
|
||||
imageURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/images/testing-swatches.svg",
|
||||
color: colors[s],
|
||||
alpha: 1,
|
||||
visible: editToolsOn
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function setAudioPosition() {
|
||||
var camera = Camera.getPosition();
|
||||
var forwardVector = Quat.getFront(MyAvatar.orientation);
|
||||
|
@ -101,7 +140,141 @@ function fixEulerAngles(eulers) {
|
|||
return rVal;
|
||||
}
|
||||
|
||||
var trackLastMouseX = 0;
|
||||
var trackLastMouseY = 0;
|
||||
var trackAsDelete = false;
|
||||
var trackAsRecolor = false;
|
||||
|
||||
function showPreviewVoxel() {
|
||||
if (editToolsOn) {
|
||||
var voxelColor;
|
||||
|
||||
var pickRay = Camera.computePickRay(trackLastMouseX, trackLastMouseY);
|
||||
var intersection = Voxels.findRayIntersection(pickRay);
|
||||
|
||||
if (whichColor == -1) {
|
||||
// Copy mode - use clicked voxel color
|
||||
voxelColor = { red: intersection.voxel.red,
|
||||
green: intersection.voxel.green,
|
||||
blue: intersection.voxel.blue };
|
||||
} else {
|
||||
voxelColor = { red: colors[whichColor].red,
|
||||
green: colors[whichColor].green,
|
||||
blue: colors[whichColor].blue };
|
||||
}
|
||||
|
||||
var guidePosition;
|
||||
|
||||
if (trackAsDelete) {
|
||||
guidePosition = { x: intersection.voxel.x,
|
||||
y: intersection.voxel.y,
|
||||
z: intersection.voxel.z };
|
||||
Overlays.editOverlay(voxelPreview, {
|
||||
position: guidePosition,
|
||||
size: intersection.voxel.s,
|
||||
visible: true,
|
||||
color: { red: 255, green: 0, blue: 0 },
|
||||
solid: false,
|
||||
alpha: 1
|
||||
});
|
||||
} else if (trackAsRecolor) {
|
||||
guidePosition = { x: intersection.voxel.x - 0.001,
|
||||
y: intersection.voxel.y - 0.001,
|
||||
z: intersection.voxel.z - 0.001 };
|
||||
|
||||
Overlays.editOverlay(voxelPreview, {
|
||||
position: guidePosition,
|
||||
size: intersection.voxel.s + 0.002,
|
||||
visible: true,
|
||||
color: voxelColor,
|
||||
solid: true,
|
||||
alpha: 0.8
|
||||
});
|
||||
|
||||
} else if (!isExtruding) {
|
||||
guidePosition = { x: intersection.voxel.x,
|
||||
y: intersection.voxel.y,
|
||||
z: intersection.voxel.z };
|
||||
|
||||
if (intersection.face == "MIN_X_FACE") {
|
||||
guidePosition.x -= intersection.voxel.s;
|
||||
} else if (intersection.face == "MAX_X_FACE") {
|
||||
guidePosition.x += intersection.voxel.s;
|
||||
} else if (intersection.face == "MIN_Y_FACE") {
|
||||
guidePosition.y -= intersection.voxel.s;
|
||||
} else if (intersection.face == "MAX_Y_FACE") {
|
||||
guidePosition.y += intersection.voxel.s;
|
||||
} else if (intersection.face == "MIN_Z_FACE") {
|
||||
guidePosition.z -= intersection.voxel.s;
|
||||
} else if (intersection.face == "MAX_Z_FACE") {
|
||||
guidePosition.z += intersection.voxel.s;
|
||||
}
|
||||
|
||||
Overlays.editOverlay(voxelPreview, {
|
||||
position: guidePosition,
|
||||
size: intersection.voxel.s,
|
||||
visible: true,
|
||||
color: voxelColor,
|
||||
solid: true,
|
||||
alpha: 0.7
|
||||
});
|
||||
} else if (isExtruding) {
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
}
|
||||
} else {
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
}
|
||||
}
|
||||
|
||||
function trackMouseEvent(event) {
|
||||
trackLastMouseX = event.x;
|
||||
trackLastMouseY = event.y;
|
||||
trackAsDelete = event.isControl;
|
||||
trackAsRecolor = event.isShifted;
|
||||
showPreviewVoxel();
|
||||
}
|
||||
|
||||
function trackKeyPressEvent(event) {
|
||||
if (event.text == "CONTROL") {
|
||||
trackAsDelete = true;
|
||||
showPreviewVoxel();
|
||||
}
|
||||
if (event.text == "SHIFT") {
|
||||
trackAsRecolor = true;
|
||||
}
|
||||
showPreviewVoxel();
|
||||
}
|
||||
|
||||
function trackKeyReleaseEvent(event) {
|
||||
if (event.text == "CONTROL") {
|
||||
trackAsDelete = false;
|
||||
showPreviewVoxel();
|
||||
}
|
||||
if (event.text == "SHIFT") {
|
||||
trackAsRecolor = false;
|
||||
}
|
||||
|
||||
// on TAB release, toggle our tool state
|
||||
if (event.text == "TAB") {
|
||||
editToolsOn = !editToolsOn;
|
||||
moveTools();
|
||||
Audio.playSound(clickSound, audioOptions);
|
||||
}
|
||||
showPreviewVoxel();
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
|
||||
// if our tools are off, then don't do anything
|
||||
if (!editToolsOn) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.isRightButton) {
|
||||
// debugging of right button click on mac...
|
||||
print(">>>> RIGHT BUTTON <<<<<");
|
||||
}
|
||||
trackMouseEvent(event); // used by preview support
|
||||
mouseX = event.x;
|
||||
mouseY = event.y;
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
|
@ -118,16 +291,17 @@ function mousePressEvent(event) {
|
|||
// get position for initial azimuth, elevation
|
||||
orbitCenter = intersection.intersection;
|
||||
var orbitVector = Vec3.subtract(cameraPosition, orbitCenter);
|
||||
orbitRadius = vLength(orbitVector);
|
||||
orbitRadius = Vec3.length(orbitVector);
|
||||
orbitAzimuth = Math.atan2(orbitVector.z, orbitVector.x);
|
||||
orbitAltitude = Math.asin(orbitVector.y / Vec3.length(orbitVector));
|
||||
|
||||
} else if (event.isRightButton || event.isControl) {
|
||||
} else if (trackAsDelete || event.isRightButton) {
|
||||
// Delete voxel
|
||||
Voxels.eraseVoxel(intersection.voxel.x, intersection.voxel.y, intersection.voxel.z, intersection.voxel.s);
|
||||
Audio.playSound(deleteSound, audioOptions);
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
|
||||
} else if (event.isShifted) {
|
||||
} else if (trackAsRecolor) {
|
||||
// Recolor Voxel
|
||||
Voxels.setVoxel(intersection.voxel.x,
|
||||
intersection.voxel.y,
|
||||
|
@ -135,6 +309,7 @@ function mousePressEvent(event) {
|
|||
intersection.voxel.s,
|
||||
colors[whichColor].red, colors[whichColor].green, colors[whichColor].blue);
|
||||
Audio.playSound(changeColorSound, audioOptions);
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
} else {
|
||||
// Add voxel on face
|
||||
if (whichColor == -1) {
|
||||
|
@ -178,6 +353,7 @@ function mousePressEvent(event) {
|
|||
lastVoxelScale = newVoxel.s;
|
||||
|
||||
Audio.playSound(addSound, audioOptions);
|
||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
||||
dragStart = { x: event.x, y: event.y };
|
||||
isAdding = true;
|
||||
}
|
||||
|
@ -185,42 +361,52 @@ function mousePressEvent(event) {
|
|||
}
|
||||
|
||||
function keyPressEvent(event) {
|
||||
key_alt = event.isAlt;
|
||||
key_shift = event.isShifted;
|
||||
var nVal = parseInt(event.text);
|
||||
if (event.text == "0") {
|
||||
print("Color = Copy");
|
||||
whichColor = -1;
|
||||
Audio.playSound(clickSound, audioOptions);
|
||||
} else if ((nVal > 0) && (nVal <= numColors)) {
|
||||
whichColor = nVal - 1;
|
||||
print("Color = " + (whichColor + 1));
|
||||
Audio.playSound(clickSound, audioOptions);
|
||||
} else if (event.text == "9") {
|
||||
// Create a brand new 1 meter voxel in front of your avatar
|
||||
var color = whichColor;
|
||||
if (color == -1) color = 0;
|
||||
var newPosition = getNewVoxelPosition();
|
||||
var newVoxel = {
|
||||
x: newPosition.x,
|
||||
y: newPosition.y ,
|
||||
z: newPosition.z,
|
||||
s: NEW_VOXEL_SIZE,
|
||||
red: colors[color].red,
|
||||
green: colors[color].green,
|
||||
blue: colors[color].blue };
|
||||
Voxels.setVoxel(newVoxel.x, newVoxel.y, newVoxel.z, newVoxel.s, newVoxel.red, newVoxel.green, newVoxel.blue);
|
||||
setAudioPosition();
|
||||
Audio.playSound(addSound, audioOptions);
|
||||
} else if (event.text == " ") {
|
||||
// if our tools are off, then don't do anything
|
||||
if (editToolsOn) {
|
||||
key_alt = event.isAlt;
|
||||
key_shift = event.isShifted;
|
||||
var nVal = parseInt(event.text);
|
||||
if (event.text == "0") {
|
||||
print("Color = Copy");
|
||||
whichColor = -1;
|
||||
Audio.playSound(clickSound, audioOptions);
|
||||
moveTools();
|
||||
} else if ((nVal > 0) && (nVal <= numColors)) {
|
||||
whichColor = nVal - 1;
|
||||
print("Color = " + (whichColor + 1));
|
||||
Audio.playSound(clickSound, audioOptions);
|
||||
moveTools();
|
||||
} else if (event.text == "9") {
|
||||
// Create a brand new 1 meter voxel in front of your avatar
|
||||
var color = whichColor;
|
||||
if (color == -1) color = 0;
|
||||
var newPosition = getNewVoxelPosition();
|
||||
var newVoxel = {
|
||||
x: newPosition.x,
|
||||
y: newPosition.y ,
|
||||
z: newPosition.z,
|
||||
s: NEW_VOXEL_SIZE,
|
||||
red: colors[color].red,
|
||||
green: colors[color].green,
|
||||
blue: colors[color].blue };
|
||||
Voxels.setVoxel(newVoxel.x, newVoxel.y, newVoxel.z, newVoxel.s, newVoxel.red, newVoxel.green, newVoxel.blue);
|
||||
setAudioPosition();
|
||||
Audio.playSound(addSound, audioOptions);
|
||||
}
|
||||
}
|
||||
|
||||
// do this even if not in edit tools
|
||||
if (event.text == " ") {
|
||||
// Reset my orientation!
|
||||
var orientation = { x:0, y:0, z:0, w:1 };
|
||||
Camera.setOrientation(orientation);
|
||||
MyAvatar.orientation = orientation;
|
||||
}
|
||||
trackKeyPressEvent(event); // used by preview support
|
||||
}
|
||||
|
||||
function keyReleaseEvent(event) {
|
||||
trackKeyReleaseEvent(event); // used by preview support
|
||||
key_alt = false;
|
||||
key_shift = false;
|
||||
}
|
||||
|
@ -248,7 +434,7 @@ function mouseMoveEvent(event) {
|
|||
var lastVoxelDistance = { x: pickRay.origin.x - lastVoxelPosition.x,
|
||||
y: pickRay.origin.y - lastVoxelPosition.y,
|
||||
z: pickRay.origin.z - lastVoxelPosition.z };
|
||||
var distance = vLength(lastVoxelDistance);
|
||||
var distance = Vec3.length(lastVoxelDistance);
|
||||
var mouseSpot = { x: pickRay.direction.x * distance, y: pickRay.direction.y * distance, z: pickRay.direction.z * distance };
|
||||
mouseSpot.x += pickRay.origin.x;
|
||||
mouseSpot.y += pickRay.origin.y;
|
||||
|
@ -279,9 +465,17 @@ function mouseMoveEvent(event) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// update the add voxel/delete voxel overlay preview
|
||||
trackMouseEvent(event);
|
||||
}
|
||||
|
||||
function mouseReleaseEvent(event) {
|
||||
// if our tools are off, then don't do anything
|
||||
if (!editToolsOn) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isOrbiting) {
|
||||
var cameraOrientation = Camera.getOrientation();
|
||||
var eulers = Quat.safeEulerAngles(cameraOrientation);
|
||||
|
@ -296,6 +490,41 @@ function mouseReleaseEvent(event) {
|
|||
isExtruding = false;
|
||||
}
|
||||
|
||||
function moveTools() {
|
||||
swatchesX = (windowDimensions.x - swatchesWidth) / 2;
|
||||
swatchesY = windowDimensions.y - swatchHeight;
|
||||
|
||||
// create the overlays, position them in a row, set their colors, and for the selected one, use a different source image
|
||||
// location so that it displays the "selected" marker
|
||||
for (s = 0; s < numColors; s++) {
|
||||
var imageFromX = 12 + (s * 27);
|
||||
var imageFromY = 0;
|
||||
if (s == whichColor) {
|
||||
imageFromY = 55;
|
||||
}
|
||||
var swatchX = swatchesX + ((swatchWidth - 1) * s);
|
||||
|
||||
Overlays.editOverlay(swatches[s], {
|
||||
x: swatchX,
|
||||
y: swatchesY,
|
||||
subImage: { x: imageFromX, y: imageFromY, width: (swatchWidth - 1), height: swatchHeight },
|
||||
color: colors[s],
|
||||
alpha: 1,
|
||||
visible: editToolsOn
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function update() {
|
||||
var newWindowDimensions = Controller.getViewportDimensions();
|
||||
if (newWindowDimensions.x != windowDimensions.x || newWindowDimensions.y != windowDimensions.y) {
|
||||
windowDimensions = newWindowDimensions;
|
||||
print("window resized...");
|
||||
moveTools();
|
||||
}
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
|
@ -303,5 +532,15 @@ Controller.keyPressEvent.connect(keyPressEvent);
|
|||
Controller.keyReleaseEvent.connect(keyReleaseEvent);
|
||||
|
||||
function scriptEnding() {
|
||||
Overlays.deleteOverlay(voxelPreview);
|
||||
for (s = 0; s < numColors; s++) {
|
||||
Overlays.deleteOverlay(swatches[s]);
|
||||
}
|
||||
}
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
||||
|
||||
Script.willSendVisualDataCallback.connect(update);
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake
|
|||
set(FACESHIFT_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/faceshift)
|
||||
set(LIBOVR_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/LibOVR)
|
||||
set(SIXENSE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/Sixense)
|
||||
set(VISAGE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/visage)
|
||||
|
||||
if (DEFINED ENV{JOB_ID})
|
||||
set(BUILD_SEQ $ENV{JOB_ID})
|
||||
|
@ -138,9 +139,10 @@ find_package(Faceshift)
|
|||
find_package(GLM REQUIRED)
|
||||
find_package(LibOVR)
|
||||
find_package(Sixense)
|
||||
find_package(Visage)
|
||||
find_package(ZLIB)
|
||||
|
||||
# likewise with Sixense library for Razer Hydra
|
||||
# include the Sixense library for Razer Hydra if available
|
||||
if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
add_definitions(-DHAVE_SIXENSE)
|
||||
include_directories(SYSTEM ${SIXENSE_INCLUDE_DIRS})
|
||||
|
@ -150,6 +152,21 @@ if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
|||
target_link_libraries(${TARGET_NAME} ${SIXENSE_LIBRARIES})
|
||||
endif (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
|
||||
# likewise with Visage library for webcam feature tracking
|
||||
if (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
add_definitions(-DHAVE_VISAGE -DVISAGE_STATIC)
|
||||
include_directories(SYSTEM ${VISAGE_INCLUDE_DIRS})
|
||||
if (APPLE)
|
||||
add_definitions(-DMAC_OS_X)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-comment -isystem ${VISAGE_INCLUDE_DIRS}")
|
||||
find_library(AVFoundation AVFoundation)
|
||||
find_library(CoreMedia CoreMedia)
|
||||
find_library(NEW_STD_LIBRARY libc++abi.dylib /usr/lib/)
|
||||
target_link_libraries(${TARGET_NAME} ${AVFoundation} ${CoreMedia} ${NEW_STD_LIBRARY})
|
||||
endif (APPLE)
|
||||
target_link_libraries(${TARGET_NAME} ${VISAGE_LIBRARIES})
|
||||
endif (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
|
||||
# and with LibOVR for Oculus Rift
|
||||
if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
||||
add_definitions(-DHAVE_LIBOVR)
|
||||
|
|
14
interface/external/visage/readme.txt
vendored
Normal file
14
interface/external/visage/readme.txt
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
|
||||
Instructions for adding the Visage driver to Interface
|
||||
Andrzej Kapolka, February 11, 2014
|
||||
|
||||
1. Copy the Visage sdk folders (lib, include, dependencies) into the interface/external/visage folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
2. Copy the Visage configuration data folder (visageSDK-MacOS/Samples/MacOSX/data/) to interface/resources/visage
|
||||
(i.e., so that interface/resources/visage/candide3.wfm is accessible)
|
||||
|
||||
3. Copy the Visage license file to interface/resources/visage/license.vlc.
|
||||
|
||||
4. Delete your build directory, run cmake and build, and you should be all set.
|
||||
|
|
@ -48,6 +48,8 @@
|
|||
#include <QXmlStreamReader>
|
||||
#include <QXmlStreamAttributes>
|
||||
#include <QMediaPlayer>
|
||||
#include <QMimeData>
|
||||
#include <QMessageBox>
|
||||
|
||||
#include <AudioInjector.h>
|
||||
#include <Logging.h>
|
||||
|
@ -199,7 +201,7 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
|
|||
connect(audioThread, SIGNAL(started()), &_audio, SLOT(start()));
|
||||
|
||||
audioThread->start();
|
||||
|
||||
|
||||
connect(nodeList, SIGNAL(domainChanged(const QString&)), SLOT(domainChanged(const QString&)));
|
||||
connect(nodeList, &NodeList::nodeAdded, this, &Application::nodeAdded);
|
||||
connect(nodeList, &NodeList::nodeKilled, this, &Application::nodeKilled);
|
||||
|
@ -1415,6 +1417,32 @@ void Application::wheelEvent(QWheelEvent* event) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::dropEvent(QDropEvent *event) {
|
||||
QString snapshotPath;
|
||||
const QMimeData *mimeData = event->mimeData();
|
||||
foreach (QUrl url, mimeData->urls()) {
|
||||
if (url.url().toLower().endsWith(SNAPSHOT_EXTENSION)) {
|
||||
snapshotPath = url.url().remove("file://");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
SnapshotMetaData* snapshotData = Snapshot::parseSnapshotData(snapshotPath);
|
||||
if (snapshotData != NULL) {
|
||||
if (!snapshotData->getDomain().isEmpty()) {
|
||||
Menu::getInstance()->goToDomain(snapshotData->getDomain());
|
||||
}
|
||||
|
||||
_myAvatar->setPosition(snapshotData->getLocation());
|
||||
_myAvatar->setOrientation(snapshotData->getOrientation());
|
||||
} else {
|
||||
QMessageBox msgBox;
|
||||
msgBox.setText("No location details were found in this JPG, try dragging in an authentic Hifi snapshot.");
|
||||
msgBox.setStandardButtons(QMessageBox::Ok);
|
||||
msgBox.exec();
|
||||
}
|
||||
}
|
||||
|
||||
void Application::sendPingPackets() {
|
||||
QByteArray pingPacket = NodeList::getInstance()->constructPingPacket();
|
||||
controlledBroadcastToNodes(pingPacket, NodeSet() << NodeType::VoxelServer
|
||||
|
@ -2013,6 +2041,15 @@ void Application::updateFaceshift() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::updateVisage() {
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateVisage()");
|
||||
|
||||
// Update Visage
|
||||
_visage.update();
|
||||
}
|
||||
|
||||
void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) {
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
|
@ -2033,13 +2070,25 @@ void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) {
|
|||
}
|
||||
lookAtSpot = _mouseRayOrigin + _mouseRayDirection * distance;
|
||||
}
|
||||
bool trackerActive = false;
|
||||
float eyePitch, eyeYaw;
|
||||
if (_faceshift.isActive()) {
|
||||
eyePitch = _faceshift.getEstimatedEyePitch();
|
||||
eyeYaw = _faceshift.getEstimatedEyeYaw();
|
||||
trackerActive = true;
|
||||
|
||||
} else if (_visage.isActive()) {
|
||||
eyePitch = _visage.getEstimatedEyePitch();
|
||||
eyeYaw = _visage.getEstimatedEyeYaw();
|
||||
trackerActive = true;
|
||||
}
|
||||
if (trackerActive) {
|
||||
// deflect using Faceshift gaze data
|
||||
glm::vec3 origin = _myAvatar->getHead().calculateAverageEyePosition();
|
||||
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
||||
float deflection = Menu::getInstance()->getFaceshiftEyeDeflection();
|
||||
lookAtSpot = origin + _myCamera.getRotation() * glm::quat(glm::radians(glm::vec3(
|
||||
_faceshift.getEstimatedEyePitch() * pitchSign * deflection, _faceshift.getEstimatedEyeYaw() * deflection, 0.0f))) *
|
||||
eyePitch * pitchSign * deflection, eyeYaw * deflection, 0.0f))) *
|
||||
glm::inverse(_myCamera.getRotation()) * (lookAtSpot - origin);
|
||||
}
|
||||
_myAvatar->getHead().setLookAtPosition(lookAtSpot);
|
||||
|
@ -2290,6 +2339,7 @@ void Application::update(float deltaTime) {
|
|||
glm::vec3 lookAtSpot;
|
||||
|
||||
updateFaceshift();
|
||||
updateVisage();
|
||||
_myAvatar->updateLookAtTargetAvatar(lookAtSpot);
|
||||
updateMyAvatarLookAtPosition(lookAtSpot);
|
||||
|
||||
|
@ -3796,6 +3846,7 @@ void Application::resetSensors() {
|
|||
_mouseY = _glWidget->height() / 2;
|
||||
|
||||
_faceshift.reset();
|
||||
_visage.reset();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::reset();
|
||||
|
@ -3833,7 +3884,7 @@ void Application::updateWindowTitle(){
|
|||
|
||||
QString title = QString() + _profile.getUsername() + " " + nodeList->getSessionUUID().toString()
|
||||
+ " @ " + nodeList->getDomainHostname() + buildVersion;
|
||||
|
||||
|
||||
qDebug("Application title set to: %s", title.toStdString().c_str());
|
||||
_window->setWindowTitle(title);
|
||||
}
|
||||
|
@ -4237,6 +4288,6 @@ void Application::takeSnapshot() {
|
|||
player->setMedia(QUrl::fromLocalFile(inf.absoluteFilePath()));
|
||||
player->play();
|
||||
|
||||
Snapshot::saveSnapshot(_glWidget, _profile.getUsername(), _myAvatar->getPosition());
|
||||
Snapshot::saveSnapshot(_glWidget, &_profile, _myAvatar);
|
||||
}
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@
|
|||
#include "avatar/Profile.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "devices/Visage.h"
|
||||
#include "renderer/AmbientOcclusionEffect.h"
|
||||
#include "renderer/GeometryCache.h"
|
||||
#include "renderer/GlowEffect.h"
|
||||
|
@ -93,6 +94,8 @@ static const float NODE_KILLED_RED = 1.0f;
|
|||
static const float NODE_KILLED_GREEN = 0.0f;
|
||||
static const float NODE_KILLED_BLUE = 0.0f;
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
|
||||
class Application : public QApplication {
|
||||
Q_OBJECT
|
||||
|
||||
|
@ -127,6 +130,7 @@ public:
|
|||
void touchUpdateEvent(QTouchEvent* event);
|
||||
|
||||
void wheelEvent(QWheelEvent* event);
|
||||
void dropEvent(QDropEvent *event);
|
||||
|
||||
bool event(QEvent* event);
|
||||
|
||||
|
@ -157,6 +161,7 @@ public:
|
|||
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
|
||||
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
|
||||
Faceshift* getFaceshift() { return &_faceshift; }
|
||||
Visage* getVisage() { return &_visage; }
|
||||
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
||||
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
||||
QSettings* getSettings() { return _settings; }
|
||||
|
@ -283,6 +288,7 @@ private:
|
|||
// Various helper functions called during update()
|
||||
void updateMouseRay();
|
||||
void updateFaceshift();
|
||||
void updateVisage();
|
||||
void updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot);
|
||||
void updateHoverVoxels(float deltaTime, float& distance, BoxFace& face);
|
||||
void updateMouseVoxels(float deltaTime, float& distance, BoxFace& face);
|
||||
|
@ -382,6 +388,7 @@ private:
|
|||
Profile _profile; // The data-server linked profile for this user
|
||||
|
||||
Faceshift _faceshift;
|
||||
Visage _visage;
|
||||
|
||||
SixenseManager _sixenseManager;
|
||||
QStringList _activeScripts;
|
||||
|
|
|
@ -46,7 +46,8 @@ public:
|
|||
void render(int screenWidth, int screenHeight);
|
||||
|
||||
float getLastInputLoudness() const { return glm::max(_lastInputLoudness - _noiseGateMeasuredFloor, 0.f); }
|
||||
|
||||
float getAudioAverageInputLoudness() const { return _lastInputLoudness; }
|
||||
|
||||
void setNoiseGateEnabled(bool noiseGateEnabled) { _noiseGateEnabled = noiseGateEnabled; }
|
||||
|
||||
void setLastAcceleration(const glm::vec3 lastAcceleration) { _lastAcceleration = lastAcceleration; }
|
||||
|
|
|
@ -250,3 +250,7 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) {
|
|||
}
|
||||
}
|
||||
|
||||
glm::vec2 ControllerScriptingInterface::getViewportDimensions() const {
|
||||
QGLWidget* widget = Application::getInstance()->getGLWidget();
|
||||
return glm::vec2(widget->width(), widget->height());
|
||||
}
|
||||
|
|
|
@ -74,6 +74,8 @@ public slots:
|
|||
virtual void captureJoystick(int joystickIndex);
|
||||
virtual void releaseJoystick(int joystickIndex);
|
||||
|
||||
virtual glm::vec2 getViewportDimensions() const;
|
||||
|
||||
private:
|
||||
const PalmData* getPrimaryPalm() const;
|
||||
const PalmData* getPalm(int palmIndex) const;
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
#include "Application.h"
|
||||
|
||||
#include "GLCanvas.h"
|
||||
#include <QMimeData>
|
||||
#include <QUrl>
|
||||
|
||||
GLCanvas::GLCanvas() : QGLWidget(QGLFormat(QGL::NoDepthBuffer, QGL::NoStencilBuffer)) {
|
||||
}
|
||||
|
@ -16,6 +18,7 @@ GLCanvas::GLCanvas() : QGLWidget(QGLFormat(QGL::NoDepthBuffer, QGL::NoStencilBuf
|
|||
void GLCanvas::initializeGL() {
|
||||
Application::getInstance()->initializeGL();
|
||||
setAttribute(Qt::WA_AcceptTouchEvents);
|
||||
setAcceptDrops(true);
|
||||
}
|
||||
|
||||
void GLCanvas::paintGL() {
|
||||
|
@ -67,4 +70,18 @@ bool GLCanvas::event(QEvent* event) {
|
|||
|
||||
void GLCanvas::wheelEvent(QWheelEvent* event) {
|
||||
Application::getInstance()->wheelEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
void GLCanvas::dragEnterEvent(QDragEnterEvent* event) {
|
||||
const QMimeData *mimeData = event->mimeData();
|
||||
foreach (QUrl url, mimeData->urls()) {
|
||||
if (url.url().toLower().endsWith(SNAPSHOT_EXTENSION)) {
|
||||
event->acceptProposedAction();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GLCanvas::dropEvent(QDropEvent* event) {
|
||||
Application::getInstance()->dropEvent(event);
|
||||
}
|
||||
|
|
|
@ -31,6 +31,9 @@ protected:
|
|||
virtual bool event(QEvent* event);
|
||||
|
||||
virtual void wheelEvent(QWheelEvent* event);
|
||||
|
||||
virtual void dragEnterEvent(QDragEnterEvent *event);
|
||||
virtual void dropEvent(QDropEvent* event);
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__GLCanvas__) */
|
||||
|
|
|
@ -112,7 +112,7 @@ Menu::Menu() :
|
|||
MenuOption::GoToDomain,
|
||||
Qt::CTRL | Qt::Key_D,
|
||||
this,
|
||||
SLOT(goToDomain()));
|
||||
SLOT(goToDomainDialog()));
|
||||
addActionToQMenuAndActionHash(fileMenu,
|
||||
MenuOption::GoToLocation,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
|
||||
|
@ -897,7 +897,7 @@ void Menu::goToDomain(const QString newDomain) {
|
|||
}
|
||||
}
|
||||
|
||||
void Menu::goToDomain() {
|
||||
void Menu::goToDomainDialog() {
|
||||
|
||||
QString currentDomainHostname = NodeList::getInstance()->getDomainHostname();
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ private slots:
|
|||
void aboutApp();
|
||||
void login();
|
||||
void editPreferences();
|
||||
void goToDomain();
|
||||
void goToDomainDialog();
|
||||
void goToLocation();
|
||||
void bandwidthDetailsClosed();
|
||||
void voxelStatsDetailsClosed();
|
||||
|
|
|
@ -2105,7 +2105,7 @@ void VoxelSystem::hideOutOfView(bool forceFullFrustum) {
|
|||
bool VoxelSystem::hideAllSubTreeOperation(OctreeElement* element, void* extraData) {
|
||||
VoxelTreeElement* voxel = (VoxelTreeElement*)element;
|
||||
hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData;
|
||||
|
||||
|
||||
// If we've culled at least once, then we will use the status of this voxel in the last culled frustum to determine
|
||||
// how to proceed. If we've never culled, then we just consider all these voxels to be UNKNOWN so that we will not
|
||||
// consider that case.
|
||||
|
@ -2141,7 +2141,7 @@ bool VoxelSystem::hideAllSubTreeOperation(OctreeElement* element, void* extraDat
|
|||
bool VoxelSystem::showAllSubTreeOperation(OctreeElement* element, void* extraData) {
|
||||
VoxelTreeElement* voxel = (VoxelTreeElement*)element;
|
||||
hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData;
|
||||
|
||||
|
||||
// If we've culled at least once, then we will use the status of this voxel in the last culled frustum to determine
|
||||
// how to proceed. If we've never culled, then we just consider all these voxels to be UNKNOWN so that we will not
|
||||
// consider that case.
|
||||
|
@ -2184,7 +2184,7 @@ bool VoxelSystem::showAllSubTreeOperation(OctreeElement* element, void* extraDat
|
|||
bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData) {
|
||||
VoxelTreeElement* voxel = (VoxelTreeElement*)element;
|
||||
hideOutOfViewArgs* args = (hideOutOfViewArgs*)extraData;
|
||||
|
||||
|
||||
// If we're still recursing the tree using this operator, then we don't know if we're inside or outside...
|
||||
// so before we move forward we need to determine our frustum location
|
||||
ViewFrustum::location inFrustum = voxel->inFrustum(args->thisViewFrustum);
|
||||
|
@ -2201,7 +2201,6 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData
|
|||
// ok, now do some processing for this node...
|
||||
switch (inFrustum) {
|
||||
case ViewFrustum::OUTSIDE: {
|
||||
|
||||
// If this node is outside the current view, then we might want to hide it... unless it was previously OUTSIDE,
|
||||
// if it was previously outside, then we can safely assume it's already hidden, and we can also safely assume
|
||||
// that all of it's children are outside both of our views, in which case we can just stop recursing...
|
||||
|
@ -2215,12 +2214,10 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData
|
|||
// we need to hide it. Additionally we know that ALL of it's children are also fully OUTSIDE so we can recurse
|
||||
// the children and simply mark them as hidden
|
||||
args->tree->recurseNodeWithOperation(voxel, hideAllSubTreeOperation, args );
|
||||
|
||||
return false;
|
||||
|
||||
} break;
|
||||
case ViewFrustum::INSIDE: {
|
||||
|
||||
// If this node is INSIDE the current view, then we might want to show it... unless it was previously INSIDE,
|
||||
// if it was previously INSIDE, then we can safely assume it's already shown, and we can also safely assume
|
||||
// that all of it's children are INSIDE both of our views, in which case we can just stop recursing...
|
||||
|
@ -2234,12 +2231,10 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData
|
|||
// we need to show it. Additionally we know that ALL of it's children are also fully INSIDE so we can recurse
|
||||
// the children and simply mark them as visible (as appropriate based on LOD)
|
||||
args->tree->recurseNodeWithOperation(voxel, showAllSubTreeOperation, args);
|
||||
|
||||
return false;
|
||||
} break;
|
||||
case ViewFrustum::INTERSECT: {
|
||||
args->nodesScanned++;
|
||||
|
||||
// If this node INTERSECTS the current view, then we might want to show it... unless it was previously INSIDE
|
||||
// the last known view, in which case it will already be visible, and we know that all it's children are also
|
||||
// previously INSIDE and visible. So in this case stop recursing
|
||||
|
@ -2253,8 +2248,15 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData
|
|||
// if the child node INTERSECTs the view, then we want to check to see if it thinks it should render
|
||||
// if it should render but is missing it's VBO index, then we want to flip it on, and we can stop recursing from
|
||||
// here because we know will block any children anyway
|
||||
|
||||
float voxelSizeScale = Menu::getInstance()->getVoxelSizeScale();
|
||||
int boundaryLevelAdjust = Menu::getInstance()->getBoundaryLevelAdjust();
|
||||
bool shouldRender = voxel->calculateShouldRender(&args->thisViewFrustum, voxelSizeScale, boundaryLevelAdjust);
|
||||
voxel->setShouldRender(shouldRender);
|
||||
|
||||
if (voxel->getShouldRender() && !voxel->isKnownBufferIndex()) {
|
||||
voxel->setDirtyBit(); // will this make it draw?
|
||||
voxel->markWithChangedTime(); // both are needed to force redraw
|
||||
args->nodesShown++;
|
||||
return false;
|
||||
}
|
||||
|
@ -2267,7 +2269,6 @@ bool VoxelSystem::hideOutOfViewOperation(OctreeElement* element, void* extraData
|
|||
} break;
|
||||
} // switch
|
||||
|
||||
|
||||
return true; // keep going!
|
||||
}
|
||||
|
||||
|
|
|
@ -62,24 +62,20 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
|
||||
// Update audio trailing average for rendering facial animations
|
||||
Faceshift* faceshift = Application::getInstance()->getFaceshift();
|
||||
Visage* visage = Application::getInstance()->getVisage();
|
||||
if (isMine) {
|
||||
_isFaceshiftConnected = faceshift->isActive();
|
||||
_isFaceshiftConnected = false;
|
||||
if (faceshift->isActive()) {
|
||||
_blendshapeCoefficients = faceshift->getBlendshapeCoefficients();
|
||||
_isFaceshiftConnected = true;
|
||||
|
||||
} else if (visage->isActive()) {
|
||||
_blendshapeCoefficients = visage->getBlendshapeCoefficients();
|
||||
_isFaceshiftConnected = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isMine && faceshift->isActive()) {
|
||||
const float EYE_OPEN_SCALE = 0.5f;
|
||||
_leftEyeBlink = faceshift->getLeftBlink() - EYE_OPEN_SCALE * faceshift->getLeftEyeOpen();
|
||||
_rightEyeBlink = faceshift->getRightBlink() - EYE_OPEN_SCALE * faceshift->getRightEyeOpen();
|
||||
|
||||
// set these values based on how they'll be used. if we use faceshift in the long term, we'll want a complete
|
||||
// mapping between their blendshape coefficients and our avatar features
|
||||
const float MOUTH_SIZE_SCALE = 2500.0f;
|
||||
_averageLoudness = faceshift->getMouthSize() * faceshift->getMouthSize() * MOUTH_SIZE_SCALE;
|
||||
const float BROW_HEIGHT_SCALE = 0.005f;
|
||||
_browAudioLift = faceshift->getBrowUpCenter() * BROW_HEIGHT_SCALE;
|
||||
_blendshapeCoefficients = faceshift->getBlendshapeCoefficients();
|
||||
|
||||
} else if (!_isFaceshiftConnected) {
|
||||
if (!_isFaceshiftConnected) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 4.0f;
|
||||
|
|
|
@ -141,7 +141,9 @@ void MyAvatar::update(float deltaTime) {
|
|||
}
|
||||
|
||||
// Get audio loudness data from audio input device
|
||||
_head.setAudioLoudness(Application::getInstance()->getAudio()->getLastInputLoudness());
|
||||
Audio* audio = Application::getInstance()->getAudio();
|
||||
_head.setAudioLoudness(audio->getLastInputLoudness());
|
||||
_head.setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Gravity)) {
|
||||
setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()));
|
||||
|
@ -335,22 +337,32 @@ const float MAX_PITCH = 90.0f;
|
|||
// Update avatar head rotation with sensor data
|
||||
void MyAvatar::updateFromGyros(float deltaTime) {
|
||||
Faceshift* faceshift = Application::getInstance()->getFaceshift();
|
||||
Visage* visage = Application::getInstance()->getVisage();
|
||||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
||||
bool trackerActive = false;
|
||||
if (faceshift->isActive()) {
|
||||
estimatedPosition = faceshift->getHeadTranslation();
|
||||
estimatedRotation = safeEulerAngles(faceshift->getHeadRotation());
|
||||
trackerActive = true;
|
||||
|
||||
} else if (visage->isActive()) {
|
||||
estimatedPosition = visage->getHeadTranslation();
|
||||
estimatedRotation = safeEulerAngles(visage->getHeadRotation());
|
||||
trackerActive = true;
|
||||
}
|
||||
if (trackerActive) {
|
||||
// Rotate the body if the head is turned beyond the screen
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
|
||||
const float FACESHIFT_YAW_TURN_SENSITIVITY = 0.5f;
|
||||
const float FACESHIFT_MIN_YAW_TURN = 15.f;
|
||||
const float FACESHIFT_MAX_YAW_TURN = 50.f;
|
||||
if ( (fabs(estimatedRotation.y) > FACESHIFT_MIN_YAW_TURN) &&
|
||||
(fabs(estimatedRotation.y) < FACESHIFT_MAX_YAW_TURN) ) {
|
||||
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
|
||||
const float TRACKER_MIN_YAW_TURN = 15.f;
|
||||
const float TRACKER_MAX_YAW_TURN = 50.f;
|
||||
if ( (fabs(estimatedRotation.y) > TRACKER_MIN_YAW_TURN) &&
|
||||
(fabs(estimatedRotation.y) < TRACKER_MAX_YAW_TURN) ) {
|
||||
if (estimatedRotation.y > 0.f) {
|
||||
_bodyYawDelta += (estimatedRotation.y - FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY;
|
||||
_bodyYawDelta += (estimatedRotation.y - TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY;
|
||||
} else {
|
||||
_bodyYawDelta += (estimatedRotation.y + FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY;
|
||||
_bodyYawDelta += (estimatedRotation.y + TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -608,7 +620,7 @@ void MyAvatar::loadData(QSettings* settings) {
|
|||
_position.y = loadSetting(settings, "position_y", 0.0f);
|
||||
_position.z = loadSetting(settings, "position_z", 0.0f);
|
||||
|
||||
_head.setPupilDilation(settings->value("pupilDilation", 0.0f).toFloat());
|
||||
_head.setPupilDilation(loadSetting(settings, "pupilDilation", 0.0f));
|
||||
|
||||
_leanScale = loadSetting(settings, "leanScale", 0.05f);
|
||||
_targetScale = loadSetting(settings, "scale", 1.0f);
|
||||
|
|
|
@ -45,5 +45,6 @@ private:
|
|||
TouchState _touchState;
|
||||
timeval* _lastReceivedPacket;
|
||||
|
||||
#endif /* defined(__hifi__Transmitter__) */
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__Transmitter__) */
|
||||
|
|
164
interface/src/devices/Visage.cpp
Normal file
164
interface/src/devices/Visage.cpp
Normal file
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// Visage.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 2/11/14.
|
||||
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include <QHash>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#ifdef HAVE_VISAGE
|
||||
#include <VisageTracker2.h>
|
||||
#endif
|
||||
|
||||
#include "Visage.h"
|
||||
#include "renderer/FBXReader.h"
|
||||
|
||||
namespace VisageSDK {
|
||||
#ifdef WIN32
|
||||
void __declspec(dllimport) initializeLicenseManager(char* licenseKeyFileName);
|
||||
#else
|
||||
void initializeLicenseManager(char* licenseKeyFileName);
|
||||
#endif
|
||||
}
|
||||
|
||||
using namespace VisageSDK;
|
||||
|
||||
const glm::vec3 DEFAULT_HEAD_ORIGIN(0.0f, 0.0f, 0.7f);
|
||||
|
||||
Visage::Visage() :
|
||||
_active(false),
|
||||
_headOrigin(DEFAULT_HEAD_ORIGIN),
|
||||
_estimatedEyePitch(0.0f),
|
||||
_estimatedEyeYaw(0.0f),
|
||||
_leftInnerBrowIndex(0),
|
||||
_rightInnerBrowIndex(0) {
|
||||
|
||||
#ifdef HAVE_VISAGE
|
||||
switchToResourcesParentIfRequired();
|
||||
QByteArray licensePath = "resources/visage/license.vlc";
|
||||
initializeLicenseManager(licensePath.data());
|
||||
_tracker = new VisageTracker2("resources/visage/Facial Features Tracker - Asymmetric.cfg");
|
||||
if (_tracker->trackFromCam()) {
|
||||
_data = new FaceData();
|
||||
|
||||
} else {
|
||||
delete _tracker;
|
||||
_tracker = NULL;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
Visage::~Visage() {
|
||||
#ifdef HAVE_VISAGE
|
||||
if (_tracker) {
|
||||
_tracker->stop();
|
||||
delete _tracker;
|
||||
delete _data;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static int leftEyeBlinkIndex = 0;
|
||||
static int rightEyeBlinkIndex = 1;
|
||||
static int centerBrowIndex = 16;
|
||||
|
||||
static QHash<QByteArray, int> createBlendshapeIndices() {
|
||||
QHash<QByteArray, QByteArray> blendshapeMap;
|
||||
blendshapeMap.insert("Sneer", "au_nose_wrinkler");
|
||||
blendshapeMap.insert("JawFwd", "au_jaw_z_push");
|
||||
blendshapeMap.insert("JawLeft", "au_jaw_x_push");
|
||||
blendshapeMap.insert("JawOpen", "au_jaw_drop");
|
||||
blendshapeMap.insert("LipsLowerDown", "au_lower_lip_drop");
|
||||
blendshapeMap.insert("LipsUpperUp", "au_upper_lip_raiser");
|
||||
blendshapeMap.insert("LipsStretch_L", "au_lip_stretcher_left");
|
||||
blendshapeMap.insert("BrowsU_L", "au_left_outer_brow_raiser");
|
||||
blendshapeMap.insert("BrowsU_C", "au_left_inner_brow_raiser");
|
||||
blendshapeMap.insert("BrowsD_L", "au_left_brow_lowerer");
|
||||
blendshapeMap.insert("LipsStretch_R", "au_lip_stretcher_right");
|
||||
blendshapeMap.insert("BrowsU_R", "au_right_outer_brow_raiser");
|
||||
blendshapeMap.insert("BrowsU_C", "au_right_inner_brow_raiser");
|
||||
blendshapeMap.insert("BrowsD_R", "au_right_brow_lowerer");
|
||||
|
||||
QHash<QByteArray, int> blendshapeIndices;
|
||||
for (int i = 0;; i++) {
|
||||
QByteArray blendshape = FACESHIFT_BLENDSHAPES[i];
|
||||
if (blendshape.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
if (blendshape == "EyeBlink_L") {
|
||||
leftEyeBlinkIndex = i;
|
||||
|
||||
} else if (blendshape == "EyeBlink_R") {
|
||||
rightEyeBlinkIndex = i;
|
||||
|
||||
} else if (blendshape == "BrowsU_C") {
|
||||
centerBrowIndex = i;
|
||||
}
|
||||
QByteArray mapping = blendshapeMap.value(blendshape);
|
||||
if (!mapping.isEmpty()) {
|
||||
blendshapeIndices.insert(mapping, i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return blendshapeIndices;
|
||||
}
|
||||
|
||||
static const QHash<QByteArray, int>& getBlendshapeIndices() {
|
||||
static QHash<QByteArray, int> blendshapeIndices = createBlendshapeIndices();
|
||||
return blendshapeIndices;
|
||||
}
|
||||
|
||||
const float TRANSLATION_SCALE = 20.0f;
|
||||
|
||||
void Visage::update() {
|
||||
#ifdef HAVE_VISAGE
|
||||
_active = (_tracker && _tracker->getTrackingData(_data) == TRACK_STAT_OK);
|
||||
if (!_active) {
|
||||
return;
|
||||
}
|
||||
_headRotation = glm::quat(glm::vec3(-_data->faceRotation[0], -_data->faceRotation[1], _data->faceRotation[2]));
|
||||
_headTranslation = (glm::vec3(_data->faceTranslation[0], _data->faceTranslation[1], _data->faceTranslation[2]) -
|
||||
_headOrigin) * TRANSLATION_SCALE;
|
||||
_estimatedEyePitch = glm::degrees(-_data->gazeDirection[1]);
|
||||
_estimatedEyeYaw = glm::degrees(-_data->gazeDirection[0]);
|
||||
|
||||
if (_blendshapeIndices.isEmpty()) {
|
||||
_blendshapeIndices.resize(_data->actionUnitCount);
|
||||
int maxIndex = -1;
|
||||
for (int i = 0; i < _data->actionUnitCount; i++) {
|
||||
QByteArray name = _data->actionUnitsNames[i];
|
||||
if (name == "au_left_inner_brow_raiser") {
|
||||
_leftInnerBrowIndex = i;
|
||||
} else if (name == "au_right_inner_brow_raiser") {
|
||||
_rightInnerBrowIndex = i;
|
||||
}
|
||||
int index = getBlendshapeIndices().value(name) - 1;
|
||||
maxIndex = qMax(maxIndex, _blendshapeIndices[i] = index);
|
||||
}
|
||||
_blendshapeCoefficients.resize(maxIndex + 1);
|
||||
}
|
||||
|
||||
qFill(_blendshapeCoefficients.begin(), _blendshapeCoefficients.end(), 0.0f);
|
||||
for (int i = 0; i < _data->actionUnitCount; i++) {
|
||||
if (!_data->actionUnitsUsed[i]) {
|
||||
continue;
|
||||
}
|
||||
int index = _blendshapeIndices.at(i);
|
||||
if (index != -1) {
|
||||
_blendshapeCoefficients[index] = _data->actionUnits[i];
|
||||
}
|
||||
}
|
||||
_blendshapeCoefficients[leftEyeBlinkIndex] = 1.0f - _data->eyeClosure[1];
|
||||
_blendshapeCoefficients[rightEyeBlinkIndex] = 1.0f - _data->eyeClosure[0];
|
||||
_blendshapeCoefficients[centerBrowIndex] = (_data->actionUnits[_leftInnerBrowIndex] +
|
||||
_data->actionUnits[_rightInnerBrowIndex]) * 0.5f;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Visage::reset() {
|
||||
_headOrigin += _headTranslation / TRANSLATION_SCALE;
|
||||
}
|
65
interface/src/devices/Visage.h
Normal file
65
interface/src/devices/Visage.h
Normal file
|
@ -0,0 +1,65 @@
|
|||
//
|
||||
// Visage.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 2/11/14.
|
||||
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__Visage__
|
||||
#define __interface__Visage__
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <QVector>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
namespace VisageSDK {
|
||||
class VisageTracker2;
|
||||
struct FaceData;
|
||||
}
|
||||
|
||||
/// Handles input from the Visage webcam feature tracking software.
|
||||
class Visage {
|
||||
public:
|
||||
|
||||
Visage();
|
||||
~Visage();
|
||||
|
||||
bool isActive() const { return _active; }
|
||||
|
||||
const glm::quat& getHeadRotation() const { return _headRotation; }
|
||||
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
|
||||
|
||||
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
||||
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
||||
|
||||
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||
|
||||
void update();
|
||||
void reset();
|
||||
|
||||
private:
|
||||
|
||||
VisageSDK::VisageTracker2* _tracker;
|
||||
VisageSDK::FaceData* _data;
|
||||
|
||||
bool _active;
|
||||
glm::quat _headRotation;
|
||||
glm::vec3 _headTranslation;
|
||||
|
||||
glm::vec3 _headOrigin;
|
||||
|
||||
float _estimatedEyePitch;
|
||||
float _estimatedEyeYaw;
|
||||
|
||||
std::vector<float> _blendshapeCoefficients;
|
||||
|
||||
QVector<int> _blendshapeIndices;
|
||||
int _leftInnerBrowIndex;
|
||||
int _rightInnerBrowIndex;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__Visage__) */
|
|
@ -21,6 +21,9 @@ class FBXNode;
|
|||
|
||||
typedef QList<FBXNode> FBXNodeList;
|
||||
|
||||
/// The names of the blendshapes expected by Faceshift, terminated with an empty string.
|
||||
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||
|
||||
/// A node within an FBX document.
|
||||
class FBXNode {
|
||||
public:
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
|
||||
#include <QDateTime>
|
||||
#include <QFileInfo>
|
||||
#include <QDebug>
|
||||
|
||||
// filename format: hifi-snap-by-%username%-on-%date%_%time%_@-%location%.jpg
|
||||
// %1 <= username, %2 <= date and time, %3 <= current location
|
||||
|
@ -21,18 +20,69 @@ const QString FILENAME_PATH_FORMAT = "hifi-snap-by-%1-on-%2@%3.jpg";
|
|||
const QString DATETIME_FORMAT = "yyyy-MM-dd_hh-mm-ss";
|
||||
const QString SNAPSHOTS_DIRECTORY = "Snapshots";
|
||||
|
||||
void Snapshot::saveSnapshot(QGLWidget* widget, QString username, glm::vec3 location) {
|
||||
QImage shot = widget->grabFrameBuffer();
|
||||
const QString LOCATION_X = "location-x";
|
||||
const QString LOCATION_Y = "location-y";
|
||||
const QString LOCATION_Z = "location-z";
|
||||
|
||||
const QString ORIENTATION_X = "orientation-x";
|
||||
const QString ORIENTATION_Y = "orientation-y";
|
||||
const QString ORIENTATION_Z = "orientation-z";
|
||||
const QString ORIENTATION_W = "orientation-w";
|
||||
|
||||
const QString DOMAIN_KEY = "domain";
|
||||
|
||||
|
||||
SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
|
||||
|
||||
if (!QFile(snapshotPath).exists()) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
QImage shot(snapshotPath);
|
||||
|
||||
// no location data stored
|
||||
if (shot.text(LOCATION_X).isEmpty() || shot.text(LOCATION_Y).isEmpty() || shot.text(LOCATION_Z).isEmpty()) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
SnapshotMetaData* data = new SnapshotMetaData();
|
||||
data->setLocation(glm::vec3(shot.text(LOCATION_X).toFloat(),
|
||||
shot.text(LOCATION_Y).toFloat(),
|
||||
shot.text(LOCATION_Z).toFloat()));
|
||||
|
||||
data->setOrientation(glm::quat(shot.text(ORIENTATION_W).toFloat(),
|
||||
shot.text(ORIENTATION_X).toFloat(),
|
||||
shot.text(ORIENTATION_Y).toFloat(),
|
||||
shot.text(ORIENTATION_Z).toFloat()));
|
||||
|
||||
data->setDomain(shot.text(DOMAIN_KEY));
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
void Snapshot::saveSnapshot(QGLWidget* widget, Profile* profile, Avatar* avatar) {
|
||||
QImage shot = widget->grabFrameBuffer();
|
||||
|
||||
glm::vec3 location = avatar->getPosition();
|
||||
glm::quat orientation = avatar->getHead().getOrientation();
|
||||
|
||||
// add metadata
|
||||
shot.setText("location-x", QString::number(location.x));
|
||||
shot.setText("location-y", QString::number(location.y));
|
||||
shot.setText("location-z", QString::number(location.z));
|
||||
shot.setText(LOCATION_X, QString::number(location.x));
|
||||
shot.setText(LOCATION_Y, QString::number(location.y));
|
||||
shot.setText(LOCATION_Z, QString::number(location.z));
|
||||
|
||||
shot.setText(ORIENTATION_X, QString::number(orientation.x));
|
||||
shot.setText(ORIENTATION_Y, QString::number(orientation.y));
|
||||
shot.setText(ORIENTATION_Z, QString::number(orientation.z));
|
||||
shot.setText(ORIENTATION_W, QString::number(orientation.w));
|
||||
|
||||
shot.setText(DOMAIN_KEY, profile->getLastDomain());
|
||||
|
||||
QString formattedLocation = QString("%1_%2_%3").arg(location.x).arg(location.y).arg(location.z);
|
||||
// replace decimal . with '-'
|
||||
formattedLocation.replace('.', '-');
|
||||
|
||||
QString username = profile->getUsername();
|
||||
// normalize username, replace all non alphanumeric with '-'
|
||||
username.replace(QRegExp("[^A-Za-z0-9_]"), "-");
|
||||
|
||||
|
|
|
@ -9,19 +9,38 @@
|
|||
#ifndef __hifi__Snapshot__
|
||||
#define __hifi__Snapshot__
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <QString>
|
||||
#include <QImage>
|
||||
#include <QGLWidget>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include "avatar/Avatar.h"
|
||||
#include "avatar/Profile.h"
|
||||
|
||||
class SnapshotMetaData {
|
||||
public:
|
||||
|
||||
QString getDomain() { return _domain; }
|
||||
void setDomain(QString domain) { _domain = domain; }
|
||||
|
||||
glm::vec3 getLocation() { return _location; }
|
||||
void setLocation(glm::vec3 location) { _location = location; }
|
||||
|
||||
glm::quat getOrientation() { return _orientation; }
|
||||
void setOrientation(glm::quat orientation) { _orientation = orientation; }
|
||||
|
||||
private:
|
||||
QString _domain;
|
||||
glm::vec3 _location;
|
||||
glm::quat _orientation;;
|
||||
};
|
||||
|
||||
class Snapshot {
|
||||
|
||||
public:
|
||||
static void saveSnapshot(QGLWidget* widget, QString username, glm::vec3 location);
|
||||
|
||||
private:
|
||||
QString _username;
|
||||
static void saveSnapshot(QGLWidget* widget, Profile* profile, Avatar* avatar);
|
||||
static SnapshotMetaData* parseSnapshotData(QString snapshotPath);
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__Snapshot__) */
|
||||
|
|
|
@ -76,7 +76,10 @@ class AvatarData : public NodeData {
|
|||
|
||||
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation)
|
||||
Q_PROPERTY(float headPitch READ getHeadPitch WRITE setHeadPitch)
|
||||
|
||||
|
||||
Q_PROPERTY(float audioLoudness READ getAudioLoudness WRITE setAudioLoudness)
|
||||
Q_PROPERTY(float audioAverageLoudness READ getAudioAverageLoudness WRITE setAudioAverageLoudness)
|
||||
|
||||
Q_PROPERTY(QUrl faceModelURL READ getFaceModelURL WRITE setFaceModelURL)
|
||||
Q_PROPERTY(QUrl skeletonModelURL READ getSkeletonModelURL WRITE setSkeletonModelURL)
|
||||
public:
|
||||
|
@ -107,6 +110,12 @@ public:
|
|||
float getHeadPitch() const { return _headData->getPitch(); }
|
||||
void setHeadPitch(float value) { _headData->setPitch(value); };
|
||||
|
||||
// access to Head().set/getAverageLoudness
|
||||
float getAudioLoudness() const { return _headData->getAudioLoudness(); }
|
||||
void setAudioLoudness(float value) { _headData->setAudioLoudness(value); }
|
||||
float getAudioAverageLoudness() const { return _headData->getAudioAverageLoudness(); }
|
||||
void setAudioAverageLoudness(float value) { _headData->setAudioAverageLoudness(value); }
|
||||
|
||||
// Scale
|
||||
float getTargetScale() const { return _targetScale; }
|
||||
void setTargetScale(float targetScale) { _targetScale = targetScale; }
|
||||
|
|
|
@ -41,9 +41,13 @@ public:
|
|||
|
||||
float getRoll() const { return _roll; }
|
||||
void setRoll(float roll) { _roll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); }
|
||||
|
||||
void setAudioLoudness(float audioLoudness) { _audioLoudness = audioLoudness; }
|
||||
|
||||
|
||||
float getAudioLoudness() const { return _audioLoudness; }
|
||||
void setAudioLoudness(float audioLoudness) { _audioLoudness = audioLoudness; }
|
||||
|
||||
float getAudioAverageLoudness() const { return _audioAverageLoudness; }
|
||||
void setAudioAverageLoudness(float audioAverageLoudness) { _audioAverageLoudness = audioAverageLoudness; }
|
||||
|
||||
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||
|
||||
float getPupilDilation() const { return _pupilDilation; }
|
||||
|
@ -72,6 +76,7 @@ protected:
|
|||
float _rightEyeBlink;
|
||||
float _averageLoudness;
|
||||
float _browAudioLift;
|
||||
float _audioAverageLoudness;
|
||||
std::vector<float> _blendshapeCoefficients;
|
||||
float _pupilDilation;
|
||||
AvatarData* _owningAvatar;
|
||||
|
|
|
@ -52,6 +52,10 @@ public slots:
|
|||
virtual void captureWheelEvents() = 0;
|
||||
virtual void releaseWheelEvents() = 0;
|
||||
|
||||
virtual void captureJoystick(int joystickIndex) = 0;
|
||||
virtual void releaseJoystick(int joystickIndex) = 0;
|
||||
|
||||
virtual glm::vec2 getViewportDimensions() const = 0;
|
||||
|
||||
signals:
|
||||
void keyPressEvent(const KeyEvent& event);
|
||||
|
|
|
@ -102,6 +102,8 @@ KeyEvent::KeyEvent(const QKeyEvent& event) {
|
|||
text = "END";
|
||||
} else if (key == Qt::Key_Help) {
|
||||
text = "HELP";
|
||||
} else if (key == Qt::Key_CapsLock) {
|
||||
text = "CAPS LOCK";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,6 +210,8 @@ void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event) {
|
|||
event.key = Qt::Key_End;
|
||||
} else if (event.text.toUpper() == "HELP") {
|
||||
event.key = Qt::Key_Help;
|
||||
} else if (event.text.toUpper() == "CAPS LOCK") {
|
||||
event.key = Qt::Key_CapsLock;
|
||||
} else {
|
||||
event.key = event.text.at(0).unicode();
|
||||
}
|
||||
|
|
|
@ -807,7 +807,8 @@ void NodeList::loadData(QSettings *settings) {
|
|||
} else {
|
||||
_domainHostname = DEFAULT_DOMAIN_HOSTNAME;
|
||||
}
|
||||
|
||||
|
||||
emit domainChanged(_domainHostname);
|
||||
settings->endGroup();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue