Merge remote-tracking branch 'upstream/master' into quick_audio_PR

This commit is contained in:
wangyix 2014-07-10 11:56:35 -07:00
commit 35419eb939
62 changed files with 1136 additions and 349 deletions

View file

@ -24,9 +24,12 @@ In order for CMake to find the Qt5 find modules, you will need to set an ENV var
For example, a Qt5 5.2.0 installation to /usr/local/qt5 would require that QT_CMAKE_PREFIX_PATH be set with the following command. This can either be entered directly into your shell session before you build or in your shell profile (e.g.: ~/.bash_profile, ~/.bashrc, ~/.zshrc - this depends on your shell and environment).
export QT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/clang_64/lib/cmake/
The path it needs to be set to will depend on where and how Qt5 was installed. e.g.
export QT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/clang_64/lib/cmake/
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.2.1/lib/cmake
export QT_CMAKE_PREFIX_PATH=/usr/local/opt/qt5/lib/cmake
The path it needs to be set to will depend on where and how Qt5 was installed.
####Generating build files
Create a build directory in the root of your checkout and then run the CMake build from there. This will keep the rest of the directory clean.

View file

@ -32,14 +32,6 @@ set(CMAKE_INCLUDE_CURRENT_DIR ON)
# Instruct CMake to run moc automatically when needed.
set(CMAKE_AUTOMOC ON)
if (APPLE)
exec_program(uname ARGS -v OUTPUT_VARIABLE DARWIN_VERSION)
string(REGEX MATCH "[0-9]+" DARWIN_VERSION ${DARWIN_VERSION})
if (DARWIN_VERSION GREATER 12)
set(CMAKE_CXX_FLAGS "-stdlib=libstdc++")
endif (DARWIN_VERSION GREATER 12)
endif (APPLE)
# targets not supported on windows
if (NOT WIN32)
add_subdirectory(animation-server)

View file

@ -87,13 +87,14 @@ void MetavoxelServer::sendDeltas() {
int elapsed = now - _lastSend;
_lastSend = now;
_sendTimer.start(qMax(0, 2 * SEND_INTERVAL - elapsed));
_sendTimer.start(qMax(0, 2 * SEND_INTERVAL - qMax(elapsed, SEND_INTERVAL)));
}
MetavoxelSession::MetavoxelSession(const SharedNodePointer& node, MetavoxelServer* server) :
Endpoint(node, new PacketRecord(), NULL),
_server(server),
_reliableDeltaChannel(NULL) {
_reliableDeltaChannel(NULL),
_reliableDeltaID(0) {
connect(&_sequencer, SIGNAL(receivedHighPriorityMessage(const QVariant&)), SLOT(handleMessage(const QVariant&)));
connect(&_sequencer, SIGNAL(sendAcknowledged(int)), SLOT(checkReliableDeltaReceived()));
@ -108,9 +109,7 @@ void MetavoxelSession::update() {
}
// if we're sending a reliable delta, wait until it's acknowledged
if (_reliableDeltaChannel) {
Bitstream& out = _sequencer.startPacket();
out << QVariant::fromValue(MetavoxelDeltaPendingMessage());
_sequencer.endPacket();
sendPacketGroup();
return;
}
Bitstream& out = _sequencer.startPacket();
@ -134,12 +133,16 @@ void MetavoxelSession::update() {
// go back to the beginning with the current packet and note that there's a delta pending
_sequencer.getOutputStream().getUnderlying().device()->seek(start);
out << QVariant::fromValue(MetavoxelDeltaPendingMessage());
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID };
out << QVariant::fromValue(msg);
_sequencer.endPacket();
} else {
_sequencer.endPacket();
}
// perhaps send additional packets to fill out the group
sendPacketGroup(1);
}
void MetavoxelSession::handleMessage(const QVariant& message, Bitstream& in) {
@ -176,3 +179,17 @@ void MetavoxelSession::checkReliableDeltaReceived() {
_reliableDeltaData = MetavoxelData();
_reliableDeltaChannel = NULL;
}
void MetavoxelSession::sendPacketGroup(int alreadySent) {
int additionalPackets = _sequencer.notePacketGroup() - alreadySent;
for (int i = 0; i < additionalPackets; i++) {
Bitstream& out = _sequencer.startPacket();
if (_reliableDeltaChannel) {
MetavoxelDeltaPendingMessage msg = { _reliableDeltaID };
out << QVariant::fromValue(msg);
} else {
out << QVariant();
}
_sequencer.endPacket();
}
}

View file

@ -74,6 +74,8 @@ private slots:
private:
void sendPacketGroup(int alreadySent = 0);
MetavoxelServer* _server;
MetavoxelLOD _lod;
@ -83,6 +85,7 @@ private:
MetavoxelData _reliableDeltaData;
MetavoxelLOD _reliableDeltaLOD;
Bitstream::WriteMappings _reliableDeltaWriteMappings;
int _reliableDeltaID;
};
#endif // hifi_MetavoxelServer_h

View file

@ -26,8 +26,8 @@ else ()
set(RTMIDI_SEARCH_DIRS "${RTMIDI_ROOT_DIR}" "$ENV{HIFI_LIB_DIR}/rtmidi")
find_path(RTMIDI_INCLUDE_DIR RtMidi.h PATH_SUFFIXES include HINTS ${RTMIDI_SEARCH_DIRS})
find_file(RTMIDI_CPP NAMES RtMidi.cpp PATH_SUFFIXES src HINTS ${RTMIDI_SEARCH_DIRS})
find_library(RTMIDI_LIBRARY NAMES rtmidi PATH_SUFFIXES lib HINTS ${RTMIDI_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(RTMIDI DEFAULT_MSG RTMIDI_INCLUDE_DIR RTMIDI_CPP)
find_package_handle_standard_args(RTMIDI DEFAULT_MSG RTMIDI_INCLUDE_DIR RTMIDI_LIBRARY)
endif ()

View file

@ -0,0 +1,126 @@
//
// avatarLocalLight.js
//
// Created by Tony Peng on July 2nd, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Set the local light direction and color on the avatar
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var localLightDirections = [ {x: 1.0, y:0.0, z: 0.0}, {x: 0.0, y:1.0, z: 1.0}, {x: 0.0, y:0.0, z: 1.0}, {x: 1.0, y:1.0, z: 1.0} ];
var localLightColors = [ {x: 0.0, y:0.0, z: 0.0}, {x: 0.0, y:0.0, z: 0.0}, {x: 0.0, y:0.0, z: 0.0}, {x: 0.0, y:0.0, z: 0.0} ];
var currentSelection = 0;
var currentNumLights = 1;
var maxNumLights = 2;
function keyPressEvent(event) {
var choice = parseInt(event.text);
if (event.text == "1") {
currentSelection = 0;
print("light election = " + currentSelection);
}
else if (event.text == "2" ) {
currentSelection = 1;
print("light selection = " + currentSelection);
}
else if (event.text == "3" ) {
currentSelection = 2;
print("light selection = " + currentSelection);
}
else if (event.text == "4" ) {
currentSelection = 3;
print("light selection = " + currentSelection);
}
else if (event.text == "5" ) {
localLightColors[currentSelection].x += 0.01;
if ( localLightColors[currentSelection].x > 1.0) {
localLightColors[currentSelection].x = 0.0;
}
MyAvatar.setLocalLightColor(localLightColors[currentSelection], currentSelection);
}
else if (event.text == "6" ) {
localLightColors[currentSelection].y += 0.01;
if ( localLightColors[currentSelection].y > 1.0) {
localLightColors[currentSelection].y = 0.0;
}
MyAvatar.setLocalLightColor(localLightColors[currentSelection], currentSelection);
}
else if (event.text == "7" ) {
localLightColors[currentSelection].z += 0.01;
if ( localLightColors[currentSelection].z > 1.0) {
localLightColors[currentSelection].z = 0.0;
}
MyAvatar.setLocalLightColor(localLightColors[currentSelection], currentSelection);
}
else if (event.text == "8" ) {
localLightDirections[currentSelection].x += 0.01;
if (localLightDirections[currentSelection].x > 1.0) {
localLightDirections[currentSelection].x = -1.0;
}
MyAvatar.setLocalLightDirection(localLightDirections[currentSelection], currentSelection);
}
else if (event.text == "9" ) {
localLightDirections[currentSelection].x -= 0.01;
if (localLightDirections[currentSelection].x < -1.0) {
localLightDirections[currentSelection].x = 1.0;
}
MyAvatar.setLocalLightDirection(localLightDirections[currentSelection], currentSelection);
}
else if (event.text == "[" ) {
localLightDirections[currentSelection].y += 0.01;
if (localLightDirections[currentSelection].y > 1.0) {
localLightDirections[currentSelection].y = -1.0;
}
MyAvatar.setLocalLightDirection(localLightDirections[currentSelection], currentSelection);
}
else if (event.text == "]" ) {
localLightDirections[currentSelection].y -= 0.01;
if (localLightDirections[currentSelection].y < -1.0) {
localLightDirections[currentSelection].y = 1.0;
}
MyAvatar.setLocalLightDirection(localLightDirections[currentSelection], currentSelection);
}
else if (event.text == "," ) {
if (currentNumLights + 1 <= maxNumLights) {
var darkGrayColor = {x:0.3, y:0.3, z:0.3};
// default light
localLightColors[currentNumLights].x = darkGrayColor.x;
localLightColors[currentNumLights].y = darkGrayColor.y;
localLightColors[currentNumLights].z = darkGrayColor.z;
MyAvatar.addLocalLight();
MyAvatar.setLocalLightColor(localLightColors[currentNumLights], currentNumLights);
MyAvatar.setLocalLightDirection(localLightDirections[currentNumLights], currentNumLights);
++currentNumLights;
}
}
else if (event.text == "." ) {
if (currentNumLights - 1 >= 0 ) {
// no light contribution
localLightColors[currentNumLights - 1].x = 0.0;
localLightColors[currentNumLights - 1].y = 0.0;
localLightColors[currentNumLights - 1].z = 0.0;
MyAvatar.removeLocalLight();
--currentNumLights;
}
}
}
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -1,72 +1,147 @@
//
// cameraExample.js
// clap.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
//
// This sample script watches your hydra hands and makes clapping sound when they come close together fast
// This sample script watches your hydra hands and makes clapping sound when they come close together fast,
// and also watches for the 'shift' key and claps when that key is pressed. Clapping multiple times by pressing
// the shift key again makes the animation and sound match your pace of clapping.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
function length(v) {
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
}
var clapAnimation = "https://s3-us-west-1.amazonaws.com/highfidelity-public/animations/ClapAnimations/ClapHands_Standing.fbx";
var ANIMATION_FRAMES_PER_CLAP = 10.0;
var startEndFrames = [];
startEndFrames.push({ start: 0, end: 10});
startEndFrames.push({ start: 10, end: 20});
startEndFrames.push({ start: 20, end: 30});
startEndFrames.push({ start: 30, end: 40});
startEndFrames.push({ start: 41, end: 51});
startEndFrames.push({ start: 53, end: 0});
var lastClapFrame = 0;
var lastAnimFrame = 0;
function printVector(v) {
print(v.x + ", " + v.y + ", " + v.z + "\n");
}
var claps = [];
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap1Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap2Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap3Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap4Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap5Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap6Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap7Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap8Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap9Rvb.wav"));
claps.push(new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/claps/BClap10Rvb.wav"));
var numberOfSounds = claps.length;
function vMinus(a, b) {
var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z };
return rval;
}
var clappingNow = false;
var collectedClicks = 0;
// First, load the clap sound from a URL
var clap1 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap1.raw");
var clap2 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap2.raw");
var clap3 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap3.raw");
var clap4 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap4.raw");
var clap5 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap5.raw");
var clap6 = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/claps/clap6.raw");
var clapping = new Array();
clapping[0] = false;
clapping[1] = false;
var clickStartTime, clickEndTime;
var clickClappingNow = false;
var CLAP_START_RATE = 15.0;
var clapRate = CLAP_START_RATE;
var startedTimer = false;
function maybePlaySound(deltaTime) {
// Set the location and other info for the sound to play
var palm1Position = Controller.getSpatialControlPosition(0);
var palm2Position = Controller.getSpatialControlPosition(2);
var distanceBetween = length(vMinus(palm1Position, palm2Position));
for (var palm = 0; palm < 2; palm++) {
var palmVelocity = Controller.getSpatialControlVelocity(palm * 2 + 1);
var speed = length(palmVelocity);
const CLAP_SPEED = 0.2;
const CLAP_DISTANCE = 0.2;
var animationDetails = MyAvatar.getAnimationDetails(clapAnimation);
if (!clapping[palm] && (distanceBetween < CLAP_DISTANCE) && (speed > CLAP_SPEED)) {
var options = new AudioInjectionOptions();
options.position = palm1Position;
options.volume = speed / 2.0;
if (options.volume > 1.0) options.volume = 1.0;
which = Math.floor((Math.random() * 6) + 1);
if (which == 1) { Audio.playSound(clap1, options); }
else if (which == 2) { Audio.playSound(clap2, options); }
else if (which == 3) { Audio.playSound(clap3, options); }
else if (which == 4) { Audio.playSound(clap4, options); }
else if (which == 5) { Audio.playSound(clap5, options); }
else { Audio.playSound(clap6, options); }
Audio.playSound(clap, options);
clapping[palm] = true;
} else if (clapping[palm] && (speed < (CLAP_SPEED / 4.0))) {
clapping[palm] = false;
}
var frame = Math.floor(animationDetails.frameIndex);
if (frame != lastAnimFrame) {
lastAnimFrame = frame;
}
for (var i = 0; i < startEndFrames.length; i++) {
if (frame == startEndFrames[i].start && (frame != lastClapFrame)) {
playClap(1.0, Camera.getPosition());
lastClapFrame = frame;
}
}
var palm1Position = MyAvatar.getLeftPalmPosition();
var palm2Position = MyAvatar.getRightPalmPosition();
var distanceBetween = Vec3.length(Vec3.subtract(palm1Position, palm2Position));
var palm1Velocity = Controller.getSpatialControlVelocity(1);
var palm2Velocity = Controller.getSpatialControlVelocity(3);
var closingVelocity = Vec3.length(Vec3.subtract(palm1Velocity, palm2Velocity));
const CLAP_SPEED = 0.7;
const CLAP_DISTANCE = 0.15;
if ((closingVelocity > CLAP_SPEED) && (distanceBetween < CLAP_DISTANCE) && !clappingNow) {
var volume = closingVelocity / 2.0;
if (volume > 1.0) volume = 1.0;
playClap(volume, palm1Position);
clappingNow = true;
} else if (clappingNow && (distanceBetween > CLAP_DISTANCE * 1.2)) {
clappingNow = false;
}
}
function playClap(volume, position) {
var options = new AudioInjectionOptions();
options.position = position;
options.volume = 1.0;
var clip = Math.floor(Math.random() * numberOfSounds);
Audio.playSound(claps[clip], options);
}
var FASTEST_CLAP_INTERVAL = 100.0;
var SLOWEST_CLAP_INTERVAL = 2000.0;
Controller.keyPressEvent.connect(function(event) {
if(event.text == "SHIFT") {
if (!clickClappingNow) {
clickClappingNow = true;
clickStartTime = new Date();
playClap(1.0, Camera.getPosition());
lastClapFrame = 0;
MyAvatar.startAnimation(clapAnimation, clapRate, 1.0, true, false);
} else {
// Adjust animation speed for measured clicking interval
clickEndTime = new Date();
var milliseconds = clickEndTime - clickStartTime;
clickStartTime = new Date();
if ((milliseconds < SLOWEST_CLAP_INTERVAL) && (milliseconds > FASTEST_CLAP_INTERVAL)) {
clapRate = ANIMATION_FRAMES_PER_CLAP * (1000.0 / milliseconds);
playClap(1.0, Camera.getPosition());
MyAvatar.stopAnimation(clapAnimation);
MyAvatar.startAnimation(clapAnimation, clapRate, 1.0, true, false);
}
collectedClicks = collectedClicks + 1;
}
}
});
var CLAP_END_WAIT_MSECS = 300;
Controller.keyReleaseEvent.connect(function(event) {
if (event.text == "SHIFT") {
collectedClicks = 0;
if (!startedTimer) {
collectedClicks = 0;
Script.setTimeout(stopClapping, CLAP_END_WAIT_MSECS);
startedTimer = true;
}
}
});
function stopClapping() {
if (collectedClicks == 0) {
startedTimer = false;
MyAvatar.stopAnimation(clapAnimation);
clapRate = CLAP_START_RATE;
clickClappingNow = false;
} else {
startedTimer = false;
}
}
// Connect a call back that happens every frame

View file

@ -0,0 +1,72 @@
//
// concertCamera.js
//
// Created by Philip Rosedale on June 24, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Move a camera through a series of pre-set locations by pressing number keys
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var oldMode;
var avatarPosition;
var cameraNumber = 0;
var freeCamera = false;
var cameraLocations = [ {x: 8027.5, y: 237.5, z: 7305.7}, {x: 8027.5, y: 237.5, z: 7306.6}, {x: 8027.5, y: 237.5, z: 7308.0}, {x: 8027.5, y: 237.5, z: 7303.0}, {x: 8030.8, y: 238.6, z: 7311.4}, {x: 8030.9, y: 237.1, z: 7308.0} ];
var cameraLookAts = [ {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7305.7}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0} ];
function saveCameraState() {
oldMode = Camera.getMode();
avatarPosition = MyAvatar.position;
Camera.setModeShiftPeriod(0.0);
Camera.setMode("independent");
}
function restoreCameraState() {
Camera.stopLooking();
Camera.setMode(oldMode);
}
function update(deltaTime) {
if (freeCamera) {
var delta = Vec3.subtract(MyAvatar.position, avatarPosition);
if (Vec3.length(delta) > 0.05) {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
}
}
function keyPressEvent(event) {
var choice = parseInt(event.text);
if ((choice > 0) && (choice <= cameraLocations.length)) {
print("camera " + choice);
if (!freeCamera) {
saveCameraState();
freeCamera = true;
}
Camera.setMode("independent");
Camera.setPosition(cameraLocations[choice - 1]);
Camera.keepLookingAt(cameraLookAts[choice - 1]);
}
if (event.text == "ESC") {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
if (event.text == "0") {
// Show camera location in log
var cameraLocation = Camera.getPosition();
print(cameraLocation.x + ", " + cameraLocation.y + ", " + cameraLocation.z);
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -0,0 +1,72 @@
//
// concertCamera.js
//
// Created by Philip Rosedale on June 24, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Move a camera through a series of pre-set locations by pressing number keys
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var oldMode;
var avatarPosition;
var cameraNumber = 0;
var freeCamera = false;
var cameraLocations = [ {x: 2921.5, y: 251.3, z: 8254.8}, {x: 2921.5, y: 251.3, z: 8254.4}, {x: 2921.5, y: 251.3, z: 8252.2}, {x: 2921.5, y: 251.3, z: 8247.2}, {x: 2921.4, y: 251.3, z: 8255.7} ];
var cameraLookAts = [ {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.4 , y: 251.3, z: 8255.1} ];
function saveCameraState() {
oldMode = Camera.getMode();
avatarPosition = MyAvatar.position;
Camera.setModeShiftPeriod(0.0);
Camera.setMode("independent");
}
function restoreCameraState() {
Camera.stopLooking();
Camera.setMode(oldMode);
}
function update(deltaTime) {
if (freeCamera) {
var delta = Vec3.subtract(MyAvatar.position, avatarPosition);
if (Vec3.length(delta) > 0.05) {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
}
}
function keyPressEvent(event) {
var choice = parseInt(event.text);
if ((choice > 0) && (choice <= cameraLocations.length)) {
print("camera " + choice);
if (!freeCamera) {
saveCameraState();
freeCamera = true;
}
Camera.setMode("independent");
Camera.setPosition(cameraLocations[choice - 1]);
Camera.keepLookingAt(cameraLookAts[choice - 1]);
}
if (event.text == "ESC") {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
if (event.text == "0") {
// Show camera location in log
var cameraLocation = Camera.getPosition();
print(cameraLocation.x + ", " + cameraLocation.y + ", " + cameraLocation.z);
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -18,13 +18,16 @@ var RIGHT = 1;
var lastLeftFrame = 0;
var lastRightFrame = 0;
var LAST_FRAME = 11.0; // What is the number of the last frame we want to use in the animation?
var SMOOTH_FACTOR = 0.80;
var leftDirection = true;
var rightDirection = true;
var LAST_FRAME = 15.0; // What is the number of the last frame we want to use in the animation?
var SMOOTH_FACTOR = 0.0;
var MAX_FRAMES = 30.0;
Script.update.connect(function(deltaTime) {
var leftTriggerValue = Math.sqrt(Controller.getTriggerValue(LEFT));
var rightTriggerValue = Math.sqrt(Controller.getTriggerValue(RIGHT));
var leftTriggerValue = Controller.getTriggerValue(LEFT);
var rightTriggerValue = Controller.getTriggerValue(RIGHT);
var leftFrame, rightFrame;
@ -32,10 +35,31 @@ Script.update.connect(function(deltaTime) {
leftFrame = (leftTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastLeftFrame * SMOOTH_FACTOR;
rightFrame = (rightTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastRightFrame * SMOOTH_FACTOR;
if (!leftDirection) {
leftFrame = MAX_FRAMES - leftFrame;
}
if (!rightDirection) {
rightFrame = MAX_FRAMES - rightFrame;
}
if ((leftTriggerValue == 1.0) && (leftDirection == true)) {
leftDirection = false;
lastLeftFrame = MAX_FRAMES - leftFrame;
} else if ((leftTriggerValue == 0.0) && (leftDirection == false)) {
leftDirection = true;
lastLeftFrame = leftFrame;
}
if ((rightTriggerValue == 1.0) && (rightDirection == true)) {
rightDirection = false;
lastRightFrame = MAX_FRAMES - rightFrame;
} else if ((rightTriggerValue == 0.0) && (rightDirection == false)) {
rightDirection = true;
lastRightFrame = rightFrame;
}
if ((leftFrame != lastLeftFrame) && leftHandAnimation.length){
MyAvatar.stopAnimation(leftHandAnimation);
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
}
if ((rightFrame != lastRightFrame) && rightHandAnimation.length) {
MyAvatar.stopAnimation(rightHandAnimation);

View file

@ -111,16 +111,6 @@ if (APPLE)
SET(INTERFACE_SRCS ${INTERFACE_SRCS} "${CMAKE_CURRENT_SOURCE_DIR}/interface.icns")
endif()
# RtMidi for scripted MIDI control
find_package(RtMidi)
if (RTMIDI_FOUND AND NOT DISABLE_RTMIDI)
add_definitions(-DHAVE_RTMIDI)
include_directories(SYSTEM ${RTMIDI_INCLUDE_DIR})
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${RTMIDI_CPP}")
endif ()
# create the executable, make it a bundle on OS X
add_executable(${TARGET_NAME} MACOSX_BUNDLE ${INTERFACE_SRCS} ${QM})
@ -151,6 +141,7 @@ find_package(Sixense)
find_package(Visage)
find_package(ZLIB)
find_package(Qxmpp)
find_package(RtMidi)
# include the Sixense library for Razer Hydra if available
if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
@ -223,11 +214,18 @@ if (QXMPP_FOUND AND NOT DISABLE_QXMPP)
target_link_libraries(${TARGET_NAME} "${QXMPP_LIBRARY}")
endif (QXMPP_FOUND AND NOT DISABLE_QXMPP)
# link CoreMIDI if we're using RtMidi
if (RTMIDI_FOUND AND APPLE)
find_library(CoreMIDI CoreMIDI)
add_definitions(-D__MACOSX_CORE__)
target_link_libraries(${TARGET_NAME} ${CoreMIDI})
# and with RtMidi for RtMidi control
if (RTMIDI_FOUND AND NOT DISABLE_RTMIDI)
add_definitions(-DHAVE_RTMIDI)
include_directories(SYSTEM ${RTMIDI_INCLUDE_DIR})
target_link_libraries(${TARGET_NAME} "${RTMIDI_LIBRARY}")
if (APPLE)
find_library(CoreMIDI CoreMIDI)
add_definitions(-D__MACOSX_CORE__)
target_link_libraries(${TARGET_NAME} ${CoreMIDI})
endif()
endif()
# include headers for interface and InterfaceConfig.

View file

@ -7,7 +7,9 @@ Stephen Birarda, June 30, 2014
2. Copy RtMidi.h to externals/rtmidi/include.
3. Copy RtMidi.cpp to externals/rtmidi/src
3. Compile the RtMidi library.
3. Copy either librtmidi.dylib (dynamic) or librtmidi.a (static) to externals/rtmidi/lib
4. Delete your build directory, run cmake and build, and you should be all set.

View file

@ -11,9 +11,16 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// local lights
const int MAX_LOCAL_LIGHTS = 2; // 2 lights for now, will probably need more later on
uniform int numLocalLights;
uniform vec3 localLightDirections[MAX_LOCAL_LIGHTS];
uniform vec3 localLightColors[MAX_LOCAL_LIGHTS];
// the interpolated position
varying vec4 position;
@ -25,8 +32,19 @@ void main(void) {
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse);
// the local light that is always present
vec4 totalLocalLight = vec4(0.0, 0.0, 0.0, 1.0);
for (int i = 0; i < numLocalLights; i++) {
float localDiffuse = dot(normalizedNormal, vec4(localLightDirections[i], 1.0));
float localLight = step(0.0, localDiffuse);
float localLightVal = localDiffuse * localLight;
totalLocalLight += (localLightVal * vec4( localLightColors[i], 0.0));
}
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + totalLocalLight);
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),

View file

@ -11,6 +11,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
const int MAX_LOCAL_LIGHTS = 4;
// the interpolated position
varying vec4 position;
@ -37,3 +39,4 @@ void main(void) {
// use standard pipeline transform
gl_Position = ftransform();
}

View file

@ -37,9 +37,14 @@ void main(void) {
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
float diffuse = dot(viewNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse);
float localDiffuse = dot(viewNormal, gl_LightSource[1].position);
float localLight = step(0.0, localDiffuse);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + gl_FrontLightProduct[1].diffuse * (localDiffuse * localLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));

View file

@ -10,6 +10,11 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
const int MAX_LOCAL_LIGHTS = 2;
uniform int numLocalLights;
uniform vec3 localLightDirections[MAX_LOCAL_LIGHTS];
uniform vec3 localLightColors[MAX_LOCAL_LIGHTS];
// the diffuse texture
uniform sampler2D diffuseMap;
@ -28,8 +33,19 @@ void main(void) {
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse);
// the local light that is always present
vec4 totalLocalLight = vec4(0.0, 0.0, 0.0, 1.0);
for (int i = 0; i < numLocalLights; i++) {
float localDiffuse = dot(normalizedNormal, vec4(localLightDirections[i], 1.0));
float localLight = step(0.0, localDiffuse);
float localLightVal = localDiffuse * localLight;
totalLocalLight += (localLightVal * vec4( localLightColors[i], 0.0));
}
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight) + totalLocalLight);
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
@ -38,4 +54,5 @@ void main(void) {
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -34,6 +34,7 @@ void main(void) {
position += clusterMatrix * gl_Vertex * clusterWeight;
normal += clusterMatrix * vec4(gl_Normal, 0.0) * clusterWeight;
}
position = gl_ModelViewMatrix * position;
normal = normalize(gl_ModelViewMatrix * normal);

View file

@ -354,6 +354,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// Set the sixense filtering
_sixenseManager.setFilter(Menu::getInstance()->isOptionChecked(MenuOption::FilterSixense));
// Set hand controller velocity filtering
_sixenseManager.setLowVelocityFilter(Menu::getInstance()->isOptionChecked(MenuOption::LowVelocityFilter));
checkVersion();
@ -601,9 +604,19 @@ void Application::paintGL() {
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness(0.0f);
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition() + glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0));
//Only behave like a true mirror when in the OR
if (OculusManager::isConnected()) {
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition() + glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0));
} else {
_myCamera.setTightness(0.0f);
glm::vec3 eyePosition = _myAvatar->getHead()->calculateAverageEyePosition();
float headHeight = eyePosition.y - _myAvatar->getPosition().y;
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight + (_raiseMirror * _myAvatar->getScale()), 0));
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
}
}
// Update camera position
@ -682,13 +695,10 @@ void Application::paintGL() {
}
{
PerformanceTimer perfTimer("paintGL/renderOverlay");
//If alpha is 1, we can render directly to the screen.
if (_applicationOverlay.getAlpha() == 1.0f) {
_applicationOverlay.renderOverlay();
} else {
//Render to to texture so we can fade it
_applicationOverlay.renderOverlay(true);
PerformanceTimer perfTimer("renderOverlay");
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
_applicationOverlay.renderOverlay(true);
if (Menu::getInstance()->isOptionChecked(MenuOption::UserInterface)) {
_applicationOverlay.displayOverlayTexture();
}
}
@ -713,6 +723,7 @@ void Application::resizeGL(int width, int height) {
resetCamerasOnResizeGL(_myCamera, width, height);
glViewport(0, 0, width, height); // shouldn't this account for the menu???
_applicationOverlay.resize();
updateProjectionMatrix();
glLoadIdentity();
@ -1010,6 +1021,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
}
break;
case Qt::Key_Slash:
Menu::getInstance()->triggerOption(MenuOption::UserInterface);
break;
case Qt::Key_F:
if (isShifted) {
Menu::getInstance()->triggerOption(MenuOption::DisplayFrustum);
@ -1029,7 +1043,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
}
break;
break;
case Qt::Key_Slash:
case Qt::Key_Percent:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;
case Qt::Key_Plus:
@ -1355,18 +1369,18 @@ void Application::idle() {
if (timeSinceLastUpdate > IDLE_SIMULATE_MSECS) {
_lastTimeUpdated.start();
{
PerformanceTimer perfTimer("idle/update");
PerformanceTimer perfTimer("update");
PerformanceWarning warn(showWarnings, "Application::idle()... update()");
const float BIGGEST_DELTA_TIME_SECS = 0.25f;
update(glm::clamp((float)timeSinceLastUpdate / 1000.f, 0.f, BIGGEST_DELTA_TIME_SECS));
}
{
PerformanceTimer perfTimer("idle/updateGL");
PerformanceTimer perfTimer("updateGL");
PerformanceWarning warn(showWarnings, "Application::idle()... updateGL()");
_glWidget->updateGL();
}
{
PerformanceTimer perfTimer("idle/rest");
PerformanceTimer perfTimer("rest");
PerformanceWarning warn(showWarnings, "Application::idle()... rest of it");
_idleLoopStdev.addValue(timeSinceLastUpdate);
@ -1378,7 +1392,7 @@ void Application::idle() {
}
if (Menu::getInstance()->isOptionChecked(MenuOption::BuckyBalls)) {
PerformanceTimer perfTimer("idle/rest/_buckyBalls");
PerformanceTimer perfTimer("buckyBalls");
_buckyBalls.simulate(timeSinceLastUpdate / 1000.f, Application::getInstance()->getAvatar()->getHandData());
}
@ -1426,6 +1440,10 @@ void Application::setRenderVoxels(bool voxelRender) {
}
}
void Application::setLowVelocityFilter(bool lowVelocityFilter) {
getSixenseManager()->setLowVelocityFilter(lowVelocityFilter);
}
void Application::doKillLocalVoxels() {
_wantToKillLocalVoxels = true;
}
@ -1792,7 +1810,7 @@ bool Application::isLookingAtMyAvatar(Avatar* avatar) {
}
void Application::updateLOD() {
PerformanceTimer perfTimer("idle/update/updateLOD");
PerformanceTimer perfTimer("LOD");
// adjust it unless we were asked to disable this feature, or if we're currently in throttleRendering mode
if (!Menu::getInstance()->isOptionChecked(MenuOption::DisableAutoAdjustLOD) && !isThrottleRendering()) {
Menu::getInstance()->autoAdjustLOD(_fps);
@ -1802,7 +1820,7 @@ void Application::updateLOD() {
}
void Application::updateMouseRay() {
PerformanceTimer perfTimer("idle/update/updateMouseRay");
PerformanceTimer perfTimer("mouseRay");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateMouseRay()");
@ -1835,8 +1853,6 @@ void Application::updateMouseRay() {
}
void Application::updateFaceshift() {
PerformanceTimer perfTimer("idle/update/updateFaceshift");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateFaceshift()");
@ -1850,8 +1866,6 @@ void Application::updateFaceshift() {
}
void Application::updateVisage() {
PerformanceTimer perfTimer("idle/update/updateVisage");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateVisage()");
@ -1860,11 +1874,11 @@ void Application::updateVisage() {
}
void Application::updateMyAvatarLookAtPosition() {
PerformanceTimer perfTimer("idle/update/updateMyAvatarLookAtPosition");
PerformanceTimer perfTimer("lookAt");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
_myAvatar->updateLookAtTargetAvatar();
FaceTracker* tracker = getActiveFaceTracker();
bool isLookingAtSomeone = false;
@ -1927,7 +1941,7 @@ void Application::updateMyAvatarLookAtPosition() {
}
void Application::updateThreads(float deltaTime) {
PerformanceTimer perfTimer("idle/update/updateThreads");
PerformanceTimer perfTimer("updateThreads");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateThreads()");
@ -1942,7 +1956,7 @@ void Application::updateThreads(float deltaTime) {
}
void Application::updateMetavoxels(float deltaTime) {
PerformanceTimer perfTimer("idle/update/updateMetavoxels");
PerformanceTimer perfTimer("updateMetavoxels");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateMetavoxels()");
@ -1972,7 +1986,7 @@ void Application::cameraMenuChanged() {
}
void Application::updateCamera(float deltaTime) {
PerformanceTimer perfTimer("idle/update/updateCamera");
PerformanceTimer perfTimer("updateCamera");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateCamera()");
@ -1990,7 +2004,7 @@ void Application::updateCamera(float deltaTime) {
}
void Application::updateDialogs(float deltaTime) {
PerformanceTimer perfTimer("idle/update/updateDialogs");
PerformanceTimer perfTimer("updateDialogs");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
@ -2007,7 +2021,7 @@ void Application::updateDialogs(float deltaTime) {
}
void Application::updateCursor(float deltaTime) {
PerformanceTimer perfTimer("idle/update/updateCursor");
PerformanceTimer perfTimer("updateCursor");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateCursor()");
@ -2032,83 +2046,69 @@ void Application::updateCursor(float deltaTime) {
}
void Application::update(float deltaTime) {
//PerformanceTimer perfTimer("idle/update"); // NOTE: we track this above in Application::idle()
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::update()");
updateLOD();
updateMouseRay(); // check what's under the mouse and update the mouse voxel
updateFaceshift();
updateVisage();
{
PerformanceTimer perfTimer("idle/update/updateLookAtTargetAvatar");
_myAvatar->updateLookAtTargetAvatar();
}
updateMyAvatarLookAtPosition();
{
PerformanceTimer perfTimer("idle/update/sixense,joystick,prioVR");
PerformanceTimer perfTimer("devices");
updateFaceshift();
updateVisage();
_sixenseManager.update(deltaTime);
_joystickManager.update();
_prioVR.update(deltaTime);
}
{
PerformanceTimer perfTimer("idle/update/updateMyAvatar");
PerformanceTimer perfTimer("myAvatar");
updateMyAvatarLookAtPosition();
updateMyAvatar(deltaTime); // Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
}
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
{
PerformanceTimer perfTimer("idle/update/_avatarManager");
_avatarManager.updateOtherAvatars(deltaTime); //loop through all the other avatars and simulate them...
}
_avatarManager.updateOtherAvatars(deltaTime); //loop through all the other avatars and simulate them...
updateMetavoxels(deltaTime); // update metavoxels
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
updateDialogs(deltaTime); // update various stats dialogs if present
updateCursor(deltaTime); // Handle cursor updates
{
PerformanceTimer perfTimer("idle/update/_particles");
PerformanceTimer perfTimer("particles");
_particles.update(); // update the particles...
}
{
PerformanceTimer perfTimer("idle/update/_particleCollisionSystem");
_particleCollisionSystem.update(); // collide the particles...
{
PerformanceTimer perfTimer("collisions");
_particleCollisionSystem.update(); // collide the particles...
}
}
{
PerformanceTimer perfTimer("idle/update/_models");
PerformanceTimer perfTimer("models");
_models.update(); // update the models...
}
{
PerformanceTimer perfTimer("idle/update/_overlays");
PerformanceTimer perfTimer("overlays");
_overlays.update(deltaTime);
}
{
PerformanceTimer perfTimer("idle/update/emit simulating");
PerformanceTimer perfTimer("emitSimulating");
// let external parties know we're updating
emit simulating(deltaTime);
}
}
void Application::updateMyAvatar(float deltaTime) {
PerformanceTimer perfTimer("updateMyAvatar");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateMyAvatar()");
{
PerformanceTimer perfTimer("updateMyAvatar/_myAvatar->update()");
_myAvatar->update(deltaTime);
}
_myAvatar->update(deltaTime);
{
// send head/hand data to the avatar mixer and voxel server
PerformanceTimer perfTimer("updateMyAvatar/sendToAvatarMixer");
PerformanceTimer perfTimer("send");
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
packet.append(_myAvatar->toByteArray());
controlledBroadcastToNodes(packet, NodeSet() << NodeType::AvatarMixer);
@ -2121,13 +2121,13 @@ void Application::updateMyAvatar(float deltaTime) {
// actually need to calculate the view frustum planes to send these details
// to the server.
{
PerformanceTimer perfTimer("updateMyAvatar/loadViewFrustum");
PerformanceTimer perfTimer("loadViewFrustum");
loadViewFrustum(_myCamera, _viewFrustum);
}
// Update my voxel servers with my current voxel query...
{
PerformanceTimer perfTimer("updateMyAvatar/queryOctree");
PerformanceTimer perfTimer("queryOctree");
quint64 now = usecTimestampNow();
quint64 sinceLastQuery = now - _lastQueriedTime;
const quint64 TOO_LONG_SINCE_LAST_QUERY = 3 * USECS_PER_SECOND;
@ -2460,7 +2460,7 @@ glm::vec3 Application::getSunDirection() {
}
void Application::updateShadowMap() {
PerformanceTimer perfTimer("paintGL/updateShadowMap");
PerformanceTimer perfTimer("shadowMap");
QOpenGLFramebufferObject* fbo = _textureCache.getShadowFramebufferObject();
fbo->bind();
glEnable(GL_DEPTH_TEST);
@ -2622,7 +2622,7 @@ QImage Application::renderAvatarBillboard() {
}
void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
PerformanceTimer perfTimer("paintGL/displaySide");
PerformanceTimer perfTimer("display");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::displaySide()");
// transform by eye offset
@ -2656,7 +2656,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// Setup 3D lights (after the camera transform, so that they are positioned in world space)
{
PerformanceTimer perfTimer("paintGL/displaySide/setupWorldLight");
PerformanceTimer perfTimer("lights");
setupWorldLight();
}
@ -2675,7 +2675,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
}
if (!selfAvatarOnly && Menu::getInstance()->isOptionChecked(MenuOption::Stars)) {
PerformanceTimer perfTimer("paintGL/displaySide/stars");
PerformanceTimer perfTimer("stars");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... stars...");
if (!_stars.isStarsLoaded()) {
@ -2704,7 +2704,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// draw the sky dome
if (!selfAvatarOnly && Menu::getInstance()->isOptionChecked(MenuOption::Atmosphere)) {
PerformanceTimer perfTimer("paintGL/displaySide/atmosphere");
PerformanceTimer perfTimer("atmosphere");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... atmosphere...");
_environment.renderAtmospheres(whichCamera);
@ -2725,13 +2725,13 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// draw the audio reflector overlay
{
PerformanceTimer perfTimer("paintGL/displaySide/audioReflector");
PerformanceTimer perfTimer("audio");
_audioReflector.render();
}
// Draw voxels
if (Menu::getInstance()->isOptionChecked(MenuOption::Voxels)) {
PerformanceTimer perfTimer("paintGL/displaySide/voxels");
PerformanceTimer perfTimer("voxels");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... voxels...");
_voxels.render();
@ -2739,14 +2739,14 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// also, metavoxels
if (Menu::getInstance()->isOptionChecked(MenuOption::Metavoxels)) {
PerformanceTimer perfTimer("paintGL/displaySide/metavoxels");
PerformanceTimer perfTimer("metavoxels");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... metavoxels...");
_metavoxels.render();
}
if (Menu::getInstance()->isOptionChecked(MenuOption::BuckyBalls)) {
PerformanceTimer perfTimer("paintGL/displaySide/buckyBalls");
PerformanceTimer perfTimer("buckyBalls");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... bucky balls...");
_buckyBalls.render();
@ -2754,7 +2754,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// render particles...
if (Menu::getInstance()->isOptionChecked(MenuOption::Particles)) {
PerformanceTimer perfTimer("paintGL/displaySide/particles");
PerformanceTimer perfTimer("particles");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... particles...");
_particles.render();
@ -2762,7 +2762,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// render models...
if (Menu::getInstance()->isOptionChecked(MenuOption::Models)) {
PerformanceTimer perfTimer("paintGL/displaySide/models");
PerformanceTimer perfTimer("models");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... models...");
_models.render();
@ -2770,7 +2770,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// render the ambient occlusion effect if enabled
if (Menu::getInstance()->isOptionChecked(MenuOption::AmbientOcclusion)) {
PerformanceTimer perfTimer("paintGL/displaySide/AmbientOcclusion");
PerformanceTimer perfTimer("ambientOcclusion");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... AmbientOcclusion...");
_ambientOcclusionEffect.render();
@ -2785,20 +2785,21 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
{
PerformanceTimer perfTimer("paintGL/displaySide/renderAvatars");
PerformanceTimer perfTimer("avatars");
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE, selfAvatarOnly);
}
if (!selfAvatarOnly) {
// Render the world box
if (whichCamera.getMode() != CAMERA_MODE_MIRROR && Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
PerformanceTimer perfTimer("paintGL/displaySide/renderWorldBox");
if (whichCamera.getMode() != CAMERA_MODE_MIRROR && Menu::getInstance()->isOptionChecked(MenuOption::Stats) &&
Menu::getInstance()->isOptionChecked(MenuOption::UserInterface)) {
PerformanceTimer perfTimer("worldBox");
renderWorldBox();
}
// view frustum for debugging
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayFrustum) && whichCamera.getMode() != CAMERA_MODE_MIRROR) {
PerformanceTimer perfTimer("paintGL/displaySide/ViewFrustum");
PerformanceTimer perfTimer("viewFrustum");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... renderViewFrustum...");
renderViewFrustum(_viewFrustum);
@ -2806,7 +2807,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// render voxel fades if they exist
if (_voxelFades.size() > 0) {
PerformanceTimer perfTimer("paintGL/displaySide/voxel fades");
PerformanceTimer perfTimer("voxelFades");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... voxel fades...");
_voxelFadesLock.lockForWrite();
@ -2823,13 +2824,13 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// give external parties a change to hook in
{
PerformanceTimer perfTimer("paintGL/displaySide/inWorldInterface");
PerformanceTimer perfTimer("inWorldInterface");
emit renderingInWorldInterface();
}
// render JS/scriptable overlays
{
PerformanceTimer perfTimer("paintGL/displaySide/3dOverlays");
PerformanceTimer perfTimer("3dOverlays");
_overlays.render3D();
}
}

View file

@ -317,6 +317,7 @@ public slots:
void nudgeVoxelsByVector(const VoxelDetail& sourceVoxel, const glm::vec3& nudgeVec);
void setRenderVoxels(bool renderVoxels);
void setLowVelocityFilter(bool lowVelocityFilter);
void doKillLocalVoxels();
void loadDialog();
void loadScriptURLDialog();

35
interface/src/Hair.cpp Normal file
View file

@ -0,0 +1,35 @@
//
// Hair.cpp
// interface/src
//
// Created by Philip on June 26, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Creates single flexible vertlet-integrated strands that can be used for hair/fur/grass
#include "Hair.h"
#include "Util.h"
#include "world.h"
Hair::Hair() {
qDebug() << "Creating Hair";
}
void Hair::simulate(float deltaTime) {
}
void Hair::render() {
//
// Before calling this function, translate/rotate to the origin of the owning object
glPushMatrix();
glColor3f(1.0f, 1.0f, 0.0f);
glutSolidSphere(1.0f, 15, 15);
glPopMatrix();
}

35
interface/src/Hair.h Normal file
View file

@ -0,0 +1,35 @@
//
// Hair.h
// interface/src
//
// Created by Philip on June 26, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Hair_h
#define hifi_Hair_h
#include <iostream>
#include <glm/glm.hpp>
#include <SharedUtil.h>
#include "GeometryUtil.h"
#include "InterfaceConfig.h"
#include "Util.h"
class Hair {
public:
Hair();
void simulate(float deltaTime);
void render();
private:
};
#endif // hifi_Hair_h

View file

@ -276,6 +276,7 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Mirror, Qt::SHIFT | Qt::Key_H, true);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FullscreenMirror, Qt::Key_H, false,
appInstance, SLOT(cameraMenuChanged()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::UserInterface, Qt::Key_Slash, true);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::EnableVRMode, 0,
false,
@ -326,7 +327,7 @@ Menu::Menu() :
addDisabledActionAndSeparator(viewMenu, "Stats");
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats, Qt::Key_Slash);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats, Qt::Key_Percent);
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log, Qt::CTRL | Qt::Key_L, appInstance, SLOT(toggleLogDialog()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Bandwidth, 0, true);
addActionToQMenuAndActionHash(viewMenu, MenuOption::BandwidthDetails, 0, this, SLOT(bandwidthDetails()));
@ -406,9 +407,7 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::GlowWhenSpeaking, 0, true);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false);
QMenu* oculusOptionsMenu = developerMenu->addMenu("Oculus Options");
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::DisplayOculusOverlays, 0, true);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::FocusIndicators, 0, false);
QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options");
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
@ -421,6 +420,13 @@ Menu::Menu() :
true,
appInstance->getSixenseManager(),
SLOT(setFilter(bool)));
addCheckableActionToQMenuAndActionHash(handOptionsMenu,
MenuOption::LowVelocityFilter,
0,
true,
appInstance,
SLOT(setLowVelocityFilter(bool)));
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::HandsCollideWithSelf, 0, false);

View file

@ -350,7 +350,6 @@ namespace MenuOption {
const QString DisplayModelBounds = "Display Model Bounds";
const QString DisplayModelElementProxy = "Display Model Element Bounds";
const QString DisplayModelElementChildProxies = "Display Model Element Children";
const QString DisplayOculusOverlays = "Display Oculus Overlays";
const QString DisplayTimingDetails = "Display Timing Details";
const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes";
const QString EchoLocalAudio = "Echo Local Audio";
@ -368,7 +367,9 @@ namespace MenuOption {
const QString Faceplus = "Faceplus";
const QString Faceshift = "Faceshift";
const QString FilterSixense = "Smooth Sixense Movement";
const QString LowVelocityFilter = "Low Velocity Filter";
const QString FirstPerson = "First Person";
const QString FocusIndicators = "Focus Indicators";
const QString FrameTimer = "Show Timer";
const QString FrustumRenderMode = "Render Mode";
const QString Fullscreen = "Fullscreen";
@ -439,6 +440,7 @@ namespace MenuOption {
const QString UploadAttachment = "Upload Attachment Model";
const QString UploadHead = "Upload Head Model";
const QString UploadSkeleton = "Upload Skeleton Model";
const QString UserInterface = "User Interface";
const QString Visage = "Visage";
const QString VoxelMode = "Cycle Voxel Mode";
const QString Voxels = "Voxels";

View file

@ -14,12 +14,13 @@
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/vector_angle.hpp>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <SharedUtil.h>
#include <glm/gtc/type_ptr.hpp>
#include <GeometryUtil.h>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <PerfStat.h>
#include <SharedUtil.h>
#include "Application.h"
#include "Avatar.h"
@ -60,7 +61,8 @@ Avatar::Avatar() :
_moving(false),
_collisionGroups(0),
_initialized(false),
_shouldRenderBillboard(true)
_shouldRenderBillboard(true),
_numLocalLights(1)
{
// we may have been created in the network thread, but we live in the main thread
moveToThread(Application::getInstance()->thread());
@ -81,6 +83,23 @@ void Avatar::init() {
_initialized = true;
_shouldRenderBillboard = (getLODDistance() >= BILLBOARD_LOD_DISTANCE);
initializeHair();
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
_localLightColors[i] = glm::vec3(0.0f, 0.0f, 0.0f);
_localLightDirections[i] = glm::vec3(0.0f, 0.0f, 0.0f);
}
glm::vec3 darkGrayColor(0.3f, 0.3f, 0.3f);
glm::vec3 greenColor(0.0f, 1.0f, 0.0f);
glm::vec3 directionX(1.0f, 0.0f, 0.0f);
glm::vec3 directionY(0.0f, 1.0f, 0.0f);
// initialize local lights
_localLightColors[0] = darkGrayColor;
_localLightColors[1] = greenColor;
_localLightDirections[0] = directionX;
_localLightDirections[1] = directionY;
}
glm::vec3 Avatar::getChestPosition() const {
@ -99,6 +118,7 @@ float Avatar::getLODDistance() const {
}
void Avatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("simulate");
if (_scale != _targetScale) {
setScale(_targetScale);
}
@ -118,31 +138,43 @@ void Avatar::simulate(float deltaTime) {
bool inViewFrustum = Application::getInstance()->getViewFrustum()->sphereInFrustum(_position, boundingRadius) !=
ViewFrustum::OUTSIDE;
getHand()->simulate(deltaTime, false);
{
PerformanceTimer perfTimer("hand");
getHand()->simulate(deltaTime, false);
}
_skeletonModel.setLODDistance(getLODDistance());
if (!_shouldRenderBillboard && inViewFrustum) {
if (_hasNewJointRotations) {
PerformanceTimer perfTimer("skeleton");
for (int i = 0; i < _jointData.size(); i++) {
const JointData& data = _jointData.at(i);
_skeletonModel.setJointState(i, data.valid, data.rotation);
}
_skeletonModel.simulate(deltaTime);
}
_skeletonModel.simulate(deltaTime, _hasNewJointRotations);
simulateAttachments(deltaTime);
_hasNewJointRotations = false;
{
PerformanceTimer perfTimer("head");
_skeletonModel.simulate(deltaTime, _hasNewJointRotations);
simulateAttachments(deltaTime);
_hasNewJointRotations = false;
glm::vec3 headPosition = _position;
_skeletonModel.getHeadPosition(headPosition);
Head* head = getHead();
head->setPosition(headPosition);
head->setScale(_scale);
head->simulate(deltaTime, false, _shouldRenderBillboard);
glm::vec3 headPosition = _position;
_skeletonModel.getHeadPosition(headPosition);
Head* head = getHead();
head->setPosition(headPosition);
head->setScale(_scale);
head->simulate(deltaTime, false, _shouldRenderBillboard);
}
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
PerformanceTimer perfTimer("hair");
simulateHair(deltaTime);
}
foreach (Hair* hair, _hairs) {
hair->simulate(deltaTime);
}
}
// update position by velocity, and subtract the change added earlier for gravity
@ -219,7 +251,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
const float GLOW_DISTANCE = 20.0f;
const float GLOW_MAX_LOUDNESS = 2500.0f;
const float MAX_GLOW = 0.5f;
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == Application::getInstance()->getAvatar())
? 0.0f
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
@ -230,7 +262,23 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderMode == NORMAL_RENDER_MODE
? 1.0f
: GLOW_FROM_AVERAGE_LOUDNESS;
// local lights directions and colors
getSkeletonModel().setNumLocalLights(_numLocalLights);
getHead()->getFaceModel().setNumLocalLights(_numLocalLights);
for (int i = 0; i < MAX_LOCAL_LIGHTS; i++) {
glm::vec3 normalized = glm::normalize(_localLightDirections[i]);
// body
getSkeletonModel().setLocalLightColor(_localLightColors[i], i);
getSkeletonModel().setLocalLightDirection(normalized, i);
// head
getHead()->getFaceModel().setLocalLightColor(_localLightColors[i], i);
getHead()->getFaceModel().setLocalLightDirection(_localLightDirections[i], i);
}
// render body
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
renderBody(renderMode, glowLevel);
@ -252,7 +300,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
}
// If this is the avatar being looked at, render a little ball above their head
if (_isLookAtTarget) {
if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::FocusIndicators)) {
const float LOOK_AT_INDICATOR_RADIUS = 0.03f;
const float LOOK_AT_INDICATOR_OFFSET = 0.22f;
const float LOOK_AT_INDICATOR_COLOR[] = { 0.8f, 0.0f, 0.0f, 0.75f };
@ -380,6 +428,9 @@ void Avatar::renderBody(RenderMode renderMode, float glowLevel) {
getHead()->render(1.0f, modelRenderMode);
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
renderHair();
foreach (Hair* hair, _hairs) {
hair->render();
}
}
}
@ -603,7 +654,6 @@ void Avatar::initializeHair() {
}
}
qDebug() << "Initialize Hair";
}
bool Avatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderMode renderMode) const {
@ -1107,3 +1157,29 @@ void Avatar::setShowDisplayName(bool showDisplayName) {
}
void Avatar::setLocalLightDirection(const glm::vec3& direction, int lightIndex) {
_localLightDirections[lightIndex] = direction;
qDebug( "set light %d direction ( %f, %f, %f )\n", lightIndex, direction.x, direction.y, direction.z );
}
void Avatar::setLocalLightColor(const glm::vec3& color, int lightIndex) {
_localLightColors[lightIndex] = color;
qDebug( "set light %d color ( %f, %f, %f )\n", lightIndex, color.x, color.y, color.z );
}
void Avatar::addLocalLight() {
if (_numLocalLights + 1 <= MAX_LOCAL_LIGHTS) {
++_numLocalLights;
}
qDebug("ADD LOCAL LIGHT (numLocalLights = %d)\n", _numLocalLights);
}
void Avatar::removeLocalLight() {
if (_numLocalLights - 1 >= 0) {
--_numLocalLights;
}
qDebug("REMOVE LOCAL LIGHT (numLocalLights = %d)\n", _numLocalLights);
}

View file

@ -19,6 +19,7 @@
#include <AvatarData.h>
#include "Hair.h"
#include "Hand.h"
#include "Head.h"
#include "InterfaceConfig.h"
@ -154,11 +155,16 @@ public:
public slots:
void updateCollisionGroups();
void setLocalLightDirection(const glm::vec3& direction, int lightIndex);
void setLocalLightColor(const glm::vec3& color, int lightIndex);
void addLocalLight();
void removeLocalLight();
signals:
void collisionWithAvatar(const QUuid& myUUID, const QUuid& theirUUID, const CollisionInfo& collision);
protected:
QVector<Hair*> _hairs;
SkeletonModel _skeletonModel;
QVector<Model*> _attachmentModels;
float _bodyYawDelta;
@ -174,9 +180,14 @@ protected:
glm::vec3 _mouseRayDirection;
float _stringLength;
bool _moving; ///< set when position is changing
quint32 _collisionGroups;
// always-present local lighting for the avatar
glm::vec3 _localLightDirections[MAX_LOCAL_LIGHTS];
glm::vec3 _localLightColors[MAX_LOCAL_LIGHTS];
int _numLocalLights;
// protected methods...
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }

View file

@ -41,9 +41,13 @@ void AvatarManager::init() {
}
void AvatarManager::updateOtherAvatars(float deltaTime) {
if (_avatarHash.size() < 2) {
return;
}
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
PerformanceTimer perfTimer("otherAvatars");
Application* applicationInstance = Application::getInstance();
glm::vec3 mouseOrigin = applicationInstance->getMouseRayOrigin();
glm::vec3 mouseDirection = applicationInstance->getMouseRayDirection();

View file

@ -49,9 +49,9 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInParentFrame()) *
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation)));
state.setRotationInParentFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2]))
state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2]))
* glm::angleAxis(RADIANS_PER_DEGREE * _owningHead->getFinalYaw(), glm::normalize(inverse * axes[1]))
* glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalPitch(), glm::normalize(inverse * axes[0]))
* joint.rotation);
@ -61,14 +61,14 @@ void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJ
// likewise with the eye joints
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
glm::mat4 inverse = glm::inverse(glm::mat4_cast(_rotation) * parentState.getTransform() *
glm::translate(state.getDefaultTranslationInParentFrame()) *
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
_owningHead->getSaccade() - _translation, 1.0f));
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
state.setRotationInParentFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
state.setRotationInConstrainedFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
joint.rotation);
}

View file

@ -108,15 +108,10 @@ void MyAvatar::reset() {
}
void MyAvatar::update(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::update/");
Head* head = getHead();
head->relaxLean(deltaTime);
{
PerformanceTimer perfTimer("MyAvatar::update/updateFromTrackers");
updateFromTrackers(deltaTime);
}
updateFromTrackers(deltaTime);
if (Menu::getInstance()->isOptionChecked(MenuOption::MoveWithLean)) {
PerformanceTimer perfTimer("MyAvatar::update/moveWithLean");
// Faceshift drive is enabled, set the avatar drive based on the head position
moveWithLean();
}
@ -127,19 +122,14 @@ void MyAvatar::update(float deltaTime) {
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
if (_motionBehaviors & AVATAR_MOTION_OBEY_ENVIRONMENTAL_GRAVITY) {
PerformanceTimer perfTimer("MyAvatar::update/gravityWork");
setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()));
}
{
PerformanceTimer perfTimer("MyAvatar::update/simulate");
simulate(deltaTime);
}
simulate(deltaTime);
}
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::simulate");
PerformanceTimer perfTimer("simulate");
if (_scale != _targetScale) {
float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale;
setScale(scale);
@ -150,31 +140,28 @@ void MyAvatar::simulate(float deltaTime) {
_handState = HAND_STATE_NULL;
{
PerformanceTimer perfTimer("MyAvatar::simulate/updateOrientation");
PerformanceTimer perfTimer("transform");
updateOrientation(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/updatePosition");
updatePosition(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/hand Collision,simulate");
PerformanceTimer perfTimer("hand");
// update avatar skeleton and simulate hand and head
getHand()->simulate(deltaTime, true);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/_skeletonModel.simulate()");
PerformanceTimer perfTimer("skeleton");
_skeletonModel.simulate(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/simulateAttachments");
PerformanceTimer perfTimer("attachments");
simulateAttachments(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/copy joints");
PerformanceTimer perfTimer("joints");
// copy out the skeleton joints from the model
_jointData.resize(_skeletonModel.getJointStateCount());
for (int i = 0; i < _jointData.size(); i++) {
@ -184,7 +171,7 @@ void MyAvatar::simulate(float deltaTime) {
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/head Simulate");
PerformanceTimer perfTimer("head");
Head* head = getHead();
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
@ -196,14 +183,17 @@ void MyAvatar::simulate(float deltaTime) {
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/hair Simulate");
PerformanceTimer perfTimer("hair");
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
simulateHair(deltaTime);
foreach (Hair* hair, _hairs) {
hair->simulate(deltaTime);
}
}
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/ragdoll");
PerformanceTimer perfTimer("ragdoll");
if (Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagdoll)) {
const int minError = 0.01f;
const float maxIterations = 10;
@ -216,7 +206,7 @@ void MyAvatar::simulate(float deltaTime) {
// now that we're done stepping the avatar forward in time, compute new collisions
if (_collisionGroups != 0) {
PerformanceTimer perfTimer("MyAvatar::simulate/_collisionGroups");
PerformanceTimer perfTimer("collisions");
Camera* myCamera = Application::getInstance()->getCamera();
float radius = getSkeletonHeight() * COLLISION_RADIUS_SCALE;
@ -225,18 +215,18 @@ void MyAvatar::simulate(float deltaTime) {
radius *= COLLISION_RADIUS_SCALAR;
}
if (_collisionGroups & COLLISION_GROUP_ENVIRONMENT) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithEnvironment");
PerformanceTimer perfTimer("environment");
updateCollisionWithEnvironment(deltaTime, radius);
}
if (_collisionGroups & COLLISION_GROUP_VOXELS) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithVoxels");
PerformanceTimer perfTimer("voxels");
updateCollisionWithVoxels(deltaTime, radius);
} else {
_trapDuration = 0.0f;
}
/* TODO: Andrew to make this work
if (_collisionGroups & COLLISION_GROUP_AVATARS) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithAvatars");
PerformanceTimer perfTimer("avatars");
updateCollisionWithAvatars(deltaTime);
}
*/
@ -896,6 +886,9 @@ void MyAvatar::renderBody(RenderMode renderMode, float glowLevel) {
getHead()->render(1.0f, modelRenderMode);
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
renderHair();
foreach (Hair* hair, _hairs) {
hair->render();
}
}
}
getHand()->render(true, modelRenderMode);
@ -910,7 +903,6 @@ bool MyAvatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderMode rend
}
float MyAvatar::computeDistanceToFloor(const glm::vec3& startPoint) {
PerformanceTimer perfTimer("MyAvatar::computeDistanceToFloor()");
glm::vec3 direction = -_worldUpDirection;
OctreeElement* elementHit; // output from findRayIntersection
float distance = FLT_MAX; // output from findRayIntersection
@ -976,7 +968,6 @@ void MyAvatar::updateOrientation(float deltaTime) {
const float NEARBY_FLOOR_THRESHOLD = 5.0f;
void MyAvatar::updatePosition(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::updatePosition");
float keyboardInput = fabsf(_driveKeys[FWD] - _driveKeys[BACK]) +
fabsf(_driveKeys[RIGHT] - _driveKeys[LEFT]) +
fabsf(_driveKeys[UP] - _driveKeys[DOWN]);

View file

@ -219,7 +219,7 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
JointState& parentState = _jointStates[parentJointIndex];
parentState.setRotationFromBindFrame(palmRotation, PALM_PRIORITY);
// lock hand to forearm by slamming its rotation (in parent-frame) to identity
_jointStates[jointIndex].setRotationInParentFrame(glm::quat());
_jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat());
} else {
inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY);
}
@ -255,9 +255,9 @@ void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const
}
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInParentFrame()) *
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation)));
state.setRotationInParentFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(),
state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(),
glm::normalize(inverse * axes[2])) * glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(),
glm::normalize(inverse * axes[0])) * joint.rotation);
}

View file

@ -109,6 +109,7 @@ public:
void resetShapePositionsToDefaultPose(); // DEBUG method
void renderRagdoll();
protected:
// virtual overrrides from Ragdoll

View file

@ -11,6 +11,7 @@
#include <QTimer>
#include <PerfStat.h>
#include <SharedUtil.h>
#include "Application.h"
@ -75,6 +76,7 @@ void Faceshift::update() {
if (!isActive()) {
return;
}
PerformanceTimer perfTimer("faceshift");
// get the euler angles relative to the window
glm::vec3 eulers = glm::degrees(safeEulerAngles(_headRotation * glm::quat(glm::radians(glm::vec3(
(_eyeGazeLeftPitch + _eyeGazeRightPitch) / 2.0f, (_eyeGazeLeftYaw + _eyeGazeRightYaw) / 2.0f, 0.0f)))));

View file

@ -12,9 +12,10 @@
#include <limits>
#include <QtDebug>
#include <glm/glm.hpp>
#include <PerfStat.h>
#include "JoystickManager.h"
using namespace std;
@ -46,6 +47,7 @@ JoystickManager::~JoystickManager() {
void JoystickManager::update() {
#ifdef HAVE_SDL
PerformanceTimer perfTimer("joystick");
SDL_JoystickUpdate();
for (int i = 0; i < _joystickStates.size(); i++) {

View file

@ -269,7 +269,7 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
// We only need to render the overlays to a texture once, then we just render the texture on the hemisphere
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
applicationOverlay.renderOverlay(true);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::DisplayOculusOverlays);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::UserInterface);
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {

View file

@ -13,6 +13,7 @@
#include <QtDebug>
#include <FBXReader.h>
#include <PerfStat.h>
#include "Application.h"
#include "PrioVR.h"
@ -166,6 +167,7 @@ void PrioVR::update(float deltaTime) {
if (!_skeletalDevice) {
return;
}
PerformanceTimer perfTimer("PrioVR");
unsigned int timestamp;
yei_getLastStreamDataAll(_skeletalDevice, (char*)_jointRotations.data(),
_jointRotations.size() * sizeof(glm::quat), &timestamp);

View file

@ -11,6 +11,8 @@
#include <vector>
#include <PerfStat.h>
#include "Application.h"
#include "SixenseManager.h"
#include "UserActivityLogger.h"
@ -32,6 +34,7 @@ SixenseManager::SixenseManager() {
#ifdef HAVE_SIXENSE
_lastMovement = 0;
_amountMoved = glm::vec3(0.0f);
_lowVelocityFilter = false;
_calibrationState = CALIBRATION_STATE_IDLE;
// By default we assume the _neckBase (in orb frame) is as high above the orb
@ -60,10 +63,8 @@ SixenseManager::~SixenseManager() {
void SixenseManager::setFilter(bool filter) {
#ifdef HAVE_SIXENSE
if (filter) {
qDebug("Sixense Filter ON");
sixenseSetFilterEnabled(1);
} else {
qDebug("Sixense Filter OFF");
sixenseSetFilterEnabled(0);
}
#endif
@ -84,7 +85,10 @@ void SixenseManager::update(float deltaTime) {
if (sixenseGetNumActiveControllers() == 0) {
_hydrasConnected = false;
return;
} else if (!_hydrasConnected) {
}
PerformanceTimer perfTimer("sixense");
if (!_hydrasConnected) {
_hydrasConnected = true;
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
}
@ -160,17 +164,21 @@ void SixenseManager::update(float deltaTime) {
}
palm->setRawVelocity(rawVelocity); // meters/sec
// Use a velocity sensitive filter to damp small motions and preserve large ones with
// no latency.
float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
palm->setRawPosition(palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter));
// adjustment for hydra controllers fit into hands
float sign = (i == 0) ? -1.0f : 1.0f;
rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
palm->setRawRotation(safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter));
if (_lowVelocityFilter) {
// Use a velocity sensitive filter to damp small motions and preserve large ones with
// no latency.
float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
palm->setRawPosition(palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter));
palm->setRawRotation(safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter));
} else {
palm->setRawPosition(position);
palm->setRawRotation(rotation);
}
// use the velocity to determine whether there's any movement (if the hand isn't new)
const float MOVEMENT_DISTANCE_THRESHOLD = 0.003f;
_amountMoved += rawVelocity * deltaTime;

View file

@ -47,6 +47,7 @@ public:
public slots:
void setFilter(bool filter);
void setLowVelocityFilter(bool lowVelocityFilter) { _lowVelocityFilter = lowVelocityFilter; };
private:
#ifdef HAVE_SIXENSE
@ -80,6 +81,8 @@ private:
bool _bumperPressed[2];
int _oldX[2];
int _oldY[2];
bool _lowVelocityFilter;
};
#endif // hifi_SixenseManager_h

View file

@ -100,6 +100,7 @@ void TV3DManager::display(Camera& whichCamera) {
// We only need to render the overlays to a texture once, then we just render the texture as a quad
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
applicationOverlay.renderOverlay(true);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::UserInterface);
if (glowEnabled) {
Application::getInstance()->getGlowEffect()->prepare();
@ -128,7 +129,9 @@ void TV3DManager::display(Camera& whichCamera) {
glLoadIdentity();
Application::getInstance()->displaySide(whichCamera);
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
if (displayOverlays) {
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
}
}
glPopMatrix();
glDisable(GL_SCISSOR_TEST);
@ -154,7 +157,9 @@ void TV3DManager::display(Camera& whichCamera) {
glLoadIdentity();
Application::getInstance()->displaySide(whichCamera);
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
if (displayOverlays) {
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
}
}
glPopMatrix();
glDisable(GL_SCISSOR_TEST);

View file

@ -11,6 +11,7 @@
#include <QHash>
#include <PerfStat.h>
#include <SharedUtil.h>
#include <FBXReader.h>
@ -128,6 +129,7 @@ void Visage::update() {
if (!_active) {
return;
}
PerformanceTimer perfTimer("visage");
_headRotation = glm::quat(glm::vec3(-_data->faceRotation[0], -_data->faceRotation[1], _data->faceRotation[2]));
_headTranslation = (glm::vec3(_data->faceTranslation[0], _data->faceTranslation[1], _data->faceTranslation[2]) -
_headOrigin) * TRANSLATION_SCALE;

View file

@ -121,7 +121,7 @@ static void maybeRelease(QOpenGLFramebufferObject* fbo) {
}
QOpenGLFramebufferObject* GlowEffect::render(bool toTexture) {
PerformanceTimer perfTimer("paintGL/glowEffect");
PerformanceTimer perfTimer("glowEffect");
QOpenGLFramebufferObject* primaryFBO = Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject();
primaryFBO->release();

View file

@ -26,7 +26,7 @@ JointState::JointState() :
JointState::JointState(const JointState& other) : _constraint(NULL) {
_transform = other._transform;
_rotation = other._rotation;
_rotationInParentFrame = other._rotationInParentFrame;
_rotationInConstrainedFrame = other._rotationInConstrainedFrame;
_animationPriority = other._animationPriority;
_fbxJoint = other._fbxJoint;
// DO NOT copy _constraint
@ -43,7 +43,7 @@ JointState::~JointState() {
void JointState::setFBXJoint(const FBXJoint* joint) {
assert(joint != NULL);
_rotationInParentFrame = joint->rotation;
_rotationInConstrainedFrame = joint->rotation;
// NOTE: JointState does not own the FBXJoint to which it points.
_fbxJoint = joint;
if (_constraint) {
@ -68,24 +68,24 @@ void JointState::copyState(const JointState& state) {
_animationPriority = state._animationPriority;
_transform = state._transform;
_rotation = extractRotation(_transform);
_rotationInParentFrame = state._rotationInParentFrame;
_rotationInConstrainedFrame = state._rotationInConstrainedFrame;
_visibleTransform = state._visibleTransform;
_visibleRotation = extractRotation(_visibleTransform);
_visibleRotationInParentFrame = state._visibleRotationInParentFrame;
_visibleRotationInConstrainedFrame = state._visibleRotationInConstrainedFrame;
// DO NOT copy _fbxJoint or _constraint
}
void JointState::computeTransform(const glm::mat4& parentTransform) {
glm::quat modifiedRotation = _fbxJoint->preRotation * _rotationInParentFrame * _fbxJoint->postRotation;
glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(modifiedRotation) * _fbxJoint->postTransform;
glm::quat rotationInConstrainedFrame = _fbxJoint->preRotation * _rotationInConstrainedFrame * _fbxJoint->postRotation;
glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(rotationInConstrainedFrame) * _fbxJoint->postTransform;
_transform = parentTransform * glm::translate(_fbxJoint->translation) * modifiedTransform;
_rotation = extractRotation(_transform);
}
void JointState::computeVisibleTransform(const glm::mat4& parentTransform) {
glm::quat modifiedRotation = _fbxJoint->preRotation * _visibleRotationInParentFrame * _fbxJoint->postRotation;
glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(modifiedRotation) * _fbxJoint->postTransform;
glm::quat rotationInConstrainedFrame = _fbxJoint->preRotation * _visibleRotationInConstrainedFrame * _fbxJoint->postRotation;
glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(rotationInConstrainedFrame) * _fbxJoint->postTransform;
_visibleTransform = parentTransform * glm::translate(_fbxJoint->translation) * modifiedTransform;
_visibleRotation = extractRotation(_visibleTransform);
}
@ -97,7 +97,7 @@ glm::quat JointState::getRotationFromBindToModelFrame() const {
void JointState::restoreRotation(float fraction, float priority) {
assert(_fbxJoint != NULL);
if (priority == _animationPriority || _animationPriority == 0.0f) {
setRotationInParentFrame(safeMix(_rotationInParentFrame, _fbxJoint->rotation, fraction));
setRotationInConstrainedFrame(safeMix(_rotationInConstrainedFrame, _fbxJoint->rotation, fraction));
_animationPriority = 0.0f;
}
}
@ -106,11 +106,11 @@ void JointState::setRotationFromBindFrame(const glm::quat& rotation, float prior
// rotation is from bind- to model-frame
assert(_fbxJoint != NULL);
if (priority >= _animationPriority) {
glm::quat targetRotation = _rotationInParentFrame * glm::inverse(_rotation) * rotation * glm::inverse(_fbxJoint->inverseBindRotation);
glm::quat targetRotation = _rotationInConstrainedFrame * glm::inverse(_rotation) * rotation * glm::inverse(_fbxJoint->inverseBindRotation);
if (constrain && _constraint) {
_constraint->softClamp(targetRotation, _rotationInParentFrame, 0.5f);
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInParentFrame(targetRotation);
setRotationInConstrainedFrame(targetRotation);
_animationPriority = priority;
}
}
@ -137,12 +137,12 @@ void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, floa
_animationPriority = priority;
if (!constrain || _constraint == NULL) {
// no constraints
_rotationInParentFrame = _rotationInParentFrame * glm::inverse(_rotation) * delta * _rotation;
_rotationInConstrainedFrame = _rotationInConstrainedFrame * glm::inverse(_rotation) * delta * _rotation;
_rotation = delta * _rotation;
return;
}
glm::quat targetRotation = _rotationInParentFrame * glm::inverse(_rotation) * delta * _rotation;
setRotationInParentFrame(targetRotation);
glm::quat targetRotation = _rotationInConstrainedFrame * glm::inverse(_rotation) * delta * _rotation;
setRotationInConstrainedFrame(targetRotation);
}
/// Applies delta rotation to joint but mixes a little bit of the default pose as well.
@ -154,30 +154,30 @@ void JointState::mixRotationDelta(const glm::quat& delta, float mixFactor, float
return;
}
_animationPriority = priority;
glm::quat targetRotation = _rotationInParentFrame * glm::inverse(_rotation) * delta * _rotation;
glm::quat targetRotation = _rotationInConstrainedFrame * glm::inverse(_rotation) * delta * _rotation;
if (mixFactor > 0.0f && mixFactor <= 1.0f) {
targetRotation = safeMix(targetRotation, _fbxJoint->rotation, mixFactor);
}
if (_constraint) {
_constraint->softClamp(targetRotation, _rotationInParentFrame, 0.5f);
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInParentFrame(targetRotation);
setRotationInConstrainedFrame(targetRotation);
}
glm::quat JointState::computeParentRotation() const {
// R = Rp * Rpre * r * Rpost
// Rp = R * (Rpre * r * Rpost)^
return _rotation * glm::inverse(_fbxJoint->preRotation * _rotationInParentFrame * _fbxJoint->postRotation);
return _rotation * glm::inverse(_fbxJoint->preRotation * _rotationInConstrainedFrame * _fbxJoint->postRotation);
}
void JointState::setRotationInParentFrame(const glm::quat& targetRotation) {
void JointState::setRotationInConstrainedFrame(const glm::quat& targetRotation) {
glm::quat parentRotation = computeParentRotation();
_rotationInParentFrame = targetRotation;
_rotationInConstrainedFrame = targetRotation;
// R' = Rp * Rpre * r' * Rpost
_rotation = parentRotation * _fbxJoint->preRotation * _rotationInParentFrame * _fbxJoint->postRotation;
_rotation = parentRotation * _fbxJoint->preRotation * _rotationInConstrainedFrame * _fbxJoint->postRotation;
}
const glm::vec3& JointState::getDefaultTranslationInParentFrame() const {
const glm::vec3& JointState::getDefaultTranslationInConstrainedFrame() const {
assert(_fbxJoint != NULL);
return _fbxJoint->translation;
}
@ -185,5 +185,5 @@ const glm::vec3& JointState::getDefaultTranslationInParentFrame() const {
void JointState::slaveVisibleTransform() {
_visibleTransform = _transform;
_visibleRotation = _rotation;
_visibleRotationInParentFrame = _rotationInParentFrame;
_visibleRotationInConstrainedFrame = _rotationInConstrainedFrame;
}

View file

@ -66,14 +66,14 @@ public:
void restoreRotation(float fraction, float priority);
/// \param rotation is from bind- to model-frame
/// computes and sets new _rotationInParentFrame
/// computes and sets new _rotationInConstrainedFrame
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationFromBindFrame(const glm::quat& rotation, float priority, bool constrain = false);
void setRotationInParentFrame(const glm::quat& targetRotation);
const glm::quat& getRotationInParentFrame() const { return _rotationInParentFrame; }
void setRotationInConstrainedFrame(const glm::quat& targetRotation);
const glm::quat& getRotationInConstrainedFrame() const { return _rotationInConstrainedFrame; }
const glm::vec3& getDefaultTranslationInParentFrame() const;
const glm::vec3& getDefaultTranslationInConstrainedFrame() const;
void clearTransformTranslation();
@ -92,11 +92,11 @@ private:
glm::mat4 _transform; // joint- to model-frame
glm::quat _rotation; // joint- to model-frame
glm::quat _rotationInParentFrame; // joint- to parentJoint-frame
glm::quat _rotationInConstrainedFrame; // rotation in frame where angular constraints would be applied
glm::mat4 _visibleTransform;
glm::quat _visibleRotation;
glm::quat _visibleRotationInParentFrame;
glm::quat _visibleRotationInConstrainedFrame;
const FBXJoint* _fbxJoint; // JointState does NOT own its FBXJoint
AngularConstraint* _constraint; // JointState owns its AngularConstraint

View file

@ -460,7 +460,7 @@ void Model::reset() {
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
for (int i = 0; i < _jointStates.size(); i++) {
_jointStates[i].setRotationInParentFrame(geometry.joints.at(i).rotation);
_jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation);
}
}
@ -688,7 +688,7 @@ bool Model::getJointState(int index, glm::quat& rotation) const {
if (index == -1 || index >= _jointStates.size()) {
return false;
}
rotation = _jointStates.at(index).getRotationInParentFrame();
rotation = _jointStates.at(index).getRotationInConstrainedFrame();
const glm::quat& defaultRotation = _geometry->getFBXGeometry().joints.at(index).rotation;
return glm::abs(rotation.x - defaultRotation.x) >= EPSILON ||
glm::abs(rotation.y - defaultRotation.y) >= EPSILON ||
@ -701,7 +701,7 @@ void Model::setJointState(int index, bool valid, const glm::quat& rotation, floa
JointState& state = _jointStates[index];
if (priority >= state._animationPriority) {
if (valid) {
state.setRotationInParentFrame(rotation);
state.setRotationInConstrainedFrame(rotation);
state._animationPriority = priority;
} else {
state.restoreRotation(1.0f, priority);
@ -1488,14 +1488,19 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
if (cascadedShadows) {
program->setUniform(skinLocations->shadowDistances, Application::getInstance()->getShadowDistances());
}
} else {
// local light uniforms
skinProgram->setUniformValue("numLocalLights", _numLocalLights);
skinProgram->setUniformArray("localLightDirections", _localLightDirections, MAX_LOCAL_LIGHTS);
skinProgram->setUniformArray("localLightColors", _localLightColors, MAX_LOCAL_LIGHTS);
} else {
glMultMatrixf((const GLfloat*)&state.clusterMatrices[0]);
program->bind();
if (cascadedShadows) {
program->setUniform(shadowDistancesLocation, Application::getInstance()->getShadowDistances());
}
}
if (mesh.blendshapes.isEmpty()) {
if (!(mesh.tangents.isEmpty() || mode == SHADOW_RENDER_MODE)) {
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
@ -1622,6 +1627,20 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
}
}
void Model::setLocalLightDirection(const glm::vec3& direction, int lightIndex) {
assert(lightIndex >= 0 && lightIndex < MAX_LOCAL_LIGHTS);
_localLightDirections[lightIndex] = direction;
}
void Model::setLocalLightColor(const glm::vec3& color, int lightIndex) {
assert(lightIndex >= 0 && lightIndex < MAX_LOCAL_LIGHTS);
_localLightColors[lightIndex] = color;
}
void Model::setNumLocalLights(int numLocalLights) {
_numLocalLights = numLocalLights;
}
void AnimationHandle::setURL(const QUrl& url) {
if (_url != url) {
_animation = Application::getInstance()->getAnimationCache()->getAnimation(_url = url);
@ -1768,7 +1787,7 @@ void AnimationHandle::applyFrame(float frameIndex) {
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
if (_priority >= state._animationPriority) {
state.setRotationInParentFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction));
state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction));
state._animationPriority = _priority;
}
}

View file

@ -32,6 +32,8 @@ class Shape;
typedef QSharedPointer<AnimationHandle> AnimationHandlePointer;
typedef QWeakPointer<AnimationHandle> WeakAnimationHandlePointer;
const int MAX_LOCAL_LIGHTS = 2;
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject, public PhysicsEntity {
Q_OBJECT
@ -143,6 +145,10 @@ public:
/// Sets blended vertices computed in a separate thread.
void setBlendedVertices(const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals);
void setLocalLightDirection(const glm::vec3& direction, int lightIndex);
void setLocalLightColor(const glm::vec3& color, int lightIndex);
void setNumLocalLights(int numLocalLights);
protected:
QSharedPointer<NetworkGeometry> _geometry;
@ -158,6 +164,10 @@ protected:
bool _showTrueJointTransforms;
int _rootIndex;
glm::vec3 _localLightDirections[MAX_LOCAL_LIGHTS];
glm::vec3 _localLightColors[MAX_LOCAL_LIGHTS];
int _numLocalLights;
QVector<JointState> _jointStates;
class MeshState {

View file

@ -10,6 +10,7 @@
//
#include "ProgramObject.h"
#include <glm/gtc/type_ptr.hpp>
ProgramObject::ProgramObject(QObject* parent) : QGLShaderProgram(parent) {
}
@ -22,3 +23,17 @@ void ProgramObject::setUniform(const char* name, const glm::vec3& value) {
setUniformValue(name, value.x, value.y, value.z);
}
void ProgramObject::setUniformArray(const char* name, const glm::vec3* values, int count) {
GLfloat* floatVal = new GLfloat[count*3];
int index = 0;
for (int i = 0; i < count; i++) {
assert(index < count*3);
const float* valPtr = glm::value_ptr(values[i]);
floatVal[index++] = valPtr[0];
floatVal[index++] = valPtr[1];
floatVal[index++] = valPtr[2];
}
setUniformValueArray(name, floatVal, count, 3);
delete[] floatVal;
}

View file

@ -23,6 +23,7 @@ public:
void setUniform(int location, const glm::vec3& value);
void setUniform(const char* name, const glm::vec3& value);
void setUniformArray(const char* name, const glm::vec3* values, int count);
};
#endif // hifi_ProgramObject_h

View file

@ -40,7 +40,6 @@ ApplicationOverlay::ApplicationOverlay() :
_framebufferObject(NULL),
_textureFov(DEFAULT_OCULUS_UI_ANGULAR_SIZE * RADIANS_PER_DEGREE),
_alpha(1.0f),
_active(true),
_crosshairTexture(0) {
memset(_reticleActive, 0, sizeof(_reticleActive));
@ -70,8 +69,8 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
//Handle fadeing and deactivation/activation of UI
if (_active) {
//Handle fading and deactivation/activation of UI
if (Menu::getInstance()->isOptionChecked(MenuOption::UserInterface)) {
_alpha += FADE_SPEED;
if (_alpha > 1.0f) {
_alpha = 1.0f;
@ -485,7 +484,8 @@ void ApplicationOverlay::renderControllerPointers() {
if (palmData->getTrigger() == 1.0f) {
if (!triggerPressed[index]) {
if (bumperPressed[index]) {
_active = !_active;
Menu::getInstance()->setIsOptionChecked(MenuOption::UserInterface,
!Menu::getInstance()->isOptionChecked(MenuOption::UserInterface));
}
triggerPressed[index] = true;
}
@ -495,7 +495,8 @@ void ApplicationOverlay::renderControllerPointers() {
if ((controllerButtons & BUTTON_FWD)) {
if (!bumperPressed[index]) {
if (triggerPressed[index]) {
_active = !_active;
Menu::getInstance()->setIsOptionChecked(MenuOption::UserInterface,
!Menu::getInstance()->isOptionChecked(MenuOption::UserInterface));
}
bumperPressed[index] = true;
}
@ -998,6 +999,14 @@ void ApplicationOverlay::renderTexturedHemisphere() {
}
void ApplicationOverlay::resize() {
if (_framebufferObject != NULL) {
delete _framebufferObject;
_framebufferObject = NULL;
}
// _framebufferObject is recreated at the correct size the next time it is accessed via getFramebufferObject().
}
QOpenGLFramebufferObject* ApplicationOverlay::getFramebufferObject() {
if (!_framebufferObject) {
_framebufferObject = new QOpenGLFramebufferObject(Application::getInstance()->getGLWidget()->size());

View file

@ -32,6 +32,7 @@ public:
void displayOverlayTexture3DTV(Camera& whichCamera, float aspectRatio, float fov);
void computeOculusPickRay(float x, float y, glm::vec3& direction) const;
void getClickLocation(int &x, int &y) const;
void resize();
// Getters
QOpenGLFramebufferObject* getFramebufferObject();
@ -68,7 +69,6 @@ private:
float _magSizeMult[NUMBER_OF_MAGNIFIERS];
float _alpha;
bool _active;
GLuint _crosshairTexture;
};

View file

@ -665,6 +665,8 @@ void Stats::display(
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)voxelStats.str().c_str(), color);
}
PerformanceTimer::tallyAllTimerRecords();
// TODO: the display of these timing details should all be moved to JavaScript
if (_expanded && Menu::getInstance()->isOptionChecked(MenuOption::DisplayTimingDetails)) {
// Timing details...

View file

@ -21,6 +21,8 @@
#include <SharedUtil.h>
#include <NodeList.h>
#include <glm/gtc/type_ptr.hpp>
#include "Application.h"
#include "InterfaceConfig.h"
#include "Menu.h"
@ -57,6 +59,8 @@ GLubyte identityIndicesRight[] = { 1, 2, 6, 1, 6, 5 };
GLubyte identityIndicesFront[] = { 0, 2, 1, 0, 3, 2 };
GLubyte identityIndicesBack[] = { 4, 5, 6, 4, 6, 7 };
static glm::vec3 grayColor = glm::vec3(0.3f, 0.3f, 0.3f);
VoxelSystem::VoxelSystem(float treeScale, int maxVoxels, VoxelTree* tree)
: NodeData(),
_treeScale(treeScale),
@ -67,7 +71,10 @@ VoxelSystem::VoxelSystem(float treeScale, int maxVoxels, VoxelTree* tree)
_inOcclusions(false),
_showCulledSharedFaces(false),
_usePrimitiveRenderer(false),
_renderer(0)
_renderer(0),
_drawHaze(false),
_farHazeDistance(300.0f),
_hazeColor(grayColor)
{
_voxelsInReadArrays = _voxelsInWriteArrays = _voxelsUpdated = 0;
@ -373,6 +380,7 @@ void VoxelSystem::cleanupVoxelMemory() {
delete[] _readVoxelDirtyArray;
_writeVoxelDirtyArray = _readVoxelDirtyArray = NULL;
_readArraysLock.unlock();
}
}
@ -454,6 +462,7 @@ void VoxelSystem::initVoxelMemory() {
_readVoxelShaderData = new VoxelShaderVBOData[_maxVoxels];
_memoryUsageRAM += (sizeof(VoxelShaderVBOData) * _maxVoxels);
} else {
// Global Normals mode uses a technique of not including normals on any voxel vertices, and instead
@ -521,13 +530,23 @@ void VoxelSystem::initVoxelMemory() {
_shadowDistancesLocation = _cascadedShadowMapProgram.uniformLocation("shadowDistances");
_cascadedShadowMapProgram.release();
}
}
_renderer = new PrimitiveRenderer(_maxVoxels);
_initialized = true;
_writeArraysLock.unlock();
_readArraysLock.unlock();
// fog for haze
if (_drawHaze) {
GLfloat fogColor[] = {_hazeColor.x, _hazeColor.y, _hazeColor.z, 1.0f};
glFogi(GL_FOG_MODE, GL_LINEAR);
glFogfv(GL_FOG_COLOR, fogColor);
glFogf(GL_FOG_START, 0.0f);
glFogf(GL_FOG_END, _farHazeDistance);
}
}
int VoxelSystem::parseData(const QByteArray& packet) {
@ -1114,6 +1133,7 @@ int VoxelSystem::updateNodeInArrays(VoxelTreeElement* node, bool reuseIndex, boo
node->setBufferIndex(nodeIndex);
node->setVoxelSystem(this);
}
// populate the array with points for the 8 vertices and RGB color for each added vertex
updateArraysDetails(nodeIndex, startVertex, voxelScale, node->getColor());
}
@ -1131,11 +1151,13 @@ int VoxelSystem::updateNodeInArrays(VoxelTreeElement* node, bool reuseIndex, boo
void VoxelSystem::updateArraysDetails(glBufferIndex nodeIndex, const glm::vec3& startVertex,
float voxelScale, const nodeColor& color) {
if (_initialized && nodeIndex <= _maxVoxels) {
_writeVoxelDirtyArray[nodeIndex] = true;
if (_useVoxelShader) {
// write in position, scale, and color for the voxel
if (_writeVoxelShaderData) {
VoxelShaderVBOData* writeVerticesAt = &_writeVoxelShaderData[nodeIndex];
writeVerticesAt->x = startVertex.x * TREE_SCALE;
@ -1157,6 +1179,7 @@ void VoxelSystem::updateArraysDetails(glBufferIndex nodeIndex, const glm::vec3&
}
}
}
}
}
@ -1407,6 +1430,10 @@ void VoxelSystem::render() {
}
} else
if (!_usePrimitiveRenderer) {
if (_drawHaze) {
glEnable(GL_FOG);
}
PerformanceWarning warn(showWarnings, "render().. TRIANGLES...");
{
@ -1478,6 +1505,10 @@ void VoxelSystem::render() {
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
if (_drawHaze) {
glDisable(GL_FOG);
}
}
else {
applyScaleAndBindProgram(texture);

View file

@ -273,7 +273,11 @@ private:
static unsigned short _sSwizzledOcclusionBits[64]; ///< Swizzle value of bit pairs of the value of index
static unsigned char _sOctantIndexToBitMask[8]; ///< Map octant index to partition mask
static unsigned char _sOctantIndexToSharedBitMask[8][8]; ///< Map octant indices to shared partition mask
// haze
bool _drawHaze;
float _farHazeDistance;
glm::vec3 _hazeColor;
};
#endif // hifi_VoxelSystem_h

View file

@ -79,7 +79,7 @@ ReliableChannel* DatagramSequencer::getReliableInputChannel(int index) {
return channel;
}
int DatagramSequencer::startPacketGroup(int desiredPackets) {
int DatagramSequencer::notePacketGroup(int desiredPackets) {
// figure out how much data we have enqueued and increase the number of packets desired
int totalAvailable = 0;
foreach (ReliableChannel* channel, _reliableOutputChannels) {

View file

@ -108,10 +108,10 @@ public:
/// Returns the intput channel at the specified index, creating it if necessary.
ReliableChannel* getReliableInputChannel(int index = 0);
/// Starts a packet group.
/// Notes that we're sending a group of packets.
/// \param desiredPackets the number of packets we'd like to write in the group
/// \return the number of packets to write in the group
int startPacketGroup(int desiredPackets = 1);
int notePacketGroup(int desiredPackets = 1);
/// Starts a new packet for transmission.
/// \return a reference to the Bitstream to use for writing to the packet

View file

@ -39,9 +39,12 @@ Endpoint::~Endpoint() {
}
void Endpoint::update() {
Bitstream& out = _sequencer.startPacket();
writeUpdateMessage(out);
_sequencer.endPacket();
int packetsToSend = _sequencer.notePacketGroup();
for (int i = 0; i < packetsToSend; i++) {
Bitstream& out = _sequencer.startPacket();
writeUpdateMessage(out);
_sequencer.endPacket();
}
}
int Endpoint::parseData(const QByteArray& packet) {

View file

@ -87,7 +87,8 @@ void MetavoxelClientManager::updateClient(MetavoxelClient* client) {
MetavoxelClient::MetavoxelClient(const SharedNodePointer& node, MetavoxelClientManager* manager) :
Endpoint(node, new PacketRecord(), new PacketRecord()),
_manager(manager),
_reliableDeltaChannel(NULL) {
_reliableDeltaChannel(NULL),
_reliableDeltaID(0) {
connect(_sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX),
SIGNAL(receivedMessage(const QVariant&, Bitstream&)), SLOT(handleMessage(const QVariant&, Bitstream&)));
@ -139,10 +140,16 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
}
}
} else if (userType == MetavoxelDeltaPendingMessage::Type) {
if (!_reliableDeltaChannel) {
// check the id to make sure this is not a delta we've already processed
int id = message.value<MetavoxelDeltaPendingMessage>().id;
if (id > _reliableDeltaID) {
_reliableDeltaID = id;
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
_reliableDeltaLOD = getLastAcknowledgedSendRecord()->getLOD();
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_remoteDataLOD = receiveRecord->getLOD();
_remoteData = receiveRecord->getData();
}
} else {
Endpoint::handleMessage(message, in);

View file

@ -74,6 +74,7 @@ private:
ReliableChannel* _reliableDeltaChannel;
MetavoxelLOD _reliableDeltaLOD;
int _reliableDeltaID;
};
#endif // hifi_MetavoxelClientManager_h

View file

@ -64,6 +64,10 @@ DECLARE_STREAMABLE_METATYPE(MetavoxelDeltaMessage)
/// A message indicating that metavoxel delta information is being sent on a reliable channel.
class MetavoxelDeltaPendingMessage {
STREAMABLE
public:
STREAM int id;
};
DECLARE_STREAMABLE_METATYPE(MetavoxelDeltaPendingMessage)

View file

@ -515,8 +515,6 @@ void ScriptEngine::run() {
qint64 now = usecTimestampNow();
float deltaTime = (float) (now - lastUpdate) / (float) USECS_PER_SECOND;
emit update(deltaTime);
lastUpdate = now;
if (_engine.hasUncaughtException()) {
int line = _engine.uncaughtExceptionLineNumber();
@ -524,6 +522,9 @@ void ScriptEngine::run() {
emit errorMessage("Uncaught exception at (" + _fileNameString + ") line" + QString::number(line) + ":" + _engine.uncaughtException().toString());
_engine.clearExceptions();
}
emit update(deltaTime);
lastUpdate = now;
}
emit scriptEnding();

View file

@ -17,6 +17,12 @@
#include "PerfStat.h"
#include "SharedUtil.h"
// ----------------------------------------------------------------------------
// PerformanceWarning
// ----------------------------------------------------------------------------
// Static class members initialization here!
bool PerformanceWarning::_suppressShortTimings = false;
@ -52,14 +58,50 @@ PerformanceWarning::~PerformanceWarning() {
}
};
// ----------------------------------------------------------------------------
// PerformanceTimerRecord
// ----------------------------------------------------------------------------
const quint64 STALE_STAT_PERIOD = 4 * USECS_PER_SECOND;
void PerformanceTimerRecord::tallyResult(const quint64& now) {
if (_numAccumulations > 0) {
_numTallies++;
_movingAverage.updateAverage(_runningTotal - _lastTotal);
_lastTotal = _runningTotal;
_numAccumulations = 0;
_expiry = now + STALE_STAT_PERIOD;
}
}
// ----------------------------------------------------------------------------
// PerformanceTimer
// ----------------------------------------------------------------------------
QString PerformanceTimer::_fullName;
QMap<QString, PerformanceTimerRecord> PerformanceTimer::_records;
PerformanceTimer::~PerformanceTimer() {
quint64 end = usecTimestampNow();
quint64 elapsedusec = (end - _start);
PerformanceTimerRecord& namedRecord = _records[_name];
namedRecord.recordResult(elapsedusec);
quint64 elapsedusec = (usecTimestampNow() - _start);
PerformanceTimerRecord& namedRecord = _records[_fullName];
namedRecord.accumulateResult(elapsedusec);
_fullName.resize(_fullName.size() - (_name.size() + 1));
}
// static
void PerformanceTimer::tallyAllTimerRecords() {
QMap<QString, PerformanceTimerRecord>::iterator recordsItr = _records.begin();
QMap<QString, PerformanceTimerRecord>::const_iterator recordsEnd = _records.end();
quint64 now = usecTimestampNow();
while (recordsItr != recordsEnd) {
recordsItr.value().tallyResult(now);
if (recordsItr.value().isStale(now)) {
// purge stale records
recordsItr = _records.erase(recordsItr);
} else {
++recordsItr;
}
}
}
void PerformanceTimer::dumpAllTimerRecords() {

View file

@ -25,13 +25,13 @@
class PerformanceWarning {
private:
quint64 _start;
const char* _message;
bool _renderWarningsOn;
bool _alwaysDisplay;
quint64* _runningTotal;
quint64* _totalCalls;
static bool _suppressShortTimings;
quint64 _start;
const char* _message;
bool _renderWarningsOn;
bool _alwaysDisplay;
quint64* _runningTotal;
quint64* _totalCalls;
static bool _suppressShortTimings;
public:
PerformanceWarning(bool renderWarnings, const char* message, bool alwaysDisplay = false,
@ -52,38 +52,47 @@ public:
class PerformanceTimerRecord {
public:
PerformanceTimerRecord() : _runningTotal(0), _totalCalls(0) {}
PerformanceTimerRecord() : _runningTotal(0), _lastTotal(0), _numAccumulations(0), _numTallies(0), _expiry(0) {}
void recordResult(quint64 elapsed) { _runningTotal += elapsed; _totalCalls++; _movingAverage.updateAverage(elapsed); }
quint64 getAverage() const { return (_totalCalls == 0) ? 0 : _runningTotal / _totalCalls; }
quint64 getMovingAverage() const { return (_totalCalls == 0) ? 0 : _movingAverage.getAverage(); }
quint64 getCount() const { return _totalCalls; }
void accumulateResult(const quint64& elapsed) { _runningTotal += elapsed; ++_numAccumulations; }
void tallyResult(const quint64& now);
bool isStale(const quint64& now) const { return now > _expiry; }
quint64 getAverage() const { return (_numTallies == 0) ? 0 : _runningTotal / _numTallies; }
quint64 getMovingAverage() const { return (_numTallies == 0) ? 0 : _movingAverage.getAverage(); }
quint64 getCount() const { return _numTallies; }
private:
quint64 _runningTotal;
quint64 _totalCalls;
SimpleMovingAverage _movingAverage;
quint64 _runningTotal;
quint64 _lastTotal;
quint64 _numAccumulations;
quint64 _numTallies;
quint64 _expiry;
SimpleMovingAverage _movingAverage;
};
class PerformanceTimer {
public:
PerformanceTimer(const QString& name) :
_start(usecTimestampNow()),
_name(name) { }
_start(0),
_name(name) {
_fullName.append("/");
_fullName.append(_name);
_start = usecTimestampNow();
}
quint64 elapsed() const { return (usecTimestampNow() - _start); };
~PerformanceTimer();
static const PerformanceTimerRecord& getTimerRecord(const QString& name) { return _records[name]; };
static const QMap<QString, PerformanceTimerRecord>& getAllTimerRecords() { return _records; };
static void tallyAllTimerRecords();
static void dumpAllTimerRecords();
private:
quint64 _start;
QString _name;
static QMap<QString, PerformanceTimerRecord> _records;
quint64 _start;
QString _name;
static QString _fullName;
static QMap<QString, PerformanceTimerRecord> _records;
};

View file

@ -14,8 +14,8 @@
SimpleMovingAverage::SimpleMovingAverage(int numSamplesToAverage) :
_numSamples(0),
_average(0),
_eventDeltaAverage(0),
_average(0.0f),
_eventDeltaAverage(0.0f),
WEIGHTING(1.0f / numSamplesToAverage),
ONE_MINUS_WEIGHTING(1 - WEIGHTING) {
@ -45,8 +45,8 @@ int SimpleMovingAverage::updateAverage(float sample) {
void SimpleMovingAverage::reset() {
_numSamples = 0;
_average = 0;
_eventDeltaAverage = 0;
_average = 0.0f;
_eventDeltaAverage = 0.0f;
}
float SimpleMovingAverage::getEventDeltaAverage() const {
@ -55,5 +55,5 @@ float SimpleMovingAverage::getEventDeltaAverage() const {
}
float SimpleMovingAverage::getAverageSampleValuePerSecond() const {
return _average * (1 / getEventDeltaAverage());
return _average * (1.0f / getEventDeltaAverage());
}

View file

@ -647,7 +647,8 @@ TestEndpoint::TestEndpoint(Mode mode) :
_mode(mode),
_highPriorityMessagesToSend(0.0f),
_reliableMessagesToSend(0.0f),
_reliableDeltaChannel(NULL) {
_reliableDeltaChannel(NULL),
_reliableDeltaID(0) {
connect(&_sequencer, SIGNAL(receivedHighPriorityMessage(const QVariant&)),
SLOT(handleHighPriorityMessage(const QVariant&)));
@ -858,7 +859,7 @@ bool TestEndpoint::simulate(int iterationNumber) {
bytesReceived += datagram.size();
_remainingPipelineCapacity += datagram.size();
}
int packetCount = _sequencer.startPacketGroup();
int packetCount = _sequencer.notePacketGroup();
groupsSent++;
maxPacketsPerGroup = qMax(maxPacketsPerGroup, packetCount);
for (int i = 0; i < packetCount; i++) {
@ -908,7 +909,8 @@ bool TestEndpoint::simulate(int iterationNumber) {
// if we're sending a reliable delta, wait until it's acknowledged
if (_reliableDeltaChannel) {
Bitstream& out = _sequencer.startPacket();
out << QVariant::fromValue(MetavoxelDeltaPendingMessage());
MetavoxelDeltaPendingMessage msg = { _reliableDeltaID };
out << QVariant::fromValue(msg);
_sequencer.endPacket();
return false;
}
@ -932,7 +934,8 @@ bool TestEndpoint::simulate(int iterationNumber) {
_reliableDeltaLOD = _lod;
_sequencer.getOutputStream().getUnderlying().device()->seek(start);
out << QVariant::fromValue(MetavoxelDeltaPendingMessage());
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID };
out << QVariant::fromValue(msg);
_sequencer.endPacket();
} else {
@ -1081,15 +1084,22 @@ void TestEndpoint::handleMessage(const QVariant& message, Bitstream& in) {
} else if (userType == MetavoxelDeltaMessage::Type) {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_data.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in,
_dataLOD = getLastAcknowledgedSendRecord()->getLOD());
_remoteData.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in,
_remoteDataLOD = getLastAcknowledgedSendRecord()->getLOD());
in.reset();
_data = _remoteData;
compareMetavoxelData();
} else if (userType == MetavoxelDeltaPendingMessage::Type) {
if (!_reliableDeltaChannel) {
int id = message.value<MetavoxelDeltaPendingMessage>().id;
if (id > _reliableDeltaID) {
_reliableDeltaID = id;
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
_reliableDeltaLOD = getLastAcknowledgedSendRecord()->getLOD();
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_remoteDataLOD = receiveRecord->getLOD();
_remoteData = receiveRecord->getData();
}
} else if (userType == QMetaType::QVariantList) {
foreach (const QVariant& element, message.toList()) {
@ -1107,7 +1117,7 @@ PacketRecord* TestEndpoint::maybeCreateSendRecord() const {
}
PacketRecord* TestEndpoint::maybeCreateReceiveRecord() const {
return new TestReceiveRecord(_dataLOD, (_mode == METAVOXEL_SERVER_MODE) ? MetavoxelData() : _data, _remoteState);
return new TestReceiveRecord(_remoteDataLOD, _remoteData, _remoteState);
}
void TestEndpoint::handleHighPriorityMessage(const QVariant& message) {
@ -1127,9 +1137,10 @@ void TestEndpoint::handleHighPriorityMessage(const QVariant& message) {
void TestEndpoint::handleReliableMessage(const QVariant& message, Bitstream& in) {
if (message.userType() == MetavoxelDeltaMessage::Type) {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_data.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in, _dataLOD = _reliableDeltaLOD);
_remoteData.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in, _remoteDataLOD = _reliableDeltaLOD);
_sequencer.getInputStream().persistReadMappings(in.getAndResetReadMappings());
in.clearPersistentMappings();
_data = _remoteData;
compareMetavoxelData();
_reliableDeltaChannel = NULL;
return;

View file

@ -79,6 +79,8 @@ private:
MetavoxelData _data;
MetavoxelLOD _dataLOD;
MetavoxelData _remoteData;
MetavoxelLOD _remoteDataLOD;
MetavoxelLOD _lod;
SharedObjectPointer _sphere;
@ -104,6 +106,7 @@ private:
MetavoxelData _reliableDeltaData;
MetavoxelLOD _reliableDeltaLOD;
Bitstream::WriteMappings _reliableDeltaWriteMappings;
int _reliableDeltaID;
};
/// A simple shared object.