mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 04:24:07 +02:00
merge seth/dice-on-table-1 into andrew/nova
This commit is contained in:
commit
326d17072e
29 changed files with 520 additions and 83 deletions
|
@ -12,7 +12,6 @@ Script.load("progress.js");
|
|||
Script.load("edit.js");
|
||||
Script.load("selectAudioDevice.js");
|
||||
Script.load("controllers/hydra/hydraMove.js");
|
||||
Script.load("headMove.js");
|
||||
Script.load("inspect.js");
|
||||
Script.load("lobby.js");
|
||||
Script.load("notifications.js");
|
||||
|
|
|
@ -13,7 +13,8 @@
|
|||
//
|
||||
|
||||
var isDice = false;
|
||||
var NUMBER_OF_DICE = 2;
|
||||
var NUMBER_OF_DICE = 4;
|
||||
var LIFETIME = 10000; // Dice will live for about 3 hours
|
||||
var dice = [];
|
||||
var DIE_SIZE = 0.20;
|
||||
|
||||
|
@ -50,7 +51,7 @@ var diceButton = Overlays.addOverlay("image", {
|
|||
});
|
||||
|
||||
var GRAVITY = -3.5;
|
||||
var LIFETIME = 300;
|
||||
|
||||
// NOTE: angularVelocity is in radians/sec
|
||||
var MAX_ANGULAR_SPEED = Math.PI;
|
||||
|
||||
|
@ -105,6 +106,7 @@ function mousePressEvent(event) {
|
|||
var clickedText = false;
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
if (clickedOverlay == offButton) {
|
||||
deleteDice();
|
||||
Script.stop();
|
||||
} else if (clickedOverlay == diceButton) {
|
||||
var HOW_HARD = 2.0;
|
||||
|
@ -116,10 +118,8 @@ function mousePressEvent(event) {
|
|||
}
|
||||
|
||||
function scriptEnding() {
|
||||
deleteDice();
|
||||
Overlays.deleteOverlay(offButton);
|
||||
Overlays.deleteOverlay(diceButton);
|
||||
|
||||
}
|
||||
|
||||
Entities.entityCollisionWithEntity.connect(entityCollisionWithEntity);
|
||||
|
|
65
examples/harmonicOscillator.js
Normal file
65
examples/harmonicOscillator.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
// harmonicOscillator.js
|
||||
//
|
||||
// Created by Philip Rosedale on May 5, 2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// An object moves around the edge of a disc while
|
||||
// changing color. The script is continuously updating
|
||||
// position, velocity, rotation, and color. The movement
|
||||
// should appear perfectly smooth to someone else,
|
||||
// provided their network connection is good.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var ball, disc;
|
||||
var time = 0.0;
|
||||
var range = 1.0;
|
||||
var speed = 0.5;
|
||||
|
||||
|
||||
var basePosition = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
|
||||
ball = Entities.addEntity(
|
||||
{ type: "Box",
|
||||
position: basePosition,
|
||||
dimensions: { x: 0.1, y: 0.1, z: 0.1 },
|
||||
color: { red: 255, green: 0, blue: 255 }
|
||||
});
|
||||
|
||||
disc = Entities.addEntity(
|
||||
{ type: "Sphere",
|
||||
position: basePosition,
|
||||
dimensions: { x: range, y: range / 20.0, z: range },
|
||||
color: { red: 128, green: 128, blue: 128 }
|
||||
});
|
||||
|
||||
function update(deltaTime) {
|
||||
time += deltaTime * speed;
|
||||
if (!ball.isKnownID) {
|
||||
ball = Entities.identifyEntity(ball);
|
||||
}
|
||||
rotation = Quat.angleAxis(time/Math.PI * 180.0, { x: 0, y: 1, z: 0 });
|
||||
Entities.editEntity(ball,
|
||||
{
|
||||
color: { red: 255 * (Math.sin(time)/2.0 + 0.5),
|
||||
green: 255 - 255 * (Math.sin(time)/2.0 + 0.5),
|
||||
blue: 0 },
|
||||
position: { x: basePosition.x + Math.sin(time) / 2.0 * range,
|
||||
y: basePosition.y,
|
||||
z: basePosition.z + Math.cos(time) / 2.0 * range },
|
||||
velocity: { x: Math.cos(time)/2.0 * range,
|
||||
y: 0.0,
|
||||
z: -Math.sin(time)/2.0 * range },
|
||||
rotation: rotation
|
||||
});
|
||||
}
|
||||
|
||||
function scriptEnding() {
|
||||
Entities.deleteEntity(ball);
|
||||
Entities.deleteEntity(disc);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
Script.update.connect(update);
|
32
interface/resources/images/face-mute.svg
Normal file
32
interface/resources/images/face-mute.svg
Normal file
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg width="45px" height="45px" viewBox="0 0 45 45" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
||||
<!-- Generator: Sketch 3.3.1 (12005) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Slice 1</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs></defs>
|
||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
||||
<g id="mic-mute" sketch:type="MSLayerGroup" transform="translate(-3.000000, 0.000000)">
|
||||
<g id="Layer_3" transform="translate(3.420508, 0.842347)" stroke-width="2.10526286" stroke="#000000" fill="#FF0000" sketch:type="MSShapeGroup">
|
||||
<g id="Layer_2">
|
||||
<path d="M43.5266358,21.6577606 C43.5266358,33.3277189 33.9360211,43.0001168 22.0267963,43.1052516 C10.2229629,43.1052516 0.63234822,33.5379884 0.63234822,21.6577606 C0.63234822,9.77753268 10.2229629,0.21026952 22.0267963,0.21026952 C33.8306298,0.21026952 43.5266358,9.88266744 43.5266358,21.6577606 L43.5266358,21.6577606 Z" id="path6"></path>
|
||||
<path d="M7.48278769,5.99268111 L37.7301094,36.1663595" id="line8"></path>
|
||||
</g>
|
||||
</g>
|
||||
<g id="Layer_1" transform="translate(0.752809, 0.984270)" fill="#000000" sketch:type="MSShapeGroup">
|
||||
<g id="Page-1_2_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="mic-tool_1_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="Page-1_3_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="Group_1_" transform="translate(1.000000, 1.000000)">
|
||||
<g id="mic_1_" transform="translate(1.000000, 0.000000)">
|
||||
<path d="M16.6868483,13.1449797 C16.6868483,16.2551904 14.4163295,17.579132 11.6131391,17.579132 C8.81632746,17.579132 6.5454869,16.2555462 6.5454869,13.1449797 C6.5454869,10.031224 8.81600655,5.5 11.6131391,5.5 C14.4163295,5.5 16.6868483,10.0312249 16.6868483,13.1449797 L16.6868483,13.1449797 Z" id="Shape"></path>
|
||||
<path d="M32.9863177,13.1449797 C32.9863177,16.2551904 30.7158014,17.579132 27.9158039,17.579132 C25.1189922,17.579132 22.8484683,16.2555462 22.8484683,13.1449797 C22.8484683,10.031224 25.1189922,5.5 27.9158039,5.5 C30.7158023,5.5 32.9863177,10.0312249 32.9863177,13.1449797 L32.9863177,13.1449797 Z" id="Shape"></path>
|
||||
<path d="M19.4246955,22.8995505 C8.59074225,22.8995505 3.97133194,18.0445183 3.97133194,24.0594622 C3.97133194,30.0829058 10.8899328,37.3346368 19.4246955,37.3346368 C27.9594572,37.3346368 34.8809223,30.3131889 34.8809223,24.2961184 C34.8809223,18.2776321 30.5208332,22.8995513 19.4246955,22.8995513 L19.4246955,22.8995505 Z M19.4246955,35.5515526 C17.3227023,35.5515526 15.3309396,35.0148194 13.5175822,34.1790777 C13.7995276,32.7577127 15.4529546,31.6435062 17.5390188,31.6435062 C18.2312941,31.6435062 18.8515667,31.7972603 19.4246955,32.0055779 C19.9978148,31.7969079 20.617776,31.6435062 21.3100529,31.6435062 C23.40822,31.6435062 25.0715291,32.76515 25.3378606,34.2003348 C23.5238656,35.0215518 21.5292319,35.5515534 19.4246955,35.5515534 L19.4246955,35.5515526 Z" id="Shape"></path>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 3.5 KiB |
31
interface/resources/images/face.svg
Normal file
31
interface/resources/images/face.svg
Normal file
|
@ -0,0 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg width="45px" height="45px" viewBox="0 0 45 45" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
||||
<!-- Generator: Sketch 3.3.1 (12005) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Slice 1</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs></defs>
|
||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
||||
<g id="mic-mute" sketch:type="MSLayerGroup" transform="translate(-3.000000, 0.000000)">
|
||||
<g id="Layer_3" transform="translate(2.420508, 0.842347)" stroke="#000000" stroke-width="2.10526286" fill="#FFFFFF" sketch:type="MSShapeGroup">
|
||||
<g id="Layer_2" transform="translate(1.000000, 0.000000)">
|
||||
<path d="M43.5266358,21.6577606 C43.5266358,33.3277189 33.9360211,43.0001168 22.0267963,43.1052516 C10.2229629,43.1052516 0.63234822,33.5379884 0.63234822,21.6577606 C0.63234822,9.77753268 10.2229629,0.21026952 22.0267963,0.21026952 C33.8306298,0.21026952 43.5266358,9.88266744 43.5266358,21.6577606 L43.5266358,21.6577606 Z" id="path6"></path>
|
||||
</g>
|
||||
</g>
|
||||
<g id="Layer_1" transform="translate(0.752809, 0.984270)" fill="#000000" sketch:type="MSShapeGroup">
|
||||
<g id="Page-1_2_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="mic-tool_1_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="Page-1_3_" transform="translate(1.000000, 0.000000)">
|
||||
<g id="Group_1_" transform="translate(1.000000, 1.000000)">
|
||||
<g id="mic_1_" transform="translate(1.000000, 0.000000)">
|
||||
<path d="M16.6868483,13.1449797 C16.6868483,16.2551904 14.4163295,17.579132 11.6131391,17.579132 C8.81632746,17.579132 6.5454869,16.2555462 6.5454869,13.1449797 C6.5454869,10.031224 8.81600655,5.5 11.6131391,5.5 C14.4163295,5.5 16.6868483,10.0312249 16.6868483,13.1449797 L16.6868483,13.1449797 Z" id="Shape"></path>
|
||||
<path d="M32.9863177,13.1449797 C32.9863177,16.2551904 30.7158014,17.579132 27.9158039,17.579132 C25.1189922,17.579132 22.8484683,16.2555462 22.8484683,13.1449797 C22.8484683,10.031224 25.1189922,5.5 27.9158039,5.5 C30.7158023,5.5 32.9863177,10.0312249 32.9863177,13.1449797 L32.9863177,13.1449797 Z" id="Shape"></path>
|
||||
<path d="M19.4246955,22.8995505 C8.59074225,22.8995505 3.97133194,18.0445183 3.97133194,24.0594622 C3.97133194,30.0829058 10.8899328,37.3346368 19.4246955,37.3346368 C27.9594572,37.3346368 34.8809223,30.3131889 34.8809223,24.2961184 C34.8809223,18.2776321 30.5208332,22.8995513 19.4246955,22.8995513 L19.4246955,22.8995505 Z M19.4246955,35.5515526 C17.3227023,35.5515526 15.3309396,35.0148194 13.5175822,34.1790777 C13.7995276,32.7577127 15.4529546,31.6435062 17.5390188,31.6435062 C18.2312941,31.6435062 18.8515667,31.7972603 19.4246955,32.0055779 C19.9978148,31.7969079 20.617776,31.6435062 21.3100529,31.6435062 C23.40822,31.6435062 25.0715291,32.76515 25.3378606,34.2003348 C23.5238656,35.0215518 21.5292319,35.5515534 19.4246955,35.5515534 L19.4246955,35.5515526 Z" id="Shape"></path>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
|
@ -104,6 +104,7 @@
|
|||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
|
||||
#include "devices/CameraToolBox.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/Leapmotion.h"
|
||||
|
@ -267,6 +268,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
||||
auto modelBlender = DependencyManager::set<ModelBlender>();
|
||||
auto audioToolBox = DependencyManager::set<AudioToolBox>();
|
||||
auto cameraToolBox = DependencyManager::set<CameraToolBox>();
|
||||
auto avatarManager = DependencyManager::set<AvatarManager>();
|
||||
auto lodManager = DependencyManager::set<LODManager>();
|
||||
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
|
||||
|
@ -592,6 +594,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
// The offscreen UI needs to intercept the mouse and keyboard
|
||||
// events coming from the onscreen window
|
||||
_glWidget->installEventFilter(DependencyManager::get<OffscreenUi>().data());
|
||||
|
||||
// initialize our face trackers after loading the menu settings
|
||||
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||
faceshiftTracker->init();
|
||||
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||
ddeTracker->init();
|
||||
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
}
|
||||
|
||||
|
||||
|
@ -923,6 +933,14 @@ void Application::audioMuteToggled() {
|
|||
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
|
||||
}
|
||||
|
||||
void Application::faceTrackerMuteToggled() {
|
||||
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
|
||||
Q_CHECK_PTR(muteAction);
|
||||
bool isMuted = getSelectedFaceTracker()->isMuted();
|
||||
muteAction->setChecked(isMuted);
|
||||
getSelectedFaceTracker()->setEnabled(!isMuted);
|
||||
}
|
||||
|
||||
void Application::aboutApp() {
|
||||
InfoView::forcedShow(INFO_HELP_PATH);
|
||||
}
|
||||
|
@ -1013,6 +1031,9 @@ bool Application::event(QEvent* event) {
|
|||
case QEvent::MouseButtonPress:
|
||||
mousePressEvent((QMouseEvent*)event);
|
||||
return true;
|
||||
case QEvent::MouseButtonDblClick:
|
||||
mouseDoublePressEvent((QMouseEvent*)event);
|
||||
return true;
|
||||
case QEvent::MouseButtonRelease:
|
||||
mouseReleaseEvent((QMouseEvent*)event);
|
||||
return true;
|
||||
|
@ -1434,7 +1455,12 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
// stop propagation
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (DependencyManager::get<CameraToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
|
||||
// stop propagation
|
||||
return;
|
||||
}
|
||||
|
||||
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
|
||||
// stop propagation
|
||||
return;
|
||||
|
@ -1452,6 +1478,24 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (activeWindow() == _window) {
|
||||
if (event->button() == Qt::LeftButton) {
|
||||
if (mouseOnScreen()) {
|
||||
if (DependencyManager::get<CameraToolBox>()->mouseDoublePressEvent(getMouseX(), getMouseY())) {
|
||||
// stop propagation
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
|
@ -1847,18 +1891,45 @@ FaceTracker* Application::getActiveFaceTracker() {
|
|||
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : NULL));
|
||||
}
|
||||
|
||||
void Application::setActiveFaceTracker() {
|
||||
FaceTracker* Application::getSelectedFaceTracker() {
|
||||
FaceTracker* faceTracker = NULL;
|
||||
#ifdef HAVE_FACESHIFT
|
||||
DependencyManager::get<Faceshift>()->setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)) {
|
||||
faceTracker = DependencyManager::get<Faceshift>().data();
|
||||
}
|
||||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseCamera)) {
|
||||
faceTracker = DependencyManager::get<DdeFaceTracker>().data();
|
||||
}
|
||||
#endif
|
||||
return faceTracker;
|
||||
}
|
||||
|
||||
void Application::setActiveFaceTracker() {
|
||||
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||
#ifdef HAVE_FACESHIFT
|
||||
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||
faceshiftTracker->setIsMuted(isMuted);
|
||||
faceshiftTracker->setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !isMuted);
|
||||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::UseAudioForMouth)->setVisible(isUsingDDE);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::VelocityFilter)->setVisible(isUsingDDE);
|
||||
DependencyManager::get<DdeFaceTracker>()->setEnabled(isUsingDDE);
|
||||
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||
ddeTracker->setIsMuted(isMuted);
|
||||
ddeTracker->setEnabled(isUsingDDE && !isMuted);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Application::toggleFaceTrackerMute() {
|
||||
FaceTracker* faceTracker = getSelectedFaceTracker();
|
||||
if (faceTracker) {
|
||||
faceTracker->toggleMute();
|
||||
}
|
||||
}
|
||||
|
||||
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
|
||||
QVector<EntityItem*> entities;
|
||||
|
||||
|
@ -2027,10 +2098,6 @@ void Application::init() {
|
|||
SixenseManager::getInstance().toggleSixense(true);
|
||||
#endif
|
||||
|
||||
// initialize our face trackers after loading the menu settings
|
||||
DependencyManager::get<Faceshift>()->init();
|
||||
DependencyManager::get<DdeFaceTracker>()->init();
|
||||
|
||||
Leapmotion::init();
|
||||
RealSense::init();
|
||||
|
||||
|
@ -2169,7 +2236,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
|
||||
isLookingAtSomeone = true;
|
||||
// If I am looking at someone else, look directly at one of their eyes
|
||||
if (tracker) {
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
// If a face tracker is active, look at the eye for the side my gaze is biased toward
|
||||
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
|
||||
// Look at their right eye
|
||||
|
@ -2195,7 +2262,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
//
|
||||
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
|
||||
//
|
||||
if (tracker) {
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
float eyePitch = tracker->getEstimatedEyePitch();
|
||||
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
|
@ -2250,7 +2317,7 @@ void Application::updateCamera(float deltaTime) {
|
|||
if (!OculusManager::isConnected() && !TV3DManager::isConnected() &&
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
|
||||
FaceTracker* tracker = getActiveFaceTracker();
|
||||
if (tracker) {
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
const float EYE_OFFSET_SCALE = 0.025f;
|
||||
glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE;
|
||||
float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f;
|
||||
|
@ -2314,7 +2381,7 @@ void Application::update(float deltaTime) {
|
|||
PerformanceTimer perfTimer("devices");
|
||||
DeviceTracker::updateAll();
|
||||
FaceTracker* tracker = getActiveFaceTracker();
|
||||
if (tracker) {
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
tracker->update(deltaTime);
|
||||
}
|
||||
SixenseManager::getInstance().update(deltaTime);
|
||||
|
@ -3254,6 +3321,7 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
|||
{
|
||||
PerformanceTimer perfTimer("3dOverlaysFront");
|
||||
glClear(GL_DEPTH_BUFFER_BIT);
|
||||
Glower glower; // Sets alpha to 1.0
|
||||
_overlays.renderWorld(true);
|
||||
}
|
||||
activeRenderingThread = nullptr;
|
||||
|
|
|
@ -171,6 +171,7 @@ public:
|
|||
|
||||
void mouseMoveEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
void mousePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
void mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
void mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||
|
||||
void touchBeginEvent(QTouchEvent* event);
|
||||
|
@ -220,6 +221,7 @@ public:
|
|||
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
|
||||
|
||||
FaceTracker* getActiveFaceTracker();
|
||||
FaceTracker* getSelectedFaceTracker();
|
||||
|
||||
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
|
||||
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
|
||||
|
@ -399,6 +401,7 @@ public slots:
|
|||
|
||||
void resetSensors();
|
||||
void setActiveFaceTracker();
|
||||
void toggleFaceTrackerMute();
|
||||
|
||||
void aboutApp();
|
||||
void showEditEntitiesHelp();
|
||||
|
@ -440,6 +443,7 @@ private slots:
|
|||
void runTests();
|
||||
|
||||
void audioMuteToggled();
|
||||
void faceTrackerMuteToggled();
|
||||
|
||||
void setCursorVisible(bool visible);
|
||||
|
||||
|
|
|
@ -131,6 +131,7 @@ bool GLCanvas::event(QEvent* event) {
|
|||
case QEvent::MouseMove:
|
||||
case QEvent::MouseButtonPress:
|
||||
case QEvent::MouseButtonRelease:
|
||||
case QEvent::MouseButtonDblClick:
|
||||
case QEvent::KeyPress:
|
||||
case QEvent::KeyRelease:
|
||||
case QEvent::FocusIn:
|
||||
|
|
|
@ -394,6 +394,12 @@ Menu::Menu() {
|
|||
QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true);
|
||||
ddeFiltering->setVisible(false);
|
||||
#endif
|
||||
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
|
||||
faceTrackingMenu->addSeparator();
|
||||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
|
||||
0, false,
|
||||
qApp, SLOT(toggleFaceTrackerMute()));
|
||||
#endif
|
||||
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
|
||||
|
|
|
@ -211,6 +211,7 @@ namespace MenuOption {
|
|||
const QString Mirror = "Mirror";
|
||||
const QString MuteAudio = "Mute Microphone";
|
||||
const QString MuteEnvironment = "Mute Environment";
|
||||
const QString MuteFaceTracking = "Mute Face Tracking";
|
||||
const QString NoFaceTracking = "None";
|
||||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OffAxisProjection = "Off-Axis Projection";
|
||||
|
|
|
@ -35,7 +35,7 @@ bool AudioToolBox::mousePressEvent(int x, int y) {
|
|||
return false;
|
||||
}
|
||||
|
||||
void AudioToolBox::render(int x, int y, bool boxed) {
|
||||
void AudioToolBox::render(int x, int y, int padding, bool boxed) {
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
|
@ -79,7 +79,7 @@ void AudioToolBox::render(int x, int y, bool boxed) {
|
|||
|
||||
float iconColor = 1.0f;
|
||||
|
||||
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||
_iconBounds = QRect(x + padding, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||
if (!audioIO->isMuted()) {
|
||||
glBindTexture(GL_TEXTURE_2D, _micTextureId);
|
||||
iconColor = 1.0f;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
class AudioToolBox : public Dependency {
|
||||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
void render(int x, int y, bool boxed);
|
||||
void render(int x, int y, int padding, bool boxed);
|
||||
|
||||
bool mousePressEvent(int x, int y);
|
||||
protected:
|
||||
|
|
|
@ -90,7 +90,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
// Only use face trackers when not playing back a recording.
|
||||
if (!myAvatar->isPlaying()) {
|
||||
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
|
||||
_isFaceTrackerConnected = faceTracker != NULL;
|
||||
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||
if (_isFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
estimatedPosition /= OCULUS_LEAN_SCALE;
|
||||
} else {
|
||||
FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker();
|
||||
if (tracker) {
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
estimatedPosition = tracker->getHeadTranslation();
|
||||
_trackedHeadPosition = estimatedPosition;
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
||||
|
@ -1177,16 +1177,8 @@ void MyAvatar::renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode ren
|
|||
renderFrustum->setNearClip(DEFAULT_NEAR_CLIP);
|
||||
} else {
|
||||
float clipDistance = _skeletonModel.getHeadClipDistance();
|
||||
if (OculusManager::isConnected()) {
|
||||
// If avatar is horizontally in front of camera, increase clip distance by the amount it is in front.
|
||||
glm::vec3 cameraToAvatar = _position - cameraPos;
|
||||
cameraToAvatar.y = 0.0f;
|
||||
glm::vec3 cameraLookAt = camera->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
float headOffset = glm::dot(cameraLookAt, cameraToAvatar);
|
||||
if (headOffset > 0) {
|
||||
clipDistance += headOffset;
|
||||
}
|
||||
}
|
||||
clipDistance = glm::length(getEyePosition()
|
||||
+ camera->getOrientation() * glm::vec3(0.0f, 0.0f, -clipDistance) - cameraPos);
|
||||
renderFrustum->setNearClip(clipDistance);
|
||||
}
|
||||
}
|
||||
|
|
121
interface/src/devices/CameraToolBox.cpp
Normal file
121
interface/src/devices/CameraToolBox.cpp
Normal file
|
@ -0,0 +1,121 @@
|
|||
//
|
||||
// CameraToolBox.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 30 Apr 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GLCanvas.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "CameraToolBox.h"
|
||||
#include "FaceTracker.h"
|
||||
|
||||
|
||||
CameraToolBox::CameraToolBox() :
|
||||
_iconPulseTimeReference(usecTimestampNow()),
|
||||
_doubleClickTimer(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
CameraToolBox::~CameraToolBox() {
|
||||
if (_doubleClickTimer) {
|
||||
_doubleClickTimer->stop();
|
||||
delete _doubleClickTimer;
|
||||
}
|
||||
}
|
||||
|
||||
bool CameraToolBox::mousePressEvent(int x, int y) {
|
||||
if (_iconBounds.contains(x, y)) {
|
||||
if (!_doubleClickTimer) {
|
||||
// Toggle mute after waiting to check that it's not a double-click.
|
||||
const int DOUBLE_CLICK_WAIT = 200; // ms
|
||||
_doubleClickTimer = new QTimer(this);
|
||||
connect(_doubleClickTimer, SIGNAL(timeout()), this, SLOT(toggleMute()));
|
||||
_doubleClickTimer->setSingleShot(true);
|
||||
_doubleClickTimer->setInterval(DOUBLE_CLICK_WAIT);
|
||||
_doubleClickTimer->start();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool CameraToolBox::mouseDoublePressEvent(int x, int y) {
|
||||
if (_iconBounds.contains(x, y)) {
|
||||
if (_doubleClickTimer) {
|
||||
_doubleClickTimer->stop();
|
||||
delete _doubleClickTimer;
|
||||
_doubleClickTimer = NULL;
|
||||
}
|
||||
Application::getInstance()->resetSensors();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void CameraToolBox::toggleMute() {
|
||||
delete _doubleClickTimer;
|
||||
_doubleClickTimer = NULL;
|
||||
|
||||
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
|
||||
if (faceTracker) {
|
||||
faceTracker->toggleMute();
|
||||
}
|
||||
}
|
||||
|
||||
void CameraToolBox::render(int x, int y, bool boxed) {
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
if (_enabledTextureId == 0) {
|
||||
_enabledTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/face.svg"));
|
||||
}
|
||||
if (_mutedTextureId == 0) {
|
||||
_mutedTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/face-mute.svg"));
|
||||
}
|
||||
|
||||
const int MUTE_ICON_SIZE = 24;
|
||||
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||
float iconColor = 1.0f;
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking)) {
|
||||
glBindTexture(GL_TEXTURE_2D, _enabledTextureId);
|
||||
} else {
|
||||
glBindTexture(GL_TEXTURE_2D, _mutedTextureId);
|
||||
|
||||
// Make muted icon pulsate
|
||||
static const float PULSE_MIN = 0.4f;
|
||||
static const float PULSE_MAX = 1.0f;
|
||||
static const float PULSE_FREQUENCY = 1.0f; // in Hz
|
||||
qint64 now = usecTimestampNow();
|
||||
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
|
||||
// Prevents t from getting too big, which would diminish glm::cos precision
|
||||
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
|
||||
}
|
||||
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
|
||||
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
|
||||
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
|
||||
}
|
||||
|
||||
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
|
||||
|
||||
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
|
||||
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
|
||||
glm::vec2 texCoordTopLeft(1,1);
|
||||
glm::vec2 texCoordBottomRight(0,0);
|
||||
|
||||
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
|
||||
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
|
||||
}
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
|
||||
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
}
|
45
interface/src/devices/CameraToolBox.h
Normal file
45
interface/src/devices/CameraToolBox.h
Normal file
|
@ -0,0 +1,45 @@
|
|||
//
|
||||
// CameraToolBox.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 30 Apr 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_CameraToolBox_h
|
||||
#define hifi_CameraToolBox_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
|
||||
class CameraToolBox : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
void render(int x, int y, bool boxed);
|
||||
bool mousePressEvent(int x, int y);
|
||||
bool mouseDoublePressEvent(int x, int y);
|
||||
|
||||
protected:
|
||||
CameraToolBox();
|
||||
~CameraToolBox();
|
||||
|
||||
private slots:
|
||||
void toggleMute();
|
||||
|
||||
private:
|
||||
GLuint _enabledTextureId = 0;
|
||||
GLuint _mutedTextureId = 0;
|
||||
int _boxQuadID = GeometryCache::UNKNOWN_ID;
|
||||
QRect _iconBounds;
|
||||
qint64 _iconPulseTimeReference = 0;
|
||||
QTimer* _doubleClickTimer;
|
||||
};
|
||||
|
||||
#endif // hifi_CameraToolBox_h
|
|
@ -178,9 +178,7 @@ DdeFaceTracker::DdeFaceTracker(const QHostAddress& host, quint16 serverPort, qui
|
|||
_filteredBrowUp(0.0f),
|
||||
_lastEyeBlinks(),
|
||||
_filteredEyeBlinks(),
|
||||
_lastEyeCoefficients(),
|
||||
_isCalculatingFPS(false),
|
||||
_frameCount(0)
|
||||
_lastEyeCoefficients()
|
||||
{
|
||||
_coefficients.resize(NUM_FACESHIFT_BLENDSHAPES);
|
||||
|
||||
|
@ -203,7 +201,16 @@ DdeFaceTracker::~DdeFaceTracker() {
|
|||
#pragma warning(default:4351)
|
||||
#endif
|
||||
|
||||
void DdeFaceTracker::init() {
|
||||
FaceTracker::init();
|
||||
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::UseCamera) && !_isMuted);
|
||||
}
|
||||
|
||||
void DdeFaceTracker::setEnabled(bool enabled) {
|
||||
if (!_isInitialized) {
|
||||
// Don't enable until have explicitly initialized
|
||||
return;
|
||||
}
|
||||
#ifdef HAVE_DDE
|
||||
// isOpen() does not work as one might expect on QUdpSocket; don't test isOpen() before closing socket.
|
||||
_udpSocket.close();
|
||||
|
@ -314,7 +321,9 @@ float DdeFaceTracker::getBlendshapeCoefficient(int index) const {
|
|||
}
|
||||
|
||||
void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
||||
if(buffer.size() > MIN_PACKET_SIZE) {
|
||||
_lastReceiveTimestamp = usecTimestampNow();
|
||||
|
||||
if (buffer.size() > MIN_PACKET_SIZE) {
|
||||
bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);
|
||||
|
||||
Packet packet;
|
||||
|
@ -326,7 +335,7 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
|||
memcpy(&translation, packet.translation, sizeof(packet.translation));
|
||||
glm::quat rotation;
|
||||
memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
|
||||
if (_reset || (_lastReceiveTimestamp == 0)) {
|
||||
if (_reset || (_lastMessageReceived == 0)) {
|
||||
memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
|
||||
memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
|
||||
_reset = false;
|
||||
|
@ -501,5 +510,4 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
|||
} else {
|
||||
qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
|
||||
}
|
||||
_lastReceiveTimestamp = usecTimestampNow();
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ class DdeFaceTracker : public FaceTracker, public Dependency {
|
|||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
virtual void init();
|
||||
virtual void reset();
|
||||
|
||||
virtual bool isActive() const;
|
||||
|
@ -120,9 +121,6 @@ private:
|
|||
float _lastEyeBlinks[2];
|
||||
float _filteredEyeBlinks[2];
|
||||
float _lastEyeCoefficients[2];
|
||||
|
||||
bool _isCalculatingFPS;
|
||||
int _frameCount;
|
||||
};
|
||||
|
||||
#endif // hifi_DdeFaceTracker_h
|
||||
|
|
|
@ -15,14 +15,14 @@
|
|||
|
||||
#include "FaceTracker.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
||||
const int FPS_TIMER_DELAY = 2000; // ms
|
||||
const int FPS_TIMER_DURATION = 2000; // ms
|
||||
|
||||
FaceTracker::FaceTracker() :
|
||||
_isCalculatingFPS(false),
|
||||
_frameCount(0)
|
||||
{
|
||||
void FaceTracker::init() {
|
||||
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||
_isInitialized = true; // FaceTracker can be used now
|
||||
}
|
||||
|
||||
inline float FaceTracker::getBlendshapeCoefficient(int index) const {
|
||||
|
@ -101,3 +101,8 @@ void FaceTracker::finishFPSTimer() {
|
|||
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
|
||||
_isCalculatingFPS = false;
|
||||
}
|
||||
|
||||
void FaceTracker::toggleMute() {
|
||||
_isMuted = !_isMuted;
|
||||
emit muteToggled();
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ public:
|
|||
virtual bool isActive() const { return false; }
|
||||
virtual bool isTracking() const { return false; }
|
||||
|
||||
virtual void init() {}
|
||||
virtual void init();
|
||||
virtual void update(float deltaTime);
|
||||
virtual void reset();
|
||||
|
||||
|
@ -42,11 +42,23 @@ public:
|
|||
bool isValidBlendshapeIndex(int index) const { return index >= 0 && index < getNumBlendshapes(); }
|
||||
const QVector<float>& getBlendshapeCoefficients() const;
|
||||
float getBlendshapeCoefficient(int index) const;
|
||||
|
||||
|
||||
bool isMuted() const { return _isMuted; }
|
||||
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
|
||||
void toggleMute();
|
||||
|
||||
signals:
|
||||
void muteToggled();
|
||||
|
||||
public slots:
|
||||
virtual void setEnabled(bool enabled) = 0;
|
||||
|
||||
protected:
|
||||
FaceTracker();
|
||||
virtual ~FaceTracker() {};
|
||||
|
||||
bool _isInitialized = false;
|
||||
bool _isMuted = true;
|
||||
|
||||
glm::vec3 _headTranslation = glm::vec3(0.0f);
|
||||
glm::quat _headRotation = glm::quat();
|
||||
float _estimatedEyePitch = 0.0f;
|
||||
|
@ -63,8 +75,8 @@ private slots:
|
|||
void finishFPSTimer();
|
||||
|
||||
private:
|
||||
bool _isCalculatingFPS;
|
||||
int _frameCount;
|
||||
bool _isCalculatingFPS = false;
|
||||
int _frameCount = 0;
|
||||
};
|
||||
|
||||
#endif // hifi_FaceTracker_h
|
||||
|
|
|
@ -49,7 +49,8 @@ Faceshift::Faceshift() :
|
|||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
void Faceshift::init() {
|
||||
setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
|
||||
FaceTracker::init();
|
||||
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !_isMuted);
|
||||
}
|
||||
|
||||
void Faceshift::update(float deltaTime) {
|
||||
|
@ -92,7 +93,7 @@ void Faceshift::reset() {
|
|||
|
||||
bool Faceshift::isActive() const {
|
||||
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
|
||||
return (usecTimestampNow() - _lastTrackingStateReceived) < ACTIVE_TIMEOUT_USECS;
|
||||
return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
|
||||
}
|
||||
|
||||
bool Faceshift::isTracking() const {
|
||||
|
@ -127,7 +128,11 @@ void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float
|
|||
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
|
||||
}
|
||||
|
||||
void Faceshift::setTCPEnabled(bool enabled) {
|
||||
void Faceshift::setEnabled(bool enabled) {
|
||||
// Don't enable until have explicitly initialized
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
#ifdef HAVE_FACESHIFT
|
||||
if ((_tcpEnabled = enabled)) {
|
||||
connectSocket();
|
||||
|
@ -196,6 +201,8 @@ void Faceshift::send(const std::string& message) {
|
|||
|
||||
void Faceshift::receive(const QByteArray& buffer) {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
_lastReceiveTimestamp = usecTimestampNow();
|
||||
|
||||
_stream.received(buffer.size(), buffer.constData());
|
||||
fsMsgPtr msg;
|
||||
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
|
||||
|
@ -240,11 +247,11 @@ void Faceshift::receive(const QByteArray& buffer) {
|
|||
|
||||
const float FRAME_AVERAGING_FACTOR = 0.99f;
|
||||
quint64 usecsNow = usecTimestampNow();
|
||||
if (_lastTrackingStateReceived != 0) {
|
||||
if (_lastMessageReceived != 0) {
|
||||
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
|
||||
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastTrackingStateReceived) / 1000000.0f;
|
||||
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
|
||||
}
|
||||
_lastTrackingStateReceived = usecsNow;
|
||||
_lastMessageReceived = usecsNow;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ signals:
|
|||
void connectionStateChanged();
|
||||
|
||||
public slots:
|
||||
void setTCPEnabled(bool enabled);
|
||||
void setEnabled(bool enabled);
|
||||
|
||||
private slots:
|
||||
void connectSocket();
|
||||
|
@ -114,7 +114,8 @@ private:
|
|||
bool _tcpEnabled = true;
|
||||
int _tcpRetryCount = 0;
|
||||
bool _tracking = false;
|
||||
quint64 _lastTrackingStateReceived = 0;
|
||||
quint64 _lastReceiveTimestamp = 0;
|
||||
quint64 _lastMessageReceived = 0;
|
||||
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
|
||||
|
||||
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "audio/AudioToolBox.h"
|
||||
#include "Application.h"
|
||||
#include "ApplicationOverlay.h"
|
||||
#include "devices/CameraToolBox.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#include "Util.h"
|
||||
|
@ -211,6 +212,7 @@ void ApplicationOverlay::renderOverlay() {
|
|||
glMatrixMode(GL_MODELVIEW);
|
||||
|
||||
renderAudioMeter();
|
||||
renderCameraToggle();
|
||||
|
||||
renderStatsAndLogs();
|
||||
|
||||
|
@ -808,18 +810,46 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
|
|||
} glPopMatrix();
|
||||
}
|
||||
|
||||
const int AUDIO_METER_GAP = 5;
|
||||
const int MUTE_ICON_PADDING = 10;
|
||||
|
||||
void ApplicationOverlay::renderCameraToggle() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking)) {
|
||||
return;
|
||||
}
|
||||
|
||||
int audioMeterY;
|
||||
bool smallMirrorVisible = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !OculusManager::isConnected();
|
||||
bool boxed = smallMirrorVisible &&
|
||||
!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
|
||||
if (boxed) {
|
||||
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||
} else {
|
||||
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||
}
|
||||
|
||||
DependencyManager::get<CameraToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderAudioMeter() {
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
|
||||
// Audio VU Meter and Mute Icon
|
||||
const int MUTE_ICON_SIZE = 24;
|
||||
const int MUTE_ICON_PADDING = 10;
|
||||
const int AUDIO_METER_WIDTH = MIRROR_VIEW_WIDTH - MUTE_ICON_SIZE - MUTE_ICON_PADDING;
|
||||
const int AUDIO_METER_SCALE_WIDTH = AUDIO_METER_WIDTH - 2 ;
|
||||
const int AUDIO_METER_HEIGHT = 8;
|
||||
const int AUDIO_METER_GAP = 5;
|
||||
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_GAP;
|
||||
const int INTER_ICON_GAP = 2;
|
||||
|
||||
int cameraSpace = 0;
|
||||
int audioMeterWidth = MIRROR_VIEW_WIDTH - MUTE_ICON_SIZE - MUTE_ICON_PADDING;
|
||||
int audioMeterScaleWidth = audioMeterWidth - 2;
|
||||
int audioMeterX = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_GAP;
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking)) {
|
||||
cameraSpace = MUTE_ICON_SIZE + INTER_ICON_GAP;
|
||||
audioMeterWidth -= cameraSpace;
|
||||
audioMeterScaleWidth -= cameraSpace;
|
||||
audioMeterX += cameraSpace;
|
||||
}
|
||||
|
||||
int audioMeterY;
|
||||
bool smallMirrorVisible = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !OculusManager::isConnected();
|
||||
|
@ -834,13 +864,13 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
const glm::vec4 AUDIO_METER_BLUE = { 0.0, 0.0, 1.0, 1.0 };
|
||||
const glm::vec4 AUDIO_METER_GREEN = { 0.0, 1.0, 0.0, 1.0 };
|
||||
const glm::vec4 AUDIO_METER_RED = { 1.0, 0.0, 0.0, 1.0 };
|
||||
const float AUDIO_GREEN_START = 0.25 * AUDIO_METER_SCALE_WIDTH;
|
||||
const float AUDIO_RED_START = 0.80 * AUDIO_METER_SCALE_WIDTH;
|
||||
const float CLIPPING_INDICATOR_TIME = 1.0f;
|
||||
const float AUDIO_METER_AVERAGING = 0.5;
|
||||
const float LOG2 = log(2.0f);
|
||||
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
|
||||
const float LOG2_LOUDNESS_FLOOR = 11.0f;
|
||||
float audioGreenStart = 0.25f * audioMeterScaleWidth;
|
||||
float audioRedStart = 0.8f * audioMeterScaleWidth;
|
||||
float audioLevel = 0.0f;
|
||||
float loudness = audio->getLastInputLoudness() + 1.0f;
|
||||
|
||||
|
@ -848,12 +878,12 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
float log2loudness = log(_trailingAudioLoudness) / LOG2;
|
||||
|
||||
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
|
||||
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE * AUDIO_METER_SCALE_WIDTH;
|
||||
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE * audioMeterScaleWidth;
|
||||
} else {
|
||||
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE * AUDIO_METER_SCALE_WIDTH;
|
||||
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE * audioMeterScaleWidth;
|
||||
}
|
||||
if (audioLevel > AUDIO_METER_SCALE_WIDTH) {
|
||||
audioLevel = AUDIO_METER_SCALE_WIDTH;
|
||||
if (audioLevel > audioMeterScaleWidth) {
|
||||
audioLevel = audioMeterScaleWidth;
|
||||
}
|
||||
bool isClipping = ((audio->getTimeSinceLastClip() > 0.0f) && (audio->getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME));
|
||||
|
||||
|
@ -863,7 +893,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
renderCollisionOverlay(glCanvas->width(), glCanvas->height(), magnitude, 1.0f);
|
||||
}
|
||||
|
||||
DependencyManager::get<AudioToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
||||
DependencyManager::get<AudioToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, cameraSpace, boxed);
|
||||
|
||||
DependencyManager::get<AudioScope>()->render(glCanvas->width(), glCanvas->height());
|
||||
DependencyManager::get<AudioIOStatsRenderer>()->render(WHITE_TEXT, glCanvas->width(), glCanvas->height());
|
||||
|
@ -871,10 +901,10 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
audioMeterY += AUDIO_METER_HEIGHT;
|
||||
|
||||
// Draw audio meter background Quad
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X, audioMeterY, AUDIO_METER_WIDTH, AUDIO_METER_HEIGHT,
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX, audioMeterY, audioMeterWidth, AUDIO_METER_HEIGHT,
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
|
||||
|
||||
if (audioLevel > AUDIO_RED_START) {
|
||||
if (audioLevel > audioRedStart) {
|
||||
glm::vec4 quadColor;
|
||||
if (!isClipping) {
|
||||
quadColor = AUDIO_METER_RED;
|
||||
|
@ -882,16 +912,16 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
quadColor = glm::vec4(1, 1, 1, 1);
|
||||
}
|
||||
// Draw Red Quad
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X + AUDIO_RED_START,
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioRedStart,
|
||||
audioMeterY,
|
||||
audioLevel - AUDIO_RED_START,
|
||||
audioLevel - audioRedStart,
|
||||
AUDIO_METER_HEIGHT, quadColor,
|
||||
_audioRedQuad);
|
||||
|
||||
audioLevel = AUDIO_RED_START;
|
||||
audioLevel = audioRedStart;
|
||||
}
|
||||
|
||||
if (audioLevel > AUDIO_GREEN_START) {
|
||||
if (audioLevel > audioGreenStart) {
|
||||
glm::vec4 quadColor;
|
||||
if (!isClipping) {
|
||||
quadColor = AUDIO_METER_GREEN;
|
||||
|
@ -899,13 +929,13 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
quadColor = glm::vec4(1, 1, 1, 1);
|
||||
}
|
||||
// Draw Green Quad
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X + AUDIO_GREEN_START,
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioGreenStart,
|
||||
audioMeterY,
|
||||
audioLevel - AUDIO_GREEN_START,
|
||||
audioLevel - audioGreenStart,
|
||||
AUDIO_METER_HEIGHT, quadColor,
|
||||
_audioGreenQuad);
|
||||
|
||||
audioLevel = AUDIO_GREEN_START;
|
||||
audioLevel = audioGreenStart;
|
||||
}
|
||||
|
||||
if (audioLevel >= 0) {
|
||||
|
@ -916,7 +946,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
quadColor = glm::vec4(1, 1, 1, 1);
|
||||
}
|
||||
// Draw Blue (low level) quad
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X,
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX,
|
||||
audioMeterY,
|
||||
audioLevel, AUDIO_METER_HEIGHT, quadColor,
|
||||
_audioBlueQuad);
|
||||
|
|
|
@ -101,6 +101,7 @@ private:
|
|||
void renderPointersOculus(const glm::vec3& eyePos);
|
||||
|
||||
void renderAudioMeter();
|
||||
void renderCameraToggle();
|
||||
void renderStatsAndLogs();
|
||||
void renderDomainConnectionStatusBorder();
|
||||
|
||||
|
|
|
@ -162,11 +162,16 @@ void HMDToolsDialog::enterHDMMode() {
|
|||
close();
|
||||
}
|
||||
|
||||
Application::getInstance()->setFullscreen(true);
|
||||
Application::getInstance()->setEnableVRMode(true);
|
||||
|
||||
const int SLIGHT_DELAY = 500;
|
||||
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(activateWindowAfterEnterMode()));
|
||||
// If we go to fullscreen immediately, it ends up on the primary monitor,
|
||||
// even though we've already moved the window. By adding this delay, the
|
||||
// fullscreen target screen ends up correct.
|
||||
QTimer::singleShot(SLIGHT_DELAY, this, [&]{
|
||||
Application::getInstance()->setFullscreen(true);
|
||||
activateWindowAfterEnterMode();
|
||||
});
|
||||
|
||||
_inHDMMode = true;
|
||||
}
|
||||
|
|
|
@ -159,6 +159,7 @@ void ObjectMotionState::handleHardAndEasyChanges(uint32_t flags, PhysicsEngine*
|
|||
computeObjectShapeInfo(shapeInfo);
|
||||
btCollisionShape* newShape = getShapeManager()->getShape(shapeInfo);
|
||||
if (!newShape) {
|
||||
qCDebug(physics) << "Warning: failed to generate new shape!";
|
||||
// failed to generate new shape! --> keep old shape and remove shape-change flag
|
||||
flags &= ~EntityItem::DIRTY_SHAPE;
|
||||
// TODO: force this object out of PhysicsEngine rather than just use the old shape
|
||||
|
|
|
@ -153,7 +153,7 @@ VectorOfMotionStates& PhysicalEntitySimulation::getObjectsToAdd() {
|
|||
while (entityItr != _pendingAdds.end()) {
|
||||
EntityItem* entity = *entityItr;
|
||||
assert(!entity->getPhysicsInfo());
|
||||
if (entity->getIgnoreForCollisions()) {
|
||||
if (entity->getShapeType() == SHAPE_TYPE_NONE || entity->getIgnoreForCollisions()) {
|
||||
// this entity should no longer be on the internal _pendingAdds
|
||||
entityItr = _pendingAdds.erase(entityItr);
|
||||
} else if (entity->isReadyToComputeShape()) {
|
||||
|
@ -168,6 +168,7 @@ VectorOfMotionStates& PhysicalEntitySimulation::getObjectsToAdd() {
|
|||
_tempVector.push_back(motionState);
|
||||
entityItr = _pendingAdds.erase(entityItr);
|
||||
} else {
|
||||
qDebug() << "Warning! Failed to generate new shape for entity." << entity->getName();
|
||||
++entityItr;
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -126,6 +126,8 @@ void PhysicsEngine::addObject(ObjectMotionState* motionState) {
|
|||
motionState->updateBodyMaterialProperties();
|
||||
|
||||
_dynamicsWorld->addRigidBody(body);
|
||||
|
||||
motionState->getAndClearIncomingDirtyFlags();
|
||||
}
|
||||
|
||||
void PhysicsEngine::removeObject(ObjectMotionState* object) {
|
||||
|
|
|
@ -162,6 +162,7 @@ public:
|
|||
Mirror,
|
||||
MuteAudio,
|
||||
MuteEnvironment,
|
||||
MuteFaceTracking,
|
||||
NoFaceTracking,
|
||||
NoShadows,
|
||||
OctreeStats,
|
||||
|
|
Loading…
Reference in a new issue