mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-06 07:23:08 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into skyboxPropertiesInZones
This commit is contained in:
commit
edb84a1a14
27 changed files with 516 additions and 73 deletions
|
@ -12,7 +12,6 @@ Script.load("progress.js");
|
||||||
Script.load("edit.js");
|
Script.load("edit.js");
|
||||||
Script.load("selectAudioDevice.js");
|
Script.load("selectAudioDevice.js");
|
||||||
Script.load("controllers/hydra/hydraMove.js");
|
Script.load("controllers/hydra/hydraMove.js");
|
||||||
Script.load("headMove.js");
|
|
||||||
Script.load("inspect.js");
|
Script.load("inspect.js");
|
||||||
Script.load("lobby.js");
|
Script.load("lobby.js");
|
||||||
Script.load("notifications.js");
|
Script.load("notifications.js");
|
||||||
|
|
|
@ -13,7 +13,8 @@
|
||||||
//
|
//
|
||||||
|
|
||||||
var isDice = false;
|
var isDice = false;
|
||||||
var NUMBER_OF_DICE = 2;
|
var NUMBER_OF_DICE = 4;
|
||||||
|
var LIFETIME = 10000; // Dice will live for about 3 hours
|
||||||
var dice = [];
|
var dice = [];
|
||||||
var DIE_SIZE = 0.20;
|
var DIE_SIZE = 0.20;
|
||||||
|
|
||||||
|
@ -50,7 +51,7 @@ var diceButton = Overlays.addOverlay("image", {
|
||||||
});
|
});
|
||||||
|
|
||||||
var GRAVITY = -3.5;
|
var GRAVITY = -3.5;
|
||||||
var LIFETIME = 300;
|
|
||||||
// NOTE: angularVelocity is in radians/sec
|
// NOTE: angularVelocity is in radians/sec
|
||||||
var MAX_ANGULAR_SPEED = Math.PI;
|
var MAX_ANGULAR_SPEED = Math.PI;
|
||||||
|
|
||||||
|
@ -105,6 +106,7 @@ function mousePressEvent(event) {
|
||||||
var clickedText = false;
|
var clickedText = false;
|
||||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||||
if (clickedOverlay == offButton) {
|
if (clickedOverlay == offButton) {
|
||||||
|
deleteDice();
|
||||||
Script.stop();
|
Script.stop();
|
||||||
} else if (clickedOverlay == diceButton) {
|
} else if (clickedOverlay == diceButton) {
|
||||||
var HOW_HARD = 2.0;
|
var HOW_HARD = 2.0;
|
||||||
|
@ -116,10 +118,8 @@ function mousePressEvent(event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function scriptEnding() {
|
function scriptEnding() {
|
||||||
deleteDice();
|
|
||||||
Overlays.deleteOverlay(offButton);
|
Overlays.deleteOverlay(offButton);
|
||||||
Overlays.deleteOverlay(diceButton);
|
Overlays.deleteOverlay(diceButton);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Entities.entityCollisionWithEntity.connect(entityCollisionWithEntity);
|
Entities.entityCollisionWithEntity.connect(entityCollisionWithEntity);
|
||||||
|
|
65
examples/harmonicOscillator.js
Normal file
65
examples/harmonicOscillator.js
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
// harmonicOscillator.js
|
||||||
|
//
|
||||||
|
// Created by Philip Rosedale on May 5, 2015
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// An object moves around the edge of a disc while
|
||||||
|
// changing color. The script is continuously updating
|
||||||
|
// position, velocity, rotation, and color. The movement
|
||||||
|
// should appear perfectly smooth to someone else,
|
||||||
|
// provided their network connection is good.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
var ball, disc;
|
||||||
|
var time = 0.0;
|
||||||
|
var range = 1.0;
|
||||||
|
var speed = 0.5;
|
||||||
|
|
||||||
|
|
||||||
|
var basePosition = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||||
|
|
||||||
|
ball = Entities.addEntity(
|
||||||
|
{ type: "Box",
|
||||||
|
position: basePosition,
|
||||||
|
dimensions: { x: 0.1, y: 0.1, z: 0.1 },
|
||||||
|
color: { red: 255, green: 0, blue: 255 }
|
||||||
|
});
|
||||||
|
|
||||||
|
disc = Entities.addEntity(
|
||||||
|
{ type: "Sphere",
|
||||||
|
position: basePosition,
|
||||||
|
dimensions: { x: range, y: range / 20.0, z: range },
|
||||||
|
color: { red: 128, green: 128, blue: 128 }
|
||||||
|
});
|
||||||
|
|
||||||
|
function update(deltaTime) {
|
||||||
|
time += deltaTime * speed;
|
||||||
|
if (!ball.isKnownID) {
|
||||||
|
ball = Entities.identifyEntity(ball);
|
||||||
|
}
|
||||||
|
rotation = Quat.angleAxis(time/Math.PI * 180.0, { x: 0, y: 1, z: 0 });
|
||||||
|
Entities.editEntity(ball,
|
||||||
|
{
|
||||||
|
color: { red: 255 * (Math.sin(time)/2.0 + 0.5),
|
||||||
|
green: 255 - 255 * (Math.sin(time)/2.0 + 0.5),
|
||||||
|
blue: 0 },
|
||||||
|
position: { x: basePosition.x + Math.sin(time) / 2.0 * range,
|
||||||
|
y: basePosition.y,
|
||||||
|
z: basePosition.z + Math.cos(time) / 2.0 * range },
|
||||||
|
velocity: { x: Math.cos(time)/2.0 * range,
|
||||||
|
y: 0.0,
|
||||||
|
z: -Math.sin(time)/2.0 * range },
|
||||||
|
rotation: rotation
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function scriptEnding() {
|
||||||
|
Entities.deleteEntity(ball);
|
||||||
|
Entities.deleteEntity(disc);
|
||||||
|
}
|
||||||
|
|
||||||
|
Script.scriptEnding.connect(scriptEnding);
|
||||||
|
Script.update.connect(update);
|
32
interface/resources/images/face-mute.svg
Normal file
32
interface/resources/images/face-mute.svg
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg width="45px" height="45px" viewBox="0 0 45 45" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
||||||
|
<!-- Generator: Sketch 3.3.1 (12005) - http://www.bohemiancoding.com/sketch -->
|
||||||
|
<title>Slice 1</title>
|
||||||
|
<desc>Created with Sketch.</desc>
|
||||||
|
<defs></defs>
|
||||||
|
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
||||||
|
<g id="mic-mute" sketch:type="MSLayerGroup" transform="translate(-3.000000, 0.000000)">
|
||||||
|
<g id="Layer_3" transform="translate(3.420508, 0.842347)" stroke-width="2.10526286" stroke="#000000" fill="#FF0000" sketch:type="MSShapeGroup">
|
||||||
|
<g id="Layer_2">
|
||||||
|
<path d="M43.5266358,21.6577606 C43.5266358,33.3277189 33.9360211,43.0001168 22.0267963,43.1052516 C10.2229629,43.1052516 0.63234822,33.5379884 0.63234822,21.6577606 C0.63234822,9.77753268 10.2229629,0.21026952 22.0267963,0.21026952 C33.8306298,0.21026952 43.5266358,9.88266744 43.5266358,21.6577606 L43.5266358,21.6577606 Z" id="path6"></path>
|
||||||
|
<path d="M7.48278769,5.99268111 L37.7301094,36.1663595" id="line8"></path>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<g id="Layer_1" transform="translate(0.752809, 0.984270)" fill="#000000" sketch:type="MSShapeGroup">
|
||||||
|
<g id="Page-1_2_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="mic-tool_1_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="Page-1_3_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="Group_1_" transform="translate(1.000000, 1.000000)">
|
||||||
|
<g id="mic_1_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<path d="M16.6868483,13.1449797 C16.6868483,16.2551904 14.4163295,17.579132 11.6131391,17.579132 C8.81632746,17.579132 6.5454869,16.2555462 6.5454869,13.1449797 C6.5454869,10.031224 8.81600655,5.5 11.6131391,5.5 C14.4163295,5.5 16.6868483,10.0312249 16.6868483,13.1449797 L16.6868483,13.1449797 Z" id="Shape"></path>
|
||||||
|
<path d="M32.9863177,13.1449797 C32.9863177,16.2551904 30.7158014,17.579132 27.9158039,17.579132 C25.1189922,17.579132 22.8484683,16.2555462 22.8484683,13.1449797 C22.8484683,10.031224 25.1189922,5.5 27.9158039,5.5 C30.7158023,5.5 32.9863177,10.0312249 32.9863177,13.1449797 L32.9863177,13.1449797 Z" id="Shape"></path>
|
||||||
|
<path d="M19.4246955,22.8995505 C8.59074225,22.8995505 3.97133194,18.0445183 3.97133194,24.0594622 C3.97133194,30.0829058 10.8899328,37.3346368 19.4246955,37.3346368 C27.9594572,37.3346368 34.8809223,30.3131889 34.8809223,24.2961184 C34.8809223,18.2776321 30.5208332,22.8995513 19.4246955,22.8995513 L19.4246955,22.8995505 Z M19.4246955,35.5515526 C17.3227023,35.5515526 15.3309396,35.0148194 13.5175822,34.1790777 C13.7995276,32.7577127 15.4529546,31.6435062 17.5390188,31.6435062 C18.2312941,31.6435062 18.8515667,31.7972603 19.4246955,32.0055779 C19.9978148,31.7969079 20.617776,31.6435062 21.3100529,31.6435062 C23.40822,31.6435062 25.0715291,32.76515 25.3378606,34.2003348 C23.5238656,35.0215518 21.5292319,35.5515534 19.4246955,35.5515534 L19.4246955,35.5515526 Z" id="Shape"></path>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.5 KiB |
31
interface/resources/images/face.svg
Normal file
31
interface/resources/images/face.svg
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg width="45px" height="45px" viewBox="0 0 45 45" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:sketch="http://www.bohemiancoding.com/sketch/ns">
|
||||||
|
<!-- Generator: Sketch 3.3.1 (12005) - http://www.bohemiancoding.com/sketch -->
|
||||||
|
<title>Slice 1</title>
|
||||||
|
<desc>Created with Sketch.</desc>
|
||||||
|
<defs></defs>
|
||||||
|
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd" sketch:type="MSPage">
|
||||||
|
<g id="mic-mute" sketch:type="MSLayerGroup" transform="translate(-3.000000, 0.000000)">
|
||||||
|
<g id="Layer_3" transform="translate(2.420508, 0.842347)" stroke="#000000" stroke-width="2.10526286" fill="#FFFFFF" sketch:type="MSShapeGroup">
|
||||||
|
<g id="Layer_2" transform="translate(1.000000, 0.000000)">
|
||||||
|
<path d="M43.5266358,21.6577606 C43.5266358,33.3277189 33.9360211,43.0001168 22.0267963,43.1052516 C10.2229629,43.1052516 0.63234822,33.5379884 0.63234822,21.6577606 C0.63234822,9.77753268 10.2229629,0.21026952 22.0267963,0.21026952 C33.8306298,0.21026952 43.5266358,9.88266744 43.5266358,21.6577606 L43.5266358,21.6577606 Z" id="path6"></path>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<g id="Layer_1" transform="translate(0.752809, 0.984270)" fill="#000000" sketch:type="MSShapeGroup">
|
||||||
|
<g id="Page-1_2_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="mic-tool_1_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="Page-1_3_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<g id="Group_1_" transform="translate(1.000000, 1.000000)">
|
||||||
|
<g id="mic_1_" transform="translate(1.000000, 0.000000)">
|
||||||
|
<path d="M16.6868483,13.1449797 C16.6868483,16.2551904 14.4163295,17.579132 11.6131391,17.579132 C8.81632746,17.579132 6.5454869,16.2555462 6.5454869,13.1449797 C6.5454869,10.031224 8.81600655,5.5 11.6131391,5.5 C14.4163295,5.5 16.6868483,10.0312249 16.6868483,13.1449797 L16.6868483,13.1449797 Z" id="Shape"></path>
|
||||||
|
<path d="M32.9863177,13.1449797 C32.9863177,16.2551904 30.7158014,17.579132 27.9158039,17.579132 C25.1189922,17.579132 22.8484683,16.2555462 22.8484683,13.1449797 C22.8484683,10.031224 25.1189922,5.5 27.9158039,5.5 C30.7158023,5.5 32.9863177,10.0312249 32.9863177,13.1449797 L32.9863177,13.1449797 Z" id="Shape"></path>
|
||||||
|
<path d="M19.4246955,22.8995505 C8.59074225,22.8995505 3.97133194,18.0445183 3.97133194,24.0594622 C3.97133194,30.0829058 10.8899328,37.3346368 19.4246955,37.3346368 C27.9594572,37.3346368 34.8809223,30.3131889 34.8809223,24.2961184 C34.8809223,18.2776321 30.5208332,22.8995513 19.4246955,22.8995513 L19.4246955,22.8995505 Z M19.4246955,35.5515526 C17.3227023,35.5515526 15.3309396,35.0148194 13.5175822,34.1790777 C13.7995276,32.7577127 15.4529546,31.6435062 17.5390188,31.6435062 C18.2312941,31.6435062 18.8515667,31.7972603 19.4246955,32.0055779 C19.9978148,31.7969079 20.617776,31.6435062 21.3100529,31.6435062 C23.40822,31.6435062 25.0715291,32.76515 25.3378606,34.2003348 C23.5238656,35.0215518 21.5292319,35.5515534 19.4246955,35.5515534 L19.4246955,35.5515526 Z" id="Shape"></path>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
|
@ -103,6 +103,7 @@
|
||||||
#include "audio/AudioIOStatsRenderer.h"
|
#include "audio/AudioIOStatsRenderer.h"
|
||||||
#include "audio/AudioScope.h"
|
#include "audio/AudioScope.h"
|
||||||
|
|
||||||
|
#include "devices/CameraToolBox.h"
|
||||||
#include "devices/DdeFaceTracker.h"
|
#include "devices/DdeFaceTracker.h"
|
||||||
#include "devices/Faceshift.h"
|
#include "devices/Faceshift.h"
|
||||||
#include "devices/Leapmotion.h"
|
#include "devices/Leapmotion.h"
|
||||||
|
@ -266,6 +267,7 @@ bool setupEssentials(int& argc, char** argv) {
|
||||||
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
||||||
auto modelBlender = DependencyManager::set<ModelBlender>();
|
auto modelBlender = DependencyManager::set<ModelBlender>();
|
||||||
auto audioToolBox = DependencyManager::set<AudioToolBox>();
|
auto audioToolBox = DependencyManager::set<AudioToolBox>();
|
||||||
|
auto cameraToolBox = DependencyManager::set<CameraToolBox>();
|
||||||
auto avatarManager = DependencyManager::set<AvatarManager>();
|
auto avatarManager = DependencyManager::set<AvatarManager>();
|
||||||
auto lodManager = DependencyManager::set<LODManager>();
|
auto lodManager = DependencyManager::set<LODManager>();
|
||||||
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
|
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
|
||||||
|
@ -590,6 +592,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
// The offscreen UI needs to intercept the mouse and keyboard
|
// The offscreen UI needs to intercept the mouse and keyboard
|
||||||
// events coming from the onscreen window
|
// events coming from the onscreen window
|
||||||
_glWidget->installEventFilter(DependencyManager::get<OffscreenUi>().data());
|
_glWidget->installEventFilter(DependencyManager::get<OffscreenUi>().data());
|
||||||
|
|
||||||
|
// initialize our face trackers after loading the menu settings
|
||||||
|
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||||
|
faceshiftTracker->init();
|
||||||
|
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||||
|
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||||
|
ddeTracker->init();
|
||||||
|
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -921,6 +931,14 @@ void Application::audioMuteToggled() {
|
||||||
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
|
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Application::faceTrackerMuteToggled() {
|
||||||
|
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
|
||||||
|
Q_CHECK_PTR(muteAction);
|
||||||
|
bool isMuted = getSelectedFaceTracker()->isMuted();
|
||||||
|
muteAction->setChecked(isMuted);
|
||||||
|
getSelectedFaceTracker()->setEnabled(!isMuted);
|
||||||
|
}
|
||||||
|
|
||||||
void Application::aboutApp() {
|
void Application::aboutApp() {
|
||||||
InfoView::forcedShow(INFO_HELP_PATH);
|
InfoView::forcedShow(INFO_HELP_PATH);
|
||||||
}
|
}
|
||||||
|
@ -1011,6 +1029,9 @@ bool Application::event(QEvent* event) {
|
||||||
case QEvent::MouseButtonPress:
|
case QEvent::MouseButtonPress:
|
||||||
mousePressEvent((QMouseEvent*)event);
|
mousePressEvent((QMouseEvent*)event);
|
||||||
return true;
|
return true;
|
||||||
|
case QEvent::MouseButtonDblClick:
|
||||||
|
mouseDoublePressEvent((QMouseEvent*)event);
|
||||||
|
return true;
|
||||||
case QEvent::MouseButtonRelease:
|
case QEvent::MouseButtonRelease:
|
||||||
mouseReleaseEvent((QMouseEvent*)event);
|
mouseReleaseEvent((QMouseEvent*)event);
|
||||||
return true;
|
return true;
|
||||||
|
@ -1433,6 +1454,11 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (DependencyManager::get<CameraToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
|
||||||
|
// stop propagation
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
|
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
|
||||||
// stop propagation
|
// stop propagation
|
||||||
return;
|
return;
|
||||||
|
@ -1450,6 +1476,24 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Application::mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||||
|
// if one of our scripts have asked to capture this event, then stop processing it
|
||||||
|
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (activeWindow() == _window) {
|
||||||
|
if (event->button() == Qt::LeftButton) {
|
||||||
|
if (mouseOnScreen()) {
|
||||||
|
if (DependencyManager::get<CameraToolBox>()->mouseDoublePressEvent(getMouseX(), getMouseY())) {
|
||||||
|
// stop propagation
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||||
|
|
||||||
if (!_aboutToQuit) {
|
if (!_aboutToQuit) {
|
||||||
|
@ -1845,18 +1889,45 @@ FaceTracker* Application::getActiveFaceTracker() {
|
||||||
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : NULL));
|
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : NULL));
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::setActiveFaceTracker() {
|
FaceTracker* Application::getSelectedFaceTracker() {
|
||||||
|
FaceTracker* faceTracker = NULL;
|
||||||
#ifdef HAVE_FACESHIFT
|
#ifdef HAVE_FACESHIFT
|
||||||
DependencyManager::get<Faceshift>()->setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
|
if (Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)) {
|
||||||
|
faceTracker = DependencyManager::get<Faceshift>().data();
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_DDE
|
||||||
|
if (Menu::getInstance()->isOptionChecked(MenuOption::UseCamera)) {
|
||||||
|
faceTracker = DependencyManager::get<DdeFaceTracker>().data();
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
return faceTracker;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Application::setActiveFaceTracker() {
|
||||||
|
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||||
|
#ifdef HAVE_FACESHIFT
|
||||||
|
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||||
|
faceshiftTracker->setIsMuted(isMuted);
|
||||||
|
faceshiftTracker->setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !isMuted);
|
||||||
#endif
|
#endif
|
||||||
#ifdef HAVE_DDE
|
#ifdef HAVE_DDE
|
||||||
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
|
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
|
||||||
Menu::getInstance()->getActionForOption(MenuOption::UseAudioForMouth)->setVisible(isUsingDDE);
|
Menu::getInstance()->getActionForOption(MenuOption::UseAudioForMouth)->setVisible(isUsingDDE);
|
||||||
Menu::getInstance()->getActionForOption(MenuOption::VelocityFilter)->setVisible(isUsingDDE);
|
Menu::getInstance()->getActionForOption(MenuOption::VelocityFilter)->setVisible(isUsingDDE);
|
||||||
DependencyManager::get<DdeFaceTracker>()->setEnabled(isUsingDDE);
|
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||||
|
ddeTracker->setIsMuted(isMuted);
|
||||||
|
ddeTracker->setEnabled(isUsingDDE && !isMuted);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Application::toggleFaceTrackerMute() {
|
||||||
|
FaceTracker* faceTracker = getSelectedFaceTracker();
|
||||||
|
if (faceTracker) {
|
||||||
|
faceTracker->toggleMute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
|
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
|
||||||
QVector<EntityItem*> entities;
|
QVector<EntityItem*> entities;
|
||||||
|
|
||||||
|
@ -2025,10 +2096,6 @@ void Application::init() {
|
||||||
SixenseManager::getInstance().toggleSixense(true);
|
SixenseManager::getInstance().toggleSixense(true);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// initialize our face trackers after loading the menu settings
|
|
||||||
DependencyManager::get<Faceshift>()->init();
|
|
||||||
DependencyManager::get<DdeFaceTracker>()->init();
|
|
||||||
|
|
||||||
Leapmotion::init();
|
Leapmotion::init();
|
||||||
RealSense::init();
|
RealSense::init();
|
||||||
|
|
||||||
|
@ -2166,7 +2233,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
|
|
||||||
isLookingAtSomeone = true;
|
isLookingAtSomeone = true;
|
||||||
// If I am looking at someone else, look directly at one of their eyes
|
// If I am looking at someone else, look directly at one of their eyes
|
||||||
if (tracker) {
|
if (tracker && !tracker->isMuted()) {
|
||||||
// If a face tracker is active, look at the eye for the side my gaze is biased toward
|
// If a face tracker is active, look at the eye for the side my gaze is biased toward
|
||||||
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
|
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
|
||||||
// Look at their right eye
|
// Look at their right eye
|
||||||
|
@ -2192,7 +2259,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
//
|
//
|
||||||
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
|
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
|
||||||
//
|
//
|
||||||
if (tracker) {
|
if (tracker && !tracker->isMuted()) {
|
||||||
float eyePitch = tracker->getEstimatedEyePitch();
|
float eyePitch = tracker->getEstimatedEyePitch();
|
||||||
float eyeYaw = tracker->getEstimatedEyeYaw();
|
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||||
|
@ -2247,7 +2314,7 @@ void Application::updateCamera(float deltaTime) {
|
||||||
if (!OculusManager::isConnected() && !TV3DManager::isConnected() &&
|
if (!OculusManager::isConnected() && !TV3DManager::isConnected() &&
|
||||||
Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
|
Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
|
||||||
FaceTracker* tracker = getActiveFaceTracker();
|
FaceTracker* tracker = getActiveFaceTracker();
|
||||||
if (tracker) {
|
if (tracker && !tracker->isMuted()) {
|
||||||
const float EYE_OFFSET_SCALE = 0.025f;
|
const float EYE_OFFSET_SCALE = 0.025f;
|
||||||
glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE;
|
glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE;
|
||||||
float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f;
|
float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f;
|
||||||
|
@ -2311,7 +2378,7 @@ void Application::update(float deltaTime) {
|
||||||
PerformanceTimer perfTimer("devices");
|
PerformanceTimer perfTimer("devices");
|
||||||
DeviceTracker::updateAll();
|
DeviceTracker::updateAll();
|
||||||
FaceTracker* tracker = getActiveFaceTracker();
|
FaceTracker* tracker = getActiveFaceTracker();
|
||||||
if (tracker) {
|
if (tracker && !tracker->isMuted()) {
|
||||||
tracker->update(deltaTime);
|
tracker->update(deltaTime);
|
||||||
}
|
}
|
||||||
SixenseManager::getInstance().update(deltaTime);
|
SixenseManager::getInstance().update(deltaTime);
|
||||||
|
@ -3241,6 +3308,7 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("3dOverlaysFront");
|
PerformanceTimer perfTimer("3dOverlaysFront");
|
||||||
glClear(GL_DEPTH_BUFFER_BIT);
|
glClear(GL_DEPTH_BUFFER_BIT);
|
||||||
|
Glower glower; // Sets alpha to 1.0
|
||||||
_overlays.renderWorld(true);
|
_overlays.renderWorld(true);
|
||||||
}
|
}
|
||||||
activeRenderingThread = nullptr;
|
activeRenderingThread = nullptr;
|
||||||
|
|
|
@ -169,6 +169,7 @@ public:
|
||||||
|
|
||||||
void mouseMoveEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
void mouseMoveEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||||
void mousePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
void mousePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||||
|
void mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||||
void mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
void mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID = 0);
|
||||||
|
|
||||||
void touchBeginEvent(QTouchEvent* event);
|
void touchBeginEvent(QTouchEvent* event);
|
||||||
|
@ -218,6 +219,7 @@ public:
|
||||||
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
|
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
|
||||||
|
|
||||||
FaceTracker* getActiveFaceTracker();
|
FaceTracker* getActiveFaceTracker();
|
||||||
|
FaceTracker* getSelectedFaceTracker();
|
||||||
|
|
||||||
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
|
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
|
||||||
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
|
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
|
||||||
|
@ -396,6 +398,7 @@ public slots:
|
||||||
|
|
||||||
void resetSensors();
|
void resetSensors();
|
||||||
void setActiveFaceTracker();
|
void setActiveFaceTracker();
|
||||||
|
void toggleFaceTrackerMute();
|
||||||
|
|
||||||
void aboutApp();
|
void aboutApp();
|
||||||
void showEditEntitiesHelp();
|
void showEditEntitiesHelp();
|
||||||
|
@ -437,6 +440,7 @@ private slots:
|
||||||
void runTests();
|
void runTests();
|
||||||
|
|
||||||
void audioMuteToggled();
|
void audioMuteToggled();
|
||||||
|
void faceTrackerMuteToggled();
|
||||||
|
|
||||||
void setCursorVisible(bool visible);
|
void setCursorVisible(bool visible);
|
||||||
|
|
||||||
|
|
|
@ -131,6 +131,7 @@ bool GLCanvas::event(QEvent* event) {
|
||||||
case QEvent::MouseMove:
|
case QEvent::MouseMove:
|
||||||
case QEvent::MouseButtonPress:
|
case QEvent::MouseButtonPress:
|
||||||
case QEvent::MouseButtonRelease:
|
case QEvent::MouseButtonRelease:
|
||||||
|
case QEvent::MouseButtonDblClick:
|
||||||
case QEvent::KeyPress:
|
case QEvent::KeyPress:
|
||||||
case QEvent::KeyRelease:
|
case QEvent::KeyRelease:
|
||||||
case QEvent::FocusIn:
|
case QEvent::FocusIn:
|
||||||
|
|
|
@ -394,6 +394,12 @@ Menu::Menu() {
|
||||||
QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true);
|
QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true);
|
||||||
ddeFiltering->setVisible(false);
|
ddeFiltering->setVisible(false);
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
|
||||||
|
faceTrackingMenu->addSeparator();
|
||||||
|
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
|
||||||
|
0, false,
|
||||||
|
qApp, SLOT(toggleFaceTrackerMute()));
|
||||||
|
#endif
|
||||||
|
|
||||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
|
||||||
|
|
|
@ -211,6 +211,7 @@ namespace MenuOption {
|
||||||
const QString Mirror = "Mirror";
|
const QString Mirror = "Mirror";
|
||||||
const QString MuteAudio = "Mute Microphone";
|
const QString MuteAudio = "Mute Microphone";
|
||||||
const QString MuteEnvironment = "Mute Environment";
|
const QString MuteEnvironment = "Mute Environment";
|
||||||
|
const QString MuteFaceTracking = "Mute Face Tracking";
|
||||||
const QString NoFaceTracking = "None";
|
const QString NoFaceTracking = "None";
|
||||||
const QString OctreeStats = "Entity Statistics";
|
const QString OctreeStats = "Entity Statistics";
|
||||||
const QString OffAxisProjection = "Off-Axis Projection";
|
const QString OffAxisProjection = "Off-Axis Projection";
|
||||||
|
|
|
@ -35,7 +35,7 @@ bool AudioToolBox::mousePressEvent(int x, int y) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioToolBox::render(int x, int y, bool boxed) {
|
void AudioToolBox::render(int x, int y, int padding, bool boxed) {
|
||||||
glEnable(GL_TEXTURE_2D);
|
glEnable(GL_TEXTURE_2D);
|
||||||
|
|
||||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||||
|
@ -79,7 +79,7 @@ void AudioToolBox::render(int x, int y, bool boxed) {
|
||||||
|
|
||||||
float iconColor = 1.0f;
|
float iconColor = 1.0f;
|
||||||
|
|
||||||
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
_iconBounds = QRect(x + padding, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||||
if (!audioIO->isMuted()) {
|
if (!audioIO->isMuted()) {
|
||||||
glBindTexture(GL_TEXTURE_2D, _micTextureId);
|
glBindTexture(GL_TEXTURE_2D, _micTextureId);
|
||||||
iconColor = 1.0f;
|
iconColor = 1.0f;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
class AudioToolBox : public Dependency {
|
class AudioToolBox : public Dependency {
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
public:
|
public:
|
||||||
void render(int x, int y, bool boxed);
|
void render(int x, int y, int padding, bool boxed);
|
||||||
|
|
||||||
bool mousePressEvent(int x, int y);
|
bool mousePressEvent(int x, int y);
|
||||||
protected:
|
protected:
|
||||||
|
|
|
@ -90,7 +90,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
||||||
// Only use face trackers when not playing back a recording.
|
// Only use face trackers when not playing back a recording.
|
||||||
if (!myAvatar->isPlaying()) {
|
if (!myAvatar->isPlaying()) {
|
||||||
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
|
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
|
||||||
_isFaceTrackerConnected = faceTracker != NULL;
|
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||||
if (_isFaceTrackerConnected) {
|
if (_isFaceTrackerConnected) {
|
||||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||||
|
|
||||||
|
|
|
@ -243,7 +243,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||||
estimatedPosition /= OCULUS_LEAN_SCALE;
|
estimatedPosition /= OCULUS_LEAN_SCALE;
|
||||||
} else {
|
} else {
|
||||||
FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker();
|
FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker();
|
||||||
if (tracker) {
|
if (tracker && !tracker->isMuted()) {
|
||||||
estimatedPosition = tracker->getHeadTranslation();
|
estimatedPosition = tracker->getHeadTranslation();
|
||||||
_trackedHeadPosition = estimatedPosition;
|
_trackedHeadPosition = estimatedPosition;
|
||||||
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
||||||
|
|
121
interface/src/devices/CameraToolBox.cpp
Normal file
121
interface/src/devices/CameraToolBox.cpp
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
//
|
||||||
|
// CameraToolBox.cpp
|
||||||
|
// interface/src/devices
|
||||||
|
//
|
||||||
|
// Created by David Rowe on 30 Apr 2015.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "InterfaceConfig.h"
|
||||||
|
|
||||||
|
#include <GLCanvas.h>
|
||||||
|
#include <PathUtils.h>
|
||||||
|
|
||||||
|
#include "Application.h"
|
||||||
|
#include "CameraToolBox.h"
|
||||||
|
#include "FaceTracker.h"
|
||||||
|
|
||||||
|
|
||||||
|
CameraToolBox::CameraToolBox() :
|
||||||
|
_iconPulseTimeReference(usecTimestampNow()),
|
||||||
|
_doubleClickTimer(NULL)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
CameraToolBox::~CameraToolBox() {
|
||||||
|
if (_doubleClickTimer) {
|
||||||
|
_doubleClickTimer->stop();
|
||||||
|
delete _doubleClickTimer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CameraToolBox::mousePressEvent(int x, int y) {
|
||||||
|
if (_iconBounds.contains(x, y)) {
|
||||||
|
if (!_doubleClickTimer) {
|
||||||
|
// Toggle mute after waiting to check that it's not a double-click.
|
||||||
|
const int DOUBLE_CLICK_WAIT = 200; // ms
|
||||||
|
_doubleClickTimer = new QTimer(this);
|
||||||
|
connect(_doubleClickTimer, SIGNAL(timeout()), this, SLOT(toggleMute()));
|
||||||
|
_doubleClickTimer->setSingleShot(true);
|
||||||
|
_doubleClickTimer->setInterval(DOUBLE_CLICK_WAIT);
|
||||||
|
_doubleClickTimer->start();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CameraToolBox::mouseDoublePressEvent(int x, int y) {
|
||||||
|
if (_iconBounds.contains(x, y)) {
|
||||||
|
if (_doubleClickTimer) {
|
||||||
|
_doubleClickTimer->stop();
|
||||||
|
delete _doubleClickTimer;
|
||||||
|
_doubleClickTimer = NULL;
|
||||||
|
}
|
||||||
|
Application::getInstance()->resetSensors();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void CameraToolBox::toggleMute() {
|
||||||
|
delete _doubleClickTimer;
|
||||||
|
_doubleClickTimer = NULL;
|
||||||
|
|
||||||
|
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
|
||||||
|
if (faceTracker) {
|
||||||
|
faceTracker->toggleMute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void CameraToolBox::render(int x, int y, bool boxed) {
|
||||||
|
glEnable(GL_TEXTURE_2D);
|
||||||
|
|
||||||
|
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||||
|
if (_enabledTextureId == 0) {
|
||||||
|
_enabledTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/face.svg"));
|
||||||
|
}
|
||||||
|
if (_mutedTextureId == 0) {
|
||||||
|
_mutedTextureId = glCanvas->bindTexture(QImage(PathUtils::resourcesPath() + "images/face-mute.svg"));
|
||||||
|
}
|
||||||
|
|
||||||
|
const int MUTE_ICON_SIZE = 24;
|
||||||
|
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||||
|
float iconColor = 1.0f;
|
||||||
|
if (!Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking)) {
|
||||||
|
glBindTexture(GL_TEXTURE_2D, _enabledTextureId);
|
||||||
|
} else {
|
||||||
|
glBindTexture(GL_TEXTURE_2D, _mutedTextureId);
|
||||||
|
|
||||||
|
// Make muted icon pulsate
|
||||||
|
static const float PULSE_MIN = 0.4f;
|
||||||
|
static const float PULSE_MAX = 1.0f;
|
||||||
|
static const float PULSE_FREQUENCY = 1.0f; // in Hz
|
||||||
|
qint64 now = usecTimestampNow();
|
||||||
|
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
|
||||||
|
// Prevents t from getting too big, which would diminish glm::cos precision
|
||||||
|
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
|
||||||
|
}
|
||||||
|
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
|
||||||
|
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
|
||||||
|
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
|
||||||
|
}
|
||||||
|
|
||||||
|
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
|
||||||
|
|
||||||
|
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
|
||||||
|
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
|
||||||
|
glm::vec2 texCoordTopLeft(1,1);
|
||||||
|
glm::vec2 texCoordBottomRight(0,0);
|
||||||
|
|
||||||
|
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
|
||||||
|
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
|
||||||
|
}
|
||||||
|
|
||||||
|
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
|
||||||
|
|
||||||
|
glDisable(GL_TEXTURE_2D);
|
||||||
|
}
|
45
interface/src/devices/CameraToolBox.h
Normal file
45
interface/src/devices/CameraToolBox.h
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
//
|
||||||
|
// CameraToolBox.h
|
||||||
|
// interface/src/devices
|
||||||
|
//
|
||||||
|
// Created by David Rowe on 30 Apr 2015.
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_CameraToolBox_h
|
||||||
|
#define hifi_CameraToolBox_h
|
||||||
|
|
||||||
|
#include <QObject>
|
||||||
|
|
||||||
|
#include <DependencyManager.h>
|
||||||
|
#include <GeometryCache.h>
|
||||||
|
|
||||||
|
class CameraToolBox : public QObject, public Dependency {
|
||||||
|
Q_OBJECT
|
||||||
|
SINGLETON_DEPENDENCY
|
||||||
|
|
||||||
|
public:
|
||||||
|
void render(int x, int y, bool boxed);
|
||||||
|
bool mousePressEvent(int x, int y);
|
||||||
|
bool mouseDoublePressEvent(int x, int y);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
CameraToolBox();
|
||||||
|
~CameraToolBox();
|
||||||
|
|
||||||
|
private slots:
|
||||||
|
void toggleMute();
|
||||||
|
|
||||||
|
private:
|
||||||
|
GLuint _enabledTextureId = 0;
|
||||||
|
GLuint _mutedTextureId = 0;
|
||||||
|
int _boxQuadID = GeometryCache::UNKNOWN_ID;
|
||||||
|
QRect _iconBounds;
|
||||||
|
qint64 _iconPulseTimeReference = 0;
|
||||||
|
QTimer* _doubleClickTimer;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_CameraToolBox_h
|
|
@ -178,9 +178,7 @@ DdeFaceTracker::DdeFaceTracker(const QHostAddress& host, quint16 serverPort, qui
|
||||||
_filteredBrowUp(0.0f),
|
_filteredBrowUp(0.0f),
|
||||||
_lastEyeBlinks(),
|
_lastEyeBlinks(),
|
||||||
_filteredEyeBlinks(),
|
_filteredEyeBlinks(),
|
||||||
_lastEyeCoefficients(),
|
_lastEyeCoefficients()
|
||||||
_isCalculatingFPS(false),
|
|
||||||
_frameCount(0)
|
|
||||||
{
|
{
|
||||||
_coefficients.resize(NUM_FACESHIFT_BLENDSHAPES);
|
_coefficients.resize(NUM_FACESHIFT_BLENDSHAPES);
|
||||||
|
|
||||||
|
@ -203,7 +201,16 @@ DdeFaceTracker::~DdeFaceTracker() {
|
||||||
#pragma warning(default:4351)
|
#pragma warning(default:4351)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
void DdeFaceTracker::init() {
|
||||||
|
FaceTracker::init();
|
||||||
|
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::UseCamera) && !_isMuted);
|
||||||
|
}
|
||||||
|
|
||||||
void DdeFaceTracker::setEnabled(bool enabled) {
|
void DdeFaceTracker::setEnabled(bool enabled) {
|
||||||
|
if (!_isInitialized) {
|
||||||
|
// Don't enable until have explicitly initialized
|
||||||
|
return;
|
||||||
|
}
|
||||||
#ifdef HAVE_DDE
|
#ifdef HAVE_DDE
|
||||||
// isOpen() does not work as one might expect on QUdpSocket; don't test isOpen() before closing socket.
|
// isOpen() does not work as one might expect on QUdpSocket; don't test isOpen() before closing socket.
|
||||||
_udpSocket.close();
|
_udpSocket.close();
|
||||||
|
@ -314,7 +321,9 @@ float DdeFaceTracker::getBlendshapeCoefficient(int index) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
||||||
if(buffer.size() > MIN_PACKET_SIZE) {
|
_lastReceiveTimestamp = usecTimestampNow();
|
||||||
|
|
||||||
|
if (buffer.size() > MIN_PACKET_SIZE) {
|
||||||
bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);
|
bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);
|
||||||
|
|
||||||
Packet packet;
|
Packet packet;
|
||||||
|
@ -326,7 +335,7 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
||||||
memcpy(&translation, packet.translation, sizeof(packet.translation));
|
memcpy(&translation, packet.translation, sizeof(packet.translation));
|
||||||
glm::quat rotation;
|
glm::quat rotation;
|
||||||
memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
|
memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
|
||||||
if (_reset || (_lastReceiveTimestamp == 0)) {
|
if (_reset || (_lastMessageReceived == 0)) {
|
||||||
memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
|
memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
|
||||||
memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
|
memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
|
||||||
_reset = false;
|
_reset = false;
|
||||||
|
@ -501,5 +510,4 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
||||||
} else {
|
} else {
|
||||||
qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
|
qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
|
||||||
}
|
}
|
||||||
_lastReceiveTimestamp = usecTimestampNow();
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ class DdeFaceTracker : public FaceTracker, public Dependency {
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
virtual void init();
|
||||||
virtual void reset();
|
virtual void reset();
|
||||||
|
|
||||||
virtual bool isActive() const;
|
virtual bool isActive() const;
|
||||||
|
@ -120,9 +121,6 @@ private:
|
||||||
float _lastEyeBlinks[2];
|
float _lastEyeBlinks[2];
|
||||||
float _filteredEyeBlinks[2];
|
float _filteredEyeBlinks[2];
|
||||||
float _lastEyeCoefficients[2];
|
float _lastEyeCoefficients[2];
|
||||||
|
|
||||||
bool _isCalculatingFPS;
|
|
||||||
int _frameCount;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_DdeFaceTracker_h
|
#endif // hifi_DdeFaceTracker_h
|
||||||
|
|
|
@ -15,14 +15,14 @@
|
||||||
|
|
||||||
#include "FaceTracker.h"
|
#include "FaceTracker.h"
|
||||||
#include "InterfaceLogging.h"
|
#include "InterfaceLogging.h"
|
||||||
|
#include "Menu.h"
|
||||||
|
|
||||||
const int FPS_TIMER_DELAY = 2000; // ms
|
const int FPS_TIMER_DELAY = 2000; // ms
|
||||||
const int FPS_TIMER_DURATION = 2000; // ms
|
const int FPS_TIMER_DURATION = 2000; // ms
|
||||||
|
|
||||||
FaceTracker::FaceTracker() :
|
void FaceTracker::init() {
|
||||||
_isCalculatingFPS(false),
|
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||||
_frameCount(0)
|
_isInitialized = true; // FaceTracker can be used now
|
||||||
{
|
|
||||||
}
|
}
|
||||||
|
|
||||||
inline float FaceTracker::getBlendshapeCoefficient(int index) const {
|
inline float FaceTracker::getBlendshapeCoefficient(int index) const {
|
||||||
|
@ -101,3 +101,8 @@ void FaceTracker::finishFPSTimer() {
|
||||||
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
|
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
|
||||||
_isCalculatingFPS = false;
|
_isCalculatingFPS = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void FaceTracker::toggleMute() {
|
||||||
|
_isMuted = !_isMuted;
|
||||||
|
emit muteToggled();
|
||||||
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ public:
|
||||||
virtual bool isActive() const { return false; }
|
virtual bool isActive() const { return false; }
|
||||||
virtual bool isTracking() const { return false; }
|
virtual bool isTracking() const { return false; }
|
||||||
|
|
||||||
virtual void init() {}
|
virtual void init();
|
||||||
virtual void update(float deltaTime);
|
virtual void update(float deltaTime);
|
||||||
virtual void reset();
|
virtual void reset();
|
||||||
|
|
||||||
|
@ -43,10 +43,22 @@ public:
|
||||||
const QVector<float>& getBlendshapeCoefficients() const;
|
const QVector<float>& getBlendshapeCoefficients() const;
|
||||||
float getBlendshapeCoefficient(int index) const;
|
float getBlendshapeCoefficient(int index) const;
|
||||||
|
|
||||||
|
bool isMuted() const { return _isMuted; }
|
||||||
|
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
|
||||||
|
void toggleMute();
|
||||||
|
|
||||||
|
signals:
|
||||||
|
void muteToggled();
|
||||||
|
|
||||||
|
public slots:
|
||||||
|
virtual void setEnabled(bool enabled) = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
FaceTracker();
|
|
||||||
virtual ~FaceTracker() {};
|
virtual ~FaceTracker() {};
|
||||||
|
|
||||||
|
bool _isInitialized = false;
|
||||||
|
bool _isMuted = true;
|
||||||
|
|
||||||
glm::vec3 _headTranslation = glm::vec3(0.0f);
|
glm::vec3 _headTranslation = glm::vec3(0.0f);
|
||||||
glm::quat _headRotation = glm::quat();
|
glm::quat _headRotation = glm::quat();
|
||||||
float _estimatedEyePitch = 0.0f;
|
float _estimatedEyePitch = 0.0f;
|
||||||
|
@ -63,8 +75,8 @@ private slots:
|
||||||
void finishFPSTimer();
|
void finishFPSTimer();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool _isCalculatingFPS;
|
bool _isCalculatingFPS = false;
|
||||||
int _frameCount;
|
int _frameCount = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_FaceTracker_h
|
#endif // hifi_FaceTracker_h
|
||||||
|
|
|
@ -49,7 +49,8 @@ Faceshift::Faceshift() :
|
||||||
|
|
||||||
#ifdef HAVE_FACESHIFT
|
#ifdef HAVE_FACESHIFT
|
||||||
void Faceshift::init() {
|
void Faceshift::init() {
|
||||||
setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
|
FaceTracker::init();
|
||||||
|
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !_isMuted);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Faceshift::update(float deltaTime) {
|
void Faceshift::update(float deltaTime) {
|
||||||
|
@ -92,7 +93,7 @@ void Faceshift::reset() {
|
||||||
|
|
||||||
bool Faceshift::isActive() const {
|
bool Faceshift::isActive() const {
|
||||||
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
|
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
|
||||||
return (usecTimestampNow() - _lastTrackingStateReceived) < ACTIVE_TIMEOUT_USECS;
|
return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Faceshift::isTracking() const {
|
bool Faceshift::isTracking() const {
|
||||||
|
@ -127,7 +128,11 @@ void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float
|
||||||
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
|
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Faceshift::setTCPEnabled(bool enabled) {
|
void Faceshift::setEnabled(bool enabled) {
|
||||||
|
// Don't enable until have explicitly initialized
|
||||||
|
if (!_isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
#ifdef HAVE_FACESHIFT
|
#ifdef HAVE_FACESHIFT
|
||||||
if ((_tcpEnabled = enabled)) {
|
if ((_tcpEnabled = enabled)) {
|
||||||
connectSocket();
|
connectSocket();
|
||||||
|
@ -196,6 +201,8 @@ void Faceshift::send(const std::string& message) {
|
||||||
|
|
||||||
void Faceshift::receive(const QByteArray& buffer) {
|
void Faceshift::receive(const QByteArray& buffer) {
|
||||||
#ifdef HAVE_FACESHIFT
|
#ifdef HAVE_FACESHIFT
|
||||||
|
_lastReceiveTimestamp = usecTimestampNow();
|
||||||
|
|
||||||
_stream.received(buffer.size(), buffer.constData());
|
_stream.received(buffer.size(), buffer.constData());
|
||||||
fsMsgPtr msg;
|
fsMsgPtr msg;
|
||||||
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
|
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
|
||||||
|
@ -240,11 +247,11 @@ void Faceshift::receive(const QByteArray& buffer) {
|
||||||
|
|
||||||
const float FRAME_AVERAGING_FACTOR = 0.99f;
|
const float FRAME_AVERAGING_FACTOR = 0.99f;
|
||||||
quint64 usecsNow = usecTimestampNow();
|
quint64 usecsNow = usecTimestampNow();
|
||||||
if (_lastTrackingStateReceived != 0) {
|
if (_lastMessageReceived != 0) {
|
||||||
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
|
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
|
||||||
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastTrackingStateReceived) / 1000000.0f;
|
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
|
||||||
}
|
}
|
||||||
_lastTrackingStateReceived = usecsNow;
|
_lastMessageReceived = usecsNow;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,7 +87,7 @@ signals:
|
||||||
void connectionStateChanged();
|
void connectionStateChanged();
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void setTCPEnabled(bool enabled);
|
void setEnabled(bool enabled);
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void connectSocket();
|
void connectSocket();
|
||||||
|
@ -114,7 +114,8 @@ private:
|
||||||
bool _tcpEnabled = true;
|
bool _tcpEnabled = true;
|
||||||
int _tcpRetryCount = 0;
|
int _tcpRetryCount = 0;
|
||||||
bool _tracking = false;
|
bool _tracking = false;
|
||||||
quint64 _lastTrackingStateReceived = 0;
|
quint64 _lastReceiveTimestamp = 0;
|
||||||
|
quint64 _lastMessageReceived = 0;
|
||||||
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
|
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
|
||||||
|
|
||||||
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
|
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
#include "audio/AudioToolBox.h"
|
#include "audio/AudioToolBox.h"
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "ApplicationOverlay.h"
|
#include "ApplicationOverlay.h"
|
||||||
|
#include "devices/CameraToolBox.h"
|
||||||
#include "devices/OculusManager.h"
|
#include "devices/OculusManager.h"
|
||||||
|
|
||||||
#include "Util.h"
|
#include "Util.h"
|
||||||
|
@ -211,6 +212,7 @@ void ApplicationOverlay::renderOverlay() {
|
||||||
glMatrixMode(GL_MODELVIEW);
|
glMatrixMode(GL_MODELVIEW);
|
||||||
|
|
||||||
renderAudioMeter();
|
renderAudioMeter();
|
||||||
|
renderCameraToggle();
|
||||||
|
|
||||||
renderStatsAndLogs();
|
renderStatsAndLogs();
|
||||||
|
|
||||||
|
@ -808,18 +810,46 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
|
||||||
} glPopMatrix();
|
} glPopMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const int AUDIO_METER_GAP = 5;
|
||||||
|
const int MUTE_ICON_PADDING = 10;
|
||||||
|
|
||||||
|
void ApplicationOverlay::renderCameraToggle() {
|
||||||
|
if (Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int audioMeterY;
|
||||||
|
bool smallMirrorVisible = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !OculusManager::isConnected();
|
||||||
|
bool boxed = smallMirrorVisible &&
|
||||||
|
!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
|
||||||
|
if (boxed) {
|
||||||
|
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||||
|
} else {
|
||||||
|
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||||
|
}
|
||||||
|
|
||||||
|
DependencyManager::get<CameraToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
||||||
|
}
|
||||||
|
|
||||||
void ApplicationOverlay::renderAudioMeter() {
|
void ApplicationOverlay::renderAudioMeter() {
|
||||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||||
auto audio = DependencyManager::get<AudioClient>();
|
auto audio = DependencyManager::get<AudioClient>();
|
||||||
|
|
||||||
// Audio VU Meter and Mute Icon
|
// Audio VU Meter and Mute Icon
|
||||||
const int MUTE_ICON_SIZE = 24;
|
const int MUTE_ICON_SIZE = 24;
|
||||||
const int MUTE_ICON_PADDING = 10;
|
|
||||||
const int AUDIO_METER_WIDTH = MIRROR_VIEW_WIDTH - MUTE_ICON_SIZE - MUTE_ICON_PADDING;
|
|
||||||
const int AUDIO_METER_SCALE_WIDTH = AUDIO_METER_WIDTH - 2 ;
|
|
||||||
const int AUDIO_METER_HEIGHT = 8;
|
const int AUDIO_METER_HEIGHT = 8;
|
||||||
const int AUDIO_METER_GAP = 5;
|
const int INTER_ICON_GAP = 2;
|
||||||
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_GAP;
|
|
||||||
|
int cameraSpace = 0;
|
||||||
|
int audioMeterWidth = MIRROR_VIEW_WIDTH - MUTE_ICON_SIZE - MUTE_ICON_PADDING;
|
||||||
|
int audioMeterScaleWidth = audioMeterWidth - 2;
|
||||||
|
int audioMeterX = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_GAP;
|
||||||
|
if (!Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking)) {
|
||||||
|
cameraSpace = MUTE_ICON_SIZE + INTER_ICON_GAP;
|
||||||
|
audioMeterWidth -= cameraSpace;
|
||||||
|
audioMeterScaleWidth -= cameraSpace;
|
||||||
|
audioMeterX += cameraSpace;
|
||||||
|
}
|
||||||
|
|
||||||
int audioMeterY;
|
int audioMeterY;
|
||||||
bool smallMirrorVisible = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !OculusManager::isConnected();
|
bool smallMirrorVisible = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !OculusManager::isConnected();
|
||||||
|
@ -834,13 +864,13 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
const glm::vec4 AUDIO_METER_BLUE = { 0.0, 0.0, 1.0, 1.0 };
|
const glm::vec4 AUDIO_METER_BLUE = { 0.0, 0.0, 1.0, 1.0 };
|
||||||
const glm::vec4 AUDIO_METER_GREEN = { 0.0, 1.0, 0.0, 1.0 };
|
const glm::vec4 AUDIO_METER_GREEN = { 0.0, 1.0, 0.0, 1.0 };
|
||||||
const glm::vec4 AUDIO_METER_RED = { 1.0, 0.0, 0.0, 1.0 };
|
const glm::vec4 AUDIO_METER_RED = { 1.0, 0.0, 0.0, 1.0 };
|
||||||
const float AUDIO_GREEN_START = 0.25 * AUDIO_METER_SCALE_WIDTH;
|
|
||||||
const float AUDIO_RED_START = 0.80 * AUDIO_METER_SCALE_WIDTH;
|
|
||||||
const float CLIPPING_INDICATOR_TIME = 1.0f;
|
const float CLIPPING_INDICATOR_TIME = 1.0f;
|
||||||
const float AUDIO_METER_AVERAGING = 0.5;
|
const float AUDIO_METER_AVERAGING = 0.5;
|
||||||
const float LOG2 = log(2.0f);
|
const float LOG2 = log(2.0f);
|
||||||
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
|
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
|
||||||
const float LOG2_LOUDNESS_FLOOR = 11.0f;
|
const float LOG2_LOUDNESS_FLOOR = 11.0f;
|
||||||
|
float audioGreenStart = 0.25f * audioMeterScaleWidth;
|
||||||
|
float audioRedStart = 0.8f * audioMeterScaleWidth;
|
||||||
float audioLevel = 0.0f;
|
float audioLevel = 0.0f;
|
||||||
float loudness = audio->getLastInputLoudness() + 1.0f;
|
float loudness = audio->getLastInputLoudness() + 1.0f;
|
||||||
|
|
||||||
|
@ -848,12 +878,12 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
float log2loudness = log(_trailingAudioLoudness) / LOG2;
|
float log2loudness = log(_trailingAudioLoudness) / LOG2;
|
||||||
|
|
||||||
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
|
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
|
||||||
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE * AUDIO_METER_SCALE_WIDTH;
|
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE * audioMeterScaleWidth;
|
||||||
} else {
|
} else {
|
||||||
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE * AUDIO_METER_SCALE_WIDTH;
|
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE * audioMeterScaleWidth;
|
||||||
}
|
}
|
||||||
if (audioLevel > AUDIO_METER_SCALE_WIDTH) {
|
if (audioLevel > audioMeterScaleWidth) {
|
||||||
audioLevel = AUDIO_METER_SCALE_WIDTH;
|
audioLevel = audioMeterScaleWidth;
|
||||||
}
|
}
|
||||||
bool isClipping = ((audio->getTimeSinceLastClip() > 0.0f) && (audio->getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME));
|
bool isClipping = ((audio->getTimeSinceLastClip() > 0.0f) && (audio->getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME));
|
||||||
|
|
||||||
|
@ -863,7 +893,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
renderCollisionOverlay(glCanvas->width(), glCanvas->height(), magnitude, 1.0f);
|
renderCollisionOverlay(glCanvas->width(), glCanvas->height(), magnitude, 1.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
DependencyManager::get<AudioToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
DependencyManager::get<AudioToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, cameraSpace, boxed);
|
||||||
|
|
||||||
DependencyManager::get<AudioScope>()->render(glCanvas->width(), glCanvas->height());
|
DependencyManager::get<AudioScope>()->render(glCanvas->width(), glCanvas->height());
|
||||||
DependencyManager::get<AudioIOStatsRenderer>()->render(WHITE_TEXT, glCanvas->width(), glCanvas->height());
|
DependencyManager::get<AudioIOStatsRenderer>()->render(WHITE_TEXT, glCanvas->width(), glCanvas->height());
|
||||||
|
@ -871,10 +901,10 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
audioMeterY += AUDIO_METER_HEIGHT;
|
audioMeterY += AUDIO_METER_HEIGHT;
|
||||||
|
|
||||||
// Draw audio meter background Quad
|
// Draw audio meter background Quad
|
||||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X, audioMeterY, AUDIO_METER_WIDTH, AUDIO_METER_HEIGHT,
|
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX, audioMeterY, audioMeterWidth, AUDIO_METER_HEIGHT,
|
||||||
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
|
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
|
||||||
|
|
||||||
if (audioLevel > AUDIO_RED_START) {
|
if (audioLevel > audioRedStart) {
|
||||||
glm::vec4 quadColor;
|
glm::vec4 quadColor;
|
||||||
if (!isClipping) {
|
if (!isClipping) {
|
||||||
quadColor = AUDIO_METER_RED;
|
quadColor = AUDIO_METER_RED;
|
||||||
|
@ -882,16 +912,16 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
quadColor = glm::vec4(1, 1, 1, 1);
|
quadColor = glm::vec4(1, 1, 1, 1);
|
||||||
}
|
}
|
||||||
// Draw Red Quad
|
// Draw Red Quad
|
||||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X + AUDIO_RED_START,
|
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioRedStart,
|
||||||
audioMeterY,
|
audioMeterY,
|
||||||
audioLevel - AUDIO_RED_START,
|
audioLevel - audioRedStart,
|
||||||
AUDIO_METER_HEIGHT, quadColor,
|
AUDIO_METER_HEIGHT, quadColor,
|
||||||
_audioRedQuad);
|
_audioRedQuad);
|
||||||
|
|
||||||
audioLevel = AUDIO_RED_START;
|
audioLevel = audioRedStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioLevel > AUDIO_GREEN_START) {
|
if (audioLevel > audioGreenStart) {
|
||||||
glm::vec4 quadColor;
|
glm::vec4 quadColor;
|
||||||
if (!isClipping) {
|
if (!isClipping) {
|
||||||
quadColor = AUDIO_METER_GREEN;
|
quadColor = AUDIO_METER_GREEN;
|
||||||
|
@ -899,13 +929,13 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
quadColor = glm::vec4(1, 1, 1, 1);
|
quadColor = glm::vec4(1, 1, 1, 1);
|
||||||
}
|
}
|
||||||
// Draw Green Quad
|
// Draw Green Quad
|
||||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X + AUDIO_GREEN_START,
|
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioGreenStart,
|
||||||
audioMeterY,
|
audioMeterY,
|
||||||
audioLevel - AUDIO_GREEN_START,
|
audioLevel - audioGreenStart,
|
||||||
AUDIO_METER_HEIGHT, quadColor,
|
AUDIO_METER_HEIGHT, quadColor,
|
||||||
_audioGreenQuad);
|
_audioGreenQuad);
|
||||||
|
|
||||||
audioLevel = AUDIO_GREEN_START;
|
audioLevel = audioGreenStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioLevel >= 0) {
|
if (audioLevel >= 0) {
|
||||||
|
@ -916,7 +946,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
quadColor = glm::vec4(1, 1, 1, 1);
|
quadColor = glm::vec4(1, 1, 1, 1);
|
||||||
}
|
}
|
||||||
// Draw Blue (low level) quad
|
// Draw Blue (low level) quad
|
||||||
DependencyManager::get<GeometryCache>()->renderQuad(AUDIO_METER_X,
|
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX,
|
||||||
audioMeterY,
|
audioMeterY,
|
||||||
audioLevel, AUDIO_METER_HEIGHT, quadColor,
|
audioLevel, AUDIO_METER_HEIGHT, quadColor,
|
||||||
_audioBlueQuad);
|
_audioBlueQuad);
|
||||||
|
|
|
@ -101,6 +101,7 @@ private:
|
||||||
void renderPointersOculus(const glm::vec3& eyePos);
|
void renderPointersOculus(const glm::vec3& eyePos);
|
||||||
|
|
||||||
void renderAudioMeter();
|
void renderAudioMeter();
|
||||||
|
void renderCameraToggle();
|
||||||
void renderStatsAndLogs();
|
void renderStatsAndLogs();
|
||||||
void renderDomainConnectionStatusBorder();
|
void renderDomainConnectionStatusBorder();
|
||||||
|
|
||||||
|
|
|
@ -162,11 +162,16 @@ void HMDToolsDialog::enterHDMMode() {
|
||||||
close();
|
close();
|
||||||
}
|
}
|
||||||
|
|
||||||
Application::getInstance()->setFullscreen(true);
|
|
||||||
Application::getInstance()->setEnableVRMode(true);
|
Application::getInstance()->setEnableVRMode(true);
|
||||||
|
|
||||||
const int SLIGHT_DELAY = 500;
|
const int SLIGHT_DELAY = 500;
|
||||||
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(activateWindowAfterEnterMode()));
|
// If we go to fullscreen immediately, it ends up on the primary monitor,
|
||||||
|
// even though we've already moved the window. By adding this delay, the
|
||||||
|
// fullscreen target screen ends up correct.
|
||||||
|
QTimer::singleShot(SLIGHT_DELAY, this, [&]{
|
||||||
|
Application::getInstance()->setFullscreen(true);
|
||||||
|
activateWindowAfterEnterMode();
|
||||||
|
});
|
||||||
|
|
||||||
_inHDMMode = true;
|
_inHDMMode = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,9 @@ public:
|
||||||
bool isAxis() const { return getType() == ChannelType::AXIS; }
|
bool isAxis() const { return getType() == ChannelType::AXIS; }
|
||||||
bool isJoint() const { return getType() == ChannelType::JOINT; }
|
bool isJoint() const { return getType() == ChannelType::JOINT; }
|
||||||
|
|
||||||
explicit Input() {}
|
// WORKAROUND: the explicit initializer here avoids a bug in GCC-4.8.2 (but not found in 4.9.2)
|
||||||
|
// where the default initializer (a C++-11ism) for the union data above is not applied.
|
||||||
|
explicit Input() : _id(0) {}
|
||||||
explicit Input(uint32 id) : _id(id) {}
|
explicit Input(uint32 id) : _id(id) {}
|
||||||
explicit Input(uint16 device, uint16 channel, ChannelType type) : _device(device), _channel(channel), _type(uint16(type)) {}
|
explicit Input(uint16 device, uint16 channel, ChannelType type) : _device(device), _channel(channel), _type(uint16(type)) {}
|
||||||
Input(const Input& src) : _id(src._id) {}
|
Input(const Input& src) : _id(src._id) {}
|
||||||
|
|
|
@ -162,6 +162,7 @@ public:
|
||||||
Mirror,
|
Mirror,
|
||||||
MuteAudio,
|
MuteAudio,
|
||||||
MuteEnvironment,
|
MuteEnvironment,
|
||||||
|
MuteFaceTracking,
|
||||||
NoFaceTracking,
|
NoFaceTracking,
|
||||||
NoShadows,
|
NoShadows,
|
||||||
OctreeStats,
|
OctreeStats,
|
||||||
|
|
Loading…
Reference in a new issue