Merge branch 'master' of https://github.com/highfidelity/hifi into refactor-playSound

This commit is contained in:
Howard Stearns 2015-06-22 14:10:13 -07:00
commit 3746231d75
64 changed files with 2770 additions and 2748 deletions

View file

@ -4,8 +4,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/jherico/oglplus.git
GIT_TAG 470d8e56fd6bf3913ceec03d82f42d3bafab2cbe
GIT_REPOSITORY https://github.com/matus-chochlik/oglplus.git
GIT_TAG a2681383928b1166f176512cbe0f95e96fe68d08
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -8,7 +8,7 @@ body {
background-color: rgb(76, 76, 76);
color: rgb(204, 204, 204);
font-family: Arial;
font-size: 8.25pt;
font-size: 9.0pt;
-webkit-touch-callout: none;
-webkit-user-select: none;
@ -58,6 +58,7 @@ body {
.multi-property-section {
}
.property-section {
display: block;
margin: 10 10;
@ -132,7 +133,7 @@ input.no-spin::-webkit-inner-spin-button {
table#entity-table {
border-collapse: collapse;
font-family: Sans-Serif;
font-size: 7.5pt;
font-size: 9pt;
width: 100%;
}
@ -156,7 +157,7 @@ table#entity-table {
}
#entity-table td {
font-size: 8.25pt;
font-size: 9.0pt;
border: 0pt black solid;
word-wrap: nowrap;
white-space: nowrap;
@ -176,14 +177,14 @@ th#entity-type {
div.input-area {
display: inline-block;
font-size: 7.5pt;
font-size: 9pt;
}
input {
}
#type {
font-size: 10.5pt;
font-size: 9.0pt;
}
#type label {
@ -191,14 +192,14 @@ input {
}
input, textarea {
background-color: rgb(102, 102, 102);
color: rgb(204, 204, 204);
background-color: rgb(63, 63, 63);
color: rgb(255, 255, 255);
border: none;
font-size: 7.5pt;
font-size: 9pt;
}
input:disabled, textarea:disabled {
background-color: rgb(102, 102, 102);
background-color: rgb(63, 63, 63);
color: rgb(160, 160, 160);
}
@ -224,7 +225,7 @@ input:disabled, textarea:disabled {
#properties-list .property {
padding: 4pt;
border-bottom: 0.75pt solid rgb(63, 63, 63);
min-height: 1em;
min-height: 12pt;
}
@ -245,7 +246,6 @@ table#properties-list {
font-weight: bold;
overflow: hidden;
text-overflow: ellipsis;
vertical-align: middle;
height: 1.2em;
}
@ -270,9 +270,7 @@ div.inner {
}
td {
vertical-align: top;
vertical-align: top;
}
#no-entities {

View file

@ -9,48 +9,52 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
function hslToRgb(hslColor) {
var h = hslColor.hue;
var s = hslColor.sat;
var l = hslColor.light;
var r, g, b;
if (s == 0) {
r = g = b = l; // achromatic
} else {
var hue2rgb = function hue2rgb(p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
map = function(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
hslToRgb = function(hslColor) {
var h = hslColor.hue;
var s = hslColor.sat;
var l = hslColor.light;
var r, g, b;
if (s == 0) {
r = g = b = l; // achromatic
} else {
var hue2rgb = function hue2rgb(p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
}
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
return {
red: Math.round(r * 255),
green: Math.round(g * 255),
blue: Math.round(b * 255)
};
return {
red: Math.round(r * 255),
green: Math.round(g * 255),
blue: Math.round(b * 255)
};
}
function map(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
function randFloat(low, high) {
return low + Math.random() * (high - low);
randFloat = function(low, high) {
return low + Math.random() * (high - low);
}
function randInt(low, high) {
return Math.floor(randFloat(low, high));
randInt = function(low, high) {
return Math.floor(randFloat(low, high));
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 369 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 369 B

View file

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#7E7E7E" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 501 B

View file

Before

Width:  |  Height:  |  Size: 501 B

After

Width:  |  Height:  |  Size: 501 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 B

View file

@ -1,3 +0,0 @@
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xl="http://www.w3.org/1999/xlink" version="1.1" viewBox="344 454 26 74" width="26pt" height="74pt"><metadata xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>2015-06-12 18:23Z</dc:date><!-- Produced by OmniGraffle Professional 5.4.4 --></metadata><defs></defs><g stroke="none" stroke-opacity="1" stroke-dasharray="none" fill="none" fill-opacity="1"><title>Canvas 1</title><rect fill="white" width="1728" height="1466"/><g><title> Navi Bar</title><line x1="356.58927" y1="466.42861" x2="356.58927" y2="515.4286" stroke="#b3b3b3" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="3"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 778 B

View file

@ -46,6 +46,36 @@ DialogContainer {
property int inputAreaHeight: 56.0 * root.scale // Height of the background's input area
property int inputAreaStep: (height - inputAreaHeight) / 2
MouseArea {
// Drag the icon
width: parent.height
height: parent.height
x: 0
y: 0
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
MouseArea {
// Drag the input rectangle
width: parent.width - parent.height
height: parent.inputAreaHeight
x: parent.height
y: parent.inputAreaStep
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
Image {
id: backArrow
@ -71,7 +101,7 @@ DialogContainer {
Image {
id: forwardArrow
source: addressBarDialog.forwardEnabled ? "../images/darkgreyarrow.svg" : "../images/redarrow.svg"
source: addressBarDialog.forwardEnabled ? "../images/right-arrow.svg" : "../images/redarrow.svg"
anchors {
fill: parent
@ -111,38 +141,7 @@ DialogContainer {
addressBarDialog.loadAddress(addressLine.text)
}
}
MouseArea {
// Drag the icon
width: parent.height
height: parent.height
x: 0
y: 0
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
// Add this code to make text bar draggable
/*
MouseArea {
// Drag the input rectangle
width: parent.width - parent.height
height: parent.inputAreaHeight
x: parent.height
y: parent.inputAreaStep
drag {
target: root
minimumX: -parent.inputAreaStep
minimumY: -parent.inputAreaStep
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}*/
}
}

View file

@ -0,0 +1,177 @@
//
// Created by Bradley Austin Davis on 2015/06/19
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.4
import QtQuick.Controls 1.3
import QtGraphicalEffects 1.0
Hifi.AvatarInputs {
id: root
objectName: "AvatarInputs"
anchors.fill: parent
// width: 800
// height: 600
// color: "black"
readonly property int iconPadding: 5
readonly property int mirrorHeight: 215
readonly property int mirrorWidth: 265
readonly property int mirrorTopPad: iconPadding
readonly property int mirrorLeftPad: 10
readonly property int iconSize: 24
Item {
id: mirror
width: root.mirrorWidth
height: root.mirrorVisible ? root.mirrorHeight : 0
visible: root.mirrorVisible
anchors.left: parent.left
anchors.leftMargin: root.mirrorLeftPad
anchors.top: parent.top
anchors.topMargin: root.mirrorTopPad
clip: true
MouseArea {
id: hover
anchors.fill: parent
hoverEnabled: true
propagateComposedEvents: true
}
Image {
id: closeMirror
visible: hover.containsMouse
width: root.iconSize
height: root.iconSize
anchors.top: parent.top
anchors.topMargin: root.iconPadding
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
source: "../images/close.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.closeMirror();
}
}
}
Image {
id: zoomIn
visible: hover.containsMouse
width: root.iconSize
height: root.iconSize
anchors.bottom: parent.bottom
anchors.bottomMargin: root.iconPadding
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
source: root.mirrorZoomed ? "../images/minus.svg" : "../images/plus.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleZoom();
}
}
}
}
Item {
width: root.mirrorWidth
height: 44
x: root.mirrorLeftPad
y: root.mirrorVisible ? root.mirrorTopPad + root.mirrorHeight : 5
Rectangle {
anchors.fill: parent
color: root.mirrorVisible ? (root.audioClipping ? "red" : "#696969") : "#00000000"
Image {
id: faceMute
width: root.iconSize
height: root.iconSize
visible: root.cameraEnabled
anchors.left: parent.left
anchors.leftMargin: root.iconPadding
anchors.verticalCenter: parent.verticalCenter
source: root.cameraMuted ? "../images/face-mute.svg" : "../images/face.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleCameraMute()
}
onDoubleClicked: {
root.resetSensors();
}
}
}
Image {
id: micMute
width: root.iconSize
height: root.iconSize
anchors.left: root.cameraEnabled ? faceMute.right : parent.left
anchors.leftMargin: root.iconPadding
anchors.verticalCenter: parent.verticalCenter
source: root.audioMuted ? "../images/mic-mute.svg" : "../images/mic.svg"
MouseArea {
anchors.fill: parent
onClicked: {
root.toggleAudioMute()
}
}
}
Item {
id: audioMeter
anchors.verticalCenter: parent.verticalCenter
anchors.left: micMute.right
anchors.leftMargin: root.iconPadding
anchors.right: parent.right
anchors.rightMargin: root.iconPadding
height: 8
Rectangle {
id: blueRect
color: "blue"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: parent.left
width: parent.width / 4
}
Rectangle {
id: greenRect
color: "green"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: blueRect.right
anchors.right: redRect.left
}
Rectangle {
id: redRect
color: "red"
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: parent.right
width: parent.width / 5
}
Rectangle {
z: 100
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: parent.right
width: (1.0 - root.audioLevel) * parent.width
color: "black"
}
}
}
}
}

View file

@ -0,0 +1,258 @@
import Hifi 1.0 as Hifi
import QtQuick 2.3
import QtQuick.Controls 1.2
Item {
anchors.fill: parent
anchors.leftMargin: 300
Hifi.Stats {
id: root
objectName: "Stats"
implicitHeight: row.height
implicitWidth: row.width
anchors.horizontalCenter: parent.horizontalCenter
readonly property int sTATS_GENERAL_MIN_WIDTH: 165
readonly property int sTATS_PING_MIN_WIDTH: 190
readonly property int sTATS_GEO_MIN_WIDTH: 240
readonly property int sTATS_OCTREE_MIN_WIDTH: 410
readonly property int fontSize: 12
readonly property string fontColor: "white"
readonly property string bgColor: "#99333333"
Row {
id: row
spacing: 8
Rectangle {
width: generalCol.width + 8;
height: generalCol.height + 8;
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: generalCol
spacing: 4; x: 4; y: 4;
width: sTATS_GENERAL_MIN_WIDTH
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Servers: " + root.serverCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Avatars: " + root.avatarCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Framerate: " + root.framerate
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Packets In/Out: " + root.packetInCount + "/" + root.packetOutCount
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Mbps In/Out: " + root.mbpsIn.toFixed(2) + "/" + root.mbpsOut.toFixed(2)
}
}
}
Rectangle {
width: pingCol.width + 8
height: pingCol.height + 8
color: root.bgColor;
visible: root.audioPing != -2
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: pingCol
spacing: 4; x: 4; y: 4;
width: sTATS_PING_MIN_WIDTH
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Audio ping: " + root.audioPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Avatar ping: " + root.avatarPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
text: "Entities avg ping: " + root.entitiesPing
}
Text {
color: root.fontColor
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Voxel max ping: " + 0
}
}
}
Rectangle {
width: geoCol.width + 8
height: geoCol.height + 8
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: geoCol
spacing: 4; x: 4; y: 4;
width: sTATS_GEO_MIN_WIDTH
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Position: " + root.position.x.toFixed(1) + ", " +
root.position.y.toFixed(1) + ", " + root.position.z.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Velocity: " + root.velocity.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Yaw: " + root.yaw.toFixed(1)
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Avatar Mixer: " + root.avatarMixerKbps + " kbps, " +
root.avatarMixerPps + "pps";
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "Downloads: ";
}
}
}
Rectangle {
width: octreeCol.width + 8
height: octreeCol.height + 8
color: root.bgColor;
MouseArea {
anchors.fill: parent
onClicked: { root.expanded = !root.expanded; }
}
Column {
id: octreeCol
spacing: 4; x: 4; y: 4;
width: sTATS_OCTREE_MIN_WIDTH
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Triangles: " + root.triangles +
" / Quads: " + root.quads + " / Material Switches: " + root.materialSwitches
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "\tMesh Parts Rendered Opaque: " + root.meshOpaque +
" / Translucent: " + root.meshTranslucent;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded;
text: "\tOpaque considered: " + root.opaqueConsidered +
" / Out of view: " + root.opaqueOutOfView + " / Too small: " + root.opaqueTooSmall;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: !root.expanded
text: "Octree Elements Server: " + root.serverElements +
" Local: " + root.localElements;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Sending Mode: " + root.sendingMode;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Packets to Process: " + root.packetStats;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Octree Elements - ";
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "\tServer: " + root.serverElements +
" Internal: " + root.serverInternal +
" Leaves: " + root.serverLeaves;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "\tLocal: " + root.localElements +
" Internal: " + root.localInternal +
" Leaves: " + root.localLeaves;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "LOD: " + root.lodStatus;
}
}
}
}
Rectangle {
y: 250
visible: root.timingExpanded
width: perfText.width + 8
height: perfText.height + 8
color: root.bgColor;
Text {
x: 4; y: 4
id: perfText
color: root.fontColor
font.family: root.monospaceFont
font.pixelSize: 12
text: "------------------------------------------ Function " +
"--------------------------------------- --msecs- -calls--\n" +
root.timingStats;
}
}
Connections {
target: root.parent
onWidthChanged: {
root.x = root.parent.width - root.width;
}
}
}
}

View file

@ -0,0 +1,31 @@
import Hifi 1.0 as Hifi
import QtQuick 2.3 as Original
import "controls"
import "styles"
Hifi.Tooltip {
id: root
HifiConstants { id: hifi }
// FIXME adjust position based on the edges of the screen
x: (lastMousePosition.x > surfaceSize.width/2) ? lastMousePosition.x - 140 : lastMousePosition.x + 20
//y: lastMousePosition.y + 5
y: (lastMousePosition.y > surfaceSize.height/2) ? lastMousePosition.y - 70 : lastMousePosition.y + 5
implicitWidth: border.implicitWidth
implicitHeight: border.implicitHeight
Border {
id: border
anchors.fill: parent
implicitWidth: text.implicitWidth
implicitHeight: Math.max(text.implicitHeight, 64)
Text {
id: text
anchors.fill: parent
anchors.margins: 16
font.pixelSize: hifi.fonts.pixelSize / 2
text: root.text
wrapMode: Original.Text.WordWrap
}
}
}

View file

@ -66,6 +66,9 @@
#include <EntityScriptingInterface.h>
#include <ErrorDialog.h>
#include <GlowEffect.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/GLBackend.h>
#include <HFActionEvent.h>
#include <HFBackEvent.h>
#include <InfoView.h>
@ -83,12 +86,14 @@
#include <PerfStat.h>
#include <PhysicsEngine.h>
#include <ProgramObject.h>
#include <RenderDeferredTask.h>
#include <ResourceCache.h>
#include <SceneScriptingInterface.h>
#include <ScriptCache.h>
#include <SettingHandle.h>
#include <SoundCache.h>
#include <TextRenderer.h>
#include <Tooltip.h>
#include <UserActivityLogger.h>
#include <UUID.h>
#include <VrMenu.h>
@ -106,11 +111,9 @@
#include "avatar/AvatarManager.h"
#include "audio/AudioToolBox.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "devices/CameraToolBox.h"
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
@ -120,12 +123,6 @@
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "gpu/Batch.h"
#include "gpu/Context.h"
#include "gpu/GLBackend.h"
#include "RenderDeferredTask.h"
#include "scripting/AccountScriptingInterface.h"
#include "scripting/AudioDeviceScriptingInterface.h"
#include "scripting/ClipboardScriptingInterface.h"
@ -141,6 +138,7 @@
#include "SpeechRecognizer.h"
#endif
#include "ui/AvatarInputs.h"
#include "ui/DataWebDialog.h"
#include "ui/DialogsManager.h"
#include "ui/LoginDialog.h"
@ -281,8 +279,6 @@ bool setupEssentials(int& argc, char** argv) {
auto animationCache = DependencyManager::set<AnimationCache>();
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
auto modelBlender = DependencyManager::set<ModelBlender>();
auto audioToolBox = DependencyManager::set<AudioToolBox>();
auto cameraToolBox = DependencyManager::set<CameraToolBox>();
auto avatarManager = DependencyManager::set<AvatarManager>();
auto lodManager = DependencyManager::set<LODManager>();
auto jsConsole = DependencyManager::set<StandAloneJSConsole>();
@ -335,8 +331,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_cursorVisible(true),
_lastMouseMove(usecTimestampNow()),
_lastMouseMoveWasSimulated(false),
_touchAvgX(0.0f),
_touchAvgY(0.0f),
_isTouchPressed(false),
_mousePressed(false),
_enableProcessOctreeThread(true),
@ -831,6 +825,7 @@ void Application::initializeUi() {
LoginDialog::registerType();
MessageDialog::registerType();
VrMenu::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
auto offscreenUi = DependencyManager::get<OffscreenUi>();
@ -844,7 +839,7 @@ void Application::initializeUi() {
VrMenu::executeQueuedLambdas();
offscreenUi->setMouseTranslator([this](const QPointF& p){
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(toGlm(p));
glm::vec2 pos = _compositor.screenToOverlay(toGlm(p));
return QPointF(pos.x, pos.y);
}
return QPointF(p);
@ -877,6 +872,7 @@ void Application::paintGL() {
OculusManager::beginFrameTiming();
}
PerformanceWarning::setSuppressShortTimings(Menu::getInstance()->isOptionChecked(MenuOption::SuppressShortTimings));
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::paintGL()");
@ -884,6 +880,12 @@ void Application::paintGL() {
{
PerformanceTimer perfTimer("renderOverlay");
/*
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context, nullptr, getViewFrustum(), lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(), RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
*/
_applicationOverlay.renderOverlay(&renderArgs);
}
@ -969,9 +971,7 @@ void Application::paintGL() {
glPopMatrix();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(&renderArgs, true, _glWidget->mapFromGlobal(QCursor::pos()));
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(&renderArgs, _mirrorViewRect);
}
@ -979,6 +979,7 @@ void Application::paintGL() {
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
glBlitFramebuffer(0, 0, _renderResolution.x, _renderResolution.y,
@ -986,9 +987,10 @@ void Application::paintGL() {
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
_applicationOverlay.displayOverlayTexture();
_compositor.displayOverlayTexture(&renderArgs);
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
_glWidget->swapBuffers();
}
@ -1046,27 +1048,26 @@ void Application::resizeGL() {
} else {
renderSize = _glWidget->getDeviceSize() * getRenderResolutionScale();
}
if (_renderResolution == toGlm(renderSize)) {
return;
if (_renderResolution != toGlm(renderSize)) {
_renderResolution = toGlm(renderSize);
DependencyManager::get<TextureCache>()->setFrameBufferSize(renderSize);
resetCamerasOnResizeGL(_myCamera, _renderResolution);
glViewport(0, 0, _renderResolution.x, _renderResolution.y); // shouldn't this account for the menu???
updateProjectionMatrix();
glLoadIdentity();
}
_renderResolution = toGlm(renderSize);
DependencyManager::get<TextureCache>()->setFrameBufferSize(renderSize);
resetCamerasOnResizeGL(_myCamera, _renderResolution);
glViewport(0, 0, _renderResolution.x, _renderResolution.y); // shouldn't this account for the menu???
updateProjectionMatrix();
glLoadIdentity();
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->resize(_glWidget->size());
auto canvasSize = _glWidget->size();
if (canvasSize != offscreenUi->getWindow()->size()) {
offscreenUi->resize(canvasSize);
}
_glWidget->makeCurrent();
// update Stats width
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
Stats::getInstance()->resetWidth(_renderResolution.x, horizontalOffset);
}
void Application::updateProjectionMatrix() {
@ -1564,27 +1565,9 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
_keyboardMouseDevice.mousePressEvent(event);
if (event->button() == Qt::LeftButton) {
_mouseDragStartedX = getTrueMouseX();
_mouseDragStartedY = getTrueMouseY();
_mouseDragStarted = getTrueMouse();
_mousePressed = true;
if (mouseOnScreen()) {
if (DependencyManager::get<AudioToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
if (DependencyManager::get<CameraToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
}
// nobody handled this - make it an action event on the _window object
HFActionEvent actionEvent(HFActionEvent::startType(),
computePickRay(event->x(), event->y()));
@ -1603,17 +1586,6 @@ void Application::mouseDoublePressEvent(QMouseEvent* event, unsigned int deviceI
}
_controllerScriptingInterface.emitMouseDoublePressEvent(event);
if (activeWindow() == _window) {
if (event->button() == Qt::LeftButton) {
if (mouseOnScreen()) {
if (DependencyManager::get<CameraToolBox>()->mouseDoublePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
}
}
}
}
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
@ -1635,13 +1607,6 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
if (event->button() == Qt::LeftButton) {
_mousePressed = false;
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats) && mouseOnScreen()) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH;
Stats::getInstance()->checkClick(getMouseX(), getMouseY(),
getMouseDragStartedX(), getMouseDragStartedY(), horizontalOffset);
}
// fire an action end event
HFActionEvent actionEvent(HFActionEvent::endType(),
computePickRay(event->x(), event->y()));
@ -1669,22 +1634,18 @@ void Application::touchUpdateEvent(QTouchEvent* event) {
bool validTouch = false;
if (activeWindow() == _window) {
const QList<QTouchEvent::TouchPoint>& tPoints = event->touchPoints();
_touchAvgX = 0.0f;
_touchAvgY = 0.0f;
_touchAvg = vec2();
int numTouches = tPoints.count();
if (numTouches > 1) {
for (int i = 0; i < numTouches; ++i) {
_touchAvgX += (float)tPoints[i].pos().x();
_touchAvgY += (float)tPoints[i].pos().y();
_touchAvg += toGlm(tPoints[i].pos());
}
_touchAvgX /= (float)(numTouches);
_touchAvgY /= (float)(numTouches);
_touchAvg /= (float)(numTouches);
validTouch = true;
}
}
if (!_isTouchPressed) {
_touchDragStartedAvgX = _touchAvgX;
_touchDragStartedAvgY = _touchAvgY;
_touchDragStartedAvg = _touchAvg;
}
_isTouchPressed = validTouch;
}
@ -1720,8 +1681,7 @@ void Application::touchEndEvent(QTouchEvent* event) {
_keyboardMouseDevice.touchEndEvent(event);
// put any application specific touch behavior below here..
_touchDragStartedAvgX = _touchAvgX;
_touchDragStartedAvgY = _touchAvgY;
_touchDragStartedAvg = _touchAvg;
_isTouchPressed = false;
}
@ -1870,9 +1830,9 @@ void Application::setFullscreen(bool fullscreen) {
Menu::getInstance()->getActionForOption(MenuOption::Fullscreen)->setChecked(fullscreen);
}
// The following code block is useful on platforms that can have a visible
// app menu in a fullscreen window. However the OSX mechanism hides the
// application menu for fullscreen apps, so the check is not required.
// The following code block is useful on platforms that can have a visible
// app menu in a fullscreen window. However the OSX mechanism hides the
// application menu for fullscreen apps, so the check is not required.
#ifndef Q_OS_MAC
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
if (fullscreen) {
@ -1946,11 +1906,6 @@ void Application::setEnableVRMode(bool enableVRMode) {
} else {
OculusManager::abandonCalibration();
OculusManager::disconnect();
_mirrorCamera.setHmdPosition(glm::vec3());
_mirrorCamera.setHmdRotation(glm::quat());
_myCamera.setHmdPosition(glm::vec3());
_myCamera.setHmdRotation(glm::quat());
}
resizeGL();
@ -1962,44 +1917,34 @@ void Application::setLowVelocityFilter(bool lowVelocityFilter) {
bool Application::mouseOnScreen() const {
if (OculusManager::isConnected()) {
return getMouseX() >= 0 && getMouseX() <= _glWidget->getDeviceWidth() &&
getMouseY() >= 0 && getMouseY() <= _glWidget->getDeviceHeight();
ivec2 mouse = getMouse();
if (!glm::all(glm::greaterThanEqual(mouse, ivec2()))) {
return false;
}
ivec2 size = toGlm(_glWidget->getDeviceSize());
if (!glm::all(glm::lessThanEqual(mouse, size))) {
return false;
}
}
return true;
}
int Application::getMouseX() const {
ivec2 Application::getMouse() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseX(), getTrueMouseY()));
return pos.x;
return _compositor.screenToOverlay(getTrueMouse());
}
return getTrueMouseX();
return getTrueMouse();
}
int Application::getMouseY() const {
ivec2 Application::getMouseDragStarted() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseX(), getTrueMouseY()));
return pos.y;
return _compositor.screenToOverlay(getTrueMouseDragStarted());
}
return getTrueMouseY();
return getTrueMouseDragStarted();
}
int Application::getMouseDragStartedX() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseDragStartedX(),
getTrueMouseDragStartedY()));
return pos.x;
}
return getTrueMouseDragStartedX();
}
int Application::getMouseDragStartedY() const {
if (OculusManager::isConnected()) {
glm::vec2 pos = _applicationOverlay.screenToOverlay(glm::vec2(getTrueMouseDragStartedX(),
getTrueMouseDragStartedY()));
return pos.y;
}
return getTrueMouseDragStartedY();
ivec2 Application::getTrueMouseDragStarted() const {
return _mouseDragStarted;
}
FaceTracker* Application::getActiveFaceTracker() {
@ -2244,13 +2189,6 @@ void Application::init() {
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
_entityClipboardRenderer.setTree(&_entityClipboard);
_rearMirrorTools = new RearMirrorTools(_mirrorViewRect);
connect(_rearMirrorTools, SIGNAL(closeView()), SLOT(closeMirrorView()));
connect(_rearMirrorTools, SIGNAL(restoreView()), SLOT(restoreMirrorView()));
connect(_rearMirrorTools, SIGNAL(shrinkView()), SLOT(shrinkMirrorView()));
connect(_rearMirrorTools, SIGNAL(resetView()), SLOT(resetSensors()));
// initialize the GlowEffect with our widget
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
DependencyManager::get<GlowEffect>()->init(glow);
@ -3150,7 +3088,7 @@ PickRay Application::computePickRay(float x, float y) const {
y /= size.y;
PickRay result;
if (isHMDMode()) {
getApplicationOverlay().computeHmdPickRay(glm::vec2(x, y), result.origin, result.direction);
getApplicationCompositor().computeHmdPickRay(glm::vec2(x, y), result.origin, result.direction);
} else {
if (QThread::currentThread() == activeRenderingThread) {
getDisplayViewFrustum()->computePickRay(x, y, result.origin, result.direction);
@ -3179,13 +3117,11 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
BILLBOARD_SIZE, BILLBOARD_SIZE),
true);
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
return image;
}
@ -3381,9 +3317,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
// flip x if in mirror mode (also requires reversing winding order for backface culling)
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
glScalef(-1.0f, 1.0f, 1.0f);
glFrontFace(GL_CW);
//glScalef(-1.0f, 1.0f, 1.0f);
//glFrontFace(GL_CW);
} else {
glFrontFace(GL_CCW);
}
@ -3406,7 +3341,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
viewTransform.setTranslation(theCamera.getPosition());
viewTransform.setRotation(rotation);
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
// viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
}
if (renderArgs->_renderSide != RenderArgs::MONO) {
glm::mat4 invView = glm::inverse(_untranslatedViewMatrix);
@ -3689,7 +3624,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
_mirrorCamera.setPosition(_myAvatar->getPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
} else if (RearMirrorTools::rearViewZoomLevel.get() == BODY) {
} else if (!AvatarInputs::getInstance()->mirrorZoomed()) {
_mirrorCamera.setPosition(_myAvatar->getChestPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
@ -3739,10 +3674,6 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
displaySide(renderArgs, _mirrorCamera, true, billboard);
glPopMatrix();
if (!billboard) {
_rearMirrorTools->render(renderArgs, false, _glWidget->mapFromGlobal(QCursor::pos()));
}
// reset Viewport and projection matrix
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
glDisable(GL_SCISSOR_TEST);
@ -4749,11 +4680,9 @@ void Application::postLambdaEvent(std::function<void()> f) {
}
void Application::initPlugins() {
OculusManager::init();
}
void Application::shutdownPlugins() {
OculusManager::deinit();
}
glm::vec3 Application::getHeadPosition() const {
@ -4773,12 +4702,8 @@ QSize Application::getDeviceSize() const {
return _glWidget->getDeviceSize();
}
int Application::getTrueMouseX() const {
return _glWidget->mapFromGlobal(QCursor::pos()).x();
}
int Application::getTrueMouseY() const {
return _glWidget->mapFromGlobal(QCursor::pos()).y();
ivec2 Application::getTrueMouse() const {
return toGlm(_glWidget->mapFromGlobal(QCursor::pos()));
}
bool Application::isThrottleRendering() const {
@ -4808,3 +4733,18 @@ qreal Application::getDevicePixelRatio() {
return _window ? _window->windowHandle()->devicePixelRatio() : 1.0;
}
mat4 Application::getEyeProjection(int eye) const {
if (isHMDMode()) {
return OculusManager::getEyeProjection(eye);
}
return _viewFrustum.getProjection();
}
mat4 Application::getEyePose(int eye) const {
if (isHMDMode()) {
return OculusManager::getEyePose(eye);
}
return mat4();
}

View file

@ -62,12 +62,12 @@
#include "ui/ModelsBrowser.h"
#include "ui/NodeBounds.h"
#include "ui/OctreeStatsDialog.h"
#include "ui/RearMirrorTools.h"
#include "ui/SnapshotShareDialog.h"
#include "ui/LodToolsDialog.h"
#include "ui/LogDialog.h"
#include "ui/overlays/Overlays.h"
#include "ui/ApplicationOverlay.h"
#include "ui/ApplicationCompositor.h"
#include "ui/RunningScriptsWidget.h"
#include "ui/ToolWindow.h"
#include "ui/UserInputMapper.h"
@ -221,15 +221,21 @@ public:
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
bool mouseOnScreen() const;
int getMouseX() const;
int getMouseY() const;
glm::ivec2 getTrueMousePosition() const;
int getTrueMouseX() const;
int getTrueMouseY() const;
int getMouseDragStartedX() const;
int getMouseDragStartedY() const;
int getTrueMouseDragStartedX() const { return _mouseDragStartedX; }
int getTrueMouseDragStartedY() const { return _mouseDragStartedY; }
ivec2 getMouse() const;
ivec2 getTrueMouse() const;
ivec2 getMouseDragStarted() const;
ivec2 getTrueMouseDragStarted() const;
// TODO get rid of these and use glm types directly
int getMouseX() const { return getMouse().x; }
int getMouseY() const { return getMouse().y; }
int getTrueMouseX() const { return getTrueMouse().x; }
int getTrueMouseY() const { return getTrueMouse().y; }
int getMouseDragStartedX() const { return getMouseDragStarted().x; }
int getMouseDragStartedY() const { return getMouseDragStarted().y; }
int getTrueMouseDragStartedX() const { return getTrueMouseDragStarted().x; }
int getTrueMouseDragStartedY() const { return getTrueMouseDragStarted().y; }
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
FaceTracker* getActiveFaceTracker();
@ -238,6 +244,8 @@ public:
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
const ApplicationOverlay& getApplicationOverlay() const { return _applicationOverlay; }
ApplicationCompositor& getApplicationCompositor() { return _compositor; }
const ApplicationCompositor& getApplicationCompositor() const { return _compositor; }
Overlays& getOverlays() { return _overlays; }
float getFps() const { return _fps; }
@ -330,6 +338,9 @@ public:
bool isHMDMode() const;
glm::quat getHeadOrientation() const;
glm::vec3 getHeadPosition() const;
glm::mat4 getHeadPose() const;
glm::mat4 getEyePose(int eye) const;
glm::mat4 getEyeProjection(int eye) const;
QRect getDesirableApplicationGeometry();
RunningScriptsWidget* getRunningScriptsWidget() { return _runningScriptsWidget; }
@ -548,21 +559,17 @@ private:
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Camera _mirrorCamera; // Cammera for mirror view
QRect _mirrorViewRect;
RearMirrorTools* _rearMirrorTools;
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Transform _viewTransform;
glm::mat4 _untranslatedViewMatrix;
@ -580,18 +587,17 @@ private:
Environment _environment;
bool _cursorVisible;
int _mouseDragStartedX;
int _mouseDragStartedY;
ivec2 _mouseDragStarted;
quint64 _lastMouseMove;
bool _lastMouseMoveWasSimulated;
glm::vec3 _mouseRayOrigin;
glm::vec3 _mouseRayDirection;
float _touchAvgX;
float _touchAvgY;
float _touchDragStartedAvgX;
float _touchDragStartedAvgY;
vec2 _touchAvg;
vec2 _touchDragStartedAvg;
bool _isTouchPressed; // true if multitouch has been pressed (clear when finished)
bool _mousePressed; // true if mouse has been pressed (clear when finished)
@ -669,6 +675,7 @@ private:
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
ApplicationCompositor _compositor;
};
#endif // hifi_Application_h

View file

@ -49,8 +49,6 @@ Camera::Camera() :
_mode(CAMERA_MODE_THIRD_PERSON),
_position(0.0f, 0.0f, 0.0f),
_projection(glm::perspective(glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES), 16.0f/9.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP)),
_hmdPosition(),
_hmdRotation(),
_isKeepLookingAt(false),
_lookingAt(0.0f, 0.0f, 0.0f)
{
@ -74,20 +72,6 @@ void Camera::setRotation(const glm::quat& rotation) {
}
}
void Camera::setHmdPosition(const glm::vec3& hmdPosition) {
_hmdPosition = hmdPosition;
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setHmdRotation(const glm::quat& hmdRotation) {
_hmdRotation = hmdRotation;
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setMode(CameraMode mode) {
_mode = mode;
emit modeUpdated(modeToString(mode));

View file

@ -45,26 +45,22 @@ public:
void setRotation(const glm::quat& rotation);
void setProjection(const glm::mat4 & projection);
void setHmdPosition(const glm::vec3& hmdPosition);
void setHmdRotation(const glm::quat& hmdRotation);
void setMode(CameraMode m);
glm::quat getRotation() const { return _rotation * _hmdRotation; }
glm::quat getRotation() const { return _rotation; }
const glm::mat4& getProjection() const { return _projection; }
const glm::vec3& getHmdPosition() const { return _hmdPosition; }
const glm::quat& getHmdRotation() const { return _hmdRotation; }
CameraMode getMode() const { return _mode; }
public slots:
QString getModeString() const;
void setModeString(const QString& mode);
glm::vec3 getPosition() const { return _position + _hmdPosition; }
glm::vec3 getPosition() const { return _position; }
void setPosition(const glm::vec3& position);
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
glm::quat getOrientation() const { return getRotation(); }
void setOrientation(const glm::quat& orientation) { setRotation(orientation); }
PickRay computePickRay(float x, float y);
// These only work on independent cameras
@ -86,8 +82,6 @@ private:
glm::vec3 _position;
glm::quat _rotation;
glm::mat4 _projection;
glm::vec3 _hmdPosition;
glm::quat _hmdRotation;
bool _isKeepLookingAt;
glm::vec3 _lookingAt;
};

View file

@ -1,114 +0,0 @@
//
// AudioToolBox.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <AudioClient.h>
#include <GLCanvas.h>
#include <PathUtils.h>
#include <GeometryCache.h>
#include <gpu/GLBackend.h>
#include "Application.h"
#include "AudioToolBox.h"
// Mute icon configration
const int MUTE_ICON_SIZE = 24;
AudioToolBox::AudioToolBox() :
_iconPulseTimeReference(usecTimestampNow())
{
}
bool AudioToolBox::mousePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
DependencyManager::get<AudioClient>()->toggleMute();
return true;
}
return false;
}
void AudioToolBox::render(int x, int y, int padding, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_micTexture) {
_micTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic.svg");
}
if (!_muteTexture) {
_muteTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic-mute.svg");
}
if (_boxTexture) {
_boxTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/audio-box.svg");
}
auto audioIO = DependencyManager::get<AudioClient>();
if (boxed) {
bool isClipping = ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f));
const int BOX_LEFT_PADDING = 5;
const int BOX_TOP_PADDING = 10;
const int BOX_WIDTH = 266;
const int BOX_HEIGHT = 44;
QRect boxBounds = QRect(x - BOX_LEFT_PADDING, y - BOX_TOP_PADDING, BOX_WIDTH, BOX_HEIGHT);
glm::vec4 quadColor;
if (isClipping) {
quadColor = glm::vec4(1.0f, 0.0f, 0.0f, 1.0f);
} else {
quadColor = glm::vec4(0.41f, 0.41f, 0.41f, 1.0f);
}
glm::vec2 topLeft(boxBounds.left(), boxBounds.top());
glm::vec2 bottomRight(boxBounds.right(), boxBounds.bottom());
static const glm::vec2 texCoordTopLeft(1,1);
static const glm::vec2 texCoordBottomRight(0, 0);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_boxTexture));
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
}
float iconColor = 1.0f;
_iconBounds = QRect(x + padding, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
if (!audioIO->isMuted()) {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_micTexture));
iconColor = 1.0f;
} else {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_muteTexture));
// Make muted icon pulsate
static const float PULSE_MIN = 0.4f;
static const float PULSE_MAX = 1.0f;
static const float PULSE_FREQUENCY = 1.0f; // in Hz
qint64 now = usecTimestampNow();
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// Prevents t from getting too big, which would diminish glm::cos precision
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
}
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
glm::vec2 texCoordTopLeft(1,1);
glm::vec2 texCoordBottomRight(0,0);
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,36 +0,0 @@
//
// AudioToolBox.h
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioToolBox_h
#define hifi_AudioToolBox_h
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <QOpenGLTexture>
class AudioToolBox : public Dependency {
SINGLETON_DEPENDENCY
public:
void render(int x, int y, int padding, bool boxed);
bool mousePressEvent(int x, int y);
protected:
AudioToolBox();
private:
gpu::TexturePointer _micTexture;
gpu::TexturePointer _muteTexture;
gpu::TexturePointer _boxTexture;
int _boxQuadID = GeometryCache::UNKNOWN_ID;
QRect _iconBounds;
qint64 _iconPulseTimeReference = 0;
};
#endif // hifi_AudioToolBox_h

View file

@ -124,18 +124,25 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
if (!(_isFaceTrackerConnected || billboard)) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f;
const float AVERAGE_SACCADE_INTERVAL = 4.0f;
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float MAXIMUM_SACCADE_SPEED = 0.8f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * 0.50f;
glm::vec3 saccadeDelta = (_saccadeTarget - _saccade) * 0.5f;
float speed = glm::length(saccadeDelta) / deltaTime;
if (speed > MAXIMUM_SACCADE_SPEED) {
saccadeDelta = saccadeDelta * MAXIMUM_SACCADE_SPEED / speed;
}
_saccade += saccadeDelta;
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;

View file

@ -1468,7 +1468,7 @@ void MyAvatar::maybeUpdateBillboard() {
}
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context);
QImage image = Application::getInstance()->renderAvatarBillboard(&renderArgs);
QImage image = qApp->renderAvatarBillboard(&renderArgs);
_billboard.clear();
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
@ -1567,7 +1567,6 @@ void MyAvatar::renderLaserPointers() {
//Gets the tip position for the laser pointer
glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
const ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
glm::vec3 direction = glm::normalize(palm->getTipPosition() - palm->getPosition());
glm::vec3 position = palm->getPosition();
@ -1576,7 +1575,8 @@ glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
glm::vec3 result;
if (applicationOverlay.calculateRayUICollisionPoint(position, direction, result)) {
const auto& compositor = qApp->getApplicationCompositor();
if (compositor.calculateRayUICollisionPoint(position, direction, result)) {
return result;
}

View file

@ -1,121 +0,0 @@
//
// CameraToolBox.cpp
// interface/src/devices
//
// Created by David Rowe on 30 Apr 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <GLCanvas.h>
#include <PathUtils.h>
#include "gpu/GLBackend.h"
#include "Application.h"
#include "CameraToolBox.h"
#include "FaceTracker.h"
CameraToolBox::CameraToolBox() :
_iconPulseTimeReference(usecTimestampNow()),
_doubleClickTimer(NULL)
{
}
CameraToolBox::~CameraToolBox() {
if (_doubleClickTimer) {
_doubleClickTimer->stop();
delete _doubleClickTimer;
}
}
bool CameraToolBox::mousePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
if (!_doubleClickTimer) {
// Toggle mute after waiting to check that it's not a double-click.
const int DOUBLE_CLICK_WAIT = 200; // ms
_doubleClickTimer = new QTimer(this);
connect(_doubleClickTimer, SIGNAL(timeout()), this, SLOT(toggleMute()));
_doubleClickTimer->setSingleShot(true);
_doubleClickTimer->setInterval(DOUBLE_CLICK_WAIT);
_doubleClickTimer->start();
}
return true;
}
return false;
}
bool CameraToolBox::mouseDoublePressEvent(int x, int y) {
if (_iconBounds.contains(x, y)) {
if (_doubleClickTimer) {
_doubleClickTimer->stop();
delete _doubleClickTimer;
_doubleClickTimer = NULL;
}
Application::getInstance()->resetSensors();
return true;
}
return false;
}
void CameraToolBox::toggleMute() {
delete _doubleClickTimer;
_doubleClickTimer = NULL;
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
if (faceTracker) {
faceTracker->toggleMute();
}
}
void CameraToolBox::render(int x, int y, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_enabledTexture) {
_enabledTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face.svg");
}
if (!_mutedTexture) {
_mutedTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face-mute.svg");
}
const int MUTE_ICON_SIZE = 24;
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
float iconColor = 1.0f;
if (!Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking)) {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_enabledTexture));
} else {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_mutedTexture));
// Make muted icon pulsate
static const float PULSE_MIN = 0.4f;
static const float PULSE_MAX = 1.0f;
static const float PULSE_FREQUENCY = 1.0f; // in Hz
qint64 now = usecTimestampNow();
if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// Prevents t from getting too big, which would diminish glm::cos precision
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
}
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
glm::vec4 quadColor(iconColor, iconColor, iconColor, 1.0f);
glm::vec2 topLeft(_iconBounds.left(), _iconBounds.top());
glm::vec2 bottomRight(_iconBounds.right(), _iconBounds.bottom());
glm::vec2 texCoordTopLeft(1,1);
glm::vec2 texCoordBottomRight(0,0);
if (_boxQuadID == GeometryCache::UNKNOWN_ID) {
_boxQuadID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor, _boxQuadID);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,45 +0,0 @@
//
// CameraToolBox.h
// interface/src/devices
//
// Created by David Rowe on 30 Apr 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CameraToolBox_h
#define hifi_CameraToolBox_h
#include <QObject>
#include <DependencyManager.h>
#include <GeometryCache.h>
class CameraToolBox : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
void render(int x, int y, bool boxed);
bool mousePressEvent(int x, int y);
bool mouseDoublePressEvent(int x, int y);
protected:
CameraToolBox();
~CameraToolBox();
private slots:
void toggleMute();
private:
gpu::TexturePointer _enabledTexture;
gpu::TexturePointer _mutedTexture;
int _boxQuadID = GeometryCache::UNKNOWN_ID;
QRect _iconBounds;
qint64 _iconPulseTimeReference = 0;
QTimer* _doubleClickTimer;
};
#endif // hifi_CameraToolBox_h

View file

@ -212,14 +212,10 @@ bool OculusManager::_eyePerFrameMode = false;
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
float OculusManager::_offscreenRenderScale = 1.0;
static glm::mat4 _combinedProjection;
static ovrPosef _eyeRenderPoses[ovrEye_Count];
ovrRecti OculusManager::_eyeViewports[ovrEye_Count];
void OculusManager::init() {
}
void OculusManager::deinit() {
}
void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
@ -269,7 +265,13 @@ void OculusManager::connect(QOpenGLContext* shareContext) {
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
_eyeProjection[eye] = toGlm(ovrMatrix4f_Projection(_eyeFov[eye],
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
});
ovrFovPort combinedFov = _ovrHmd->MaxEyeFov[0];
combinedFov.RightTan = _ovrHmd->MaxEyeFov[1].RightTan;
_combinedProjection = toGlm(ovrMatrix4f_Projection(combinedFov,
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
@ -528,18 +530,12 @@ void OculusManager::endFrameTiming() {
//Sets the camera FoV and aspect ratio
void OculusManager::configureCamera(Camera& camera) {
ovrFovPort fov;
if (_activeEye == ovrEye_Count) {
// When not rendering, provide a FOV encompasing both eyes
fov = _eyeFov[0];
fov.RightTan = _eyeFov[1].RightTan;
} else {
// When rendering, provide the exact FOV
fov = _eyeFov[_activeEye];
}
// Convert the FOV to the correct projection matrix
glm::mat4 projection = toGlm(ovrMatrix4f_Projection(fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
camera.setProjection(projection);
camera.setProjection(_combinedProjection);
return;
}
camera.setProjection(_eyeProjection[_activeEye]);
}
//Displays everything for the oculus, frame timing must be active
@ -615,7 +611,6 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
#ifndef Q_OS_WIN
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
#endif
static ovrPosef eyeRenderPose[ovrEye_Count];
//Render each eye into an fbo
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
// If we're in eye-per-frame mode, only render one eye
@ -625,29 +620,17 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
return;
}
_lastEyeRendered = _activeEye = eye;
eyeRenderPose[eye] = eyePoses[eye];
_eyeRenderPoses[eye] = eyePoses[eye];
// Set the camera rotation for this eye
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
// Update the application camera with the latest HMD position
whichCamera.setHmdPosition(trackerPosition);
whichCamera.setHmdRotation(orientation);
vec3 eyePosition = toGlm(_eyeRenderPoses[eye].Position);
eyePosition = whichCamera.getRotation() * eyePosition;
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation());
_camera->setPosition(whichCamera.getPosition());
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
_camera->setPosition(whichCamera.getPosition() + eyePosition);
configureCamera(*_camera);
// Store the latest left and right eye render locations for things that need to know
glm::vec3 thisEyePosition = position + trackerPosition +
(bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) *
glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z));
_eyePositions[eye] = thisEyePosition;
_camera->update(1.0f / Application::getInstance()->getFps());
glMatrixMode(GL_PROJECTION);
@ -662,8 +645,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, *_camera, false);
qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
qApp->displaySide(renderArgs, *_camera);
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
});
_activeEye = ovrEye_Count;
@ -709,7 +692,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// Submit the frame to the Oculus SDK for timewarp and distortion
for_each_eye([&](ovrEyeType eye) {
_sceneLayer.RenderPose[eye] = eyeRenderPose[eye];
_sceneLayer.RenderPose[eye] = _eyeRenderPoses[eye];
});
auto header = &_sceneLayer.Header;
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
@ -725,6 +708,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// rendering to complete before it starts the distortion rendering,
// but without triggering a CPU/GPU synchronization
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(syncObject);
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
for_each_eye([&](ovrEyeType eye) {
@ -734,7 +718,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
});
// restore our normal viewport
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
ovrHmd_EndFrame(_ovrHmd, _eyeRenderPoses, _eyeTextures);
glCanvas->makeCurrent();
#endif
@ -835,3 +819,10 @@ int OculusManager::getHMDScreen() {
#endif
}
mat4 OculusManager::getEyeProjection(int eye) {
return _eyeProjection[eye];
}
mat4 OculusManager::getEyePose(int eye) {
return toGlm(_eyeRenderPoses[eye]);
}

View file

@ -38,8 +38,6 @@ struct MirrorFramebufferWrapper;
/// Handles interaction with the Oculus Rift.
class OculusManager {
public:
static void init();
static void deinit();
static void connect(QOpenGLContext* shareContext);
static void disconnect();
static bool isConnected();
@ -63,6 +61,9 @@ public:
static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
static int getHMDScreen();
static glm::mat4 getEyeProjection(int eye);
static glm::mat4 getEyePose(int eye);
private:
static void initSdk();

View file

@ -512,7 +512,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
pos = qApp->getApplicationOverlay().getPalmClickLocation(palm);
pos = qApp->getApplicationCompositor().getPalmClickLocation(palm);
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();

View file

@ -12,6 +12,7 @@
#include "InterfaceConfig.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GlowEffect.h>
#include "gpu/GLBackend.h"
@ -106,21 +107,20 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
_activeEye = &eye;
glViewport(portalX, portalY, portalW, portalH);
glScissor(portalX, portalY, portalW, portalH);
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
float fov = atan(1.0f / projection[1][1]);
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
eyeCamera.setProjection(projection);
glMatrixMode(GL_PROJECTION);
glLoadIdentity(); // reset projection matrix
glFrustum(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ); // set left view frustum
GLfloat p[4][4];
// Really?
glGetFloatv(GL_PROJECTION_MATRIX, &(p[0][0]));
float cotangent = p[1][1];
GLfloat fov = atan(1.0f / cotangent);
glTranslatef(eye.modelTranslation, 0.0, 0.0); // translate to cancel parallax
glLoadMatrixf(glm::value_ptr(projection));
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, eyeCamera, false);
qApp->getApplicationOverlay().displayOverlayTextureStereo(whichCamera, _aspect, fov);
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
_activeEye = NULL;
}, [&]{
// render right side view

View file

@ -25,9 +25,9 @@ bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
glm::vec3 direction = orientation * glm::vec3(0.0f, 0.0f, -1.0f);
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
const auto& compositor = Application::getInstance()->getApplicationCompositor();
return applicationOverlay.calculateRayUICollisionPoint(position, direction, result);
return compositor.calculateRayUICollisionPoint(position, direction, result);
}
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
@ -40,7 +40,7 @@ QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* conte
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter);
glm::quat rotation = ::rotationBetween(glm::vec3(0.0f, 0.0f, -1.0f), direction);
glm::vec3 eulers = ::safeEulerAngles(rotation);
return qScriptValueFromValue<glm::vec2>(engine, Application::getInstance()->getApplicationOverlay()
return qScriptValueFromValue<glm::vec2>(engine, Application::getInstance()->getApplicationCompositor()
.sphericalToOverlay(glm::vec2(eulers.y, -eulers.x)));
}
return QScriptValue::NullValue;

View file

@ -27,11 +27,11 @@ public:
static QScriptValue getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine);
public slots:
void toggleMagnifier() { Application::getInstance()->getApplicationOverlay().toggleMagnifier(); };
void toggleMagnifier() { Application::getInstance()->getApplicationCompositor().toggleMagnifier(); };
private:
HMDScriptingInterface() {};
bool getMagnifier() const { return Application::getInstance()->getApplicationOverlay().hasMagnifier(); };
bool getMagnifier() const { return Application::getInstance()->getApplicationCompositor().hasMagnifier(); };
bool isHMDMode() const { return Application::getInstance()->isHMDMode(); }
bool getHUDLookAtPosition3D(glm::vec3& result) const;

View file

@ -11,6 +11,7 @@
//
#include "starfield/renderer/Renderer.h"
#include "Application.h"
using namespace starfield;
@ -52,6 +53,10 @@ void Renderer::render(float perspective, float aspect, mat4 const& orientation,
matrix[3][1] = 0.0f;
matrix[3][2] = 0.0f;
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadMatrixf(glm::value_ptr(qApp->getDisplayViewFrustum()->getProjection()));
glMatrixMode(GL_MODELVIEW);
// extract local z vector
vec3 ahead = vec3(matrix[2]);
@ -74,6 +79,10 @@ void Renderer::render(float perspective, float aspect, mat4 const& orientation,
floodFill(cursor, TileSelection(*this, _tileArray, _tileArray + _tiling.getTileCount(), (TileSelection::Cursor*) _batchCountArray));
this->glBatch(glm::value_ptr(matrix), prepareBatch((unsigned*) _batchOffs, _outIndexPos), alpha);
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
}
// renderer construction

View file

@ -0,0 +1,790 @@
//
// ApplicationCompositor.cpp
// interface/src/ui/overlays
//
// Created by Benjamin Arnold on 5/27/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include "ApplicationCompositor.h"
#include <glm/gtc/type_ptr.hpp>
#include <avatar/AvatarManager.h>
#include <gpu/GLBackend.h>
#include <CursorManager.h>
#include <Tooltip.h>
#include "Application.h"
// Used to animate the magnification windows
static const float MAG_SPEED = 0.08f;
static const quint64 MSECS_TO_USECS = 1000ULL;
static const quint64 TOOLTIP_DELAY = 500 * MSECS_TO_USECS;
static const float WHITE_TEXT[] = { 0.93f, 0.93f, 0.93f };
static const float RETICLE_COLOR[] = { 0.0f, 198.0f / 255.0f, 244.0f / 255.0f };
static const float reticleSize = TWO_PI / 100.0f;
static const float CONNECTION_STATUS_BORDER_COLOR[] = { 1.0f, 0.0f, 0.0f };
static const float CONNECTION_STATUS_BORDER_LINE_WIDTH = 4.0f;
static const float CURSOR_PIXEL_SIZE = 32.0f;
static const float MOUSE_PITCH_RANGE = 1.0f * PI;
static const float MOUSE_YAW_RANGE = 0.5f * TWO_PI;
static const glm::vec2 MOUSE_RANGE(MOUSE_YAW_RANGE, MOUSE_PITCH_RANGE);
static gpu::BufferPointer _hemiVertices;
static gpu::BufferPointer _hemiIndices;
static int _hemiIndexCount{ 0 };
EntityItemID ApplicationCompositor::_noItemId;
static QString _tooltipId;
// Return a point's cartesian coordinates on a sphere from pitch and yaw
glm::vec3 getPoint(float yaw, float pitch) {
return glm::vec3(glm::cos(-pitch) * (-glm::sin(yaw)),
glm::sin(-pitch),
glm::cos(-pitch) * (-glm::cos(yaw)));
}
//Checks if the given ray intersects the sphere at the origin. result will store a multiplier that should
//be multiplied by dir and added to origin to get the location of the collision
bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r, float* result)
{
//Source: http://wiki.cgsociety.org/index.php/Ray_Sphere_Intersection
//Compute A, B and C coefficients
float a = glm::dot(dir, dir);
float b = 2 * glm::dot(dir, origin);
float c = glm::dot(origin, origin) - (r * r);
//Find discriminant
float disc = b * b - 4 * a * c;
// if discriminant is negative there are no real roots, so return
// false as ray misses sphere
if (disc < 0) {
return false;
}
// compute q as described above
float distSqrt = sqrtf(disc);
float q;
if (b < 0) {
q = (-b - distSqrt) / 2.0f;
} else {
q = (-b + distSqrt) / 2.0f;
}
// compute t0 and t1
float t0 = q / a;
float t1 = c / q;
// make sure t0 is smaller than t1
if (t0 > t1) {
// if t0 is bigger than t1 swap them around
float temp = t0;
t0 = t1;
t1 = temp;
}
// if t1 is less than zero, the object is in the ray's negative direction
// and consequently the ray misses the sphere
if (t1 < 0) {
return false;
}
// if t0 is less than zero, the intersection point is at t1
if (t0 < 0) {
*result = t1;
return true;
} else { // else the intersection point is at t0
*result = t0;
return true;
}
}
ApplicationCompositor::ApplicationCompositor() {
memset(_reticleActive, 0, sizeof(_reticleActive));
memset(_magActive, 0, sizeof(_reticleActive));
memset(_magSizeMult, 0, sizeof(_magSizeMult));
auto geometryCache = DependencyManager::get<GeometryCache>();
_reticleQuad = geometryCache->allocateID();
_magnifierQuad = geometryCache->allocateID();
_magnifierBorder = geometryCache->allocateID();
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverEnterEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId != entityItemID) {
_hoverItemId = entityItemID;
_hoverItemEnterUsecs = usecTimestampNow();
auto properties = entityScriptingInterface->getEntityProperties(_hoverItemId);
_hoverItemHref = properties.getHref();
auto cursor = Cursor::Manager::instance().getCursor();
if (!_hoverItemHref.isEmpty()) {
cursor->setIcon(Cursor::Icon::LINK);
} else {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
}
});
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId == entityItemID) {
_hoverItemId = _noItemId;
_hoverItemHref.clear();
auto cursor = Cursor::Manager::instance().getCursor();
cursor->setIcon(Cursor::Icon::DEFAULT);
if (!_tooltipId.isEmpty()) {
qDebug() << "Closing tooltip " << _tooltipId;
Tooltip::closeTip(_tooltipId);
_tooltipId.clear();
}
}
});
}
ApplicationCompositor::~ApplicationCompositor() {
}
void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorIndex) {
auto& cursorManager = Cursor::Manager::instance();
auto cursor = cursorManager.getCursor(cursorIndex);
auto iconId = cursor->getIcon();
if (!_cursors.count(iconId)) {
auto iconPath = cursorManager.getIconImage(cursor->getIcon());
_cursors[iconId] = DependencyManager::get<TextureCache>()->
getImageTexture(iconPath);
}
batch.setUniformTexture(0, _cursors[iconId]);
}
// Draws the FBO texture for the screen
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
if (_alpha == 0.0f) {
return;
}
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
if (!texture) {
return;
}
updateTooltips();
vec2 canvasSize = qApp->getCanvasSize();
//Handle fading and deactivation/activation of UI
gpu::Batch batch;
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
batch._glBindTexture(GL_TEXTURE_2D, texture);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
geometryCache->renderUnitQuad(batch, vec4(vec3(1), _alpha));
// Doesn't actually render
renderPointers(batch);
//draw the mouse pointer
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 mousePosition = toNormalizedDeviceScale(vec2(qApp->getMouse()), canvasSize);
// Invert the Y axis
mousePosition.y *= -1.0f;
Transform model;
model.setTranslation(vec3(mousePosition, 0));
vec2 mouseSize = CURSOR_PIXEL_SIZE / canvasSize;
model.setScale(vec3(mouseSize, 1.0f));
batch.setModelTransform(model);
bindCursorTexture(batch);
vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
geometryCache->renderUnitQuad(batch, vec4(1));
renderArgs->_context->render(batch);
}
vec2 getPolarCoordinates(const PalmData& palm) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
return polar;
}
// Draws the FBO texture for Oculus rift.
void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int eye) {
if (_alpha == 0.0f) {
return;
}
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
if (!texture) {
return;
}
updateTooltips();
vec2 canvasSize = qApp->getCanvasSize();
_textureAspectRatio = aspect(canvasSize);
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
gpu::Batch batch;
geometryCache->useSimpleDrawPipeline(batch);
batch._glDisable(GL_DEPTH_TEST);
batch._glDisable(GL_CULL_FACE);
batch._glBindTexture(GL_TEXTURE_2D, texture);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
batch.setViewTransform(Transform());
batch.setProjectionTransform(qApp->getEyeProjection(eye));
mat4 eyePose = qApp->getEyePose(eye);
glm::mat4 overlayXfm = glm::inverse(eyePose);
#ifdef DEBUG_OVERLAY
{
batch.setModelTransform(glm::translate(mat4(), vec3(0, 0, -2)));
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}
#else
{
batch.setModelTransform(overlayXfm);
drawSphereSection(batch);
}
#endif
// Doesn't actually render
renderPointers(batch);
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
bindCursorTexture(batch);
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Controller Pointers
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
mat4 pointerXfm = glm::mat4_cast(quat(vec3(polar.y, -polar.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
// Render reticle at location
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
renderArgs->_context->render(batch);
}
void ApplicationCompositor::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const {
cursorPos *= qApp->getCanvasSize();
const glm::vec2 projection = screenToSpherical(cursorPos);
// The overlay space orientation of the mouse coordinates
const glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
// FIXME We now have the direction of the ray FROM THE DEFAULT HEAD POSE.
// Now we need to account for the actual camera position relative to the overlay
glm::vec3 overlaySpaceDirection = glm::normalize(orientation * IDENTITY_FRONT);
// We need the RAW camera orientation and position, because this is what the overlay is
// rendered relative to
const glm::vec3 overlayPosition = qApp->getCamera()->getPosition();
const glm::quat overlayOrientation = qApp->getCamera()->getRotation();
// Intersection UI overlay space
glm::vec3 worldSpaceDirection = overlayOrientation * overlaySpaceDirection;
glm::vec3 intersectionWithUi = glm::normalize(worldSpaceDirection) * _oculusUIRadius;
intersectionWithUi += overlayPosition;
// Intersection in world space
origin = overlayPosition;
direction = glm::normalize(intersectionWithUi - origin);
}
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationCompositor::getPalmClickLocation(const PalmData *palm) const {
QPoint rv;
auto canvasSize = qApp->getCanvasSize();
if (qApp->isHMDMode()) {
glm::vec2 polar = getPolarCoordinates(*palm);
glm::vec2 point = sphericalToScreen(-polar);
rv.rx() = point.x;
rv.ry() = point.y;
} else {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::dmat4 projection;
qApp->getProjectionMatrix(&projection);
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
glm::vec3 tipPos = invOrientation * (tip - eyePos);
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
glm::vec3 ndcSpacePos;
if (clipSpacePos.w != 0) {
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
}
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * canvasSize.x);
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * canvasSize.y);
}
return rv;
}
//Finds the collision point of a world space ray
bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat inverseOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 relativePosition = inverseOrientation * (position - myAvatar->getDefaultEyePosition());
glm::vec3 relativeDirection = glm::normalize(inverseOrientation * direction);
float t;
if (raySphereIntersect(relativeDirection, relativePosition, _oculusUIRadius * myAvatar->getScale(), &t)){
result = position + direction * t;
return true;
}
return false;
}
//Renders optional pointers
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
//If we are in oculus, render reticle later
if (_lastMouseMove == 0) {
_lastMouseMove = usecTimestampNow();
}
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
static const int MAX_IDLE_TIME = 3;
if (_reticlePosition[MOUSE] != position) {
_lastMouseMove = usecTimestampNow();
} else if (usecTimestampNow() - _lastMouseMove > MAX_IDLE_TIME * USECS_PER_SECOND) {
//float pitch = 0.0f, yaw = 0.0f, roll = 0.0f; // radians
//OculusManager::getEulerAngles(yaw, pitch, roll);
glm::quat orientation = qApp->getHeadOrientation(); // (glm::vec3(pitch, yaw, roll));
glm::vec3 result;
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
if (calculateRayUICollisionPoint(myAvatar->getEyePosition(),
myAvatar->getOrientation() * orientation * IDENTITY_FRONT,
result)) {
glm::vec3 lookAtDirection = glm::inverse(myAvatar->getOrientation()) * (result - myAvatar->getDefaultEyePosition());
glm::vec2 spericalPos = directionToSpherical(glm::normalize(lookAtDirection));
glm::vec2 screenPos = sphericalToScreen(spericalPos);
position = QPoint(screenPos.x, screenPos.y);
// FIXME
//glCanvas->cursor().setPos(glCanvas->mapToGlobal(position));
} else {
qDebug() << "No collision point";
}
}
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
_reticleActive[LEFT_CONTROLLER] = false;
_reticleActive[RIGHT_CONTROLLER] = false;
} else if (qApp->getLastMouseMoveWasSimulated() && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
_lastMouseMove = 0;
//only render controller pointer if we aren't already rendering a mouse pointer
_reticleActive[MOUSE] = false;
_magActive[MOUSE] = false;
renderControllerPointers(batch);
}
}
void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Static variables used for storing controller state
static quint64 pressedTime[NUMBER_OF_RETICLES] = { 0ULL, 0ULL, 0ULL };
static bool isPressed[NUMBER_OF_RETICLES] = { false, false, false };
static bool stateWhenPressed[NUMBER_OF_RETICLES] = { false, false, false };
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
const int index = palmIndex - 1;
const PalmData* palmData = NULL;
if (palmIndex >= handData->getPalms().size()) {
return;
}
if (handData->getPalms()[palmIndex].isActive()) {
palmData = &handData->getPalms()[palmIndex];
} else {
continue;
}
int controllerButtons = palmData->getControllerButtons();
//Check for if we should toggle or drag the magnification window
if (controllerButtons & BUTTON_3) {
if (isPressed[index] == false) {
//We are now dragging the window
isPressed[index] = true;
//set the pressed time in us
pressedTime[index] = usecTimestampNow();
stateWhenPressed[index] = _magActive[index];
}
} else if (isPressed[index]) {
isPressed[index] = false;
//If the button was only pressed for < 250 ms
//then disable it.
const int MAX_BUTTON_PRESS_TIME = 250 * MSECS_TO_USECS;
if (usecTimestampNow() < pressedTime[index] + MAX_BUTTON_PRESS_TIME) {
_magActive[index] = !stateWhenPressed[index];
}
}
//if we have the oculus, we should make the cursor smaller since it will be
//magnified
if (qApp->isHMDMode()) {
QPoint point = getPalmClickLocation(palmData);
_reticlePosition[index] = point;
//When button 2 is pressed we drag the mag window
if (isPressed[index]) {
_magActive[index] = true;
}
// If oculus is enabled, we draw the crosshairs later
continue;
}
auto canvasSize = qApp->getCanvasSize();
int mouseX, mouseY;
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
QPoint res = getPalmClickLocation(palmData);
mouseX = res.x();
mouseY = res.y();
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
}
//If the cursor is out of the screen then don't render it
if (mouseX < 0 || mouseX >= (int)canvasSize.x || mouseY < 0 || mouseY >= (int)canvasSize.y) {
_reticleActive[index] = false;
continue;
}
_reticleActive[index] = true;
const float reticleSize = 40.0f;
mouseX -= reticleSize / 2.0f;
mouseY += reticleSize / 2.0f;
glm::vec2 topLeft(mouseX, mouseY);
glm::vec2 bottomRight(mouseX + reticleSize, mouseY - reticleSize);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f));
}
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
if (!_magnifier) {
return;
}
auto canvasSize = qApp->getCanvasSize();
const int widgetWidth = canvasSize.x;
const int widgetHeight = canvasSize.y;
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
// Magnification Texture Coordinates
const float magnifyULeft = (magPos.x - halfWidth) / (float)widgetWidth;
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
//Get yaw / pitch value for the corners
const glm::vec2 topLeftYawPitch = overlayToSpherical(glm::vec2(magPos.x - newHalfWidth,
magPos.y - newHalfHeight));
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
magPos.y + newHalfHeight));
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
const glm::vec3 topRight = getPoint(bottomRightYawPitch.x, topLeftYawPitch.y);
auto geometryCache = DependencyManager::get<GeometryCache>();
if (bottomLeft != _previousMagnifierBottomLeft || bottomRight != _previousMagnifierBottomRight
|| topLeft != _previousMagnifierTopLeft || topRight != _previousMagnifierTopRight) {
QVector<glm::vec3> border;
border << topLeft;
border << bottomLeft;
border << bottomRight;
border << topRight;
border << topLeft;
geometryCache->updateVertices(_magnifierBorder, border, glm::vec4(1.0f, 0.0f, 0.0f, _alpha));
_previousMagnifierBottomLeft = bottomLeft;
_previousMagnifierBottomRight = bottomRight;
_previousMagnifierTopLeft = topLeft;
_previousMagnifierTopRight = topRight;
}
glPushMatrix(); {
if (showBorder) {
glDisable(GL_TEXTURE_2D);
glLineWidth(1.0f);
//Outer Line
geometryCache->renderVertices(gpu::LINE_STRIP, _magnifierBorder);
glEnable(GL_TEXTURE_2D);
}
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
glm::vec2(magnifyULeft, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVTop),
glm::vec2(magnifyULeft, magnifyVTop),
magnifierColor, _magnifierQuad);
} glPopMatrix();
}
void ApplicationCompositor::buildHemiVertices(
const float fov, const float aspectRatio, const int slices, const int stacks) {
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
if (textureFOV == fov && textureAspectRatio == aspectRatio) {
return;
}
textureFOV = fov;
textureAspectRatio = aspectRatio;
auto geometryCache = DependencyManager::get<GeometryCache>();
_hemiVertices = gpu::BufferPointer(new gpu::Buffer());
_hemiIndices = gpu::BufferPointer(new gpu::Buffer());
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
vec3 pos;
vec2 uv;
// Compute vertices positions and texture UV coordinate
// Create and write to buffer
for (int i = 0; i < stacks; i++) {
uv.y = (float)i / (float)(stacks - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (uv.y - 0.5f);
for (int j = 0; j < slices; j++) {
uv.x = (float)j / (float)(slices - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (uv.x - 0.5f);
pos = getPoint(yaw, pitch);
static const vec4 color(1);
_hemiVertices->append(sizeof(pos), (gpu::Byte*)&pos);
_hemiVertices->append(sizeof(vec2), (gpu::Byte*)&uv);
_hemiVertices->append(sizeof(vec4), (gpu::Byte*)&color);
}
}
// Compute number of indices needed
static const int VERTEX_PER_TRANGLE = 3;
static const int TRIANGLE_PER_RECTANGLE = 2;
int numberOfRectangles = (slices - 1) * (stacks - 1);
_hemiIndexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
std::vector<GLushort> indices;
for (int i = 0; i < stacks - 1; i++) {
for (int j = 0; j < slices - 1; j++) {
GLushort bottomLeftIndex = i * slices + j;
GLushort bottomRightIndex = bottomLeftIndex + 1;
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
// FIXME make a z-order curve for better vertex cache locality
indices.push_back(topLeftIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(topRightIndex);
indices.push_back(topRightIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(bottomRightIndex);
}
}
_hemiIndices->append(sizeof(GLushort) * indices.size(), (gpu::Byte*)&indices[0]);
}
void ApplicationCompositor::drawSphereSection(gpu::Batch& batch) {
buildHemiVertices(_textureFov, _textureAspectRatio, 80, 80);
static const int VERTEX_DATA_SLOT = 0;
static const int TEXTURE_DATA_SLOT = 1;
static const int COLOR_DATA_SLOT = 2;
gpu::Stream::FormatPointer streamFormat(new gpu::Stream::Format()); // 1 for everyone
streamFormat->setAttribute(gpu::Stream::POSITION, VERTEX_DATA_SLOT, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
streamFormat->setAttribute(gpu::Stream::TEXCOORD, TEXTURE_DATA_SLOT, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
streamFormat->setAttribute(gpu::Stream::COLOR, COLOR_DATA_SLOT, gpu::Element(gpu::VEC4, gpu::FLOAT, gpu::RGBA));
batch.setInputFormat(streamFormat);
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
batch.setInputBuffer(VERTEX_DATA_SLOT, posView);
batch.setInputBuffer(TEXTURE_DATA_SLOT, uvView);
batch.setInputBuffer(COLOR_DATA_SLOT, colView);
batch.setIndexBuffer(gpu::UINT16, _hemiIndices, 0);
batch.drawIndexed(gpu::TRIANGLES, _hemiIndexCount);
}
glm::vec2 ApplicationCompositor::directionToSpherical(const glm::vec3& direction) {
glm::vec2 result;
// Compute yaw
glm::vec3 normalProjection = glm::normalize(glm::vec3(direction.x, 0.0f, direction.z));
result.x = glm::acos(glm::dot(IDENTITY_FRONT, normalProjection));
if (glm::dot(IDENTITY_RIGHT, normalProjection) > 0.0f) {
result.x = -glm::abs(result.x);
} else {
result.x = glm::abs(result.x);
}
// Compute pitch
result.y = angleBetween(IDENTITY_UP, direction) - PI_OVER_TWO;
return result;
}
glm::vec3 ApplicationCompositor::sphericalToDirection(const glm::vec2& sphericalPos) {
glm::quat rotation(glm::vec3(sphericalPos.y, sphericalPos.x, 0.0f));
return rotation * IDENTITY_FRONT;
}
glm::vec2 ApplicationCompositor::screenToSpherical(const glm::vec2& screenPos) {
auto screenSize = qApp->getCanvasSize();
glm::vec2 result;
result.x = -(screenPos.x / screenSize.x - 0.5f);
result.y = (screenPos.y / screenSize.y - 0.5f);
result.x *= MOUSE_YAW_RANGE;
result.y *= MOUSE_PITCH_RANGE;
return result;
}
glm::vec2 ApplicationCompositor::sphericalToScreen(const glm::vec2& sphericalPos) {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result /= MOUSE_RANGE;
result += 0.5f;
result *= qApp->getCanvasSize();
return result;
}
glm::vec2 ApplicationCompositor::sphericalToOverlay(const glm::vec2& sphericalPos) const {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result /= _textureFov;
result.x /= _textureAspectRatio;
result += 0.5f;
result *= qApp->getCanvasSize();
return result;
}
glm::vec2 ApplicationCompositor::overlayToSpherical(const glm::vec2& overlayPos) const {
glm::vec2 result = overlayPos;
result /= qApp->getCanvasSize();
result -= 0.5f;
result *= _textureFov;
result.x *= _textureAspectRatio;
result.x *= -1.0f;
return result;
}
glm::vec2 ApplicationCompositor::screenToOverlay(const glm::vec2& screenPos) const {
return sphericalToOverlay(screenToSpherical(screenPos));
}
glm::vec2 ApplicationCompositor::overlayToScreen(const glm::vec2& overlayPos) const {
return sphericalToScreen(overlayToSpherical(overlayPos));
}
void ApplicationCompositor::updateTooltips() {
if (_hoverItemId != _noItemId) {
quint64 hoverDuration = usecTimestampNow() - _hoverItemEnterUsecs;
if (_hoverItemEnterUsecs != UINT64_MAX && !_hoverItemHref.isEmpty() && hoverDuration > TOOLTIP_DELAY) {
// TODO Enable and position the tooltip
_hoverItemEnterUsecs = UINT64_MAX;
_tooltipId = Tooltip::showTip("URL: " + _hoverItemHref);
}
}
}

View file

@ -0,0 +1,121 @@
//
// Created by Bradley Austin Davis Arnold on 2015/06/13
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ApplicationCompositor_h
#define hifi_ApplicationCompositor_h
#include <QObject>
#include <cstdint>
#include <EntityItemID.h>
#include <GeometryCache.h>
#include <GLMHelpers.h>
#include <gpu/Batch.h>
#include <gpu/Texture.h>
class Camera;
class PalmData;
class RenderArgs;
const float MAGNIFY_WIDTH = 220.0f;
const float MAGNIFY_HEIGHT = 100.0f;
const float MAGNIFY_MULT = 2.0f;
const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
// Handles the drawing of the overlays to the screen
// TODO, move divide up the rendering, displaying and input handling
// facilities of this class
class ApplicationCompositor : public QObject {
Q_OBJECT
public:
ApplicationCompositor();
~ApplicationCompositor();
void displayOverlayTexture(RenderArgs* renderArgs);
void displayOverlayTextureHmd(RenderArgs* renderArgs, int eye);
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
bool hasMagnifier() const { return _magnifier; }
void toggleMagnifier() { _magnifier = !_magnifier; }
float getHmdUIAngularSize() const { return _hmdUIAngularSize; }
void setHmdUIAngularSize(float hmdUIAngularSize) { _hmdUIAngularSize = hmdUIAngularSize; }
// Converter from one frame of reference to another.
// Frame of reference:
// Direction: Ray that represents the spherical values
// Screen: Position on the screen (x,y)
// Spherical: Pitch and yaw that gives the position on the sphere we project on (yaw,pitch)
// Overlay: Position on the overlay (x,y)
// (x,y) in Overlay are similar than (x,y) in Screen except they can be outside of the bound of te screen.
// This allows for picking outside of the screen projection in 3D.
glm::vec2 sphericalToOverlay(const glm::vec2 & sphericalPos) const;
glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
GLuint getOverlayTexture() const;
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
static glm::vec2 sphericalToScreen(const glm::vec2 & sphericalPos);
private:
void displayOverlayTextureStereo(RenderArgs* renderArgs, float aspectRatio, float fov);
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
void buildHemiVertices(const float fov, const float aspectRatio, const int slices, const int stacks);
void drawSphereSection(gpu::Batch& batch);
void updateTooltips();
void renderPointers(gpu::Batch& batch);
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
void renderControllerPointers(gpu::Batch& batch);
void renderPointersOculus(gpu::Batch& batch);
// Support for hovering and tooltips
static EntityItemID _noItemId;
EntityItemID _hoverItemId{ _noItemId };
QString _hoverItemHref;
quint64 _hoverItemEnterUsecs{ 0 };
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
float _textureFov{ glm::radians(DEFAULT_HMD_UI_ANGULAR_SIZE) };
float _textureAspectRatio{ 1.0f };
int _hemiVerticesID{ GeometryCache::UNKNOWN_ID };
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
QPoint _reticlePosition[NUMBER_OF_RETICLES];
bool _magActive[NUMBER_OF_RETICLES];
float _magSizeMult[NUMBER_OF_RETICLES];
quint64 _lastMouseMove{ 0 };
bool _magnifier{ true };
float _alpha{ 1.0f };
float _oculusUIRadius{ 1.0f };
QMap<uint16_t, gpu::TexturePointer> _cursors;
int _reticleQuad;
int _magnifierQuad;
int _magnifierBorder;
int _previousBorderWidth{ -1 };
int _previousBorderHeight{ -1 };
glm::vec3 _previousMagnifierBottomLeft;
glm::vec3 _previousMagnifierBottomRight;
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
};
#endif // hifi_ApplicationCompositor_h

File diff suppressed because it is too large Load diff

View file

@ -13,16 +13,8 @@
#define hifi_ApplicationOverlay_h
#include <gpu/Texture.h>
class Camera;
class Overlays;
class QOpenGLFramebufferObject;
const float MAGNIFY_WIDTH = 220.0f;
const float MAGNIFY_HEIGHT = 100.0f;
const float MAGNIFY_MULT = 2.0f;
const float DEFAULT_HMD_UI_ANGULAR_SIZE = 72.0f;
// Handles the drawing of the overlays to the screen
// TODO, move divide up the rendering, displaying and input handling
// facilities of this class
@ -33,122 +25,26 @@ public:
~ApplicationOverlay();
void renderOverlay(RenderArgs* renderArgs);
void displayOverlayTexture();
void displayOverlayTextureStereo(Camera& whichCamera, float aspectRatio, float fov);
void displayOverlayTextureHmd(Camera& whichCamera);
GLuint getOverlayTexture();
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
bool hasMagnifier() const { return _magnifier; }
void toggleMagnifier() { _magnifier = !_magnifier; }
float getHmdUIAngularSize() const { return _hmdUIAngularSize; }
void setHmdUIAngularSize(float hmdUIAngularSize) { _hmdUIAngularSize = hmdUIAngularSize; }
// Converter from one frame of reference to another.
// Frame of reference:
// Direction: Ray that represents the spherical values
// Screen: Position on the screen (x,y)
// Spherical: Pitch and yaw that gives the position on the sphere we project on (yaw,pitch)
// Overlay: Position on the overlay (x,y)
// (x,y) in Overlay are similar than (x,y) in Screen except they can be outside of the bound of te screen.
// This allows for picking outside of the screen projection in 3D.
glm::vec2 sphericalToOverlay(const glm::vec2 & sphericalPos) const;
glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
static glm::vec2 screenToSpherical(const glm::vec2 & screenPos);
static glm::vec2 sphericalToScreen(const glm::vec2 & sphericalPos);
private:
// Interleaved vertex data
struct TextureVertex {
glm::vec3 position;
glm::vec2 uv;
};
void renderStatsAndLogs(RenderArgs* renderArgs);
void renderDomainConnectionStatusBorder(RenderArgs* renderArgs);
void renderRearViewToFbo(RenderArgs* renderArgs);
void renderRearView(RenderArgs* renderArgs);
void renderQmlUi(RenderArgs* renderArgs);
void renderOverlays(RenderArgs* renderArgs);
void buildFramebufferObject();
typedef QPair<GLuint, GLuint> VerticesIndices;
class TexturedHemisphere {
public:
TexturedHemisphere();
~TexturedHemisphere();
void bind();
void release();
GLuint getTexture();
void buildFramebufferObject();
void buildVBO(const float fov, const float aspectRatio, const int slices, const int stacks);
void render();
private:
void cleanupVBO();
GLuint _vertices;
GLuint _indices;
QOpenGLFramebufferObject* _framebufferObject;
VerticesIndices _vbo;
};
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
void renderReticle(glm::quat orientation, float alpha);
void renderPointers();;
void renderMagnifier(glm::vec2 magPos, float sizeMult, bool showBorder);
void renderControllerPointers();
void renderPointersOculus();
void renderAudioMeter();
void renderCameraToggle();
void renderStatsAndLogs();
void renderDomainConnectionStatusBorder();
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
float _alpha{ 1.0f };
float _trailingAudioLoudness{ 0.0f };
GLuint _uiTexture{ 0 };
TexturedHemisphere _overlays;
float _textureFov;
float _textureAspectRatio;
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
QPoint _reticlePosition[NUMBER_OF_RETICLES];
bool _magActive[NUMBER_OF_RETICLES];
float _magSizeMult[NUMBER_OF_RETICLES];
quint64 _lastMouseMove;
bool _magnifier;
float _alpha = 1.0f;
float _oculusUIRadius;
float _trailingAudioLoudness;
gpu::TexturePointer _crosshairTexture;
QMap<uint16_t, gpu::TexturePointer> _cursors;
GLuint _newUiTexture{ 0 };
int _reticleQuad;
int _magnifierQuad;
int _audioRedQuad;
int _audioGreenQuad;
int _audioBlueQuad;
int _domainStatusBorder;
int _magnifierBorder;
int _previousBorderWidth;
int _previousBorderHeight;
glm::vec3 _previousMagnifierBottomLeft;
glm::vec3 _previousMagnifierBottomRight;
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
ivec2 _previousBorderSize{ -1 };
QOpenGLFramebufferObject* _overlayFramebuffer{ nullptr };
};
#endif // hifi_ApplicationOverlay_h

View file

@ -0,0 +1,133 @@
//
// Created by Bradley Austin Davis 2015/06/19
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "AvatarInputs.h"
#include <SettingHandle.h>
#include "Menu.h"
#include "devices/FaceTracker.h"
HIFI_QML_DEF(AvatarInputs)
static AvatarInputs* INSTANCE{ nullptr };
static const char SETTINGS_GROUP_NAME[] = "Rear View Tools";
static const char ZOOM_LEVEL_SETTINGS[] = "ZoomLevel";
static Setting::Handle<int> rearViewZoomLevel(QStringList() << SETTINGS_GROUP_NAME << ZOOM_LEVEL_SETTINGS, 0);
AvatarInputs* AvatarInputs::getInstance() {
if (!INSTANCE) {
AvatarInputs::registerType();
AvatarInputs::show();
Q_ASSERT(INSTANCE);
}
return INSTANCE;
}
AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
_mirrorZoomed = rearViewZoomLevel.get() != 0;
}
#define AI_UPDATE(name, src) \
{ \
auto val = src; \
if (_##name != val) { \
_##name = val; \
emit name##Changed(); \
} \
}
#define AI_UPDATE_FLOAT(name, src, epsilon) \
{ \
float val = src; \
if (abs(_##name - val) >= epsilon) { \
_##name = val; \
emit name##Changed(); \
} \
}
void AvatarInputs::update() {
if (!Menu::getInstance()) {
return;
}
AI_UPDATE(mirrorVisible, Menu::getInstance()->isOptionChecked(MenuOption::Mirror) && !qApp->isHMDMode()
&& !Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror));
AI_UPDATE(cameraEnabled, !Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking));
AI_UPDATE(cameraMuted, Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking));
auto audioIO = DependencyManager::get<AudioClient>();
const float CLIPPING_INDICATOR_TIME = 1.0f;
const float AUDIO_METER_AVERAGING = 0.5;
const float LOG2 = log(2.0f);
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
const float LOG2_LOUDNESS_FLOOR = 11.0f;
float audioLevel = 0.0f;
auto audio = DependencyManager::get<AudioClient>();
float loudness = audio->getLastInputLoudness() + 1.0f;
_trailingAudioLoudness = AUDIO_METER_AVERAGING * _trailingAudioLoudness + (1.0f - AUDIO_METER_AVERAGING) * loudness;
float log2loudness = logf(_trailingAudioLoudness) / LOG2;
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE;
} else {
audioLevel = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE;
}
if (audioLevel > 1.0) {
audioLevel = 1.0;
}
AI_UPDATE_FLOAT(audioLevel, audioLevel, 0.01);
AI_UPDATE(audioClipping, ((audioIO->getTimeSinceLastClip() > 0.0f) && (audioIO->getTimeSinceLastClip() < 1.0f)));
AI_UPDATE(audioMuted, audioIO->isMuted());
//// Make muted icon pulsate
//static const float PULSE_MIN = 0.4f;
//static const float PULSE_MAX = 1.0f;
//static const float PULSE_FREQUENCY = 1.0f; // in Hz
//qint64 now = usecTimestampNow();
//if (now - _iconPulseTimeReference > (qint64)USECS_PER_SECOND) {
// // Prevents t from getting too big, which would diminish glm::cos precision
// _iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
//}
//float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
//float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
//iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
}
void AvatarInputs::toggleCameraMute() {
FaceTracker* faceTracker = Application::getInstance()->getSelectedFaceTracker();
if (faceTracker) {
faceTracker->toggleMute();
}
}
void AvatarInputs::toggleAudioMute() {
DependencyManager::get<AudioClient>()->toggleMute();
}
void AvatarInputs::resetSensors() {
qApp->resetSensors();
}
void AvatarInputs::toggleZoom() {
_mirrorZoomed = !_mirrorZoomed;
rearViewZoomLevel.set(_mirrorZoomed ? 0 : 1);
emit mirrorZoomedChanged();
}
void AvatarInputs::closeMirror() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);
}
}

View file

@ -0,0 +1,59 @@
//
// Created by Bradley Austin Davis 2015/06/19
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarInputs_h
#define hifi_AvatarInputs_h
#include <QQuickItem>
#include <OffscreenUi.h>
#define AI_PROPERTY(type, name, initialValue) \
Q_PROPERTY(type name READ name NOTIFY name##Changed) \
public: \
type name() { return _##name; }; \
private: \
type _##name{ initialValue };
class AvatarInputs : public QQuickItem {
Q_OBJECT
HIFI_QML_DECL
AI_PROPERTY(bool, cameraEnabled, false)
AI_PROPERTY(bool, cameraMuted, false)
AI_PROPERTY(bool, audioMuted, false)
AI_PROPERTY(bool, audioClipping, false)
AI_PROPERTY(float, audioLevel, 0)
AI_PROPERTY(bool, mirrorVisible, false)
AI_PROPERTY(bool, mirrorZoomed, true)
public:
static AvatarInputs* getInstance();
AvatarInputs(QQuickItem* parent = nullptr);
void update();
signals:
void cameraEnabledChanged();
void cameraMutedChanged();
void audioMutedChanged();
void audioClippingChanged();
void audioLevelChanged();
void mirrorVisibleChanged();
void mirrorZoomedChanged();
protected:
Q_INVOKABLE void resetSensors();
Q_INVOKABLE void toggleCameraMute();
Q_INVOKABLE void toggleAudioMute();
Q_INVOKABLE void toggleZoom();
Q_INVOKABLE void closeMirror();
private:
float _trailingAudioLoudness{ 0 };
};
#endif // hifi_AvatarInputs_h

View file

@ -175,7 +175,7 @@ void PreferencesDialog::loadPreferences() {
ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationOverlay().getHmdUIAngularSize());
ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationCompositor().getHmdUIAngularSize());
SixenseManager& sixense = SixenseManager::getInstance();
ui.sixenseReticleMoveSpeedSpin->setValue(sixense.getReticleMoveSpeed());
@ -239,7 +239,7 @@ void PreferencesDialog::savePreferences() {
qApp->setMaxOctreePacketsPerSecond(ui.maxOctreePPSSpin->value());
qApp->getApplicationOverlay().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
qApp->getApplicationCompositor().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
SixenseManager& sixense = SixenseManager::getInstance();
sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());

View file

@ -1,134 +0,0 @@
//
// RearMirrorTools.cpp
// interface/src/ui
//
// Created by Stojce Slavkovski on 10/23/2013.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <QMouseEvent>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <gpu/GLBackend.h>
#include "Application.h"
#include "RearMirrorTools.h"
#include "Util.h"
const int ICON_SIZE = 24;
const int ICON_PADDING = 5;
const char SETTINGS_GROUP_NAME[] = "Rear View Tools";
const char ZOOM_LEVEL_SETTINGS[] = "ZoomLevel";
Setting::Handle<int> RearMirrorTools::rearViewZoomLevel(QStringList() << SETTINGS_GROUP_NAME << ZOOM_LEVEL_SETTINGS,
ZoomLevel::HEAD);
RearMirrorTools::RearMirrorTools(QRect& bounds) :
_bounds(bounds),
_windowed(false),
_fullScreen(false)
{
_closeTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/close.svg");
_zoomHeadTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/plus.svg");
_zoomBodyTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/minus.svg");
_shrinkIconRect = QRect(ICON_PADDING, ICON_PADDING, ICON_SIZE, ICON_SIZE);
_closeIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.top() + ICON_PADDING, ICON_SIZE, ICON_SIZE);
_resetIconRect = QRect(_bounds.width() - ICON_SIZE - ICON_PADDING, _bounds.top() + ICON_PADDING, ICON_SIZE, ICON_SIZE);
_bodyZoomIconRect = QRect(_bounds.width() - ICON_SIZE - ICON_PADDING, _bounds.bottom() - ICON_PADDING - ICON_SIZE, ICON_SIZE, ICON_SIZE);
_headZoomIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.bottom() - ICON_PADDING - ICON_SIZE, ICON_SIZE, ICON_SIZE);
}
void RearMirrorTools::render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePosition) {
if (fullScreen) {
_fullScreen = true;
displayIcon(QRect(QPoint(), qApp->getDeviceSize()), _shrinkIconRect, _closeTexture);
} else {
// render rear view tools if mouse is in the bounds
_windowed = _bounds.contains(mousePosition);
if (_windowed) {
displayIcon(_bounds, _closeIconRect, _closeTexture);
ZoomLevel zoomLevel = (ZoomLevel)rearViewZoomLevel.get();
displayIcon(_bounds, _headZoomIconRect, _zoomHeadTexture, zoomLevel == HEAD);
displayIcon(_bounds, _bodyZoomIconRect, _zoomBodyTexture, zoomLevel == BODY);
}
}
}
bool RearMirrorTools::mousePressEvent(int x, int y) {
if (_windowed) {
if (_closeIconRect.contains(x, y)) {
_windowed = false;
emit closeView();
return true;
}
if (_headZoomIconRect.contains(x, y)) {
rearViewZoomLevel.set(HEAD);
return true;
}
if (_bodyZoomIconRect.contains(x, y)) {
rearViewZoomLevel.set(BODY);
return true;
}
if (_bounds.contains(x, y)) {
_windowed = false;
emit restoreView();
return true;
}
}
if (_fullScreen) {
if (_shrinkIconRect.contains(x, y)) {
_fullScreen = false;
emit shrinkView();
return true;
}
}
return false;
}
void RearMirrorTools::displayIcon(QRect bounds, QRect iconBounds, const gpu::TexturePointer& texture, bool selected) {
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(bounds.left(), bounds.right(), bounds.bottom(), bounds.top(), -1.0, 1.0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_TEXTURE_2D);
glm::vec4 quadColor;
if (selected) {
quadColor = glm::vec4(.5f, .5f, .5f, 1.0f);
} else {
quadColor = glm::vec4(1, 1, 1, 1);
}
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(texture));
glm::vec2 topLeft(iconBounds.left(), iconBounds.top());
glm::vec2 bottomRight(iconBounds.right(), iconBounds.bottom());
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}

View file

@ -1,56 +0,0 @@
//
// RearMirrorTools.h
// interface/src/ui
//
// Created by Stojce Slavkovski on 10/23/2013.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RearMirrorTools_h
#define hifi_RearMirrorTools_h
#include <gpu/Texture.h>
#include <SettingHandle.h>
enum ZoomLevel {
HEAD = 0,
BODY = 1
};
class RearMirrorTools : public QObject {
Q_OBJECT
public:
RearMirrorTools(QRect& bounds);
void render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePos);
bool mousePressEvent(int x, int y);
static Setting::Handle<int> rearViewZoomLevel;
signals:
void closeView();
void shrinkView();
void resetView();
void restoreView();
private:
QRect _bounds;
gpu::TexturePointer _closeTexture;
gpu::TexturePointer _zoomBodyTexture;
gpu::TexturePointer _zoomHeadTexture;
QRect _closeIconRect;
QRect _resetIconRect;
QRect _shrinkIconRect;
QRect _headZoomIconRect;
QRect _bodyZoomIconRect;
bool _windowed;
bool _fullScreen;
void displayIcon(QRect bounds, QRect iconBounds, const gpu::TexturePointer& texture, bool selected = false);
};
#endif // hifi_RearMirrorTools_h

View file

@ -1,17 +1,17 @@
//
// Stats.cpp
// interface/src/ui
//
// Created by Lucas Crisman on 22/03/14.
// Created by Bradley Austin Davis 2015/06/17
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <sstream>
#include <gpu/GPUConfig.h>
#include <stdlib.h>
#include "Stats.h"
#include <sstream>
#include <QFontDatabase>
#include <glm/glm.hpp>
#include <glm/gtx/component_wise.hpp>
@ -25,147 +25,31 @@
#include <LODManager.h>
#include <PerfStat.h>
#include "Stats.h"
#include "BandwidthRecorder.h"
#include "InterfaceConfig.h"
#include "Menu.h"
#include "Util.h"
#include "SequenceNumberStats.h"
HIFI_QML_DEF(Stats)
using namespace std;
const int STATS_PELS_PER_LINE = 16;
const int STATS_PELS_INITIALOFFSET = 12;
const int STATS_GENERAL_MIN_WIDTH = 165;
const int STATS_PING_MIN_WIDTH = 190;
const int STATS_GEO_MIN_WIDTH = 240;
const int STATS_OCTREE_MIN_WIDTH = 410;
static Stats* INSTANCE{ nullptr };
Stats* Stats::getInstance() {
static Stats stats;
return &stats;
if (!INSTANCE) {
Stats::registerType();
Stats::show();
Q_ASSERT(INSTANCE);
}
return INSTANCE;
}
Stats::Stats():
_expanded(false),
_recentMaxPackets(0),
_resetRecentMaxPacketsSoon(true),
_generalStatsWidth(STATS_GENERAL_MIN_WIDTH),
_pingStatsWidth(STATS_PING_MIN_WIDTH),
_geoStatsWidth(STATS_GEO_MIN_WIDTH),
_octreeStatsWidth(STATS_OCTREE_MIN_WIDTH),
_lastHorizontalOffset(0)
{
auto canvasSize = Application::getInstance()->getCanvasSize();
resetWidth(canvasSize.x, 0);
}
void Stats::toggleExpanded() {
_expanded = !_expanded;
}
// called on mouse click release
// check for clicks over stats in order to expand or contract them
void Stats::checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseDragStartedY, int horizontalOffset) {
auto canvasSize = Application::getInstance()->getCanvasSize();
if (0 != glm::compMax(glm::abs(glm::ivec2(mouseX - mouseDragStartedX, mouseY - mouseDragStartedY)))) {
// not worried about dragging on stats
return;
}
int statsHeight = 0,
statsWidth = 0,
statsX = 0,
statsY = 0,
lines = 0;
statsX = horizontalOffset;
// top-left stats click
lines = _expanded ? 5 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _generalStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _generalStatsWidth;
// ping stats click
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
lines = _expanded ? 4 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _pingStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _pingStatsWidth;
}
// geo stats panel click
lines = _expanded ? 4 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
if (mouseX > statsX && mouseX < statsX + _geoStatsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
statsX += _geoStatsWidth;
// top-right stats click
lines = _expanded ? 11 : 3;
statsHeight = lines * STATS_PELS_PER_LINE + 10;
statsWidth = canvasSize.x - statsX;
if (mouseX > statsX && mouseX < statsX + statsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
toggleExpanded();
return;
}
}
void Stats::resetWidth(int width, int horizontalOffset) {
auto canvasSize = Application::getInstance()->getCanvasSize();
int extraSpace = canvasSize.x - horizontalOffset - 2
- STATS_GENERAL_MIN_WIDTH
- (Menu::getInstance()->isOptionChecked(MenuOption::TestPing) ? STATS_PING_MIN_WIDTH -1 : 0)
- STATS_GEO_MIN_WIDTH
- STATS_OCTREE_MIN_WIDTH;
int panels = 4;
_generalStatsWidth = STATS_GENERAL_MIN_WIDTH;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
_pingStatsWidth = STATS_PING_MIN_WIDTH;
} else {
_pingStatsWidth = 0;
panels = 3;
}
_geoStatsWidth = STATS_GEO_MIN_WIDTH;
_octreeStatsWidth = STATS_OCTREE_MIN_WIDTH;
if (extraSpace > panels) {
_generalStatsWidth += (int) extraSpace / panels;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
_pingStatsWidth += (int) extraSpace / panels;
}
_geoStatsWidth += (int) extraSpace / panels;
_octreeStatsWidth += canvasSize.x -
(_generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3);
}
}
// translucent background box that makes stats more readable
void Stats::drawBackground(unsigned int rgba, int x, int y, int width, int height) {
glm::vec4 color(((rgba >> 24) & 0xff) / 255.0f,
((rgba >> 16) & 0xff) / 255.0f,
((rgba >> 8) & 0xff) / 255.0f,
(rgba & 0xff) / 255.0f);
// FIX ME: is this correct? It seems to work to fix textures bleeding into us...
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
DependencyManager::get<GeometryCache>()->renderQuad(x, y, width, height, color);
Stats::Stats(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
const QFont font = QFontDatabase::systemFont(QFontDatabase::FixedFont);
_monospaceFont = font.family();
}
bool Stats::includeTimingRecord(const QString& name) {
@ -190,107 +74,240 @@ bool Stats::includeTimingRecord(const QString& name) {
return false;
}
// display expanded or contracted stats
void Stats::display(
const float* color,
int horizontalOffset,
float fps,
int inPacketsPerSecond,
int outPacketsPerSecond,
int inKbitsPerSecond,
int outKbitsPerSecond,
int voxelPacketsToProcess)
{
auto canvasSize = Application::getInstance()->getCanvasSize();
unsigned int backgroundColor = 0x33333399;
int verticalOffset = 0, lines = 0;
float scale = 0.10f;
float rotation = 0.0f;
int font = 2;
QLocale locale(QLocale::English);
std::stringstream octreeStats;
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
if (_lastHorizontalOffset != horizontalOffset) {
resetWidth(canvasSize.x, horizontalOffset);
_lastHorizontalOffset = horizontalOffset;
#define STAT_UPDATE(name, src) \
{ \
auto val = src; \
if (_##name != val) { \
_##name = val; \
emit name##Changed(); \
} \
}
glPointSize(1.0f);
#define STAT_UPDATE_FLOAT(name, src, epsilon) \
{ \
float val = src; \
if (abs(_##name - val) >= epsilon) { \
_##name = val; \
emit name##Changed(); \
} \
}
void Stats::updateStats() {
if (!Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
if (isVisible()) {
setVisible(false);
}
return;
} else {
if (!isVisible()) {
setVisible(true);
}
}
bool shouldDisplayTimingDetail = Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails) &&
Menu::getInstance()->isOptionChecked(MenuOption::Stats) && isExpanded();
if (shouldDisplayTimingDetail != PerformanceTimer::isActive()) {
PerformanceTimer::setActive(shouldDisplayTimingDetail);
}
auto nodeList = DependencyManager::get<NodeList>();
auto avatarManager = DependencyManager::get<AvatarManager>();
// we need to take one avatar out so we don't include ourselves
int totalAvatars = DependencyManager::get<AvatarManager>()->size() - 1;
int totalServers = DependencyManager::get<NodeList>()->size();
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
STAT_UPDATE(serverCount, nodeList->size());
STAT_UPDATE(framerate, (int)qApp->getFps());
lines = 5;
int columnOneWidth = _generalStatsWidth;
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
STAT_UPDATE(packetInCount, bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond());
STAT_UPDATE(packetOutCount, bandwidthRecorder->getCachedTotalAverageOutputPacketsPerSecond());
STAT_UPDATE_FLOAT(mbpsIn, (float)bandwidthRecorder->getCachedTotalAverageOutputKilobitsPerSecond() / 1000.0f, 0.01f);
STAT_UPDATE_FLOAT(mbpsOut, (float)bandwidthRecorder->getCachedTotalAverageInputKilobitsPerSecond() / 1000.0f, 0.01f);
// Second column: ping
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
SharedNodePointer avatarMixerNode = nodeList->soloNodeOfType(NodeType::AvatarMixer);
STAT_UPDATE(audioPing, audioMixerNode ? audioMixerNode->getPingMs() : -1);
STAT_UPDATE(avatarPing, avatarMixerNode ? avatarMixerNode->getPingMs() : -1);
//// Now handle voxel servers, since there could be more than one, we average their ping times
unsigned long totalPingOctree = 0;
int octreeServerCount = 0;
int pingOctreeMax = 0;
int pingVoxel;
nodeList->eachNode([&](const SharedNodePointer& node) {
// TODO: this should also support entities
if (node->getType() == NodeType::EntityServer) {
totalPingOctree += node->getPingMs();
octreeServerCount++;
if (pingOctreeMax < node->getPingMs()) {
pingOctreeMax = node->getPingMs();
}
}
});
if (octreeServerCount) {
pingVoxel = totalPingOctree / octreeServerCount;
}
//STAT_UPDATE(entitiesPing, pingVoxel);
//if (_expanded) {
// QString voxelMaxPing;
// if (pingVoxel >= 0) { // Average is only meaningful if pingVoxel is valid.
// voxelMaxPing = QString("Voxel max ping: %1").arg(pingOctreeMax);
// } else {
// voxelMaxPing = QString("Voxel max ping: --");
// }
} else {
// -2 causes the QML to hide the ping column
STAT_UPDATE(audioPing, -2);
}
// Third column, avatar stats
MyAvatar* myAvatar = avatarManager->getMyAvatar();
glm::vec3 avatarPos = myAvatar->getPosition();
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
STAT_UPDATE_FLOAT(velocity, glm::length(myAvatar->getVelocity()), 0.1f);
STAT_UPDATE_FLOAT(yaw, myAvatar->getBodyYaw(), 0.1f);
if (_expanded) {
SharedNodePointer avatarMixer = nodeList->soloNodeOfType(NodeType::AvatarMixer);
if (avatarMixer) {
STAT_UPDATE(avatarMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AvatarMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AvatarMixer)));
STAT_UPDATE(avatarMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AvatarMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AvatarMixer)));
} else {
STAT_UPDATE(avatarMixerKbps, -1);
STAT_UPDATE(avatarMixerPps, -1);
}
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (audioMixerNode) {
STAT_UPDATE(audioMixerKbps, roundf(
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
STAT_UPDATE(audioMixerPps, roundf(
bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
} else {
STAT_UPDATE(audioMixerKbps, -1);
STAT_UPDATE(audioMixerPps, -1);
}
STAT_UPDATE(downloads, ResourceCache::getLoadingRequests().size());
STAT_UPDATE(downloadsPending, ResourceCache::getPendingRequestCount());
// TODO fix to match original behavior
//stringstream downloads;
//downloads << "Downloads: ";
//foreach(Resource* resource, ) {
// downloads << (int)(resource->getProgress() * 100.0f) << "% ";
//}
//downloads << "(" << << " pending)";
} // expanded avatar column
// Fourth column, octree stats
int serverCount = 0;
int movingServerCount = 0;
unsigned long totalNodes = 0;
unsigned long totalInternal = 0;
unsigned long totalLeaves = 0;
std::stringstream sendingModeStream("");
sendingModeStream << "[";
NodeToOctreeSceneStats* octreeServerSceneStats = Application::getInstance()->getOcteeSceneStats();
for (NodeToOctreeSceneStatsIterator i = octreeServerSceneStats->begin(); i != octreeServerSceneStats->end(); i++) {
//const QUuid& uuid = i->first;
OctreeSceneStats& stats = i->second;
serverCount++;
if (_expanded) {
if (serverCount > 1) {
sendingModeStream << ",";
}
if (stats.isMoving()) {
sendingModeStream << "M";
movingServerCount++;
} else {
sendingModeStream << "S";
}
}
// calculate server node totals
totalNodes += stats.getTotalElements();
if (_expanded) {
totalInternal += stats.getTotalInternal();
totalLeaves += stats.getTotalLeaves();
}
}
if (_expanded) {
if (serverCount == 0) {
sendingModeStream << "---";
}
sendingModeStream << "] " << serverCount << " servers";
if (movingServerCount > 0) {
sendingModeStream << " <SCENE NOT STABLE>";
} else {
sendingModeStream << " <SCENE STABLE>";
}
QString sendingModeResult = sendingModeStream.str().c_str();
STAT_UPDATE(sendingMode, sendingModeResult);
}
// Incoming packets
QLocale locale(QLocale::English);
auto voxelPacketsToProcess = qApp->getOctreePacketProcessor().packetsToProcessCount();
if (_expanded) {
std::stringstream octreeStats;
QString packetsString = locale.toString((int)voxelPacketsToProcess);
QString maxString = locale.toString((int)_recentMaxPackets);
octreeStats << "Octree Packets to Process: " << qPrintable(packetsString)
<< " [Recent Max: " << qPrintable(maxString) << "]";
QString str = octreeStats.str().c_str();
STAT_UPDATE(packetStats, str);
// drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_resetRecentMaxPacketsSoon && voxelPacketsToProcess > 0) {
_recentMaxPackets = 0;
_resetRecentMaxPacketsSoon = false;
}
if (voxelPacketsToProcess == 0) {
_resetRecentMaxPacketsSoon = true;
} else if (voxelPacketsToProcess > _recentMaxPackets) {
_recentMaxPackets = voxelPacketsToProcess;
}
// Server Octree Elements
STAT_UPDATE(serverElements, totalNodes);
STAT_UPDATE(localElements, OctreeElement::getNodeCount());
if (_expanded) {
STAT_UPDATE(serverInternal, totalInternal);
STAT_UPDATE(serverLeaves, totalLeaves);
// Local Voxels
STAT_UPDATE(localInternal, OctreeElement::getInternalNodeCount());
STAT_UPDATE(localLeaves, OctreeElement::getLeafNodeCount());
// LOD Details
STAT_UPDATE(lodStatus, "You can see " + DependencyManager::get<LODManager>()->getLODFeedbackText());
}
bool performanceTimerIsActive = PerformanceTimer::isActive();
bool displayPerf = _expanded && Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails);
if (displayPerf && performanceTimerIsActive) {
if (!_timingExpanded) {
_timingExpanded = true;
emit timingExpandedChanged();
}
PerformanceTimer::tallyAllTimerRecords(); // do this even if we're not displaying them, so they don't stack up
columnOneWidth = _generalStatsWidth + _pingStatsWidth + _geoStatsWidth; // 3 columns wide...
// we will also include room for 1 line per timing record and a header of 4 lines
lines += 4;
const QMap<QString, PerformanceTimerRecord>& allRecords = PerformanceTimer::getAllTimerRecords();
QMapIterator<QString, PerformanceTimerRecord> i(allRecords);
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
int statsLines = 0;
while (i.hasNext()) {
i.next();
if (includeTimingRecord(i.key())) {
lines++;
statsLines++;
if (onlyDisplayTopTen && statsLines == 10) {
break;
}
}
}
}
drawBackground(backgroundColor, horizontalOffset, 0, columnOneWidth, (lines + 1) * STATS_PELS_PER_LINE);
horizontalOffset += 5;
int columnOneHorizontalOffset = horizontalOffset;
QString serverNodes = QString("Servers: %1").arg(totalServers);
QString avatarNodes = QString("Avatars: %1").arg(totalAvatars);
QString framesPerSecond = QString("Framerate: %1 FPS").arg(fps, 3, 'f', 0);
verticalOffset = STATS_PELS_INITIALOFFSET; // first one is offset by less than a line
drawText(horizontalOffset, verticalOffset, scale, rotation, font, serverNodes.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarNodes.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, framesPerSecond.toUtf8().constData(), color);
QString packetsPerSecondString = QString("Packets In/Out: %1/%2").arg(inPacketsPerSecond).arg(outPacketsPerSecond);
QString averageMegabitsPerSecond = QString("Mbps In/Out: %1/%2").
arg((float)inKbitsPerSecond * 1.0f / 1000.0f).
arg((float)outKbitsPerSecond * 1.0f / 1000.0f);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, packetsPerSecondString.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, averageMegabitsPerSecond.toUtf8().constData(), color);
// TODO: the display of these timing details should all be moved to JavaScript
if (displayPerf && performanceTimerIsActive) {
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
// Timing details...
verticalOffset += STATS_PELS_PER_LINE * 4; // skip 3 lines to be under the other columns
drawText(columnOneHorizontalOffset, verticalOffset, scale, rotation, font,
"-------------------------------------------------------- Function "
"------------------------------------------------------- --msecs- -calls--", color);
// First iterate all the records, and for the ones that should be included, insert them into
// a new Map sorted by average time...
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
QMap<float, QString> sortedRecords;
const QMap<QString, PerformanceTimerRecord>& allRecords = PerformanceTimer::getAllTimerRecords();
QMapIterator<QString, PerformanceTimerRecord> i(allRecords);
@ -306,316 +323,54 @@ void Stats::display(
int linesDisplayed = 0;
QMapIterator<float, QString> j(sortedRecords);
j.toBack();
QString perfLines;
while (j.hasPrevious()) {
j.previous();
QChar noBreakingSpace = QChar::Nbsp;
QString functionName = j.value();
static const QChar noBreakingSpace = QChar::Nbsp;
QString functionName = j.value();
const PerformanceTimerRecord& record = allRecords.value(functionName);
QString perfLine = QString("%1: %2 [%3]").
arg(QString(qPrintable(functionName)), 120, noBreakingSpace).
perfLines += QString("%1: %2 [%3]\n").
arg(QString(qPrintable(functionName)), 90, noBreakingSpace).
arg((float)record.getMovingAverage() / (float)USECS_PER_MSEC, 8, 'f', 3, noBreakingSpace).
arg((int)record.getCount(), 6, 10, noBreakingSpace);
verticalOffset += STATS_PELS_PER_LINE;
drawText(columnOneHorizontalOffset, verticalOffset, scale, rotation, font, perfLine.toUtf8().constData(), color);
linesDisplayed++;
if (onlyDisplayTopTen && linesDisplayed == 10) {
break;
}
}
_timingStats = perfLines;
emit timingStatsChanged();
} else if (_timingExpanded) {
_timingExpanded = false;
emit timingExpandedChanged();
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + 1;
if (Menu::getInstance()->isOptionChecked(MenuOption::TestPing)) {
int pingAudio = -1, pingAvatar = -1, pingVoxel = -1, pingOctreeMax = -1;
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
SharedNodePointer avatarMixerNode = nodeList->soloNodeOfType(NodeType::AvatarMixer);
pingAudio = audioMixerNode ? audioMixerNode->getPingMs() : -1;
pingAvatar = avatarMixerNode ? avatarMixerNode->getPingMs() : -1;
// Now handle voxel servers, since there could be more than one, we average their ping times
unsigned long totalPingOctree = 0;
int octreeServerCount = 0;
nodeList->eachNode([&totalPingOctree, &pingOctreeMax, &octreeServerCount](const SharedNodePointer& node){
// TODO: this should also support entities
if (node->getType() == NodeType::EntityServer) {
totalPingOctree += node->getPingMs();
octreeServerCount++;
if (pingOctreeMax < node->getPingMs()) {
pingOctreeMax = node->getPingMs();
}
}
});
if (octreeServerCount) {
pingVoxel = totalPingOctree / octreeServerCount;
}
lines = _expanded ? 4 : 3;
// only draw our background if column one didn't draw a wide background
if (columnOneWidth == _generalStatsWidth) {
drawBackground(backgroundColor, horizontalOffset, 0, _pingStatsWidth, (lines + 1) * STATS_PELS_PER_LINE);
}
horizontalOffset += 5;
QString audioPing;
if (pingAudio >= 0) {
audioPing = QString("Audio ping: %1").arg(pingAudio);
} else {
audioPing = QString("Audio ping: --");
}
QString avatarPing;
if (pingAvatar >= 0) {
avatarPing = QString("Avatar ping: %1").arg(pingAvatar);
} else {
avatarPing = QString("Avatar ping: --");
}
QString voxelAvgPing;
if (pingVoxel >= 0) {
voxelAvgPing = QString("Entities avg ping: %1").arg(pingVoxel);
} else {
voxelAvgPing = QString("Entities avg ping: --");
}
drawText(horizontalOffset, verticalOffset, scale, rotation, font, audioPing.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarPing.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, voxelAvgPing.toUtf8().constData(), color);
if (_expanded) {
QString voxelMaxPing;
if (pingVoxel >= 0) { // Average is only meaningful if pingVoxel is valid.
voxelMaxPing = QString("Voxel max ping: %1").arg(pingOctreeMax);
} else {
voxelMaxPing = QString("Voxel max ping: --");
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, voxelMaxPing.toUtf8().constData(), color);
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _pingStatsWidth + 2;
}
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 avatarPos = myAvatar->getPosition();
lines = _expanded ? 7 : 3;
if (columnOneWidth == _generalStatsWidth) {
drawBackground(backgroundColor, horizontalOffset, 0, _geoStatsWidth, (lines + 1) * STATS_PELS_PER_LINE);
}
horizontalOffset += 5;
QString avatarPosition = QString("Position: %1, %2, %3").
arg(avatarPos.x, -1, 'f', 1).
arg(avatarPos.y, -1, 'f', 1).
arg(avatarPos.z, -1, 'f', 1);
QString avatarVelocity = QString("Velocity: %1").arg(glm::length(myAvatar->getVelocity()), -1, 'f', 1);
QString avatarBodyYaw = QString("Yaw: %1").arg(myAvatar->getBodyYaw(), -1, 'f', 1);
QString avatarMixerStats;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarPosition.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarVelocity.toUtf8().constData(), color);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarBodyYaw.toUtf8().constData(), color);
if (_expanded) {
SharedNodePointer avatarMixer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AvatarMixer);
if (avatarMixer) {
avatarMixerStats = QString("Avatar Mixer: %1 kbps, %2 pps").
arg(roundf(bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer))).
arg(roundf(bandwidthRecorder->getAverageInputPacketsPerSecond(NodeType::AudioMixer) +
bandwidthRecorder->getAverageOutputPacketsPerSecond(NodeType::AudioMixer)));
} else {
avatarMixerStats = QString("No Avatar Mixer");
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, avatarMixerStats.toUtf8().constData(), color);
stringstream downloads;
downloads << "Downloads: ";
foreach (Resource* resource, ResourceCache::getLoadingRequests()) {
downloads << (int)(resource->getProgress() * 100.0f) << "% ";
}
downloads << "(" << ResourceCache::getPendingRequestCount() << " pending)";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downloads.str().c_str(), color);
}
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3;
lines = _expanded ? 10 : 3;
drawBackground(backgroundColor, horizontalOffset, 0, canvasSize.x - horizontalOffset,
(lines + 1) * STATS_PELS_PER_LINE);
horizontalOffset += 5;
// Model/Entity render details
octreeStats.str("");
octreeStats << "Triangles: " << _renderDetails._trianglesRendered
<< " / Quads:" << _renderDetails._quadsRendered
<< " / Material Switches:" << _renderDetails._materialSwitches;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
if (_expanded) {
octreeStats.str("");
octreeStats << " Mesh Parts Rendered Opaque: " << _renderDetails._opaque._rendered
<< " / Translucent:" << _renderDetails._translucent._rendered;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Opaque considered: " << _renderDetails._opaque._considered
<< " / Out of view:" << _renderDetails._opaque._outOfView
<< " / Too small:" << _renderDetails._opaque._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Translucent considered: " << _renderDetails._translucent._considered
<< " / Out of view:" << _renderDetails._translucent._outOfView
<< " / Too small:" << _renderDetails._translucent._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
std::stringstream sendingMode("");
sendingMode << "Octree Sending Mode: [";
int serverCount = 0;
int movingServerCount = 0;
unsigned long totalNodes = 0;
unsigned long totalInternal = 0;
unsigned long totalLeaves = 0;
NodeToOctreeSceneStats* octreeServerSceneStats = Application::getInstance()->getOcteeSceneStats();
for(NodeToOctreeSceneStatsIterator i = octreeServerSceneStats->begin(); i != octreeServerSceneStats->end(); i++) {
//const QUuid& uuid = i->first;
OctreeSceneStats& stats = i->second;
serverCount++;
if (_expanded) {
if (serverCount > 1) {
sendingMode << ",";
}
if (stats.isMoving()) {
sendingMode << "M";
movingServerCount++;
} else {
sendingMode << "S";
}
}
// calculate server node totals
totalNodes += stats.getTotalElements();
if (_expanded) {
totalInternal += stats.getTotalInternal();
totalLeaves += stats.getTotalLeaves();
}
}
if (_expanded) {
if (serverCount == 0) {
sendingMode << "---";
}
sendingMode << "] " << serverCount << " servers";
if (movingServerCount > 0) {
sendingMode << " <SCENE NOT STABLE>";
} else {
sendingMode << " <SCENE STABLE>";
}
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)sendingMode.str().c_str(), color);
}
// Incoming packets
if (_expanded) {
octreeStats.str("");
QString packetsString = locale.toString((int)voxelPacketsToProcess);
QString maxString = locale.toString((int)_recentMaxPackets);
octreeStats << "Octree Packets to Process: " << qPrintable(packetsString)
<< " [Recent Max: " << qPrintable(maxString) << "]";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_resetRecentMaxPacketsSoon && voxelPacketsToProcess > 0) {
_recentMaxPackets = 0;
_resetRecentMaxPacketsSoon = false;
}
if (voxelPacketsToProcess == 0) {
_resetRecentMaxPacketsSoon = true;
} else {
if (voxelPacketsToProcess > _recentMaxPackets) {
_recentMaxPackets = voxelPacketsToProcess;
}
}
QString serversTotalString = locale.toString((uint)totalNodes); // consider adding: .rightJustified(10, ' ');
unsigned long localTotal = OctreeElement::getNodeCount();
QString localTotalString = locale.toString((uint)localTotal); // consider adding: .rightJustified(10, ' ');
// Server Octree Elements
if (!_expanded) {
octreeStats.str("");
octreeStats << "Octree Elements Server: " << qPrintable(serversTotalString)
<< " Local:" << qPrintable(localTotalString);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
if (_expanded) {
octreeStats.str("");
octreeStats << "Octree Elements -";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
QString serversInternalString = locale.toString((uint)totalInternal);
QString serversLeavesString = locale.toString((uint)totalLeaves);
octreeStats.str("");
octreeStats << " Server: " << qPrintable(serversTotalString) <<
" Internal: " << qPrintable(serversInternalString) <<
" Leaves: " << qPrintable(serversLeavesString);
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
// Local Voxels
unsigned long localInternal = OctreeElement::getInternalNodeCount();
unsigned long localLeaves = OctreeElement::getLeafNodeCount();
QString localInternalString = locale.toString((uint)localInternal);
QString localLeavesString = locale.toString((uint)localLeaves);
octreeStats.str("");
octreeStats << " Local: " << qPrintable(serversTotalString) <<
" Internal: " << qPrintable(localInternalString) <<
" Leaves: " << qPrintable(localLeavesString) << "";
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
// LOD Details
octreeStats.str("");
QString displayLODDetails = DependencyManager::get<LODManager>()->getLODFeedbackText();
octreeStats << "LOD: You can see " << qPrintable(displayLODDetails.trimmed());
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
void Stats::setRenderDetails(const RenderDetails& details) {
STAT_UPDATE(triangles, details._trianglesRendered);
STAT_UPDATE(quads, details._quadsRendered);
STAT_UPDATE(materialSwitches, details._materialSwitches);
if (_expanded) {
STAT_UPDATE(meshOpaque, details._opaque._rendered);
STAT_UPDATE(meshTranslucent, details._opaque._rendered);
STAT_UPDATE(opaqueConsidered, details._opaque._considered);
STAT_UPDATE(opaqueOutOfView, details._opaque._outOfView);
STAT_UPDATE(opaqueTooSmall, details._opaque._tooSmall);
STAT_UPDATE(translucentConsidered, details._translucent._considered);
STAT_UPDATE(translucentOutOfView, details._translucent._outOfView);
STAT_UPDATE(translucentTooSmall, details._translucent._tooSmall);
}
}
/*
// display expanded or contracted stats
void Stats::display(
int voxelPacketsToProcess)
{
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
}
*/

View file

@ -1,8 +1,5 @@
//
// Stats.h
// interface/src/ui
//
// Created by Lucas Crisman on 22/03/14.
// Created by Bradley Austin Davis 2015/06/17
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -13,47 +10,139 @@
#define hifi_Stats_h
#include <QObject>
#include <QQuickItem>
#include <QVector3D>
#include <OffscreenUi.h>
#include <RenderArgs.h>
class Stats: public QObject {
#define STATS_PROPERTY(type, name, initialValue) \
Q_PROPERTY(type name READ name NOTIFY name##Changed) \
public: \
type name() { return _##name; }; \
private: \
type _##name{ initialValue };
class Stats : public QQuickItem {
Q_OBJECT
HIFI_QML_DECL
Q_PROPERTY(bool expanded READ isExpanded WRITE setExpanded NOTIFY expandedChanged)
Q_PROPERTY(bool timingExpanded READ isTimingExpanded NOTIFY timingExpandedChanged)
Q_PROPERTY(QString monospaceFont READ monospaceFont)
STATS_PROPERTY(int, serverCount, 0)
STATS_PROPERTY(int, framerate, 0)
STATS_PROPERTY(int, avatarCount, 0)
STATS_PROPERTY(int, packetInCount, 0)
STATS_PROPERTY(int, packetOutCount, 0)
STATS_PROPERTY(float, mbpsIn, 0)
STATS_PROPERTY(float, mbpsOut, 0)
STATS_PROPERTY(int, audioPing, 0)
STATS_PROPERTY(int, avatarPing, 0)
STATS_PROPERTY(int, entitiesPing, 0)
STATS_PROPERTY(QVector3D, position, QVector3D(0, 0, 0) )
STATS_PROPERTY(float, velocity, 0)
STATS_PROPERTY(float, yaw, 0)
STATS_PROPERTY(int, avatarMixerKbps, 0)
STATS_PROPERTY(int, avatarMixerPps, 0)
STATS_PROPERTY(int, audioMixerKbps, 0)
STATS_PROPERTY(int, audioMixerPps, 0)
STATS_PROPERTY(int, downloads, 0)
STATS_PROPERTY(int, downloadsPending, 0)
STATS_PROPERTY(int, triangles, 0)
STATS_PROPERTY(int, quads, 0)
STATS_PROPERTY(int, materialSwitches, 0)
STATS_PROPERTY(int, meshOpaque, 0)
STATS_PROPERTY(int, meshTranslucent, 0)
STATS_PROPERTY(int, opaqueConsidered, 0)
STATS_PROPERTY(int, opaqueOutOfView, 0)
STATS_PROPERTY(int, opaqueTooSmall, 0)
STATS_PROPERTY(int, translucentConsidered, 0)
STATS_PROPERTY(int, translucentOutOfView, 0)
STATS_PROPERTY(int, translucentTooSmall, 0)
STATS_PROPERTY(QString, sendingMode, QString())
STATS_PROPERTY(QString, packetStats, QString())
STATS_PROPERTY(QString, lodStatus, QString())
STATS_PROPERTY(QString, timingStats, QString())
STATS_PROPERTY(int, serverElements, 0)
STATS_PROPERTY(int, serverInternal, 0)
STATS_PROPERTY(int, serverLeaves, 0)
STATS_PROPERTY(int, localElements, 0)
STATS_PROPERTY(int, localInternal, 0)
STATS_PROPERTY(int, localLeaves, 0)
public:
static Stats* getInstance();
Stats();
static void drawBackground(unsigned int rgba, int x, int y, int width, int height);
void toggleExpanded();
bool isExpanded() { return _expanded; }
void checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseDragStartedY, int horizontalOffset);
void resetWidth(int width, int horizontalOffset);
void display(const float* color, int horizontalOffset, float fps, int inPacketsPerSecond, int outPacketsPerSecond,
int inKbitsPerSecond, int outKbitsPerSecond, int voxelPacketsToProcess);
Stats(QQuickItem* parent = nullptr);
bool includeTimingRecord(const QString& name);
void setRenderDetails(const RenderDetails& details) { _renderDetails = details; }
void setRenderDetails(const RenderDetails& details);
const QString& monospaceFont() {
return _monospaceFont;
}
void updateStats();
bool isExpanded() { return _expanded; }
bool isTimingExpanded() { return _timingExpanded; }
void setExpanded(bool expanded) {
if (_expanded != expanded) {
_expanded = expanded;
emit expandedChanged();
}
}
signals:
void expandedChanged();
void timingExpandedChanged();
void serverCountChanged();
void framerateChanged();
void avatarCountChanged();
void packetInCountChanged();
void packetOutCountChanged();
void mbpsInChanged();
void mbpsOutChanged();
void audioPingChanged();
void avatarPingChanged();
void entitiesPingChanged();
void positionChanged();
void velocityChanged();
void yawChanged();
void avatarMixerKbpsChanged();
void avatarMixerPpsChanged();
void audioMixerKbpsChanged();
void audioMixerPpsChanged();
void downloadsChanged();
void downloadsPendingChanged();
void trianglesChanged();
void quadsChanged();
void materialSwitchesChanged();
void meshOpaqueChanged();
void meshTranslucentChanged();
void opaqueConsideredChanged();
void opaqueOutOfViewChanged();
void opaqueTooSmallChanged();
void translucentConsideredChanged();
void translucentOutOfViewChanged();
void translucentTooSmallChanged();
void sendingModeChanged();
void packetStatsChanged();
void lodStatusChanged();
void serverElementsChanged();
void serverInternalChanged();
void serverLeavesChanged();
void localElementsChanged();
void localInternalChanged();
void localLeavesChanged();
void timingStatsChanged();
private:
static Stats* _sharedInstance;
bool _expanded;
int _recentMaxPackets; // recent max incoming voxel packets to process
bool _resetRecentMaxPacketsSoon;
int _generalStatsWidth;
int _bandwidthStatsWidth;
int _pingStatsWidth;
int _geoStatsWidth;
int _octreeStatsWidth;
int _lastHorizontalOffset;
RenderDetails _renderDetails;
int _recentMaxPackets{ 0 } ; // recent max incoming voxel packets to process
bool _resetRecentMaxPacketsSoon{ true };
bool _expanded{ false };
bool _timingExpanded{ false };
QString _monospaceFont;
};
#endif // hifi_Stats_h

View file

@ -258,7 +258,7 @@ void Overlays::deleteOverlay(unsigned int id) {
unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
glm::vec2 pointCopy = point;
if (qApp->isHMDMode()) {
pointCopy = qApp->getApplicationOverlay().screenToOverlay(point);
pointCopy = qApp->getApplicationCompositor().screenToOverlay(point);
}
QReadLocker lock(&_lock);

View file

@ -795,6 +795,11 @@ void AudioClient::handleAudioInput() {
delete[] inputAudioSamples;
// Remove DC offset
if (!_isStereoInput && !_audioSourceInjectEnabled) {
_inputGate.removeDCOffset(networkAudioSamples, numNetworkSamples);
}
// only impose the noise gate and perform tone injection if we are sending mono audio
if (!_isStereoInput && !_audioSourceInjectEnabled && _isNoiseGateEnabled) {
_inputGate.gateSamples(networkAudioSamples, numNetworkSamples);

View file

@ -33,6 +33,32 @@ AudioNoiseGate::AudioNoiseGate() :
}
void AudioNoiseGate::removeDCOffset(int16_t* samples, int numSamples) {
//
// DC Offset correction
//
// Measure the DC offset over a trailing number of frames, and remove it from the input signal.
// This causes the noise background measurements and server muting to be more accurate. Many off-board
// ADC's have a noticeable DC offset.
//
const float DC_OFFSET_AVERAGING = 0.99f;
float measuredDcOffset = 0.0f;
// Remove trailing DC offset from samples
for (int i = 0; i < numSamples; i++) {
measuredDcOffset += samples[i];
samples[i] -= (int16_t) _dcOffset;
}
// Update measured DC offset
measuredDcOffset /= numSamples;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
}
void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
//
// Impose Noise Gate
@ -61,17 +87,12 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
const int NOISE_GATE_WIDTH = 5;
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
const float DC_OFFSET_AVERAGING = 0.99f;
// Check clipping, adjust DC offset, and check if should open noise gate
float measuredDcOffset = 0.0f;
// Check clipping, and check if should open noise gate
_didClipInLastFrame = false;
for (int i = 0; i < numSamples; i++) {
measuredDcOffset += samples[i];
samples[i] -= (int16_t) _dcOffset;
thisSample = std::abs(samples[i]);
if (thisSample >= ((float) AudioConstants::MAX_SAMPLE_VALUE * CLIPPING_THRESHOLD)) {
_didClipInLastFrame = true;
}
@ -83,14 +104,6 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
}
}
measuredDcOffset /= numSamples;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
_lastLoudness = fabs(loudness / numSamples);
if (_quietestFrame > _lastLoudness) {

View file

@ -21,6 +21,7 @@ public:
AudioNoiseGate();
void gateSamples(int16_t* samples, int numSamples);
void removeDCOffset(int16_t* samples, int numSamples);
bool clippedInLastFrame() const { return _didClipInLastFrame; }
float getMeasuredFloor() const { return _measuredFloor; }

View file

@ -44,9 +44,6 @@
#include "RenderablePolyVoxEntityItem.h"
#include "EntitiesRendererLogging.h"
#include "DependencyManager.h"
#include "AddressManager.h"
EntityTreeRenderer::EntityTreeRenderer(bool wantScripts, AbstractViewStateInterface* viewState,
AbstractScriptingServicesInterface* scriptingServices) :
OctreeRenderer(),

View file

@ -14,7 +14,7 @@
#include <stdint.h>
#include <QDebug>
#include <QObject>
#include <QHash>
#include <QScriptEngine>

View file

@ -12,6 +12,8 @@
#include "GLBackendShared.h"
#include <glm/gtc/type_ptr.hpp>
using namespace gpu;
GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
{
(&::gpu::GLBackend::do_draw),
@ -142,6 +144,14 @@ bool GLBackend::checkGLError(const char* name) {
}
}
bool GLBackend::checkGLErrorDebug(const char* name) {
#ifdef DEBUG
return checkGLError(name);
#else
Q_UNUSED(name);
return false;
#endif
}
void GLBackend::syncCache() {
syncTransformStateCache();
@ -687,4 +697,20 @@ void GLBackend::fetchMatrix(GLenum target, glm::mat4 & m) {
glGetFloatv(target, glm::value_ptr(m));
}
void GLBackend::checkGLStackStable(std::function<void()> f) {
#ifdef DEBUG
GLint mvDepth, prDepth;
glGetIntegerv(GL_MODELVIEW_STACK_DEPTH, &mvDepth);
glGetIntegerv(GL_PROJECTION_STACK_DEPTH, &prDepth);
#endif
f();
#ifdef DEBUG
GLint mvDepthFinal, prDepthFinal;
glGetIntegerv(GL_MODELVIEW_STACK_DEPTH, &mvDepthFinal);
glGetIntegerv(GL_PROJECTION_STACK_DEPTH, &prDepthFinal);
Q_ASSERT(mvDepth == mvDepthFinal);
Q_ASSERT(prDepth == prDepthFinal);
#endif
}

View file

@ -12,11 +12,13 @@
#define hifi_gpu_GLBackend_h
#include <assert.h>
#include <functional>
#include <bitset>
#include "GPUConfig.h"
#include "Context.h"
#include "Batch.h"
#include <bitset>
namespace gpu {
@ -47,6 +49,11 @@ public:
static bool checkGLError(const char* name = nullptr);
// Only checks in debug builds
static bool checkGLErrorDebug(const char* name = nullptr);
static void checkGLStackStable(std::function<void()> f);
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());

View file

@ -11,6 +11,7 @@
#include "GPULogging.h"
#include "GLBackendShared.h"
using namespace gpu;
GLBackend::GLFramebuffer::GLFramebuffer() {}

View file

@ -17,9 +17,7 @@
#include "Batch.h"
using namespace gpu;
static const GLenum _primitiveToGLmode[NUM_PRIMITIVES] = {
static const GLenum _primitiveToGLmode[gpu::NUM_PRIMITIVES] = {
GL_POINTS,
GL_LINES,
GL_LINE_STRIP,
@ -30,7 +28,7 @@ static const GLenum _primitiveToGLmode[NUM_PRIMITIVES] = {
GL_QUAD_STRIP,
};
static const GLenum _elementTypeToGLType[NUM_TYPES]= {
static const GLenum _elementTypeToGLType[gpu::NUM_TYPES] = {
GL_FLOAT,
GL_INT,
GL_UNSIGNED_INT,
@ -49,10 +47,12 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
GL_UNSIGNED_BYTE
};
#if _DEBUG
#define CHECK_GL_ERROR() ::gpu::GLBackend::checkGLError(__FUNCTION__)
#else
#define CHECK_GL_ERROR() false
#endif
// Stupid preprocessor trick to turn the line macro into a string
#define CHECK_GL_ERROR_HELPER(x) #x
// FIXME doesn't build on Linux or Mac. Hmmmm
// #define CHECK_GL_ERROR() gpu::GLBackend::checkGLErrorDebug(__FUNCTION__ ":" CHECK_GL_ERROR_HELPER(__LINE__))
#define CHECK_GL_ERROR() gpu::GLBackend::checkGLErrorDebug(__FUNCTION__)
#define CHECK_GL_STACK_STABLE(f) gpu::GLBackend::checkGLStackStable(f)
#endif

View file

@ -11,6 +11,7 @@
#include "GPULogging.h"
#include "GLBackendShared.h"
using namespace gpu;
GLBackend::GLTexture::GLTexture() :
_storageStamp(0),

View file

@ -308,6 +308,9 @@ void AddressManager::goToAddressFromObject(const QVariantMap& dataObject, const
handlePath(returnedPath, trigger);
}
} else {
// we're going to hit the index path, set that as the _newHostLookupPath
_newHostLookupPath = INDEX_PATH;
// we didn't override the path or get one back - ask the DS for the viewpoint of its index path
// which we will jump to if it exists
emit pathChangeRequired(INDEX_PATH);
@ -479,6 +482,7 @@ bool AddressManager::handleViewpoint(const QString& viewpointString, bool should
// We use _newHostLookupPath to determine if the client has already stored its last address
// before moving to a new host thanks to the information in the same lookup URL.
if (definitelyPathOnly || (!pathString.isEmpty() && pathString != _newHostLookupPath)) {
addCurrentAddressToHistory(LookupTrigger::UserInput);
}

View file

@ -65,9 +65,10 @@ void ViewFrustum::setProjection(const glm::mat4& projection) {
_farClip = -_corners[BOTTOM_LEFT_FAR].z;
_aspectRatio = (_corners[TOP_RIGHT_NEAR].x - _corners[BOTTOM_LEFT_NEAR].x) /
(_corners[TOP_RIGHT_NEAR].y - _corners[BOTTOM_LEFT_NEAR].y);
glm::vec3 right = glm::normalize(glm::vec3(_corners[TOP_RIGHT_NEAR]));
glm::vec3 left = glm::normalize(glm::vec3(_corners[TOP_LEFT_NEAR]));
_fieldOfView = abs(glm::degrees(glm::angle(right, left)));
glm::vec4 top = _inverseProjection * vec4(0, 1, -1, 1);
top /= top.w;
_fieldOfView = abs(glm::degrees(2.0f * abs(glm::angle(vec3(0, 0, -1), glm::normalize(vec3(top))))));
}
// ViewFrustum::calculateViewFrustum()

View file

@ -28,6 +28,9 @@
#include "RenderUtilsLogging.h"
#include "GeometryCache.h"
#include "standardTransformPNTC_vert.h"
#include "standardDrawTexture_frag.h"
//#define WANT_DEBUG
const int GeometryCache::UNKNOWN_ID = -1;
@ -1182,6 +1185,26 @@ void GeometryCache::renderQuad(gpu::Batch& batch, const glm::vec2& minCorner, co
batch.draw(gpu::QUADS, 4, 0);
}
void GeometryCache::renderUnitCube(gpu::Batch& batch) {
static const glm::vec4 color(1);
renderSolidCube(batch, 1, color);
}
void GeometryCache::renderUnitQuad(const glm::vec4& color, int id) {
gpu::Batch batch;
renderUnitQuad(batch, color, id);
gpu::GLBackend::renderBatch(batch);
}
void GeometryCache::renderUnitQuad(gpu::Batch& batch, const glm::vec4& color, int id) {
static const glm::vec2 topLeft(-1, 1);
static const glm::vec2 bottomRight(1, -1);
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, color, id);
}
void GeometryCache::renderQuad(const glm::vec2& minCorner, const glm::vec2& maxCorner,
const glm::vec2& texCoordMinCorner, const glm::vec2& texCoordMaxCorner,
const glm::vec4& color, int id) {
@ -1805,6 +1828,23 @@ QSharedPointer<Resource> GeometryCache::createResource(const QUrl& url, const QS
return geometry.staticCast<Resource>();
}
void GeometryCache::useSimpleDrawPipeline(gpu::Batch& batch) {
if (!_standardDrawPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(standardTransformPNTC_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(standardDrawTexture_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::makeProgram((*program));
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_standardDrawPipeline.reset(gpu::Pipeline::create(program, state));
}
batch.setPipeline(_standardDrawPipeline);
}
const float NetworkGeometry::NO_HYSTERESIS = -1.0f;
NetworkGeometry::NetworkGeometry(const QUrl& url, const QSharedPointer<NetworkGeometry>& fallback, bool delayLoad,

View file

@ -155,6 +155,10 @@ public:
void renderBevelCornersRect(int x, int y, int width, int height, int bevelDistance, const glm::vec4& color, int id = UNKNOWN_ID);
void renderBevelCornersRect(gpu::Batch& batch, int x, int y, int width, int height, int bevelDistance, const glm::vec4& color, int id = UNKNOWN_ID);
void renderUnitCube(gpu::Batch& batch);
void renderUnitQuad(const glm::vec4& color = glm::vec4(1), int id = UNKNOWN_ID);
void renderUnitQuad(gpu::Batch& batch, const glm::vec4& color = glm::vec4(1), int id = UNKNOWN_ID);
void renderQuad(int x, int y, int width, int height, const glm::vec4& color, int id = UNKNOWN_ID)
{ renderQuad(glm::vec2(x,y), glm::vec2(x + width, y + height), color, id); }
void renderQuad(gpu::Batch& batch, int x, int y, int width, int height, const glm::vec4& color, int id = UNKNOWN_ID)
@ -249,6 +253,9 @@ public:
/// \param delayLoad if true, don't load the geometry immediately; wait until load is first requested
QSharedPointer<NetworkGeometry> getGeometry(const QUrl& url, const QUrl& fallback = QUrl(), bool delayLoad = false);
/// Set a batch to the simple pipeline, returning the previous pipeline
void useSimpleDrawPipeline(gpu::Batch& batch);
protected:
virtual QSharedPointer<Resource> createResource(const QUrl& url,
@ -266,6 +273,7 @@ private:
int vertexSize;
};
gpu::PipelinePointer _standardDrawPipeline;
QHash<float, gpu::BufferPointer> _cubeVerticies;
QHash<Vec2Pair, gpu::BufferPointer> _cubeColors;
gpu::BufferPointer _wireCubeIndexBuffer;
@ -273,7 +281,7 @@ private:
QHash<float, gpu::BufferPointer> _solidCubeVertices;
QHash<Vec2Pair, gpu::BufferPointer> _solidCubeColors;
gpu::BufferPointer _solidCubeIndexBuffer;
class BatchItemDetails {
public:
static int population;

View file

@ -11,6 +11,9 @@
#include <QOpenGLDebugLogger>
#include <QGLWidget>
#include <QtQml>
#include <PerfStat.h>
#include "AbstractViewStateInterface.h"
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
@ -91,6 +94,7 @@ void OffscreenQmlSurface::resize(const QSize& newSize) {
// Qt bug in 5.4 forces this check of pixel ratio,
// even though we're rendering offscreen.
qreal pixelRatio = 1.0;
_qmlEngine->rootContext()->setContextProperty("surfaceSize", newSize);
if (_renderControl && _renderControl->_renderWindow) {
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
} else {
@ -111,7 +115,6 @@ void OffscreenQmlSurface::resize(const QSize& newSize) {
_quickWindow->contentItem()->setSize(newSize);
}
// Update our members
if (_rootItem) {
_rootItem->setSize(newSize);
@ -212,6 +215,7 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
void OffscreenQmlSurface::updateQuick() {
PerformanceTimer perfTimer("qmlUpdate");
if (_paused) {
return;
}
@ -376,4 +380,4 @@ void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
QQuickWindow* OffscreenQmlSurface::getWindow() {
return _quickWindow;
}
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// standardDrawTexture.frag
// fragment shader
//
// Created by Sam Gateau on 6/10/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the texture
uniform sampler2D colorMap;
varying vec2 varTexcoord;
varying vec4 varColor;
void main(void) {
vec4 color = texture2D(colorMap, varTexcoord);
gl_FragColor = color * varColor;
}

View file

@ -0,0 +1,35 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// standardTransformPNTC.slv
// vertex shader
//
// Created by Sam Gateau on 6/10/2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec3 varPosition;
varying vec3 varNormal;
varying vec2 varTexcoord;
varying vec4 varColor;
void main(void) {
varTexcoord = gl_MultiTexCoord0.xy;
varColor = gl_Color;
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
<$transformModelToEyeDir(cam, obj, gl_Normal, varNormal)$>
varNormal = normalize(varNormal);
varPosition = gl_Vertex.xyz;
}

View file

@ -0,0 +1,76 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// stars.frag
// fragment shader
//
// Created by Bradley Austin Davis on 2015/06/19
varying vec2 varTexcoord;
varying vec3 varNomral;
varying vec3 varPosition;
const int star_iterations = 14;
const float time_scale = 0.2;
const vec3 col_star = vec3( 1.0, 0.7, 0.5 );
float hash( float n ) { return fract(sin(n)*123.456789); }
vec2 rotate( in vec2 uv, float a)
{
float c = cos( a );
float s = sin( a );
return vec2( c * uv.x - s * uv.y, s * uv.x + c * uv.y );
}
float noise( in vec3 p )
{
vec3 fl = floor( p );
vec3 fr = fract( p );
fr = fr * fr * ( 3.0 - 2.0 * fr );
float n = fl.x + fl.y * 157.0 + 113.0 * fl.z;
return mix( mix( mix( hash( n + 0.0), hash( n + 1.0 ), fr.x ),
mix( hash( n + 157.0), hash( n + 158.0 ), fr.x ), fr.y ),
mix( mix( hash( n + 113.0), hash( n + 114.0 ), fr.x ),
mix( hash( n + 270.0), hash( n + 271.0 ), fr.x ), fr.y ), fr.z );
}
float fbm( in vec2 p, float t )
{
float f;
f = 0.5000 * noise( vec3( p, t ) ); p *= 2.1;
f += 0.2500 * noise( vec3( p, t ) ); p *= 2.2;
f += 0.1250 * noise( vec3( p, t ) ); p *= 2.3;
f += 0.0625 * noise( vec3( p, t ) );
return f;
}
vec3 doBackgroundStars( in vec3 dir )
{
vec3 n = abs( dir );
vec2 uv = ( n.x > n.y && n.x > n.z ) ? dir.yz / dir.x:
( n.y > n.x && n.y > n.z ) ? dir.zx / dir.y:
dir.xy / dir.z;
float f = 0.0;
for( int i = 0 ; i < star_iterations; ++i )
{
uv = rotate( 1.07 * uv + vec2( 0.7 ), 0.5 );
float t = 10. * uv.x * uv.y;
vec2 u = cos( 100. * uv ) * fbm( 10. * uv, 0.0 );
f += smoothstep( 0.5, 0.55, u.x * u.y ) * ( 0.25 * sin( t ) + 0.75 );
}
return f * col_star;
}
void main(void) {
vec3 c = doBackgroundStars( normalize(varPosition) );
c = pow( c, vec3( 0.4545 ) );
gl_FragColor = vec4( c, 1.0 );
}

View file

@ -131,6 +131,21 @@ float aspect(const T& t) {
return (float)t.x / (float)t.y;
}
// Take values in an arbitrary range [0, size] and convert them to the range [0, 1]
template <typename T>
T toUnitScale(const T& value, const T& size) {
return value / size;
}
// Take values in an arbitrary range [0, size] and convert them to the range [0, 1]
template <typename T>
T toNormalizedDeviceScale(const T& value, const T& size) {
vec2 result = toUnitScale(value, size);
result *= 2.0f;
result -= 1.0f;
return result;
}
#define YAW(euler) euler.y
#define PITCH(euler) euler.x
#define ROLL(euler) euler.z

View file

@ -0,0 +1,51 @@
//
// Tooltip.cpp
//
// Created by Bradley Austin Davis on 2015/04/14
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Tooltip.h"
#include <QUuid>
HIFI_QML_DEF(Tooltip)
Tooltip::Tooltip(QQuickItem* parent) : QQuickItem(parent) {
}
Tooltip::~Tooltip() {
}
QString Tooltip::text() const {
return _text;
}
void Tooltip::setText(const QString& arg) {
if (arg != _text) {
_text = arg;
emit textChanged();
}
}
void Tooltip::setVisible(bool visible) {
QQuickItem::setVisible(visible);
}
QString Tooltip::showTip(const QString& text) {
const QString newTipId = QUuid().createUuid().toString();
Tooltip::show([&](QQmlContext*, QObject* object) {
object->setObjectName(newTipId);
object->setProperty("text", text);
});
return newTipId;
}
void Tooltip::closeTip(const QString& tipId) {
auto rootItem = DependencyManager::get<OffscreenUi>()->getRootItem();
QQuickItem* that = rootItem->findChild<QQuickItem*>(tipId);
if (that) {
that->deleteLater();
}
}

View file

@ -0,0 +1,45 @@
//
// Tooltip.h
//
// Created by Bradley Austin Davis on 2015/04/14
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Tooltip_h
#define hifi_Tooltip_h
#include "OffscreenQmlDialog.h"
class Tooltip : public QQuickItem
{
Q_OBJECT
HIFI_QML_DECL
private:
Q_PROPERTY(QString text READ text WRITE setText NOTIFY textChanged)
public:
Tooltip(QQuickItem* parent = 0);
virtual ~Tooltip();
QString text() const;
static QString showTip(const QString& text);
static void closeTip(const QString& tipId);
public slots:
virtual void setVisible(bool v);
void setText(const QString& arg);
signals:
void textChanged();
private:
QString _text;
};
#endif // hifi_Tooltip_h