Merge branch 'master' of https://github.com/highfidelity/hifi into yellow

This commit is contained in:
Sam Gateau 2015-07-08 14:25:41 -07:00
commit 4c44eb63da
24 changed files with 1193 additions and 789 deletions

View file

@ -4,8 +4,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject) include(ExternalProject)
ExternalProject_Add( ExternalProject_Add(
${EXTERNAL_NAME} ${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/matus-chochlik/oglplus.git URL http://iweb.dl.sourceforge.net/project/oglplus/oglplus-0.63.x/oglplus-0.63.0.zip
GIT_TAG a2681383928b1166f176512cbe0f95e96fe68d08 URL_MD5 de984ab245b185b45c87415c0e052135
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND "" BUILD_COMMAND ""
INSTALL_COMMAND "" INSTALL_COMMAND ""

View file

@ -12,7 +12,6 @@ Script.load("progress.js");
Script.load("edit.js"); Script.load("edit.js");
Script.load("selectAudioDevice.js"); Script.load("selectAudioDevice.js");
Script.load("inspect.js"); Script.load("inspect.js");
Script.load("lobby.js");
Script.load("notifications.js"); Script.load("notifications.js");
Script.load("users.js"); Script.load("users.js");
Script.load("grab.js"); Script.load("grab.js");

View file

@ -0,0 +1,70 @@
//
// make-dummy.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Makes a boxing-dummy that responds to collisions.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
"use strict";
/*jslint vars: true*/
var Overlays, Entities, Controller, Script, MyAvatar, Vec3; // Referenced globals provided by High Fidelity.
var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var rezButton = Overlays.addOverlay("image", {
x: 100,
y: 350,
width: 32,
height: 32,
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",
color: {
red: 255,
green: 255,
blue: 255
},
alpha: 1
});
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if (clickedOverlay === rezButton) {
var boxId;
var position = Vec3.sum(MyAvatar.position, {x: 1.0, y: 0.4, z: 0.0});
boxId = Entities.addEntity({
type: "Box",
name: "dummy",
position: position,
dimensions: {x: 0.3, y: 0.7, z: 0.3},
gravity: {x: 0.0, y: -3.0, z: 0.0},
damping: 0.2,
collisionsWillMove: true
});
var pointToOffsetFrom = Vec3.sum(position, {x: 0.0, y: 2.0, z: 0.0});
Entities.addAction("offset", boxId, {pointToOffsetFrom: pointToOffsetFrom,
linearDistance: 2.0,
// linearTimeScale: 0.005
linearTimeScale: 0.1
});
}
}
function scriptEnding() {
Overlays.deleteOverlay(rezButton);
}
Controller.mousePressEvent.connect(mousePressEvent);
Script.scriptEnding.connect(scriptEnding);

View file

@ -0,0 +1,195 @@
// stick.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Allow avatar to hold a stick
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
"use strict";
/*jslint vars: true*/
var Script, Entities, MyAvatar, Window, Overlays, Controller, Vec3, Quat, print; // Referenced globals provided by High Fidelity.
var hand = "right";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
var stickID = null;
var actionID = nullActionID;
var dimensions = { x: 0.3, y: 0.1, z: 2.0 };
var AWAY_ORIENTATION = Quat.fromPitchYawRollDegrees(-90, 0, 0);
var stickModel = "https://hifi-public.s3.amazonaws.com/eric/models/stick.fbx";
var swordModel = "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx";
var whichModel = "sword";
var rezButton = Overlays.addOverlay("image", {
x: 100,
y: 380,
width: 32,
height: 32,
imageURL: "http://s3.amazonaws.com/hifi-public/images/delete.png",
color: {
red: 255,
green: 255,
blue: 255
},
alpha: 1
});
var health = 100;
var display;
var isAway = false;
function updateDisplay() {
var text = health.toString();
if (!display) {
health = 100;
display = Overlays.addOverlay("text", {
text: text,
font: { size: 20 },
color: {red: 0, green: 255, blue: 0},
backgroundColor: {red: 100, green: 100, blue: 100}, // Why doesn't this and the next work?
backgroundAlpha: 0.9,
x: Window.innerWidth - 50,
y: 50
});
} else {
Overlays.editOverlay(display, {text: text});
}
}
function removeDisplay() {
if (display) {
Overlays.deleteOverlay(display);
display = null;
}
}
function cleanUp() {
if (stickID) {
Entities.deleteAction(stickID, actionID);
Entities.deleteEntity(stickID);
stickID = null;
actionID = null;
}
removeDisplay();
Overlays.deleteOverlay(rezButton);
}
function computeEnergy(collision, entityID) {
var id = entityID || collision.idA || collision.idB;
var entity = id && Entities.getEntityProperties(id);
var mass = entity ? (entity.density * entity.dimensions.x * entity.dimensions.y * entity.dimensions.z) : 1;
var linearVelocityChange = Vec3.length(collision.velocityChange);
var energy = 0.5 * mass * linearVelocityChange * linearVelocityChange;
return Math.min(Math.max(1.0, Math.round(energy)), 20);
}
function gotHit(collision) {
if (isAway) { return; }
var energy = computeEnergy(collision);
health -= energy;
updateDisplay();
}
function scoreHit(idA, idB, collision) {
if (isAway) { return; }
var energy = computeEnergy(collision, idA);
health += energy;
updateDisplay();
}
function positionStick(stickOrientation) {
var baseOffset = {x: 0.3, y: 0.0, z: -dimensions.z / 2}; // FIXME: don't move yourself by colliding with your own capsule. Fudge of 0.3 in x.
var offset = Vec3.multiplyQbyV(stickOrientation, baseOffset);
Entities.updateAction(stickID, actionID, {relativePosition: offset,
relativeRotation: stickOrientation});
}
function mouseMoveEvent(event) {
if (!stickID || actionID === nullActionID || isAway) {
return;
}
var windowCenterX = Window.innerWidth / 2;
var windowCenterY = Window.innerHeight / 2;
var mouseXCenterOffset = event.x - windowCenterX;
var mouseYCenterOffset = event.y - windowCenterY;
var mouseXRatio = mouseXCenterOffset / windowCenterX;
var mouseYRatio = mouseYCenterOffset / windowCenterY;
var stickOrientation = Quat.fromPitchYawRollDegrees(mouseYRatio * -90, mouseXRatio * -90, 0);
positionStick(stickOrientation);
}
function initControls() {
if (hand === "right") {
controllerID = 3; // right handed
} else {
controllerID = 4; // left handed
}
}
function update() {
var palmPosition = Controller.getSpatialControlPosition(controllerID);
controllerActive = (Vec3.length(palmPosition) > 0);
if (!controllerActive) {
return;
}
var stickOrientation = Controller.getSpatialControlRawRotation(controllerID);
var adjustment = Quat.fromPitchYawRollDegrees(180, 0, 0);
stickOrientation = Quat.multiply(stickOrientation, adjustment);
positionStick(stickOrientation);
}
function toggleAway() {
isAway = !isAway;
if (isAway) {
positionStick(AWAY_ORIENTATION);
removeDisplay();
} else {
updateDisplay();
}
}
function onClick(event) {
switch (Overlays.getOverlayAtPoint({x: event.x, y: event.y})) {
case rezButton:
if (!stickID) {
stickID = Entities.addEntity({
type: "Model",
modelURL: (whichModel === "sword") ? swordModel : stickModel,
//compoundShapeURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.obj",
shapeType: "box",
dimensions: dimensions,
position: MyAvatar.getRightPalmPosition(), // initial position doesn't matter, as long as it's close
rotation: MyAvatar.orientation,
damping: 0.1,
collisionSoundURL: "http://public.highfidelity.io/sounds/Collisions-hitsandslaps/swordStrike1.wav",
restitution: 0.01,
collisionsWillMove: true
});
actionID = Entities.addAction("hold", stickID, {relativePosition: {x: 0.0, y: 0.0, z: -dimensions.z / 2},
hand: hand,
timeScale: 0.15});
if (actionID === nullActionID) {
print('*** FAILED TO MAKE SWORD ACTION ***');
cleanUp();
}
Script.addEventHandler(stickID, 'collisionWithEntity', scoreHit);
updateDisplay();
} else {
toggleAway();
}
break;
}
}
Script.scriptEnding.connect(cleanUp);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(onClick);
Script.update.connect(update);
MyAvatar.collisionWithEntity.connect(gotHit);

214
examples/utilities/tools/cookies.js Executable file → Normal file
View file

@ -1,7 +1,7 @@
// //
// cookies.js // cookies.js
// //
// version 1.0 // version 2.0
// //
// Created by Sam Gateau, 4/1/2015 // Created by Sam Gateau, 4/1/2015
// A simple ui panel that present a list of porperties and the proper widget to edit it // A simple ui panel that present a list of porperties and the proper widget to edit it
@ -9,11 +9,10 @@
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
// The Slider class // The Slider class
Slider = function(x,y,width,thumbSize) { Slider = function(x,y,width,thumbSize) {
this.background = Overlays.addOverlay("text", { this.background = Overlays.addOverlay("text", {
backgroundColor: { red: 125, green: 125, blue: 255 }, backgroundColor: { red: 200, green: 200, blue: 255 },
x: x, x: x,
y: y, y: y,
width: width, width: width,
@ -93,8 +92,8 @@ Slider = function(x,y,width,thumbSize) {
this.isMoving = false; this.isMoving = false;
}; };
// Public members:
// Public members:
this.setNormalizedValue = function(value) { this.setNormalizedValue = function(value) {
if (value < 0.0) { if (value < 0.0) {
this.thumbX = this.minThumbX; this.thumbX = this.minThumbX;
@ -120,23 +119,6 @@ Slider = function(x,y,width,thumbSize) {
this.onValueChanged = function(value) {}; this.onValueChanged = function(value) {};
this.setMaxValue = function(maxValue) {
if (this.maxValue == maxValue) {
return;
}
var currentVal = this.getValue();
this.maxValue = maxValue;
this.setValue(currentVal);
}
this.setMinValue = function(minValue) {
if (this.minValue == minValue) {
return;
}
var currentVal = this.getValue();
this.minValue = minValue;
this.setValue(currentVal);
}
this.destroy = function() { this.destroy = function() {
Overlays.deleteOverlay(this.background); Overlays.deleteOverlay(this.background);
Overlays.deleteOverlay(this.thumb); Overlays.deleteOverlay(this.thumb);
@ -151,10 +133,22 @@ Slider = function(x,y,width,thumbSize) {
} }
// The Checkbox class // The Checkbox class
Checkbox = function(x,y,thumbSize) { Checkbox = function(x,y,width,thumbSize) {
this.background = Overlays.addOverlay("text", {
backgroundColor: { red: 125, green: 125, blue: 255 },
x: x,
y: y,
width: width,
height: thumbSize,
alpha: 1.0,
backgroundAlpha: 0.5,
visible: true
});
this.thumb = Overlays.addOverlay("text", { this.thumb = Overlays.addOverlay("text", {
backgroundColor: { red: 255, green: 255, blue: 255 }, backgroundColor: { red: 255, green: 255, blue: 255 },
textFontSize: 10,
x: x, x: x,
y: y, y: y,
width: thumbSize, width: thumbSize,
@ -163,99 +157,83 @@ Checkbox = function(x,y,thumbSize) {
backgroundAlpha: 1.0, backgroundAlpha: 1.0,
visible: true visible: true
}); });
this.background = Overlays.addOverlay("text", {
backgroundColor: { red: 125, green: 125, blue: 255 },
x: x, this.thumbSize = thumbSize;
y: y, var checkX = x + (0.25 * thumbSize);
width: thumbSize * 2, var checkY = y + (0.25 * thumbSize);
height: thumbSize,
var checkMark = Overlays.addOverlay("text", {
backgroundColor: { red: 0, green: 255, blue: 0 },
x: checkX,
y: checkY,
width: thumbSize / 2.0,
height: thumbSize / 2.0,
alpha: 1.0, alpha: 1.0,
backgroundAlpha: 0.5,
visible: true visible: true
}); });
this.thumbSize = thumbSize; var unCheckMark = Overlays.addOverlay("image", {
this.thumbHalfSize = 0.5 * thumbSize; backgroundColor: { red: 255, green: 255, blue: 255 },
x: checkX + 1.0,
y: checkY + 1.0,
width: thumbSize / 2.5,
height: thumbSize / 2.5,
alpha: 1.0,
visible: boxCheckStatus
});
this.minThumbX = x + this.thumbHalfSize;
this.maxThumbX = x + thumbSize * 2 - this.thumbHalfSize;
this.thumbX = this.minThumbX;
this.minValue = 0.0; var boxCheckStatus;
this.maxValue = 1.0; var clickedBox = false;
this.clickOffsetX = 0;
this.isMoving = false;
this.updateThumb = function() { this.updateThumb = function() {
thumbTruePos = this.thumbX - 0.5 * this.thumbSize; if (clickedBox) {
Overlays.editOverlay(this.thumb, { x: thumbTruePos } ); boxCheckStatus = !boxCheckStatus;
if (boxCheckStatus) {
Overlays.editOverlay(unCheckMark, { visible: false });
} else {
Overlays.editOverlay(unCheckMark, { visible: true });
}
}
}; };
this.isClickableOverlayItem = function(item) { this.isClickableOverlayItem = function(item) {
return item == this.background; return (item == this.thumb) || (item == checkMark) || (item == unCheckMark);
};
this.onMouseMoveEvent = function(event) {
if (this.isMoving) {
newThumbX = event.x - this.clickOffsetX;
if (newThumbX < this.minThumbX) {
newThumbX = this.minThumbX;
}
if (newThumbX > this.maxThumbX) {
newThumbX = this.maxThumbX;
}
this.thumbX = newThumbX;
this.updateThumb();
this.onValueChanged(this.getValue());
}
}; };
this.onMousePressEvent = function(event, clickedOverlay) { this.onMousePressEvent = function(event, clickedOverlay) {
if (this.background != clickedOverlay) { if (!this.isClickableOverlayItem(clickedOverlay)) {
this.isMoving = false; this.isMoving = false;
clickedBox = false;
return; return;
} }
this.isMoving = true;
var clickOffset = event.x - this.thumbX; clickedBox = true;
if ((clickOffset > -this.thumbHalfSize) && (clickOffset < this.thumbHalfSize)) {
this.clickOffsetX = clickOffset;
} else {
this.clickOffsetX = 0;
this.thumbX = event.x;
this.updateThumb(); this.updateThumb();
this.onValueChanged(this.getValue()); this.onValueChanged(this.getValue());
}
}; };
this.onMouseReleaseEvent = function(event) { this.onMouseReleaseEvent = function(event) {
this.isMoving = false; this.clickedBox = false;
}; };
// Public members: // Public members:
this.setNormalizedValue = function(value) { this.setNormalizedValue = function(value) {
if (value < 0.0) { boxCheckStatus = value;
this.thumbX = this.minThumbX;
} else if (value > 1.0) {
this.thumbX = this.maxThumbX;
} else {
this.thumbX = value * (this.maxThumbX - this.minThumbX) + this.minThumbX;
}
this.updateThumb();
}; };
this.getNormalizedValue = function() { this.getNormalizedValue = function() {
return (this.thumbX - this.minThumbX) / (this.maxThumbX - this.minThumbX); return boxCheckStatus;
}; };
this.setValue = function(value) { this.setValue = function(value) {
var normValue = (value - this.minValue) / (this.maxValue - this.minValue); this.setNormalizedValue(value);
this.setNormalizedValue(normValue);
}; };
this.getValue = function() { this.getValue = function() {
return this.getNormalizedValue() * (this.maxValue - this.minValue) + this.minValue; return boxCheckStatus;
}; };
this.onValueChanged = function(value) {}; this.onValueChanged = function(value) {};
@ -263,7 +241,17 @@ Checkbox = function(x,y,thumbSize) {
this.destroy = function() { this.destroy = function() {
Overlays.deleteOverlay(this.background); Overlays.deleteOverlay(this.background);
Overlays.deleteOverlay(this.thumb); Overlays.deleteOverlay(this.thumb);
Overlays.deleteOverlay(checkMark);
Overlays.deleteOverlay(unCheckMark);
}; };
this.setThumbColor = function(color) {
Overlays.editOverlay(this.thumb, { red: 255, green: 255, blue: 255 } );
};
this.setBackgroundColor = function(color) {
Overlays.editOverlay(this.background, { red: 125, green: 125, blue: 255 });
};
} }
// The ColorBox class // The ColorBox class
@ -428,13 +416,21 @@ DirectionBox = function(x,y,width,thumbSize) {
this.onValueChanged = function(value) {}; this.onValueChanged = function(value) {};
} }
var textFontSize = 16; var textFontSize = 12;
function PanelItem(name, setter, getter, displayer, x, y, textWidth, valueWidth, height) { function PanelItem(name, setter, getter, displayer, x, y, textWidth, valueWidth, height) {
//print("creating panel item: " + name);
this.name = name; this.name = name;
this.displayer = typeof displayer !== 'undefined' ? displayer : function(value) {
this.displayer = typeof displayer !== 'undefined' ? displayer : function(value) { return value.toFixed(2); }; if(value == true) {
return "On";
} else if (value == false) {
return "Off";
}
return value.toFixed(2);
};
var topMargin = (height - textFontSize); var topMargin = (height - textFontSize);
this.title = Overlays.addOverlay("text", { this.title = Overlays.addOverlay("text", {
@ -463,21 +459,28 @@ function PanelItem(name, setter, getter, displayer, x, y, textWidth, valueWidth,
text: this.displayer(getter()), text: this.displayer(getter()),
font: {size: textFontSize}, font: {size: textFontSize},
topMargin: topMargin topMargin: topMargin
}); });
this.getter = getter; this.getter = getter;
this.resetValue = getter();
this.setter = function(value) { this.setter = function(value) {
setter(value); setter(value);
Overlays.editOverlay(this.value, {text: this.displayer(getter())}); Overlays.editOverlay(this.value, {text: this.displayer(getter())});
if (this.widget) { if (this.widget) {
this.widget.setValue(value); this.widget.setValue(value);
} }
//print("successfully set value of widget to " + value);
}; };
this.setterFromWidget = function(value) { this.setterFromWidget = function(value) {
setter(value); setter(value);
// ANd loop back the value after the final setter has been called // ANd loop back the value after the final setter has been called
var value = getter(); var value = getter();
if (this.widget) { if (this.widget) {
this.widget.setValue(value); this.widget.setValue(value);
} }
@ -519,7 +522,6 @@ Panel = function(x, y) {
} }
}; };
// we also handle click detection in our mousePressEvent()
this.mousePressEvent = function(event) { this.mousePressEvent = function(event) {
// Make sure we quitted previous widget // Make sure we quitted previous widget
if (this.activeWidget) { if (this.activeWidget) {
@ -536,12 +538,28 @@ Panel = function(x, y) {
if (widget.isClickableOverlayItem(clickedOverlay)) { if (widget.isClickableOverlayItem(clickedOverlay)) {
this.activeWidget = widget; this.activeWidget = widget;
this.activeWidget.onMousePressEvent(event, clickedOverlay); this.activeWidget.onMousePressEvent(event, clickedOverlay);
// print("clicked... widget=" + i);
break; break;
} }
} }
}; };
// Reset panel item upon double-clicking
this.mouseDoublePressEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
for (var i in this.items) {
var item = this.items[i];
var widget = item.widget;
if (item.title == clickedOverlay || item.value == clickedOverlay) {
widget.updateThumb();
widget.onValueChanged(item.resetValue);
break;
}
}
}
this.mouseReleaseEvent = function(event) { this.mouseReleaseEvent = function(event) {
if (this.activeWidget) { if (this.activeWidget) {
this.activeWidget.onMouseReleaseEvent(event); this.activeWidget.onMouseReleaseEvent(event);
@ -557,27 +575,31 @@ Panel = function(x, y) {
slider.minValue = minValue; slider.minValue = minValue;
slider.maxValue = maxValue; slider.maxValue = maxValue;
item.widget = slider; item.widget = slider;
item.widget.onValueChanged = function(value) { item.setterFromWidget(value); }; item.widget.onValueChanged = function(value) { item.setterFromWidget(value); };
item.setter(getValue()); item.setter(getValue());
this.items[name] = item; this.items[name] = item;
this.nextY += rawYDelta; this.nextY += rawYDelta;
// print("created Item... slider=" + name);
}; };
this.newCheckbox = function(name, setValue, getValue, displayValue) { this.newCheckbox = function(name, setValue, getValue, displayValue) {
var display;
if (displayValue == true) {
display = function() {return "On";};
} else if (displayValue == false) {
display = function() {return "Off";};
}
var item = new PanelItem(name, setValue, getValue, displayValue, this.x, this.nextY, textWidth, valueWidth, rawHeight); var item = new PanelItem(name, setValue, getValue, display, this.x, this.nextY, textWidth, valueWidth, rawHeight);
var checkbox = new Checkbox(this.widgetX, this.nextY, rawHeight); var checkbox = new Checkbox(this.widgetX, this.nextY, widgetWidth, rawHeight);
item.widget = checkbox; item.widget = checkbox;
item.widget.onValueChanged = function(value) { item.setterFromWidget(value); }; item.widget.onValueChanged = function(value) { item.setterFromWidget(value); };
item.setter(getValue()); item.setter(getValue());
this.items[name] = item; this.items[name] = item;
this.nextY += rawYDelta; this.nextY += rawYDelta;
// print("created Item... slider=" + name); //print("created Item... checkbox=" + name);
}; };
this.newColorBox = function(name, setValue, getValue, displayValue) { this.newColorBox = function(name, setValue, getValue, displayValue) {
@ -630,14 +652,6 @@ Panel = function(x, y) {
return null; return null;
} }
this.getWidget = function(name) {
var item = this.items[name];
if (item != null) {
return item.widget;
}
return null;
}
this.update = function(name) { this.update = function(name) {
var item = this.items[name]; var item = this.items[name];
if (item != null) { if (item != null) {

View file

@ -2304,38 +2304,42 @@ void Application::updateMyAvatarLookAtPosition() {
bool isLookingAtSomeone = false; bool isLookingAtSomeone = false;
glm::vec3 lookAtSpot; glm::vec3 lookAtSpot;
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// When I am in mirror mode, just look right at the camera (myself) // When I am in mirror mode, just look right at the camera (myself); don't switch gaze points because when physically
// looking in a mirror one's eyes appear steady.
if (!OculusManager::isConnected()) { if (!OculusManager::isConnected()) {
lookAtSpot = _myCamera.getPosition(); lookAtSpot = _myCamera.getPosition();
} else { } else {
if (_myAvatar->isLookingAtLeftEye()) { lookAtSpot = _myCamera.getPosition() + OculusManager::getMidEyePosition();
lookAtSpot = OculusManager::getLeftEyePosition();
} else {
lookAtSpot = OculusManager::getRightEyePosition();
} }
}
} else { } else {
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock(); AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
if (lookingAt && _myAvatar != lookingAt.get()) { if (lookingAt && _myAvatar != lookingAt.get()) {
isLookingAtSomeone = true;
// If I am looking at someone else, look directly at one of their eyes // If I am looking at someone else, look directly at one of their eyes
if (tracker && !tracker->isMuted()) { isLookingAtSomeone = true;
// If a face tracker is active, look at the eye for the side my gaze is biased toward Head* lookingAtHead = static_cast<Avatar*>(lookingAt.get())->getHead();
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
// Look at their right eye const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition(); glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
} else { glm::vec3 fromLookingAtToMe = glm::normalize(_myAvatar->getHead()->getEyePosition()
// Look at their left eye - lookingAtHead->getEyePosition());
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition(); float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
if (faceAngle < MAXIMUM_FACE_ANGLE) {
// Randomly look back and forth between look targets
switch (_myAvatar->getEyeContactTarget()) {
case LEFT_EYE:
lookAtSpot = lookingAtHead->getLeftEyePosition();
break;
case RIGHT_EYE:
lookAtSpot = lookingAtHead->getRightEyePosition();
break;
case MOUTH:
lookAtSpot = lookingAtHead->getMouthPosition();
break;
} }
} else { } else {
// Need to add randomly looking back and forth between left and right eye for case with no tracker // Just look at their head (mid point between eyes)
if (_myAvatar->isLookingAtLeftEye()) { lookAtSpot = lookingAtHead->getEyePosition();
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition();
} else {
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition();
}
} }
} else { } else {
// I am not looking at anyone else, so just look forward // I am not looking at anyone else, so just look forward
@ -2343,14 +2347,13 @@ void Application::updateMyAvatarLookAtPosition() {
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE)); (_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
} }
} }
//
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active // Deflect the eyes a bit to match the detected gaze from Faceshift if active.
// // DDE doesn't track eyes.
if (tracker && !tracker->isMuted()) { if (tracker && typeid(*tracker) == typeid(Faceshift) && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch(); float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw(); float eyeYaw = tracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f; const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
// deflect using Faceshift gaze data
glm::vec3 origin = _myAvatar->getHead()->getEyePosition(); glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f; float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
float deflection = DependencyManager::get<Faceshift>()->getEyeDeflection(); float deflection = DependencyManager::get<Faceshift>()->getEyeDeflection();

View file

@ -750,7 +750,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
const int text_y = -nameDynamicRect.height() / 2; const int text_y = -nameDynamicRect.height() / 2;
// Compute background position/size // Compute background position/size
static const float SLIGHTLY_BEHIND = -0.05f; static const float SLIGHTLY_IN_FRONT = 0.1f;
const int border = 0.1f * nameDynamicRect.height(); const int border = 0.1f * nameDynamicRect.height();
const int left = text_x - border; const int left = text_x - border;
const int bottom = text_y - border; const int bottom = text_y - border;
@ -765,16 +765,16 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
// Compute display name transform // Compute display name transform
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize()); auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize());
batch.setModelTransform(textTransform);
// Render background slightly behind to avoid z-fighting DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, true, true);
auto backgroundTransform = textTransform;
backgroundTransform.postTranslate(glm::vec3(0.0f, 0.0f, SLIGHTLY_BEHIND));
batch.setModelTransform(backgroundTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch);
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(batch, left, bottom, width, height, DependencyManager::get<GeometryCache>()->renderBevelCornersRect(batch, left, bottom, width, height,
bevelDistance, backgroundColor); bevelDistance, backgroundColor);
// Render actual name // Render actual name
QByteArray nameUTF8 = renderedDisplayName.toLocal8Bit(); QByteArray nameUTF8 = renderedDisplayName.toLocal8Bit();
// Render text slightly in front to avoid z-fighting
textTransform.postTranslate(glm::vec3(0.0f, 0.0f, SLIGHTLY_IN_FRONT * renderer->getFontSize()));
batch.setModelTransform(textTransform); batch.setModelTransform(textTransform);
renderer->draw(batch, text_x, -text_y, nameUTF8.data(), textColor); renderer->draw(batch, text_x, -text_y, nameUTF8.data(), textColor);
} }

View file

@ -256,7 +256,6 @@ void AvatarManager::handleOutgoingChanges(VectorOfMotionStates& motionStates) {
} }
void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) { void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
// TODO: expose avatar collision events to JS
for (Collision collision : collisionEvents) { for (Collision collision : collisionEvents) {
// TODO: Current physics uses null idA or idB for non-entities. The plan is to handle MOTIONSTATE_TYPE_AVATAR, // TODO: Current physics uses null idA or idB for non-entities. The plan is to handle MOTIONSTATE_TYPE_AVATAR,
// and then MOTIONSTATE_TYPE_MYAVATAR. As it is, this code only covers the case of my avatar (in which case one // and then MOTIONSTATE_TYPE_MYAVATAR. As it is, this code only covers the case of my avatar (in which case one
@ -285,6 +284,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
const float AVATAR_STRETCH_FACTOR = 1.0f; const float AVATAR_STRETCH_FACTOR = 1.0f;
AudioInjector::playSound(collisionSoundURL, energyFactorOfFull, AVATAR_STRETCH_FACTOR, myAvatar->getPosition()); AudioInjector::playSound(collisionSoundURL, energyFactorOfFull, AVATAR_STRETCH_FACTOR, myAvatar->getPosition());
myAvatar->collisionWithEntity(collision);
} }
} }
} }

View file

@ -22,11 +22,6 @@
#include "InterfaceConfig.h" #include "InterfaceConfig.h"
#include "world.h" #include "world.h"
enum eyeContactTargets {
LEFT_EYE,
RIGHT_EYE,
MOUTH
};
const float EYE_EAR_GAP = 0.08f; const float EYE_EAR_GAP = 0.08f;
@ -77,6 +72,7 @@ public:
const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; } const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; }
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); } glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); } glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
glm::vec3 getMouthPosition() const { return _eyePosition - getUpDirection() * glm::length(_rightEyePosition - _leftEyePosition); }
FaceModel& getFaceModel() { return _faceModel; } FaceModel& getFaceModel() { return _faceModel; }
const FaceModel& getFaceModel() const { return _faceModel; } const FaceModel& getFaceModel() const { return _faceModel; }

View file

@ -34,6 +34,9 @@
#include <TextRenderer.h> #include <TextRenderer.h>
#include <UserActivityLogger.h> #include <UserActivityLogger.h>
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "Application.h" #include "Application.h"
#include "AvatarManager.h" #include "AvatarManager.h"
#include "Environment.h" #include "Environment.h"
@ -42,7 +45,6 @@
#include "MyAvatar.h" #include "MyAvatar.h"
#include "Physics.h" #include "Physics.h"
#include "Recorder.h" #include "Recorder.h"
#include "devices/Faceshift.h"
#include "Util.h" #include "Util.h"
#include "InterfaceLogging.h" #include "InterfaceLogging.h"
@ -97,7 +99,7 @@ MyAvatar::MyAvatar() :
_shouldRender(true), _shouldRender(true),
_billboardValid(false), _billboardValid(false),
_feetTouchFloor(true), _feetTouchFloor(true),
_isLookingAtLeftEye(true), _eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView", _realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES), DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_firstPersonSkeletonModel(this), _firstPersonSkeletonModel(this),
@ -884,7 +886,6 @@ void MyAvatar::updateLookAtTargetAvatar() {
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f; const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f; const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
int howManyLookingAtMe = 0;
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) { foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.get()); Avatar* avatar = static_cast<Avatar*>(avatarPointer.get());
bool isCurrentTarget = avatar->getIsLookAtTarget(); bool isCurrentTarget = avatar->getIsLookAtTarget();
@ -897,17 +898,22 @@ void MyAvatar::updateLookAtTargetAvatar() {
_targetAvatarPosition = avatarPointer->getPosition(); _targetAvatarPosition = avatarPointer->getPosition();
smallestAngleTo = angleTo; smallestAngleTo = angleTo;
} }
// Check if this avatar is looking at me, and fix their gaze on my camera if so
if (Application::getInstance()->isLookingAtMyAvatar(avatar)) { if (Application::getInstance()->isLookingAtMyAvatar(avatar)) {
howManyLookingAtMe++; // Alter their gaze to look directly at my camera; this looks more natural than looking at my avatar's face.
// Have that avatar look directly at my camera // Offset their gaze according to whether they're looking at one of my eyes or my mouth.
// Philip TODO: correct to look at left/right eye glm::vec3 gazeOffset = avatar->getHead()->getLookAtPosition() - getHead()->getEyePosition();
if (qApp->isHMDMode()) { const float HUMAN_EYE_SEPARATION = 0.065f;
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()); float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
// FIXME what is the point of this? gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
// avatar->getHead()->setCorrectedLookAtPosition(OculusManager::getLeftEyePosition());
if (Application::getInstance()->isHMDMode()) {
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
// + OculusManager::getMidEyePosition() + gazeOffset);
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ OculusManager::getMidEyePosition() + gazeOffset);
} else { } else {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()); avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
} }
} else { } else {
avatar->getHead()->clearCorrectedLookAtPosition(); avatar->getHead()->clearCorrectedLookAtPosition();
@ -924,12 +930,24 @@ void MyAvatar::clearLookAtTargetAvatar() {
_lookAtTargetAvatar.reset(); _lookAtTargetAvatar.reset();
} }
bool MyAvatar::isLookingAtLeftEye() { eyeContactTarget MyAvatar::getEyeContactTarget() {
float const CHANCE_OF_CHANGING_EYE = 0.01f; float const CHANCE_OF_CHANGING_TARGET = 0.01f;
if (randFloat() < CHANCE_OF_CHANGING_EYE) { if (randFloat() < CHANCE_OF_CHANGING_TARGET) {
_isLookingAtLeftEye = !_isLookingAtLeftEye; float const FIFTY_FIFTY_CHANCE = 0.5f;
switch (_eyeContactTarget) {
case LEFT_EYE:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? MOUTH : RIGHT_EYE;
break;
case RIGHT_EYE:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? LEFT_EYE : MOUTH;
break;
case MOUTH:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? RIGHT_EYE : LEFT_EYE;
break;
} }
return _isLookingAtLeftEye; }
return _eyeContactTarget;
} }
glm::vec3 MyAvatar::getDefaultEyePosition() const { glm::vec3 MyAvatar::getDefaultEyePosition() const {

View file

@ -19,6 +19,12 @@
class ModelItemID; class ModelItemID;
enum eyeContactTarget {
LEFT_EYE,
RIGHT_EYE,
MOUTH
};
class MyAvatar : public Avatar { class MyAvatar : public Avatar {
Q_OBJECT Q_OBJECT
Q_PROPERTY(bool shouldRenderLocally READ getShouldRenderLocally WRITE setShouldRenderLocally) Q_PROPERTY(bool shouldRenderLocally READ getShouldRenderLocally WRITE setShouldRenderLocally)
@ -94,7 +100,7 @@ public:
bool isMyAvatar() const { return true; } bool isMyAvatar() const { return true; }
bool isLookingAtLeftEye(); eyeContactTarget getEyeContactTarget();
virtual int parseDataAtOffset(const QByteArray& packet, int offset); virtual int parseDataAtOffset(const QByteArray& packet, int offset);
@ -209,6 +215,7 @@ public slots:
signals: signals:
void transformChanged(); void transformChanged();
void newCollisionSoundURL(const QUrl& url); void newCollisionSoundURL(const QUrl& url);
void collisionWithEntity(const Collision& collision);
private: private:
@ -251,7 +258,7 @@ private:
QList<AnimationHandlePointer> _animationHandles; QList<AnimationHandlePointer> _animationHandles;
bool _feetTouchFloor; bool _feetTouchFloor;
bool _isLookingAtLeftEye; eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder; RecorderPointer _recorder;

View file

@ -283,6 +283,7 @@ static ovrVector3f _eyeOffsets[ovrEye_Count];
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; } glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; } glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
void OculusManager::connect(QOpenGLContext* shareContext) { void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING; qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
@ -692,13 +693,13 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
_eyeRenderPoses[eye] = eyePoses[eye]; _eyeRenderPoses[eye] = eyePoses[eye];
// Set the camera rotation for this eye // Set the camera rotation for this eye
vec3 eyePosition = toGlm(_eyeRenderPoses[eye].Position); _eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
eyePosition = whichCamera.getRotation() * eyePosition; _eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation); quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
// Update our camera to what the application camera is doing // Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation() * eyeRotation); _camera->setRotation(whichCamera.getRotation() * eyeRotation);
_camera->setPosition(whichCamera.getPosition() + eyePosition); _camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
configureCamera(*_camera); configureCamera(*_camera);
_camera->update(1.0f / Application::getInstance()->getFps()); _camera->update(1.0f / Application::getInstance()->getFps());

View file

@ -47,6 +47,7 @@ public:
static glm::vec3 getLeftEyePosition(); static glm::vec3 getLeftEyePosition();
static glm::vec3 getRightEyePosition(); static glm::vec3 getRightEyePosition();
static glm::vec3 getMidEyePosition();
static int getHMDScreen(); static int getHMDScreen();

View file

@ -21,6 +21,7 @@
#include "Application.h" #include "Application.h"
#include "../octree/OctreePacketProcessor.h"
#include "ui/OctreeStatsDialog.h" #include "ui/OctreeStatsDialog.h"
OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* model) : OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* model) :
@ -53,7 +54,7 @@ OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* mo
_localElementsMemory = AddStatItem("Elements Memory"); _localElementsMemory = AddStatItem("Elements Memory");
_sendingMode = AddStatItem("Sending Mode"); _sendingMode = AddStatItem("Sending Mode");
_processedPackets = AddStatItem("Processed Packets"); _processedPackets = AddStatItem("Entity Packets");
_processedPacketsElements = AddStatItem("Processed Packets Elements"); _processedPacketsElements = AddStatItem("Processed Packets Elements");
_processedPacketsEntities = AddStatItem("Processed Packets Entities"); _processedPacketsEntities = AddStatItem("Processed Packets Entities");
_processedPacketsTiming = AddStatItem("Processed Packets Timing"); _processedPacketsTiming = AddStatItem("Processed Packets Timing");
@ -155,6 +156,8 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
if (sinceLastRefresh < REFRESH_AFTER) { if (sinceLastRefresh < REFRESH_AFTER) {
return QDialog::paintEvent(event); return QDialog::paintEvent(event);
} }
const int FLOATING_POINT_PRECISION = 3;
_lastRefresh = now; _lastRefresh = now;
// Update labels // Update labels
@ -245,7 +248,6 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
auto averageElementsPerPacket = entities->getAverageElementsPerPacket(); auto averageElementsPerPacket = entities->getAverageElementsPerPacket();
auto averageEntitiesPerPacket = entities->getAverageEntitiesPerPacket(); auto averageEntitiesPerPacket = entities->getAverageEntitiesPerPacket();
auto averagePacketsPerSecond = entities->getAveragePacketsPerSecond();
auto averageElementsPerSecond = entities->getAverageElementsPerSecond(); auto averageElementsPerSecond = entities->getAverageElementsPerSecond();
auto averageEntitiesPerSecond = entities->getAverageEntitiesPerSecond(); auto averageEntitiesPerSecond = entities->getAverageEntitiesPerSecond();
@ -253,21 +255,32 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
auto averageUncompressPerPacket = entities->getAverageUncompressPerPacket(); auto averageUncompressPerPacket = entities->getAverageUncompressPerPacket();
auto averageReadBitstreamPerPacket = entities->getAverageReadBitstreamPerPacket(); auto averageReadBitstreamPerPacket = entities->getAverageReadBitstreamPerPacket();
QString averageElementsPerPacketString = locale.toString(averageElementsPerPacket); QString averageElementsPerPacketString = locale.toString(averageElementsPerPacket, 'f', FLOATING_POINT_PRECISION);
QString averageEntitiesPerPacketString = locale.toString(averageEntitiesPerPacket); QString averageEntitiesPerPacketString = locale.toString(averageEntitiesPerPacket, 'f', FLOATING_POINT_PRECISION);
QString averagePacketsPerSecondString = locale.toString(averagePacketsPerSecond); QString averageElementsPerSecondString = locale.toString(averageElementsPerSecond, 'f', FLOATING_POINT_PRECISION);
QString averageElementsPerSecondString = locale.toString(averageElementsPerSecond); QString averageEntitiesPerSecondString = locale.toString(averageEntitiesPerSecond, 'f', FLOATING_POINT_PRECISION);
QString averageEntitiesPerSecondString = locale.toString(averageEntitiesPerSecond);
QString averageWaitLockPerPacketString = locale.toString(averageWaitLockPerPacket); QString averageWaitLockPerPacketString = locale.toString(averageWaitLockPerPacket);
QString averageUncompressPerPacketString = locale.toString(averageUncompressPerPacket); QString averageUncompressPerPacketString = locale.toString(averageUncompressPerPacket);
QString averageReadBitstreamPerPacketString = locale.toString(averageReadBitstreamPerPacket); QString averageReadBitstreamPerPacketString = locale.toString(averageReadBitstreamPerPacket);
label = _labels[_processedPackets]; label = _labels[_processedPackets];
const OctreePacketProcessor& entitiesPacketProcessor = Application::getInstance()->getOctreePacketProcessor();
auto incomingPPS = entitiesPacketProcessor.getIncomingPPS();
auto processedPPS = entitiesPacketProcessor.getProcessedPPS();
auto treeProcessedPPS = entities->getAveragePacketsPerSecond();
QString incomingPPSString = locale.toString(incomingPPS, 'f', FLOATING_POINT_PRECISION);
QString processedPPSString = locale.toString(processedPPS, 'f', FLOATING_POINT_PRECISION);
QString treeProcessedPPSString = locale.toString(treeProcessedPPS, 'f', FLOATING_POINT_PRECISION);
statsValue.str(""); statsValue.str("");
statsValue << statsValue <<
"" << qPrintable(averagePacketsPerSecondString) << " per second"; "Network IN: " << qPrintable(incomingPPSString) << " PPS / " <<
"Queue OUT: " << qPrintable(processedPPSString) << " PPS / " <<
"Tree IN: " << qPrintable(treeProcessedPPSString) << " PPS";
label->setText(statsValue.str().c_str()); label->setText(statsValue.str().c_str());
@ -321,7 +334,7 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
} }
QString totalTrackedEditsString = locale.toString((uint)totalTrackedEdits); QString totalTrackedEditsString = locale.toString((uint)totalTrackedEdits);
QString updatesPerSecondString = locale.toString(updatesPerSecond); QString updatesPerSecondString = locale.toString(updatesPerSecond, 'f', FLOATING_POINT_PRECISION);
QString bytesPerEditString = locale.toString(bytesPerEdit); QString bytesPerEditString = locale.toString(bytesPerEdit);
statsValue.str(""); statsValue.str("");

View file

@ -13,6 +13,7 @@
#include "Text3DOverlay.h" #include "Text3DOverlay.h"
#include <DeferredLightingEffect.h>
#include <RenderDeferredTask.h> #include <RenderDeferredTask.h>
#include <TextRenderer3D.h> #include <TextRenderer3D.h>
@ -114,6 +115,7 @@ void Text3DOverlay::render(RenderArgs* args) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, SLIGHTLY_BEHIND); glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, SLIGHTLY_BEHIND);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, SLIGHTLY_BEHIND); glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, SLIGHTLY_BEHIND);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, false, true);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor); DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
// Same font properties as textSize() // Same font properties as textSize()

View file

@ -36,10 +36,6 @@ void RenderableTextEntityItem::render(RenderArgs* args) {
glm::vec4 backgroundColor = glm::vec4(toGlm(getBackgroundColorX()), 1.0f); glm::vec4 backgroundColor = glm::vec4(toGlm(getBackgroundColorX()), 1.0f);
glm::vec3 dimensions = getDimensions(); glm::vec3 dimensions = getDimensions();
Transform transformToTopLeft = getTransformToCenter();
transformToTopLeft.postTranslate(glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
// Render background // Render background
glm::vec3 minCorner = glm::vec3(0.0f, -dimensions.y, SLIGHTLY_BEHIND); glm::vec3 minCorner = glm::vec3(0.0f, -dimensions.y, SLIGHTLY_BEHIND);
glm::vec3 maxCorner = glm::vec3(dimensions.x, 0.0f, SLIGHTLY_BEHIND); glm::vec3 maxCorner = glm::vec3(dimensions.x, 0.0f, SLIGHTLY_BEHIND);
@ -48,15 +44,22 @@ void RenderableTextEntityItem::render(RenderArgs* args) {
// Batch render calls // Batch render calls
Q_ASSERT(args->_batch); Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch; gpu::Batch& batch = *args->_batch;
batch.setModelTransform(transformToTopLeft);
//rotate about vertical to face the camera Transform transformToTopLeft = getTransformToCenter();
if (getFaceCamera()) { if (getFaceCamera()) {
transformToTopLeft.postRotate(args->_viewFrustum->getOrientation()); //rotate about vertical to face the camera
batch.setModelTransform(transformToTopLeft); glm::vec3 dPosition = args->_viewFrustum->getPosition() - getPosition();
// If x and z are 0, atan(x, z) is undefined, so default to 0 degrees
float yawRotation = dPosition.x == 0.0f && dPosition.z == 0.0f ? 0.0f : glm::atan(dPosition.x, dPosition.z);
glm::quat orientation = glm::quat(glm::vec3(0.0f, yawRotation, 0.0f));
transformToTopLeft.setRotation(orientation);
} }
transformToTopLeft.postTranslate(glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false); batch.setModelTransform(transformToTopLeft);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false, false, true);
DependencyManager::get<GeometryCache>()->renderQuad(batch, minCorner, maxCorner, backgroundColor); DependencyManager::get<GeometryCache>()->renderQuad(batch, minCorner, maxCorner, backgroundColor);
float scale = _lineHeight / _textRenderer->getFontSize(); float scale = _lineHeight / _textRenderer->getFontSize();

View file

@ -92,13 +92,11 @@ void EntityTree::postAddEntity(EntityItemPointer entity) {
bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProperties& properties, const SharedNodePointer& senderNode) { bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElement* containingElement = getContainingElement(entityID); EntityTreeElement* containingElement = getContainingElement(entityID);
if (!containingElement) { if (!containingElement) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::updateEntity() entityID doesn't exist!!! entityID=" << entityID;
return false; return false;
} }
EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID); EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID);
if (!existingEntity) { if (!existingEntity) {
qCDebug(entities) << "UNEXPECTED!!!! don't call updateEntity() on entity items that don't exist. entityID=" << entityID;
return false; return false;
} }
@ -108,8 +106,6 @@ bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProp
bool EntityTree::updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode) { bool EntityTree::updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElement* containingElement = getContainingElement(entity->getEntityItemID()); EntityTreeElement* containingElement = getContainingElement(entity->getEntityItemID());
if (!containingElement) { if (!containingElement) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::updateEntity() entity-->element lookup failed!!! entityID="
<< entity->getEntityItemID();
return false; return false;
} }
return updateEntityWithElement(entity, properties, containingElement, senderNode); return updateEntityWithElement(entity, properties, containingElement, senderNode);

View file

@ -589,7 +589,7 @@ void GLBackend::do_setStateAntialiasedLineEnable(bool enable) {
void GLBackend::do_setStateDepthBias(Vec2 bias) { void GLBackend::do_setStateDepthBias(Vec2 bias) {
if ( (bias.x != _pipeline._stateCache.depthBias) || (bias.y != _pipeline._stateCache.depthBiasSlopeScale)) { if ( (bias.x != _pipeline._stateCache.depthBias) || (bias.y != _pipeline._stateCache.depthBiasSlopeScale)) {
if ((bias.x != 0.f) || (bias.y != 0.f)) { if ((bias.x != 0.0f) || (bias.y != 0.0f)) {
glEnable(GL_POLYGON_OFFSET_FILL); glEnable(GL_POLYGON_OFFSET_FILL);
glEnable(GL_POLYGON_OFFSET_LINE); glEnable(GL_POLYGON_OFFSET_LINE);
glEnable(GL_POLYGON_OFFSET_POINT); glEnable(GL_POLYGON_OFFSET_POINT);

View file

@ -9,10 +9,17 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
#include <NumericalConstants.h>
#include "NodeList.h" #include "NodeList.h"
#include "ReceivedPacketProcessor.h" #include "ReceivedPacketProcessor.h"
#include "SharedUtil.h" #include "SharedUtil.h"
ReceivedPacketProcessor::ReceivedPacketProcessor() {
_lastWindowAt = usecTimestampNow();
}
void ReceivedPacketProcessor::terminating() { void ReceivedPacketProcessor::terminating() {
_hasPackets.wakeAll(); _hasPackets.wakeAll();
} }
@ -25,6 +32,7 @@ void ReceivedPacketProcessor::queueReceivedPacket(const SharedNodePointer& sendi
lock(); lock();
_packets.push_back(networkPacket); _packets.push_back(networkPacket);
_nodePacketCounts[sendingNode->getUUID()]++; _nodePacketCounts[sendingNode->getUUID()]++;
_lastWindowIncomingPackets++;
unlock(); unlock();
// Make sure to wake our actual processing thread because we now have packets for it to process. // Make sure to wake our actual processing thread because we now have packets for it to process.
@ -32,6 +40,24 @@ void ReceivedPacketProcessor::queueReceivedPacket(const SharedNodePointer& sendi
} }
bool ReceivedPacketProcessor::process() { bool ReceivedPacketProcessor::process() {
quint64 now = usecTimestampNow();
quint64 sinceLastWindow = now - _lastWindowAt;
if (sinceLastWindow > USECS_PER_SECOND) {
lock();
float secondsSinceLastWindow = sinceLastWindow / USECS_PER_SECOND;
float incomingPacketsPerSecondInWindow = (float)_lastWindowIncomingPackets / secondsSinceLastWindow;
_incomingPPS.updateAverage(incomingPacketsPerSecondInWindow);
float processedPacketsPerSecondInWindow = (float)_lastWindowIncomingPackets / secondsSinceLastWindow;
_processedPPS.updateAverage(processedPacketsPerSecondInWindow);
_lastWindowAt = now;
_lastWindowIncomingPackets = 0;
_lastWindowProcessedPackets = 0;
unlock();
}
if (_packets.size() == 0) { if (_packets.size() == 0) {
_waitingOnPacketsMutex.lock(); _waitingOnPacketsMutex.lock();
@ -51,6 +77,7 @@ bool ReceivedPacketProcessor::process() {
foreach(auto& packet, currentPackets) { foreach(auto& packet, currentPackets) {
processPacket(packet.getNode(), packet.getByteArray()); processPacket(packet.getNode(), packet.getByteArray());
_lastWindowProcessedPackets++;
midProcess(); midProcess();
} }

View file

@ -21,7 +21,7 @@
class ReceivedPacketProcessor : public GenericThread { class ReceivedPacketProcessor : public GenericThread {
Q_OBJECT Q_OBJECT
public: public:
ReceivedPacketProcessor() { } ReceivedPacketProcessor();
/// Add packet from network receive thread to the processing queue. /// Add packet from network receive thread to the processing queue.
void queueReceivedPacket(const SharedNodePointer& sendingNode, const QByteArray& packet); void queueReceivedPacket(const SharedNodePointer& sendingNode, const QByteArray& packet);
@ -47,6 +47,9 @@ public:
/// How many received packets waiting are to be processed /// How many received packets waiting are to be processed
int packetsToProcessCount() const { return _packets.size(); } int packetsToProcessCount() const { return _packets.size(); }
float getIncomingPPS() const { return _incomingPPS.getAverage(); }
float getProcessedPPS() const { return _processedPPS.getAverage(); }
virtual void terminating(); virtual void terminating();
public slots: public slots:
@ -80,6 +83,12 @@ protected:
QWaitCondition _hasPackets; QWaitCondition _hasPackets;
QMutex _waitingOnPacketsMutex; QMutex _waitingOnPacketsMutex;
quint64 _lastWindowAt = 0;
int _lastWindowIncomingPackets = 0;
int _lastWindowProcessedPackets = 0;
SimpleMovingAverage _incomingPPS;
SimpleMovingAverage _processedPPS;
}; };
#endif // hifi_ReceivedPacketProcessor_h #endif // hifi_ReceivedPacketProcessor_h

View file

@ -50,37 +50,44 @@
static const std::string glowIntensityShaderHandle = "glowIntensity"; static const std::string glowIntensityShaderHandle = "glowIntensity";
gpu::PipelinePointer DeferredLightingEffect::getPipeline(SimpleProgramKey config) {
auto it = _simplePrograms.find(config);
if (it != _simplePrograms.end()) {
return it.value();
}
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
if (config.isCulled()) {
state->setCullMode(gpu::State::CULL_BACK);
} else {
state->setCullMode(gpu::State::CULL_NONE);
}
state->setDepthTest(true, true, gpu::LESS_EQUAL);
if (config.hasDepthBias()) {
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::ShaderPointer program = (config.isEmissive()) ? _emissiveShader : _simpleShader;
gpu::PipelinePointer pipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_simplePrograms.insert(config, pipeline);
return pipeline;
}
void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) { void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) {
auto VS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(simple_vert))); auto VS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(simple_vert)));
auto PS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_frag))); auto PS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_frag)));
auto PSEmissive = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_emisive_frag))); auto PSEmissive = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_emisive_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS)); _simpleShader = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS));
gpu::ShaderPointer programEmissive = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PSEmissive)); _emissiveShader = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PSEmissive));
gpu::Shader::BindingSet slotBindings; gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings); gpu::Shader::makeProgram(*_simpleShader, slotBindings);
gpu::Shader::makeProgram(*programEmissive, slotBindings); gpu::Shader::makeProgram(*_emissiveShader, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::StatePointer stateCullNone = gpu::StatePointer(new gpu::State());
stateCullNone->setCullMode(gpu::State::CULL_NONE);
stateCullNone->setDepthTest(true, true, gpu::LESS_EQUAL);
stateCullNone->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_simpleProgram = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_simpleProgramCullNone = gpu::PipelinePointer(gpu::Pipeline::create(program, stateCullNone));
_simpleProgramEmissive = gpu::PipelinePointer(gpu::Pipeline::create(programEmissive, state));
_simpleProgramEmissiveCullNone = gpu::PipelinePointer(gpu::Pipeline::create(programEmissive, stateCullNone));
_viewState = viewState; _viewState = viewState;
loadLightProgram(directional_light_frag, false, _directionalLight, _directionalLightLocations); loadLightProgram(directional_light_frag, false, _directionalLight, _directionalLightLocations);
@ -117,21 +124,12 @@ void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) {
lp->setAmbientSpherePreset(gpu::SphericalHarmonics::Preset(_ambientLightMode % gpu::SphericalHarmonics::NUM_PRESET)); lp->setAmbientSpherePreset(gpu::SphericalHarmonics::Preset(_ambientLightMode % gpu::SphericalHarmonics::NUM_PRESET));
} }
void DeferredLightingEffect::bindSimpleProgram(gpu::Batch& batch, bool textured, bool culled, bool emmisive) { void DeferredLightingEffect::bindSimpleProgram(gpu::Batch& batch, bool textured, bool culled,
if (emmisive) { bool emmisive, bool depthBias) {
if (culled) { SimpleProgramKey config{textured, culled, emmisive, depthBias};
batch.setPipeline(_simpleProgramEmissive); batch.setPipeline(getPipeline(config));
} else {
batch.setPipeline(_simpleProgramEmissiveCullNone); if (!config.isTextured()) {
}
} else {
if (culled) {
batch.setPipeline(_simpleProgram);
} else {
batch.setPipeline(_simpleProgramCullNone);
}
}
if (!textured) {
// If it is not textured, bind white texture and keep using textured pipeline // If it is not textured, bind white texture and keep using textured pipeline
batch.setUniformTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture()); batch.setUniformTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
} }

View file

@ -24,6 +24,7 @@
class AbstractViewStateInterface; class AbstractViewStateInterface;
class RenderArgs; class RenderArgs;
class SimpleProgramKey;
/// Handles deferred lighting for the bits that require it (voxels...) /// Handles deferred lighting for the bits that require it (voxels...)
class DeferredLightingEffect : public Dependency { class DeferredLightingEffect : public Dependency {
@ -34,7 +35,8 @@ public:
void init(AbstractViewStateInterface* viewState); void init(AbstractViewStateInterface* viewState);
/// Sets up the state necessary to render static untextured geometry with the simple program. /// Sets up the state necessary to render static untextured geometry with the simple program.
void bindSimpleProgram(gpu::Batch& batch, bool textured = false, bool culled = true, bool emmisive = false); void bindSimpleProgram(gpu::Batch& batch, bool textured = false, bool culled = true,
bool emmisive = false, bool depthBias = false);
//// Renders a solid sphere with the simple program. //// Renders a solid sphere with the simple program.
void renderSolidSphere(gpu::Batch& batch, float radius, int slices, int stacks, const glm::vec4& color); void renderSolidSphere(gpu::Batch& batch, float radius, int slices, int stacks, const glm::vec4& color);
@ -95,11 +97,11 @@ private:
}; };
static void loadLightProgram(const char* fragSource, bool limited, ProgramObject& program, LightLocations& locations); static void loadLightProgram(const char* fragSource, bool limited, ProgramObject& program, LightLocations& locations);
gpu::PipelinePointer getPipeline(SimpleProgramKey config);
gpu::PipelinePointer _simpleProgram; gpu::ShaderPointer _simpleShader;
gpu::PipelinePointer _simpleProgramCullNone; gpu::ShaderPointer _emissiveShader;
gpu::PipelinePointer _simpleProgramEmissive; QHash<SimpleProgramKey, gpu::PipelinePointer> _simplePrograms;
gpu::PipelinePointer _simpleProgramEmissiveCullNone;
ProgramObject _directionalSkyboxLight; ProgramObject _directionalSkyboxLight;
LightLocations _directionalSkyboxLightLocations; LightLocations _directionalSkyboxLightLocations;
@ -160,4 +162,53 @@ private:
model::SkyboxPointer _skybox; model::SkyboxPointer _skybox;
}; };
class SimpleProgramKey {
public:
enum FlagBit {
IS_TEXTURED_FLAG = 0,
IS_CULLED_FLAG,
IS_EMISSIVE_FLAG,
HAS_DEPTH_BIAS_FLAG,
NUM_FLAGS,
};
enum Flag {
IS_TEXTURED = (1 << IS_TEXTURED_FLAG),
IS_CULLED = (1 << IS_CULLED_FLAG),
IS_EMISSIVE = (1 << IS_EMISSIVE_FLAG),
HAS_DEPTH_BIAS = (1 << HAS_DEPTH_BIAS_FLAG),
};
typedef unsigned short Flags;
bool isFlag(short flagNum) const { return bool((_flags & flagNum) != 0); }
bool isTextured() const { return isFlag(IS_TEXTURED); }
bool isCulled() const { return isFlag(IS_CULLED); }
bool isEmissive() const { return isFlag(IS_EMISSIVE); }
bool hasDepthBias() const { return isFlag(HAS_DEPTH_BIAS); }
Flags _flags = 0;
short _spare = 0;
int getRaw() const { return *reinterpret_cast<const int*>(this); }
SimpleProgramKey(bool textured = false, bool culled = true,
bool emissive = false, bool depthBias = false) {
_flags = (textured ? IS_TEXTURED : 0) | (culled ? IS_CULLED : 0) |
(emissive ? IS_EMISSIVE : 0) | (depthBias ? HAS_DEPTH_BIAS : 0);
}
SimpleProgramKey(int bitmask) : _flags(bitmask) {}
};
inline uint qHash(const SimpleProgramKey& key, uint seed) {
return qHash(key.getRaw(), seed);
}
inline bool operator==(const SimpleProgramKey& a, const SimpleProgramKey& b) {
return a.getRaw() == b.getRaw();
}
#endif // hifi_DeferredLightingEffect_h #endif // hifi_DeferredLightingEffect_h

View file

@ -229,6 +229,7 @@ QScriptValue collisionToScriptValue(QScriptEngine* engine, const Collision& coll
obj.setProperty("idB", quuidToScriptValue(engine, collision.idB)); obj.setProperty("idB", quuidToScriptValue(engine, collision.idB));
obj.setProperty("penetration", vec3toScriptValue(engine, collision.penetration)); obj.setProperty("penetration", vec3toScriptValue(engine, collision.penetration));
obj.setProperty("contactPoint", vec3toScriptValue(engine, collision.contactPoint)); obj.setProperty("contactPoint", vec3toScriptValue(engine, collision.contactPoint));
obj.setProperty("velocityChange", vec3toScriptValue(engine, collision.velocityChange));
return obj; return obj;
} }