Merge branch 'master' of https://github.com/highfidelity/hifi into yellow

This commit is contained in:
Sam Gateau 2015-07-08 14:25:41 -07:00
commit 4c44eb63da
24 changed files with 1193 additions and 789 deletions

View file

@ -4,8 +4,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/matus-chochlik/oglplus.git
GIT_TAG a2681383928b1166f176512cbe0f95e96fe68d08
URL http://iweb.dl.sourceforge.net/project/oglplus/oglplus-0.63.x/oglplus-0.63.0.zip
URL_MD5 de984ab245b185b45c87415c0e052135
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -12,7 +12,6 @@ Script.load("progress.js");
Script.load("edit.js");
Script.load("selectAudioDevice.js");
Script.load("inspect.js");
Script.load("lobby.js");
Script.load("notifications.js");
Script.load("users.js");
Script.load("grab.js");

View file

@ -0,0 +1,70 @@
//
// make-dummy.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Makes a boxing-dummy that responds to collisions.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
"use strict";
/*jslint vars: true*/
var Overlays, Entities, Controller, Script, MyAvatar, Vec3; // Referenced globals provided by High Fidelity.
var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var rezButton = Overlays.addOverlay("image", {
x: 100,
y: 350,
width: 32,
height: 32,
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",
color: {
red: 255,
green: 255,
blue: 255
},
alpha: 1
});
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if (clickedOverlay === rezButton) {
var boxId;
var position = Vec3.sum(MyAvatar.position, {x: 1.0, y: 0.4, z: 0.0});
boxId = Entities.addEntity({
type: "Box",
name: "dummy",
position: position,
dimensions: {x: 0.3, y: 0.7, z: 0.3},
gravity: {x: 0.0, y: -3.0, z: 0.0},
damping: 0.2,
collisionsWillMove: true
});
var pointToOffsetFrom = Vec3.sum(position, {x: 0.0, y: 2.0, z: 0.0});
Entities.addAction("offset", boxId, {pointToOffsetFrom: pointToOffsetFrom,
linearDistance: 2.0,
// linearTimeScale: 0.005
linearTimeScale: 0.1
});
}
}
function scriptEnding() {
Overlays.deleteOverlay(rezButton);
}
Controller.mousePressEvent.connect(mousePressEvent);
Script.scriptEnding.connect(scriptEnding);

View file

@ -0,0 +1,195 @@
// stick.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Allow avatar to hold a stick
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
"use strict";
/*jslint vars: true*/
var Script, Entities, MyAvatar, Window, Overlays, Controller, Vec3, Quat, print; // Referenced globals provided by High Fidelity.
var hand = "right";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
var stickID = null;
var actionID = nullActionID;
var dimensions = { x: 0.3, y: 0.1, z: 2.0 };
var AWAY_ORIENTATION = Quat.fromPitchYawRollDegrees(-90, 0, 0);
var stickModel = "https://hifi-public.s3.amazonaws.com/eric/models/stick.fbx";
var swordModel = "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx";
var whichModel = "sword";
var rezButton = Overlays.addOverlay("image", {
x: 100,
y: 380,
width: 32,
height: 32,
imageURL: "http://s3.amazonaws.com/hifi-public/images/delete.png",
color: {
red: 255,
green: 255,
blue: 255
},
alpha: 1
});
var health = 100;
var display;
var isAway = false;
function updateDisplay() {
var text = health.toString();
if (!display) {
health = 100;
display = Overlays.addOverlay("text", {
text: text,
font: { size: 20 },
color: {red: 0, green: 255, blue: 0},
backgroundColor: {red: 100, green: 100, blue: 100}, // Why doesn't this and the next work?
backgroundAlpha: 0.9,
x: Window.innerWidth - 50,
y: 50
});
} else {
Overlays.editOverlay(display, {text: text});
}
}
function removeDisplay() {
if (display) {
Overlays.deleteOverlay(display);
display = null;
}
}
function cleanUp() {
if (stickID) {
Entities.deleteAction(stickID, actionID);
Entities.deleteEntity(stickID);
stickID = null;
actionID = null;
}
removeDisplay();
Overlays.deleteOverlay(rezButton);
}
function computeEnergy(collision, entityID) {
var id = entityID || collision.idA || collision.idB;
var entity = id && Entities.getEntityProperties(id);
var mass = entity ? (entity.density * entity.dimensions.x * entity.dimensions.y * entity.dimensions.z) : 1;
var linearVelocityChange = Vec3.length(collision.velocityChange);
var energy = 0.5 * mass * linearVelocityChange * linearVelocityChange;
return Math.min(Math.max(1.0, Math.round(energy)), 20);
}
function gotHit(collision) {
if (isAway) { return; }
var energy = computeEnergy(collision);
health -= energy;
updateDisplay();
}
function scoreHit(idA, idB, collision) {
if (isAway) { return; }
var energy = computeEnergy(collision, idA);
health += energy;
updateDisplay();
}
function positionStick(stickOrientation) {
var baseOffset = {x: 0.3, y: 0.0, z: -dimensions.z / 2}; // FIXME: don't move yourself by colliding with your own capsule. Fudge of 0.3 in x.
var offset = Vec3.multiplyQbyV(stickOrientation, baseOffset);
Entities.updateAction(stickID, actionID, {relativePosition: offset,
relativeRotation: stickOrientation});
}
function mouseMoveEvent(event) {
if (!stickID || actionID === nullActionID || isAway) {
return;
}
var windowCenterX = Window.innerWidth / 2;
var windowCenterY = Window.innerHeight / 2;
var mouseXCenterOffset = event.x - windowCenterX;
var mouseYCenterOffset = event.y - windowCenterY;
var mouseXRatio = mouseXCenterOffset / windowCenterX;
var mouseYRatio = mouseYCenterOffset / windowCenterY;
var stickOrientation = Quat.fromPitchYawRollDegrees(mouseYRatio * -90, mouseXRatio * -90, 0);
positionStick(stickOrientation);
}
function initControls() {
if (hand === "right") {
controllerID = 3; // right handed
} else {
controllerID = 4; // left handed
}
}
function update() {
var palmPosition = Controller.getSpatialControlPosition(controllerID);
controllerActive = (Vec3.length(palmPosition) > 0);
if (!controllerActive) {
return;
}
var stickOrientation = Controller.getSpatialControlRawRotation(controllerID);
var adjustment = Quat.fromPitchYawRollDegrees(180, 0, 0);
stickOrientation = Quat.multiply(stickOrientation, adjustment);
positionStick(stickOrientation);
}
function toggleAway() {
isAway = !isAway;
if (isAway) {
positionStick(AWAY_ORIENTATION);
removeDisplay();
} else {
updateDisplay();
}
}
function onClick(event) {
switch (Overlays.getOverlayAtPoint({x: event.x, y: event.y})) {
case rezButton:
if (!stickID) {
stickID = Entities.addEntity({
type: "Model",
modelURL: (whichModel === "sword") ? swordModel : stickModel,
//compoundShapeURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.obj",
shapeType: "box",
dimensions: dimensions,
position: MyAvatar.getRightPalmPosition(), // initial position doesn't matter, as long as it's close
rotation: MyAvatar.orientation,
damping: 0.1,
collisionSoundURL: "http://public.highfidelity.io/sounds/Collisions-hitsandslaps/swordStrike1.wav",
restitution: 0.01,
collisionsWillMove: true
});
actionID = Entities.addAction("hold", stickID, {relativePosition: {x: 0.0, y: 0.0, z: -dimensions.z / 2},
hand: hand,
timeScale: 0.15});
if (actionID === nullActionID) {
print('*** FAILED TO MAKE SWORD ACTION ***');
cleanUp();
}
Script.addEventHandler(stickID, 'collisionWithEntity', scoreHit);
updateDisplay();
} else {
toggleAway();
}
break;
}
}
Script.scriptEnding.connect(cleanUp);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(onClick);
Script.update.connect(update);
MyAvatar.collisionWithEntity.connect(gotHit);

1316
examples/utilities/tools/cookies.js Executable file → Normal file

File diff suppressed because it is too large Load diff

View file

@ -2304,38 +2304,42 @@ void Application::updateMyAvatarLookAtPosition() {
bool isLookingAtSomeone = false;
glm::vec3 lookAtSpot;
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// When I am in mirror mode, just look right at the camera (myself)
// When I am in mirror mode, just look right at the camera (myself); don't switch gaze points because when physically
// looking in a mirror one's eyes appear steady.
if (!OculusManager::isConnected()) {
lookAtSpot = _myCamera.getPosition();
} else {
if (_myAvatar->isLookingAtLeftEye()) {
lookAtSpot = OculusManager::getLeftEyePosition();
} else {
lookAtSpot = OculusManager::getRightEyePosition();
}
lookAtSpot = _myCamera.getPosition() + OculusManager::getMidEyePosition();
}
} else {
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
if (lookingAt && _myAvatar != lookingAt.get()) {
isLookingAtSomeone = true;
// If I am looking at someone else, look directly at one of their eyes
if (tracker && !tracker->isMuted()) {
// If a face tracker is active, look at the eye for the side my gaze is biased toward
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
// Look at their right eye
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition();
} else {
// Look at their left eye
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition();
isLookingAtSomeone = true;
Head* lookingAtHead = static_cast<Avatar*>(lookingAt.get())->getHead();
const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
glm::vec3 fromLookingAtToMe = glm::normalize(_myAvatar->getHead()->getEyePosition()
- lookingAtHead->getEyePosition());
float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
if (faceAngle < MAXIMUM_FACE_ANGLE) {
// Randomly look back and forth between look targets
switch (_myAvatar->getEyeContactTarget()) {
case LEFT_EYE:
lookAtSpot = lookingAtHead->getLeftEyePosition();
break;
case RIGHT_EYE:
lookAtSpot = lookingAtHead->getRightEyePosition();
break;
case MOUTH:
lookAtSpot = lookingAtHead->getMouthPosition();
break;
}
} else {
// Need to add randomly looking back and forth between left and right eye for case with no tracker
if (_myAvatar->isLookingAtLeftEye()) {
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition();
} else {
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition();
}
// Just look at their head (mid point between eyes)
lookAtSpot = lookingAtHead->getEyePosition();
}
} else {
// I am not looking at anyone else, so just look forward
@ -2343,14 +2347,13 @@ void Application::updateMyAvatarLookAtPosition() {
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
}
}
//
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
//
if (tracker && !tracker->isMuted()) {
// Deflect the eyes a bit to match the detected gaze from Faceshift if active.
// DDE doesn't track eyes.
if (tracker && typeid(*tracker) == typeid(Faceshift) && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
// deflect using Faceshift gaze data
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
float deflection = DependencyManager::get<Faceshift>()->getEyeDeflection();

View file

@ -750,7 +750,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
const int text_y = -nameDynamicRect.height() / 2;
// Compute background position/size
static const float SLIGHTLY_BEHIND = -0.05f;
static const float SLIGHTLY_IN_FRONT = 0.1f;
const int border = 0.1f * nameDynamicRect.height();
const int left = text_x - border;
const int bottom = text_y - border;
@ -765,16 +765,16 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
// Compute display name transform
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize());
// Render background slightly behind to avoid z-fighting
auto backgroundTransform = textTransform;
backgroundTransform.postTranslate(glm::vec3(0.0f, 0.0f, SLIGHTLY_BEHIND));
batch.setModelTransform(backgroundTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch);
batch.setModelTransform(textTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, true, true);
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(batch, left, bottom, width, height,
bevelDistance, backgroundColor);
// Render actual name
QByteArray nameUTF8 = renderedDisplayName.toLocal8Bit();
// Render text slightly in front to avoid z-fighting
textTransform.postTranslate(glm::vec3(0.0f, 0.0f, SLIGHTLY_IN_FRONT * renderer->getFontSize()));
batch.setModelTransform(textTransform);
renderer->draw(batch, text_x, -text_y, nameUTF8.data(), textColor);
}

View file

@ -256,7 +256,6 @@ void AvatarManager::handleOutgoingChanges(VectorOfMotionStates& motionStates) {
}
void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
// TODO: expose avatar collision events to JS
for (Collision collision : collisionEvents) {
// TODO: Current physics uses null idA or idB for non-entities. The plan is to handle MOTIONSTATE_TYPE_AVATAR,
// and then MOTIONSTATE_TYPE_MYAVATAR. As it is, this code only covers the case of my avatar (in which case one
@ -285,6 +284,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
const float AVATAR_STRETCH_FACTOR = 1.0f;
AudioInjector::playSound(collisionSoundURL, energyFactorOfFull, AVATAR_STRETCH_FACTOR, myAvatar->getPosition());
myAvatar->collisionWithEntity(collision);
}
}
}

View file

@ -296,7 +296,7 @@ void Head::relaxLean(float deltaTime) {
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum, bool postLighting) {
if (_renderLookatVectors) {
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
}
}
@ -324,7 +324,7 @@ glm::vec3 Head::getCorrectedLookAtPosition() {
}
void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
_isLookingAtMe = true;
_isLookingAtMe = true;
_correctedLookAtPosition = correctedLookAtPosition;
}

View file

@ -22,11 +22,6 @@
#include "InterfaceConfig.h"
#include "world.h"
enum eyeContactTargets {
LEFT_EYE,
RIGHT_EYE,
MOUTH
};
const float EYE_EAR_GAP = 0.08f;
@ -77,6 +72,7 @@ public:
const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; }
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
glm::vec3 getMouthPosition() const { return _eyePosition - getUpDirection() * glm::length(_rightEyePosition - _leftEyePosition); }
FaceModel& getFaceModel() { return _faceModel; }
const FaceModel& getFaceModel() const { return _faceModel; }
@ -148,7 +144,7 @@ private:
FaceModel _faceModel;
glm::vec3 _correctedLookAtPosition;
int _leftEyeLookAtID;
int _rightEyeLookAtID;

View file

@ -34,6 +34,9 @@
#include <TextRenderer.h>
#include <UserActivityLogger.h>
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "Application.h"
#include "AvatarManager.h"
#include "Environment.h"
@ -42,7 +45,6 @@
#include "MyAvatar.h"
#include "Physics.h"
#include "Recorder.h"
#include "devices/Faceshift.h"
#include "Util.h"
#include "InterfaceLogging.h"
@ -97,7 +99,7 @@ MyAvatar::MyAvatar() :
_shouldRender(true),
_billboardValid(false),
_feetTouchFloor(true),
_isLookingAtLeftEye(true),
_eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_firstPersonSkeletonModel(this),
@ -884,7 +886,6 @@ void MyAvatar::updateLookAtTargetAvatar() {
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
int howManyLookingAtMe = 0;
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.get());
bool isCurrentTarget = avatar->getIsLookAtTarget();
@ -897,17 +898,22 @@ void MyAvatar::updateLookAtTargetAvatar() {
_targetAvatarPosition = avatarPointer->getPosition();
smallestAngleTo = angleTo;
}
// Check if this avatar is looking at me, and fix their gaze on my camera if so
if (Application::getInstance()->isLookingAtMyAvatar(avatar)) {
howManyLookingAtMe++;
// Have that avatar look directly at my camera
// Philip TODO: correct to look at left/right eye
if (qApp->isHMDMode()) {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition());
// FIXME what is the point of this?
// avatar->getHead()->setCorrectedLookAtPosition(OculusManager::getLeftEyePosition());
// Alter their gaze to look directly at my camera; this looks more natural than looking at my avatar's face.
// Offset their gaze according to whether they're looking at one of my eyes or my mouth.
glm::vec3 gazeOffset = avatar->getHead()->getLookAtPosition() - getHead()->getEyePosition();
const float HUMAN_EYE_SEPARATION = 0.065f;
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
if (Application::getInstance()->isHMDMode()) {
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
// + OculusManager::getMidEyePosition() + gazeOffset);
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ OculusManager::getMidEyePosition() + gazeOffset);
} else {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition());
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
}
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
@ -924,12 +930,24 @@ void MyAvatar::clearLookAtTargetAvatar() {
_lookAtTargetAvatar.reset();
}
bool MyAvatar::isLookingAtLeftEye() {
float const CHANCE_OF_CHANGING_EYE = 0.01f;
if (randFloat() < CHANCE_OF_CHANGING_EYE) {
_isLookingAtLeftEye = !_isLookingAtLeftEye;
eyeContactTarget MyAvatar::getEyeContactTarget() {
float const CHANCE_OF_CHANGING_TARGET = 0.01f;
if (randFloat() < CHANCE_OF_CHANGING_TARGET) {
float const FIFTY_FIFTY_CHANCE = 0.5f;
switch (_eyeContactTarget) {
case LEFT_EYE:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? MOUTH : RIGHT_EYE;
break;
case RIGHT_EYE:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? LEFT_EYE : MOUTH;
break;
case MOUTH:
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? RIGHT_EYE : LEFT_EYE;
break;
}
}
return _isLookingAtLeftEye;
return _eyeContactTarget;
}
glm::vec3 MyAvatar::getDefaultEyePosition() const {

View file

@ -19,6 +19,12 @@
class ModelItemID;
enum eyeContactTarget {
LEFT_EYE,
RIGHT_EYE,
MOUTH
};
class MyAvatar : public Avatar {
Q_OBJECT
Q_PROPERTY(bool shouldRenderLocally READ getShouldRenderLocally WRITE setShouldRenderLocally)
@ -94,7 +100,7 @@ public:
bool isMyAvatar() const { return true; }
bool isLookingAtLeftEye();
eyeContactTarget getEyeContactTarget();
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
@ -209,6 +215,7 @@ public slots:
signals:
void transformChanged();
void newCollisionSoundURL(const QUrl& url);
void collisionWithEntity(const Collision& collision);
private:
@ -251,7 +258,7 @@ private:
QList<AnimationHandlePointer> _animationHandles;
bool _feetTouchFloor;
bool _isLookingAtLeftEye;
eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder;

View file

@ -283,6 +283,7 @@ static ovrVector3f _eyeOffsets[ovrEye_Count];
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
@ -692,13 +693,13 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
_eyeRenderPoses[eye] = eyePoses[eye];
// Set the camera rotation for this eye
vec3 eyePosition = toGlm(_eyeRenderPoses[eye].Position);
eyePosition = whichCamera.getRotation() * eyePosition;
_eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
_eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
_camera->setPosition(whichCamera.getPosition() + eyePosition);
_camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
configureCamera(*_camera);
_camera->update(1.0f / Application::getInstance()->getFps());

View file

@ -47,6 +47,7 @@ public:
static glm::vec3 getLeftEyePosition();
static glm::vec3 getRightEyePosition();
static glm::vec3 getMidEyePosition();
static int getHMDScreen();

View file

@ -21,6 +21,7 @@
#include "Application.h"
#include "../octree/OctreePacketProcessor.h"
#include "ui/OctreeStatsDialog.h"
OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* model) :
@ -53,7 +54,7 @@ OctreeStatsDialog::OctreeStatsDialog(QWidget* parent, NodeToOctreeSceneStats* mo
_localElementsMemory = AddStatItem("Elements Memory");
_sendingMode = AddStatItem("Sending Mode");
_processedPackets = AddStatItem("Processed Packets");
_processedPackets = AddStatItem("Entity Packets");
_processedPacketsElements = AddStatItem("Processed Packets Elements");
_processedPacketsEntities = AddStatItem("Processed Packets Entities");
_processedPacketsTiming = AddStatItem("Processed Packets Timing");
@ -155,6 +156,8 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
if (sinceLastRefresh < REFRESH_AFTER) {
return QDialog::paintEvent(event);
}
const int FLOATING_POINT_PRECISION = 3;
_lastRefresh = now;
// Update labels
@ -245,7 +248,6 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
auto averageElementsPerPacket = entities->getAverageElementsPerPacket();
auto averageEntitiesPerPacket = entities->getAverageEntitiesPerPacket();
auto averagePacketsPerSecond = entities->getAveragePacketsPerSecond();
auto averageElementsPerSecond = entities->getAverageElementsPerSecond();
auto averageEntitiesPerSecond = entities->getAverageEntitiesPerSecond();
@ -253,21 +255,32 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
auto averageUncompressPerPacket = entities->getAverageUncompressPerPacket();
auto averageReadBitstreamPerPacket = entities->getAverageReadBitstreamPerPacket();
QString averageElementsPerPacketString = locale.toString(averageElementsPerPacket);
QString averageEntitiesPerPacketString = locale.toString(averageEntitiesPerPacket);
QString averageElementsPerPacketString = locale.toString(averageElementsPerPacket, 'f', FLOATING_POINT_PRECISION);
QString averageEntitiesPerPacketString = locale.toString(averageEntitiesPerPacket, 'f', FLOATING_POINT_PRECISION);
QString averagePacketsPerSecondString = locale.toString(averagePacketsPerSecond);
QString averageElementsPerSecondString = locale.toString(averageElementsPerSecond);
QString averageEntitiesPerSecondString = locale.toString(averageEntitiesPerSecond);
QString averageElementsPerSecondString = locale.toString(averageElementsPerSecond, 'f', FLOATING_POINT_PRECISION);
QString averageEntitiesPerSecondString = locale.toString(averageEntitiesPerSecond, 'f', FLOATING_POINT_PRECISION);
QString averageWaitLockPerPacketString = locale.toString(averageWaitLockPerPacket);
QString averageUncompressPerPacketString = locale.toString(averageUncompressPerPacket);
QString averageReadBitstreamPerPacketString = locale.toString(averageReadBitstreamPerPacket);
label = _labels[_processedPackets];
const OctreePacketProcessor& entitiesPacketProcessor = Application::getInstance()->getOctreePacketProcessor();
auto incomingPPS = entitiesPacketProcessor.getIncomingPPS();
auto processedPPS = entitiesPacketProcessor.getProcessedPPS();
auto treeProcessedPPS = entities->getAveragePacketsPerSecond();
QString incomingPPSString = locale.toString(incomingPPS, 'f', FLOATING_POINT_PRECISION);
QString processedPPSString = locale.toString(processedPPS, 'f', FLOATING_POINT_PRECISION);
QString treeProcessedPPSString = locale.toString(treeProcessedPPS, 'f', FLOATING_POINT_PRECISION);
statsValue.str("");
statsValue <<
"" << qPrintable(averagePacketsPerSecondString) << " per second";
"Network IN: " << qPrintable(incomingPPSString) << " PPS / " <<
"Queue OUT: " << qPrintable(processedPPSString) << " PPS / " <<
"Tree IN: " << qPrintable(treeProcessedPPSString) << " PPS";
label->setText(statsValue.str().c_str());
@ -321,7 +334,7 @@ void OctreeStatsDialog::paintEvent(QPaintEvent* event) {
}
QString totalTrackedEditsString = locale.toString((uint)totalTrackedEdits);
QString updatesPerSecondString = locale.toString(updatesPerSecond);
QString updatesPerSecondString = locale.toString(updatesPerSecond, 'f', FLOATING_POINT_PRECISION);
QString bytesPerEditString = locale.toString(bytesPerEdit);
statsValue.str("");

View file

@ -13,6 +13,7 @@
#include "Text3DOverlay.h"
#include <DeferredLightingEffect.h>
#include <RenderDeferredTask.h>
#include <TextRenderer3D.h>
@ -114,6 +115,7 @@ void Text3DOverlay::render(RenderArgs* args) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, SLIGHTLY_BEHIND);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, SLIGHTLY_BEHIND);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, false, true);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
// Same font properties as textSize()

View file

@ -36,10 +36,6 @@ void RenderableTextEntityItem::render(RenderArgs* args) {
glm::vec4 backgroundColor = glm::vec4(toGlm(getBackgroundColorX()), 1.0f);
glm::vec3 dimensions = getDimensions();
Transform transformToTopLeft = getTransformToCenter();
transformToTopLeft.postTranslate(glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
// Render background
glm::vec3 minCorner = glm::vec3(0.0f, -dimensions.y, SLIGHTLY_BEHIND);
glm::vec3 maxCorner = glm::vec3(dimensions.x, 0.0f, SLIGHTLY_BEHIND);
@ -48,15 +44,22 @@ void RenderableTextEntityItem::render(RenderArgs* args) {
// Batch render calls
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transformToTopLeft = getTransformToCenter();
if (getFaceCamera()) {
//rotate about vertical to face the camera
glm::vec3 dPosition = args->_viewFrustum->getPosition() - getPosition();
// If x and z are 0, atan(x, z) is undefined, so default to 0 degrees
float yawRotation = dPosition.x == 0.0f && dPosition.z == 0.0f ? 0.0f : glm::atan(dPosition.x, dPosition.z);
glm::quat orientation = glm::quat(glm::vec3(0.0f, yawRotation, 0.0f));
transformToTopLeft.setRotation(orientation);
}
transformToTopLeft.postTranslate(glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
batch.setModelTransform(transformToTopLeft);
//rotate about vertical to face the camera
if (getFaceCamera()) {
transformToTopLeft.postRotate(args->_viewFrustum->getOrientation());
batch.setModelTransform(transformToTopLeft);
}
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false, false, true);
DependencyManager::get<GeometryCache>()->renderQuad(batch, minCorner, maxCorner, backgroundColor);
float scale = _lineHeight / _textRenderer->getFontSize();

View file

@ -92,13 +92,11 @@ void EntityTree::postAddEntity(EntityItemPointer entity) {
bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElement* containingElement = getContainingElement(entityID);
if (!containingElement) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::updateEntity() entityID doesn't exist!!! entityID=" << entityID;
return false;
}
EntityItemPointer existingEntity = containingElement->getEntityWithEntityItemID(entityID);
if (!existingEntity) {
qCDebug(entities) << "UNEXPECTED!!!! don't call updateEntity() on entity items that don't exist. entityID=" << entityID;
return false;
}
@ -108,8 +106,6 @@ bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProp
bool EntityTree::updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode) {
EntityTreeElement* containingElement = getContainingElement(entity->getEntityItemID());
if (!containingElement) {
qCDebug(entities) << "UNEXPECTED!!!! EntityTree::updateEntity() entity-->element lookup failed!!! entityID="
<< entity->getEntityItemID();
return false;
}
return updateEntityWithElement(entity, properties, containingElement, senderNode);

View file

@ -589,7 +589,7 @@ void GLBackend::do_setStateAntialiasedLineEnable(bool enable) {
void GLBackend::do_setStateDepthBias(Vec2 bias) {
if ( (bias.x != _pipeline._stateCache.depthBias) || (bias.y != _pipeline._stateCache.depthBiasSlopeScale)) {
if ((bias.x != 0.f) || (bias.y != 0.f)) {
if ((bias.x != 0.0f) || (bias.y != 0.0f)) {
glEnable(GL_POLYGON_OFFSET_FILL);
glEnable(GL_POLYGON_OFFSET_LINE);
glEnable(GL_POLYGON_OFFSET_POINT);

View file

@ -9,10 +9,17 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <NumericalConstants.h>
#include "NodeList.h"
#include "ReceivedPacketProcessor.h"
#include "SharedUtil.h"
ReceivedPacketProcessor::ReceivedPacketProcessor() {
_lastWindowAt = usecTimestampNow();
}
void ReceivedPacketProcessor::terminating() {
_hasPackets.wakeAll();
}
@ -25,6 +32,7 @@ void ReceivedPacketProcessor::queueReceivedPacket(const SharedNodePointer& sendi
lock();
_packets.push_back(networkPacket);
_nodePacketCounts[sendingNode->getUUID()]++;
_lastWindowIncomingPackets++;
unlock();
// Make sure to wake our actual processing thread because we now have packets for it to process.
@ -32,6 +40,24 @@ void ReceivedPacketProcessor::queueReceivedPacket(const SharedNodePointer& sendi
}
bool ReceivedPacketProcessor::process() {
quint64 now = usecTimestampNow();
quint64 sinceLastWindow = now - _lastWindowAt;
if (sinceLastWindow > USECS_PER_SECOND) {
lock();
float secondsSinceLastWindow = sinceLastWindow / USECS_PER_SECOND;
float incomingPacketsPerSecondInWindow = (float)_lastWindowIncomingPackets / secondsSinceLastWindow;
_incomingPPS.updateAverage(incomingPacketsPerSecondInWindow);
float processedPacketsPerSecondInWindow = (float)_lastWindowIncomingPackets / secondsSinceLastWindow;
_processedPPS.updateAverage(processedPacketsPerSecondInWindow);
_lastWindowAt = now;
_lastWindowIncomingPackets = 0;
_lastWindowProcessedPackets = 0;
unlock();
}
if (_packets.size() == 0) {
_waitingOnPacketsMutex.lock();
@ -51,6 +77,7 @@ bool ReceivedPacketProcessor::process() {
foreach(auto& packet, currentPackets) {
processPacket(packet.getNode(), packet.getByteArray());
_lastWindowProcessedPackets++;
midProcess();
}

View file

@ -21,7 +21,7 @@
class ReceivedPacketProcessor : public GenericThread {
Q_OBJECT
public:
ReceivedPacketProcessor() { }
ReceivedPacketProcessor();
/// Add packet from network receive thread to the processing queue.
void queueReceivedPacket(const SharedNodePointer& sendingNode, const QByteArray& packet);
@ -47,6 +47,9 @@ public:
/// How many received packets waiting are to be processed
int packetsToProcessCount() const { return _packets.size(); }
float getIncomingPPS() const { return _incomingPPS.getAverage(); }
float getProcessedPPS() const { return _processedPPS.getAverage(); }
virtual void terminating();
public slots:
@ -80,6 +83,12 @@ protected:
QWaitCondition _hasPackets;
QMutex _waitingOnPacketsMutex;
quint64 _lastWindowAt = 0;
int _lastWindowIncomingPackets = 0;
int _lastWindowProcessedPackets = 0;
SimpleMovingAverage _incomingPPS;
SimpleMovingAverage _processedPPS;
};
#endif // hifi_ReceivedPacketProcessor_h

View file

@ -50,37 +50,44 @@
static const std::string glowIntensityShaderHandle = "glowIntensity";
gpu::PipelinePointer DeferredLightingEffect::getPipeline(SimpleProgramKey config) {
auto it = _simplePrograms.find(config);
if (it != _simplePrograms.end()) {
return it.value();
}
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
if (config.isCulled()) {
state->setCullMode(gpu::State::CULL_BACK);
} else {
state->setCullMode(gpu::State::CULL_NONE);
}
state->setDepthTest(true, true, gpu::LESS_EQUAL);
if (config.hasDepthBias()) {
state->setDepthBias(1.0f);
state->setDepthBiasSlopeScale(1.0f);
}
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::ShaderPointer program = (config.isEmissive()) ? _emissiveShader : _simpleShader;
gpu::PipelinePointer pipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_simplePrograms.insert(config, pipeline);
return pipeline;
}
void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) {
auto VS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(simple_vert)));
auto PS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_frag)));
auto PSEmissive = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(simple_textured_emisive_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS));
gpu::ShaderPointer programEmissive = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PSEmissive));
_simpleShader = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS));
_emissiveShader = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PSEmissive));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::Shader::makeProgram(*programEmissive, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::StatePointer stateCullNone = gpu::StatePointer(new gpu::State());
stateCullNone->setCullMode(gpu::State::CULL_NONE);
stateCullNone->setDepthTest(true, true, gpu::LESS_EQUAL);
stateCullNone->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_simpleProgram = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_simpleProgramCullNone = gpu::PipelinePointer(gpu::Pipeline::create(program, stateCullNone));
_simpleProgramEmissive = gpu::PipelinePointer(gpu::Pipeline::create(programEmissive, state));
_simpleProgramEmissiveCullNone = gpu::PipelinePointer(gpu::Pipeline::create(programEmissive, stateCullNone));
gpu::Shader::makeProgram(*_simpleShader, slotBindings);
gpu::Shader::makeProgram(*_emissiveShader, slotBindings);
_viewState = viewState;
loadLightProgram(directional_light_frag, false, _directionalLight, _directionalLightLocations);
@ -117,21 +124,12 @@ void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) {
lp->setAmbientSpherePreset(gpu::SphericalHarmonics::Preset(_ambientLightMode % gpu::SphericalHarmonics::NUM_PRESET));
}
void DeferredLightingEffect::bindSimpleProgram(gpu::Batch& batch, bool textured, bool culled, bool emmisive) {
if (emmisive) {
if (culled) {
batch.setPipeline(_simpleProgramEmissive);
} else {
batch.setPipeline(_simpleProgramEmissiveCullNone);
}
} else {
if (culled) {
batch.setPipeline(_simpleProgram);
} else {
batch.setPipeline(_simpleProgramCullNone);
}
}
if (!textured) {
void DeferredLightingEffect::bindSimpleProgram(gpu::Batch& batch, bool textured, bool culled,
bool emmisive, bool depthBias) {
SimpleProgramKey config{textured, culled, emmisive, depthBias};
batch.setPipeline(getPipeline(config));
if (!config.isTextured()) {
// If it is not textured, bind white texture and keep using textured pipeline
batch.setUniformTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
}

View file

@ -24,6 +24,7 @@
class AbstractViewStateInterface;
class RenderArgs;
class SimpleProgramKey;
/// Handles deferred lighting for the bits that require it (voxels...)
class DeferredLightingEffect : public Dependency {
@ -34,7 +35,8 @@ public:
void init(AbstractViewStateInterface* viewState);
/// Sets up the state necessary to render static untextured geometry with the simple program.
void bindSimpleProgram(gpu::Batch& batch, bool textured = false, bool culled = true, bool emmisive = false);
void bindSimpleProgram(gpu::Batch& batch, bool textured = false, bool culled = true,
bool emmisive = false, bool depthBias = false);
//// Renders a solid sphere with the simple program.
void renderSolidSphere(gpu::Batch& batch, float radius, int slices, int stacks, const glm::vec4& color);
@ -95,11 +97,11 @@ private:
};
static void loadLightProgram(const char* fragSource, bool limited, ProgramObject& program, LightLocations& locations);
gpu::PipelinePointer getPipeline(SimpleProgramKey config);
gpu::PipelinePointer _simpleProgram;
gpu::PipelinePointer _simpleProgramCullNone;
gpu::PipelinePointer _simpleProgramEmissive;
gpu::PipelinePointer _simpleProgramEmissiveCullNone;
gpu::ShaderPointer _simpleShader;
gpu::ShaderPointer _emissiveShader;
QHash<SimpleProgramKey, gpu::PipelinePointer> _simplePrograms;
ProgramObject _directionalSkyboxLight;
LightLocations _directionalSkyboxLightLocations;
@ -160,4 +162,53 @@ private:
model::SkyboxPointer _skybox;
};
class SimpleProgramKey {
public:
enum FlagBit {
IS_TEXTURED_FLAG = 0,
IS_CULLED_FLAG,
IS_EMISSIVE_FLAG,
HAS_DEPTH_BIAS_FLAG,
NUM_FLAGS,
};
enum Flag {
IS_TEXTURED = (1 << IS_TEXTURED_FLAG),
IS_CULLED = (1 << IS_CULLED_FLAG),
IS_EMISSIVE = (1 << IS_EMISSIVE_FLAG),
HAS_DEPTH_BIAS = (1 << HAS_DEPTH_BIAS_FLAG),
};
typedef unsigned short Flags;
bool isFlag(short flagNum) const { return bool((_flags & flagNum) != 0); }
bool isTextured() const { return isFlag(IS_TEXTURED); }
bool isCulled() const { return isFlag(IS_CULLED); }
bool isEmissive() const { return isFlag(IS_EMISSIVE); }
bool hasDepthBias() const { return isFlag(HAS_DEPTH_BIAS); }
Flags _flags = 0;
short _spare = 0;
int getRaw() const { return *reinterpret_cast<const int*>(this); }
SimpleProgramKey(bool textured = false, bool culled = true,
bool emissive = false, bool depthBias = false) {
_flags = (textured ? IS_TEXTURED : 0) | (culled ? IS_CULLED : 0) |
(emissive ? IS_EMISSIVE : 0) | (depthBias ? HAS_DEPTH_BIAS : 0);
}
SimpleProgramKey(int bitmask) : _flags(bitmask) {}
};
inline uint qHash(const SimpleProgramKey& key, uint seed) {
return qHash(key.getRaw(), seed);
}
inline bool operator==(const SimpleProgramKey& a, const SimpleProgramKey& b) {
return a.getRaw() == b.getRaw();
}
#endif // hifi_DeferredLightingEffect_h

View file

@ -229,6 +229,7 @@ QScriptValue collisionToScriptValue(QScriptEngine* engine, const Collision& coll
obj.setProperty("idB", quuidToScriptValue(engine, collision.idB));
obj.setProperty("penetration", vec3toScriptValue(engine, collision.penetration));
obj.setProperty("contactPoint", vec3toScriptValue(engine, collision.contactPoint));
obj.setProperty("velocityChange", vec3toScriptValue(engine, collision.velocityChange));
return obj;
}