Merge branch 'master' into plugins

Conflicts:
	interface/resources/qml/AddressBarDialog.qml
	interface/src/Application.cpp
	interface/src/Application.h
	interface/src/Environment.cpp
	interface/src/Environment.h
	interface/src/devices/TV3DManager.cpp
	interface/src/ui/ApplicationOverlay.h
	interface/src/ui/overlays/LocalModelsOverlay.cpp
This commit is contained in:
Brad Davis 2015-06-10 14:21:48 -07:00
commit 7f6a49688a
192 changed files with 7651 additions and 3127 deletions
examples
interface
libraries

23
examples/dialTone.js Normal file
View file

@ -0,0 +1,23 @@
//
// dialTone.js
// examples
//
// Created by Stephen Birarda on 06/08/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// setup the local sound we're going to use
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/short1.wav");
// setup the options needed for that sound
var connectSoundOptions = {
localOnly: true
}
// play the sound locally once we get the first audio packet from a mixer
Audio.receivedFirstPacket.connect(function(){
Audio.playSound(connectSound, connectSoundOptions);
});

View file

@ -0,0 +1,89 @@
// dynamicLandscape.js
// examples
//
// Created by Eric Levin on June 8
// Copyright 2015 High Fidelity, Inc.
//
// Meditative ocean landscape
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
Script.include(HIFI_PUBLIC_BUCKET + 'scripts/utilities.js')
var NUM_ROWS = 10;
var CUBE_SIZE = 1;
var cubes = [];
var cubesSettings = [];
var time = 0;
var OMEGA = 2.0 * Math.PI/8;
var RANGE = CUBE_SIZE/2;
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(CUBE_SIZE* 10, Quat.getFront(Camera.getOrientation())));
for (var x = 0, rowIndex = 0; x < NUM_ROWS * CUBE_SIZE; x += CUBE_SIZE, rowIndex++) {
for (var z = 0, columnIndex = 0; z < NUM_ROWS * CUBE_SIZE; z += CUBE_SIZE, columnIndex++) {
var baseHeight = map( columnIndex + 1, 1, NUM_ROWS, -CUBE_SIZE * 2, -CUBE_SIZE);
var relativePosition = {
x: x,
y: baseHeight,
z: z
};
var position = Vec3.sum(center, relativePosition);
cubes.push(Entities.addEntity({
type: 'Box',
position: position,
dimensions: {
x: CUBE_SIZE,
y: CUBE_SIZE,
z: CUBE_SIZE
}
}));
var phase = map( (columnIndex + 1) * (rowIndex + 1), 2, NUM_ROWS * NUM_ROWS, Math.PI * 2, Math.PI * 4);
cubesSettings.push({
baseHeight: center.y + baseHeight,
phase: phase
})
}
}
function update(deleteTime) {
time += deleteTime;
for (var i = 0; i < cubes.length; i++) {
var phase = cubesSettings[i].phase;
var props = Entities.getEntityProperties(cubes[i]);
var newHeight = Math.sin(time * OMEGA + phase) / 2.0;
var hue = map(newHeight, -.5, .5, 0.5, 0.7);
var light = map(newHeight, -.5, .5, 0.4, 0.6)
newHeight = cubesSettings[i].baseHeight + (newHeight * RANGE);
var newVelocityY = Math.cos(time * OMEGA + phase) / 2.0 * RANGE * OMEGA;
var newPosition = props.position;
var newVelocity = props.velocity;
newPosition.y = newHeight;
newVelocity = newVelocityY;
Entities.editEntity( cubes[i], {
position: newPosition,
velocity: props.velocity,
color: hslToRgb({hue: hue, sat: 0.7, light: light})
});
}
}
function cleanup() {
cubes.forEach(function(cube) {
Entities.deleteEntity(cube);
})
}
Script.update.connect(update);
Script.scriptEnding.connect(cleanup)

View file

@ -1,4 +1,3 @@
// grab.js
// examples
//
@ -11,247 +10,335 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var isGrabbing = false;
var grabbedEntity = null;
var actionID = null;
var prevMouse = {};
var deltaMouse = {
z: 0
}
var entityProps;
var moveUpDown = false;
var MOVE_TIMESCALE = 0.1;
var INV_MOVE_TIMESCALE = 1.0 / MOVE_TIMESCALE;
var MAX_SOLID_ANGLE = 0.01; // objects that appear smaller than this can't be grabbed
var CLOSE_ENOUGH = 0.001;
var FULL_STRENGTH = 0.11;
var SPRING_RATE = 1.5;
var DAMPING_RATE = 0.80;
var ZERO_VEC3 = { x: 0, y: 0, z: 0 };
var ANGULAR_DAMPING_RATE = 0.40;
var SCREEN_TO_METERS = 0.001;
var currentPosition, currentVelocity, cameraEntityDistance, currentRotation;
var velocityTowardTarget, desiredVelocity, addedVelocity, newVelocity, dPosition, camYaw, distanceToTarget, targetPosition;
var originalGravity = {x: 0, y: 0, z: 0};
var shouldRotate = false;
var dQ, theta, axisAngle, dT;
var angularVelocity = {
x: 0,
y: 0,
z: 0
// NOTE: to improve readability global variable names start with 'g'
var gIsGrabbing = false;
var gGrabbedEntity = null;
var gPrevMouse = {x: 0, y: 0};
var gEntityProperties;
var gStartPosition;
var gStartRotation;
var gCurrentPosition;
var gOriginalGravity = ZERO_VEC3;
var gPlaneNormal = ZERO_VEC3;
// gMaxGrabDistance is a function of the size of the object.
var gMaxGrabDistance;
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
// Modes to eventually support?:
// xyPlane
// yzPlane
// polar
// elevationAzimuth
var gGrabMode = "xzplane";
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// are relative to the ray's intersection by the same offset.
var gGrabOffset = { x: 0, y: 0, z: 0 };
var gTargetPosition;
var gTargetRotation;
var gLiftKey = false; // SHIFT
var gRotateKey = false; // CONTROL
var gPreviousMouse = { x: 0, y: 0 };
var gMouseCursorLocation = { x: 0, y: 0 };
var gMouseAtRotateStart = { x: 0, y: 0 };
var gBeaconHeight = 0.10;
var gAngularVelocity = ZERO_VEC3;
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
// var VOLUME = 0.0;
var gBeaconHeight = 0.10;
var BEACON_COLOR = {
red: 200,
green: 200,
blue: 200
};
var BEACON_WIDTH = 2;
var DROP_DISTANCE = 5.0;
var DROP_COLOR = {
red: 200,
green: 200,
blue: 200
};
var DROP_WIDTH = 2;
var dropLine = Overlays.addOverlay("line3d", {
color: DROP_COLOR,
alpha: 1,
visible: false,
lineWidth: DROP_WIDTH
var gBeacon = Overlays.addOverlay("line3d", {
color: BEACON_COLOR,
alpha: 1,
visible: false,
lineWidth: BEACON_WIDTH
});
function vectorIsZero(v) {
return v.x == 0 && v.y == 0 && v.z == 0;
function updateDropLine(position) {
Overlays.editOverlay(gBeacon, {
visible: true,
start: {
x: position.x,
y: position.y + gBeaconHeight,
z: position.z
},
end: {
x: position.x,
y: position.y - gBeaconHeight,
z: position.z
}
});
}
function nearLinePoint(targetPosition) {
// var handPosition = Vec3.sum(MyAvatar.position, {x:0, y:0.2, z:0});
var handPosition = MyAvatar.getRightPalmPosition();
var along = Vec3.subtract(targetPosition, handPosition);
along = Vec3.normalize(along);
along = Vec3.multiply(along, 0.4);
return Vec3.sum(handPosition, along);
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
var cameraPosition = Camera.getPosition();
var localPointOnPlane = Vec3.subtract(pointOnPlane, cameraPosition);
var distanceFromPlane = Vec3.dot(localPointOnPlane, planeNormal);
var MIN_DISTANCE_FROM_PLANE = 0.001;
if (Math.abs(distanceFromPlane) < MIN_DISTANCE_FROM_PLANE) {
// camera is touching the plane
return pointOnPlane;
}
var pickRay = Camera.computePickRay(event.x, event.y);
var dirDotNorm = Vec3.dot(pickRay.direction, planeNormal);
var MIN_RAY_PLANE_DOT = 0.00001;
var localIntersection;
var useMaxForwardGrab = false;
if (Math.abs(dirDotNorm) > MIN_RAY_PLANE_DOT) {
var distanceToIntersection = distanceFromPlane / dirDotNorm;
if (distanceToIntersection > 0 && distanceToIntersection < gMaxGrabDistance) {
// ray points into the plane
localIntersection = Vec3.multiply(pickRay.direction, distanceFromPlane / dirDotNorm);
} else {
// ray intersects BEHIND the camera or else very far away
// so we clamp the grab point to be the maximum forward position
useMaxForwardGrab = true;
}
} else {
// ray points perpendicular to grab plane
// so we map the grab point to the maximum forward position
useMaxForwardGrab = true;
}
if (useMaxForwardGrab) {
// we re-route the intersection to be in front at max distance.
var rayDirection = Vec3.subtract(pickRay.direction, Vec3.multiply(planeNormal, dirDotNorm));
rayDirection = Vec3.normalize(rayDirection);
localIntersection = Vec3.multiply(rayDirection, gMaxGrabDistance);
localIntersection = Vec3.sum(localIntersection, Vec3.multiply(planeNormal, distanceFromPlane));
}
var worldIntersection = Vec3.sum(cameraPosition, localIntersection);
return worldIntersection;
}
function computeNewGrabPlane() {
var maybeResetMousePosition = false;
if (gGrabMode !== "rotate") {
gMouseAtRotateStart = gMouseCursorLocation;
} else {
maybeResetMousePosition = true;
}
gGrabMode = "xzPlane";
gPointOnPlane = gCurrentPosition;
gPlaneNormal = { x: 0, y: 1, z: 0 };
if (gLiftKey) {
if (!gRotateKey) {
gGrabMode = "verticalCylinder";
// a new planeNormal will be computed each move
}
} else if (gRotateKey) {
gGrabMode = "rotate";
}
gPointOnPlane = Vec3.subtract(gCurrentPosition, gGrabOffset);
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
xzOffset.y = 0;
gXzDistanceToGrab = Vec3.length(xzOffset);
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
// we reset the mouse position whenever we stop rotating
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
}
}
function mousePressEvent(event) {
if (!event.isLeftButton) {
return;
}
var pickRay = Camera.computePickRay(event.x, event.y);
var intersection = Entities.findRayIntersection(pickRay, true); // accurate picking
if (intersection.intersects && intersection.properties.collisionsWillMove) {
grabbedEntity = intersection.entityID;
var props = Entities.getEntityProperties(grabbedEntity)
isGrabbing = true;
originalGravity = props.gravity;
targetPosition = props.position;
currentPosition = props.position;
currentVelocity = props.velocity;
updateDropLine(targetPosition);
Entities.editEntity(grabbedEntity, {
gravity: {x: 0, y: 0, z: 0}
});
Controller.mouseMoveEvent.connect(mouseMoveEvent);
}
}
function updateDropLine(position) {
Overlays.editOverlay(dropLine, {
visible: true,
start: {
x: position.x,
y: position.y + DROP_DISTANCE,
z: position.z
},
end: {
x: position.x,
y: position.y - DROP_DISTANCE,
z: position.z
if (!event.isLeftButton) {
return;
}
})
}
gPreviousMouse = {x: event.x, y: event.y };
var pickRay = Camera.computePickRay(event.x, event.y);
var pickResults = Entities.findRayIntersection(pickRay, true); // accurate picking
if (!pickResults.intersects) {
// didn't click on anything
return;
}
if (!pickResults.properties.collisionsWillMove) {
// only grab dynamic objects
return;
}
var clickedEntity = pickResults.entityID;
var entityProperties = Entities.getEntityProperties(clickedEntity)
var objectPosition = entityProperties.position;
var cameraPosition = Camera.getPosition();
gBeaconHeight = Vec3.length(entityProperties.dimensions);
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
if (Vec3.distance(objectPosition, cameraPosition) > gMaxGrabDistance) {
// don't allow grabs of things far away
return;
}
Entities.editEntity(clickedEntity, { gravity: ZERO_VEC3 });
gIsGrabbing = true;
gGrabbedEntity = clickedEntity;
gCurrentPosition = entityProperties.position;
gOriginalGravity = entityProperties.gravity;
gTargetPosition = objectPosition;
// compute the grab point
var nearestPoint = Vec3.subtract(objectPosition, cameraPosition);
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
// compute the grab offset
gGrabOffset = Vec3.subtract(objectPosition, gPointOnPlane);
computeNewGrabPlane();
updateDropLine(objectPosition);
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
}
function mouseReleaseEvent() {
if (isGrabbing) {
Controller.mouseMoveEvent.disconnect(mouseMoveEvent);
isGrabbing = false;
Entities.deleteAction(grabbedEntity, actionID);
actionID = null;
if (gIsGrabbing) {
if (Vec3.length(gOriginalGravity) != 0) {
Entities.editEntity(gGrabbedEntity, { gravity: gOriginalGravity });
}
// only restore the original gravity if it's not zero. This is to avoid...
// 1. interface A grabs an entity and locally saves off its gravity
// 2. interface A sets the entity's gravity to zero
// 3. interface B grabs the entity and saves off its gravity (which is zero)
// 4. interface A releases the entity and puts the original gravity back
// 5. interface B releases the entity and puts the original gravity back (to zero)
if (!vectorIsZero(originalGravity)) {
Entities.editEntity(grabbedEntity, {
gravity: originalGravity
});
gIsGrabbing = false
Overlays.editOverlay(gBeacon, { visible: false });
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(releaseSound, { position: entityProperties.position, volume: VOLUME });
}
Overlays.editOverlay(dropLine, {
visible: false
});
targetPosition = null;
}
}
function mouseMoveEvent(event) {
if (isGrabbing) {
if (!gIsGrabbing) {
return;
}
// see if something added/restored gravity
var props = Entities.getEntityProperties(grabbedEntity);
if (!vectorIsZero(props.gravity)) {
originalGravity = props.gravity;
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
if (Vec3.length(entityProperties.gravity) != 0) {
gOriginalGravity = entityProperties.gravity;
}
deltaMouse.x = event.x - prevMouse.x;
if (!moveUpDown) {
deltaMouse.z = event.y - prevMouse.y;
deltaMouse.y = 0;
if (gGrabMode === "rotate") {
var deltaMouse = { x: 0, y: 0 };
var dx = event.x - gPreviousMouse.x;
var dy = event.y - gPreviousMouse.y;
var orientation = Camera.getOrientation();
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
var axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 8.0; // magic number tuned by hand
gAngularVelocity = Vec3.multiply(ROTATE_STRENGTH, axis);
} else {
deltaMouse.y = (event.y - prevMouse.y) * -1;
deltaMouse.z = 0;
var newTargetPosition;
if (gGrabMode === "verticalCylinder") {
// for this mode we recompute the plane based on current Camera
var planeNormal = Quat.getFront(Camera.getOrientation());
planeNormal.y = 0;
planeNormal = Vec3.normalize(planeNormal);
var pointOnCylinder = Vec3.multiply(planeNormal, gXzDistanceToGrab);
pointOnCylinder = Vec3.sum(Camera.getPosition(), pointOnCylinder);
newTargetPosition = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, event);
} else {
var cameraPosition = Camera.getPosition();
newTargetPosition = mouseIntersectionWithPlane(gPointOnPlane, gPlaneNormal, event);
var relativePosition = Vec3.subtract(newTargetPosition, cameraPosition);
var distance = Vec3.length(relativePosition);
if (distance > gMaxGrabDistance) {
// clamp distance
relativePosition = Vec3.multiply(relativePosition, gMaxGrabDistance / distance);
newTargetPosition = Vec3.sum(relativePosition, cameraPosition);
}
}
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
}
// Update the target position by the amount the mouse moved
camYaw = Quat.safeEulerAngles(Camera.getOrientation()).y;
dPosition = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, camYaw, 0), deltaMouse);
if (!shouldRotate) {
// Adjust target position for the object by the mouse move
cameraEntityDistance = Vec3.distance(Camera.getPosition(), currentPosition);
// Scale distance we want to move by the distance from the camera to the grabbed object
// TODO: Correct SCREEN_TO_METERS to be correct for the actual FOV, resolution
targetPosition = Vec3.sum(targetPosition, Vec3.multiply(dPosition, cameraEntityDistance * SCREEN_TO_METERS));
} else if (shouldRotate) {
var transformedDeltaMouse = {
x: deltaMouse.z,
y: deltaMouse.x,
z: deltaMouse.y
};
transformedDeltaMouse = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, camYaw, 0), transformedDeltaMouse);
dQ = Quat.fromVec3Degrees(transformedDeltaMouse);
theta = 2 * Math.acos(dQ.w);
axisAngle = Quat.axis(dQ);
angularVelocity = Vec3.multiply((theta / dT), axisAngle);
}
}
prevMouse.x = event.x;
prevMouse.y = event.y;
gPreviousMouse = { x: event.x, y: event.y };
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
}
function keyReleaseEvent(event) {
if (event.text === "SHIFT") {
moveUpDown = false;
}
if (event.text === "SPACE") {
shouldRotate = false;
}
if (event.text === "SHIFT") {
gLiftKey = false;
}
if (event.text === "CONTROL") {
gRotateKey = false;
}
computeNewGrabPlane();
}
function keyPressEvent(event) {
if (event.text === "SHIFT") {
moveUpDown = true;
}
if (event.text === "SPACE") {
shouldRotate = true;
}
if (event.text === "SHIFT") {
gLiftKey = true;
}
if (event.text === "CONTROL") {
gRotateKey = true;
}
computeNewGrabPlane();
}
function update(deltaTime) {
dT = deltaTime;
if (isGrabbing) {
entityProps = Entities.getEntityProperties(grabbedEntity);
currentPosition = entityProps.position;
currentVelocity = entityProps.velocity;
currentRotation = entityProps.rotation;
var dPosition = Vec3.subtract(targetPosition, currentPosition);
distanceToTarget = Vec3.length(dPosition);
if (distanceToTarget > CLOSE_ENOUGH) {
// compute current velocity in the direction we want to move
velocityTowardTarget = Vec3.dot(currentVelocity, Vec3.normalize(dPosition));
velocityTowardTarget = Vec3.multiply(Vec3.normalize(dPosition), velocityTowardTarget);
// compute the speed we would like to be going toward the target position
desiredVelocity = Vec3.multiply(dPosition, (1.0 / deltaTime) * SPRING_RATE);
// compute how much we want to add to the existing velocity
addedVelocity = Vec3.subtract(desiredVelocity, velocityTowardTarget);
// If target is too far, roll off the force as inverse square of distance
if (distanceToTarget / cameraEntityDistance > FULL_STRENGTH) {
addedVelocity = Vec3.multiply(addedVelocity, Math.pow(FULL_STRENGTH / distanceToTarget, 2.0));
}
newVelocity = Vec3.sum(currentVelocity, addedVelocity);
// Add Damping
newVelocity = Vec3.subtract(newVelocity, Vec3.multiply(newVelocity, DAMPING_RATE));
// Update entity
} else {
newVelocity = {x: 0, y: 0, z: 0};
}
if (shouldRotate) {
angularVelocity = Vec3.subtract(angularVelocity, Vec3.multiply(angularVelocity, ANGULAR_DAMPING_RATE));
Entities.editEntity(grabbedEntity, {
rotation: currentRotation,
angularVelocity: angularVelocity
});
} else {
angularVelocity = entityProps.angularVelocity;
if (!gIsGrabbing) {
return;
}
var newSpeed = Vec3.length(newVelocity);
if (!actionID) {
actionID = Entities.addAction("pull-to-point", grabbedEntity, {target: targetPosition, speed: newSpeed});
} else {
Entities.updateAction(grabbedEntity, actionID, {target: targetPosition, speed: newSpeed});
}
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
gCurrentPosition = entityProperties.position;
if (gGrabMode === "rotate") {
gAngularVelocity = Vec3.subtract(gAngularVelocity, Vec3.multiply(gAngularVelocity, ANGULAR_DAMPING_RATE));
Entities.editEntity(gGrabbedEntity, { angularVelocity: gAngularVelocity, });
}
updateDropLine(targetPosition);
}
// always push toward linear grab position, even when rotating
var newVelocity = ZERO_VEC3;
var dPosition = Vec3.subtract(gTargetPosition, gCurrentPosition);
var delta = Vec3.length(dPosition);
if (delta > CLOSE_ENOUGH) {
var MAX_POSITION_DELTA = 4.0;
if (delta > MAX_POSITION_DELTA) {
dPosition = Vec3.multiply(dPosition, MAX_POSITION_DELTA / delta);
}
// desired speed is proportional to displacement by the inverse of timescale
// (for critically damped motion)
newVelocity = Vec3.multiply(dPosition, INV_MOVE_TIMESCALE);
}
Entities.editEntity(gGrabbedEntity, { velocity: newVelocity, });
updateDropLine(gTargetPosition);
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -12,7 +12,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('lineRider.js')
var MAX_POINTS_PER_LINE = 30;
var MAX_POINTS_PER_LINE = 80;
var colorPalette = [{

56
examples/utilities.js Normal file
View file

@ -0,0 +1,56 @@
// utilities.js
// examples
//
// Created by Eric Levin on June 8
// Copyright 2015 High Fidelity, Inc.
//
// Common utilities
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
function hslToRgb(hslColor) {
var h = hslColor.hue;
var s = hslColor.sat;
var l = hslColor.light;
var r, g, b;
if (s == 0) {
r = g = b = l; // achromatic
} else {
var hue2rgb = function hue2rgb(p, q, t) {
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6) return p + (q - p) * 6 * t;
if (t < 1 / 2) return q;
if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
return p;
}
var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
var p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
return {
red: Math.round(r * 255),
green: Math.round(g * 255),
blue: Math.round(b * 255)
};
}
function map(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
function randFloat(low, high) {
return low + Math.random() * (high - low);
}
function randInt(low, high) {
return Math.floor(randFloat(low, high));
}

View file

@ -134,7 +134,7 @@ Slider = function(x,y,width,thumbSize) {
}
// The Checkbox class
Checkbox = function(x,y,width,thumbSize) {
Checkbox = function(x,y,thumbSize) {
this.thumb = Overlays.addOverlay("text", {
backgroundColor: { red: 255, green: 255, blue: 255 },
@ -150,7 +150,7 @@ Checkbox = function(x,y,width,thumbSize) {
backgroundColor: { red: 125, green: 125, blue: 255 },
x: x,
y: y,
width: width,
width: thumbSize * 2,
height: thumbSize,
alpha: 1.0,
backgroundAlpha: 0.5,
@ -161,7 +161,7 @@ Checkbox = function(x,y,width,thumbSize) {
this.thumbHalfSize = 0.5 * thumbSize;
this.minThumbX = x + this.thumbHalfSize;
this.maxThumbX = x + width - this.thumbHalfSize;
this.maxThumbX = x + thumbSize * 2 - this.thumbHalfSize;
this.thumbX = this.minThumbX;
this.minValue = 0.0;
@ -553,7 +553,7 @@ Panel = function(x, y) {
var item = new PanelItem(name, setValue, getValue, displayValue, this.x, this.nextY, textWidth, valueWidth, rawHeight);
var checkbox = new Checkbox(this.widgetX, this.nextY, widgetWidth, rawHeight);
var checkbox = new Checkbox(this.widgetX, this.nextY, rawHeight);
item.widget = checkbox;
item.widget.onValueChanged = function(value) { item.setterFromWidget(value); };

View file

@ -0,0 +1,104 @@
//
// SunLightExample.js
// examples
// Sam Gateau
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("cookies.js");
var panel = new Panel(10, 400);
panel.newCheckbox("Enable Cull Opaque",
function(value) { Scene.setEngineCullOpaque((value != 0)); },
function() { return Scene.doEngineCullOpaque(); },
function(value) { return (value); }
);
panel.newCheckbox("Enable Sort Opaque",
function(value) { Scene.setEngineSortOpaque((value != 0)); },
function() { return Scene.doEngineSortOpaque(); },
function(value) { return (value); }
);
panel.newCheckbox("Enable Render Opaque",
function(value) { Scene.setEngineRenderOpaque((value != 0)); },
function() { return Scene.doEngineRenderOpaque(); },
function(value) { return (value); }
);
panel.newSlider("Num Feed Opaques", 0, 1000,
function(value) { },
function() { return Scene.getEngineNumFeedOpaqueItems(); },
function(value) { return (value); }
);
panel.newSlider("Num Drawn Opaques", 0, 1000,
function(value) { },
function() { return Scene.getEngineNumDrawnOpaqueItems(); },
function(value) { return (value); }
);
panel.newSlider("Max Drawn Opaques", -1, 1000,
function(value) { Scene.setEngineMaxDrawnOpaqueItems(value); },
function() { return Scene.getEngineMaxDrawnOpaqueItems(); },
function(value) { return (value); }
);
panel.newCheckbox("Enable Cull Transparent",
function(value) { Scene.setEngineCullTransparent((value != 0)); },
function() { return Scene.doEngineCullTransparent(); },
function(value) { return (value); }
);
panel.newCheckbox("Enable Sort Transparent",
function(value) { Scene.setEngineSortTransparent((value != 0)); },
function() { return Scene.doEngineSortTransparent(); },
function(value) { return (value); }
);
panel.newCheckbox("Enable Render Transparent",
function(value) { Scene.setEngineRenderTransparent((value != 0)); },
function() { return Scene.doEngineRenderTransparent(); },
function(value) { return (value); }
);
panel.newSlider("Num Feed Transparents", 0, 100,
function(value) { },
function() { return Scene.getEngineNumFeedTransparentItems(); },
function(value) { return (value); }
);
panel.newSlider("Num Drawn Transparents", 0, 100,
function(value) { },
function() { return Scene.getEngineNumDrawnTransparentItems(); },
function(value) { return (value); }
);
panel.newSlider("Max Drawn Transparents", -1, 100,
function(value) { Scene.setEngineMaxDrawnTransparentItems(value); },
function() { return Scene.getEngineMaxDrawnTransparentItems(); },
function(value) { return (value); }
);
var tickTackPeriod = 500;
function updateCounters() {
panel.set("Num Feed Opaques", panel.get("Num Feed Opaques"));
panel.set("Num Drawn Opaques", panel.get("Num Drawn Opaques"));
panel.set("Num Feed Transparents", panel.get("Num Feed Transparents"));
panel.set("Num Drawn Transparents", panel.get("Num Drawn Transparents"));
}
Script.setInterval(updateCounters, tickTackPeriod);
Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) { return panel.mouseMoveEvent(event); });
Controller.mousePressEvent.connect( function panelMousePressEvent(event) { return panel.mousePressEvent(event); });
Controller.mouseReleaseEvent.connect(function(event) { return panel.mouseReleaseEvent(event); });
function scriptEnding() {
panel.destroy();
}
Script.scriptEnding.connect(scriptEnding);

View file

@ -126,7 +126,7 @@ target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu model fbx networking entities avatars
link_hifi_libraries(shared octree environment gpu model render fbx networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer ui plugins display-plugins)

View file

@ -0,0 +1,123 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
x="0px"
y="0px"
viewBox="0 0 150 150"
enable-background="new 0 0 512 512"
xml:space="preserve"
id="svg2"
inkscape:version="0.91 r13725"
sodipodi:docname="hifi-logo-blackish.svg"
width="150"
height="150"><metadata
id="metadata57"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
id="defs55" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1835"
inkscape:window-height="1057"
id="namedview53"
showgrid="false"
inkscape:zoom="2.6074563"
inkscape:cx="256"
inkscape:cy="67.100056"
inkscape:window-x="77"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="svg2" /><g
id="Layer_1"
transform="translate(-30.500027,-42.2)"><g
id="g5"><g
id="g7"><path
d="M 155.3,67.4 C 141.9,54.3 124.5,47 105.8,47 86.8,47 69.4,54.3 56,67.4 42.9,80.8 35.5,98.2 35.5,117.2 c 0,18.7 7.3,36.4 20.5,49.5 13.2,13.1 30.8,20.5 49.5,20.5 18.7,0 36.4,-7.3 49.5,-20.5 13.1,-13.1 20.5,-30.8 20.5,-49.5 0.3,-18.9 -7,-36.4 -20.2,-49.8 z m -5,94.3 c -11.9,11.9 -27.8,18.4 -44.7,18.4 C 88.7,180.1 73,173.5 60.9,161.7 49,149.8 42.5,133.9 42.5,117 c 0,-16.9 6.6,-32.6 18.4,-44.7 11.9,-11.9 27.8,-18.4 44.7,-18.4 16.9,0 32.6,6.6 44.7,18.4 11.9,11.9 18.4,27.8 18.4,44.7 0,16.9 -6.6,32.8 -18.4,44.7 z"
id="path9"
inkscape:connector-curvature="0"
style="fill:#333333" /><g
id="g11"><path
d="m 86.8,142 c -1.5,0 -3,-1.3 -3,-3 l 0,-54.1 c 0,-1.5 1.3,-3 3,-3 1.5,0 3,1.3 3,3 l 0,54.3 c 0.1,1.5 -1.2,2.8 -3,2.8 z"
id="path13"
inkscape:connector-curvature="0"
style="fill:#333333" /><path
d="m 90.4,83.4 c -2,2 -5.1,2 -6.8,0 -2,-2 -2,-5.1 0,-6.8 2,-2 5.1,-2 6.8,0 2,2 2,5 0,6.8"
id="path15"
inkscape:connector-curvature="0"
style="fill:#333333" /><g
id="g17"><path
d="m 86.8,87.9 c -2,0 -4,-0.8 -5.6,-2.3 -3,-3 -3,-8.1 0,-11.1 1.5,-1.5 3.5,-2.3 5.6,-2.3 2.1,0 4,0.8 5.6,2.3 1.5,1.5 2.3,3.5 2.3,5.6 0,2.1 -0.8,4 -2.3,5.6 -1.5,1.2 -3.3,2.2 -5.6,2.2 z m 0,-9.8 c -0.5,0 -1,0.3 -1.5,0.5 -0.8,0.8 -0.8,2 0,2.8 0.8,0.8 2,0.8 2.8,0 0.3,-0.3 0.5,-0.8 0.5,-1.5 0,-0.5 -0.3,-1 -0.5,-1.5 -0.2,-0.1 -0.7,-0.3 -1.3,-0.3 z"
id="path19"
inkscape:connector-curvature="0"
style="fill:#333333" /></g><path
d="m 83.6,140.5 c 2,-2 5.1,-2 6.8,0 2,2 2,5.1 0,6.8 -2,2 -5.1,2 -6.8,0 -2.1,-1.8 -2.1,-4.8 0,-6.8"
id="path21"
inkscape:connector-curvature="0"
style="fill:#333333" /><g
id="g23"><path
d="m 86.8,151.8 c -2,0 -4,-0.8 -5.6,-2.3 C 79.6,148 79,146 79,144 c 0,-2 0.8,-4 2.3,-5.6 1.5,-1.6 3.5,-2.3 5.6,-2.3 2.1,0 4,0.8 5.6,2.3 1.6,1.5 2.3,3.5 2.3,5.6 0,2.1 -0.8,4 -2.3,5.6 -1.6,1.5 -3.4,2.2 -5.7,2.2 z m 0,-9.8 c -0.5,0 -1,0.3 -1.5,0.5 -0.3,0.3 -0.5,0.8 -0.5,1.5 0,0.5 0.3,1 0.5,1.5 0.8,0.8 2,0.8 2.8,0 0.3,-0.3 0.5,-0.8 0.5,-1.5 0,-0.5 -0.3,-1 -0.5,-1.5 -0.2,-0.3 -0.7,-0.5 -1.3,-0.5 z"
id="path25"
inkscape:connector-curvature="0"
style="fill:#333333" /></g><g
id="g27"><path
d="m 124.5,152.3 c -1.5,0 -3,-1.3 -3,-3 l 0,-54.3 c 0,-1.5 1.3,-3 3,-3 1.5,0 3,1.3 3,3 l 0,54.3 c 0,1.8 -1.5,3 -3,3 z"
id="path29"
inkscape:connector-curvature="0"
style="fill:#333333" /></g><path
d="m 128,93.7 c -2,2 -5.1,2 -6.8,0 -2,-2 -2,-5.1 0,-6.8 2,-2 5.1,-2 6.8,0 1.8,1.8 1.8,4.8 0,6.8"
id="path31"
inkscape:connector-curvature="0"
style="fill:#333333" /><g
id="g33"><path
d="m 124.5,98 c -2,0 -4,-0.8 -5.6,-2.3 -3,-3 -3,-8.1 0,-11.1 1.5,-1.5 3.5,-2.3 5.6,-2.3 2,0 4,0.8 5.6,2.3 1.5,1.5 2.3,3.5 2.3,5.6 0,2 -0.8,4 -2.3,5.6 -1.6,1.5 -3.6,2.2 -5.6,2.2 z m 0,-9.8 c -0.5,0 -1,0.3 -1.5,0.5 -0.8,0.8 -0.8,2 0,2.8 0.8,0.8 2,0.8 2.8,0 0.3,-0.3 0.5,-0.8 0.5,-1.5 0,-0.5 -0.3,-1 -0.5,-1.5 -0.3,-0.1 -0.8,-0.3 -1.3,-0.3 z"
id="path35"
inkscape:connector-curvature="0"
style="fill:#333333" /></g><g
id="g37"><path
d="m 124.5,162.2 c -2,0 -4,-0.8 -5.6,-2.3 -3,-3 -3,-8.1 0,-11.1 1.5,-1.5 3.5,-2.3 5.6,-2.3 2,0 4,0.8 5.6,2.3 1.5,1.5 2.3,3.5 2.3,5.6 0,2.1 -0.8,4 -2.3,5.6 -1.6,1.2 -3.6,2.2 -5.6,2.2 z m 0,-9.9 c -0.5,0 -1,0.3 -1.5,0.5 -0.8,0.8 -0.8,2 0,2.8 0.8,0.8 2,0.8 2.8,0 0.3,-0.3 0.5,-0.8 0.5,-1.5 0,-0.7 -0.3,-1 -0.5,-1.5 -0.3,0 -0.8,-0.3 -1.3,-0.3 z"
id="path39"
inkscape:connector-curvature="0"
style="fill:#333333" /></g><path
d="m 121,150.8 c 2,-2 5.1,-2 6.8,0 2,2 2,5.1 0,6.8 -2,2 -5.1,2 -6.8,0 -1.8,-1.7 -1.8,-5 0,-6.8"
id="path41"
inkscape:connector-curvature="0"
style="fill:#333333" /><g
id="g43"><rect
x="85.099998"
y="113.2"
transform="matrix(-0.9064,-0.4224,0.4224,-0.9064,152.6722,266.0858)"
width="41.400002"
height="5.8000002"
id="rect45"
style="fill:#333333" /></g></g></g></g></g><g
id="Layer_2_copy"
display="none"
style="display:none"
transform="translate(0,-362)"><path
display="inline"
d="m 500,511.5 -488,0 c -6.6,0 -12,-5.4 -12,-12 l 0,-168 c 0,-6.6 5.4,-12 12,-12 l 488,0 c 6.6,0 12,5.4 12,12 l 0,168 c 0,6.6 -5.4,12 -12,12 z"
id="path48"
inkscape:connector-curvature="0"
style="display:inline;fill:#ffffff" /><g
id="Layer_2"
display="inline"
style="display:inline"><path
d="M 500,384 12,384 C 5.4,384 0,378.6 0,372 L 0,12 C 0,5.4 5.4,0 12,0 l 488,0 c 6.6,0 12,5.4 12,12 l 0,360 c 0,6.6 -5.4,12 -12,12 z"
id="path51"
inkscape:connector-curvature="0"
style="fill:#368db0" /></g></g></svg>

After

(image error) Size: 6.7 KiB

View file

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="pomodoro"
x="0px"
y="0px"
width="100"
height="100"
viewBox="-7.125 -7.328 100 100"
enable-background="new -7.125 -7.328 100 91.57"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="login-close.svg"><metadata
id="metadata4143"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title /></cc:Work></rdf:RDF></metadata><defs
id="defs4141" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1835"
inkscape:window-height="1057"
id="namedview4139"
showgrid="false"
inkscape:zoom="4.8487496"
inkscape:cx="-41.872299"
inkscape:cy="37.53545"
inkscape:window-x="77"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="pomodoro" /><path
d="m 42.8705,-2.3280001 c -24.8688,0 -44.9955,20.1618001 -44.9955,45.0045001 0,24.8733 20.1267,44.9955 44.9955,44.9955 24.8427,0 45.0045,-20.1222 45.0045,-44.9955 0,-24.8427 -20.1618,-45.0045001 -45.0045,-45.0045001 z M 67.1066,57.126 57.329,66.9036 42.6293,52.221 27.9422,66.9036 18.1511,57.126 32.8247,42.4443 18.1511,27.7482 27.9422,17.9796 42.6284,32.6487 57.3281,17.9796 67.1057,27.7482 52.424,42.4434 67.1066,57.126 Z"
id="path4137"
inkscape:connector-curvature="0" /><path
style="fill:#787878;fill-opacity:1;stroke:none;stroke-opacity:1"
d="M 38.400102,87.62655 C 28.705316,86.39839 21.084707,83.18102 13.982682,77.31765 5.5185024,70.329714 -0.09877759,60.244376 -1.7904936,48.998291 -2.1921426,46.328239 -2.2434696,39.677941 -1.8825126,37.07572 0.23131941,21.836625 9.4778634,8.9272213 23.005945,2.3281243 c 9.805646,-4.783264 20.444414,-5.902737 30.964952,-3.25830896 7.357662,1.849413 14.403738,5.75570696 19.976698,11.07495366 7.36697,7.031569 12.03213,16.084669 13.58981,26.37208 0.45133,2.980701 0.44981,9.518147 -0.003,12.481442 -0.72914,4.772737 -2.08456,9.199896 -4.04575,13.214497 -2.40852,4.930297 -4.94684,8.502038 -8.75077,12.313422 -6.78153,6.79482 -14.822805,10.95587 -24.504932,12.68035 -1.787127,0.3183 -3.134188,0.40875 -6.708441,0.45045 -2.459762,0.0287 -4.765789,0.0149 -5.124505,-0.0304 z m -3.02899,-27.869116 7.314939,-7.311007 7.360877,7.35692 7.360872,7.356917 4.983865,-4.982378 4.98386,-4.982378 -7.359111,-7.358686 -7.359105,-7.358687 7.359105,-7.358687 7.359111,-7.358686 -4.98387,-4.982383 -4.983864,-4.982384 -7.407456,7.393329 -7.407456,7.393328 -7.360652,-7.342464 -7.36065,-7.342467 -4.922357,4.916384 -4.922356,4.916381 7.300528,7.417269 7.300528,7.417267 -7.362706,7.362244 -7.362709,7.362244 4.890918,4.889465 c 2.690008,2.689205 4.974582,4.889463 5.076835,4.889463 0.102254,0 3.477639,-3.289951 7.500854,-7.311004 z"
id="path4145"
inkscape:connector-curvature="0" /><path
style="fill:#ffffff;fill-opacity:0.50196081;stroke:none;stroke-linejoin:round;stroke-opacity:1"
d="M 44.534673,94.743449 C 34.891509,93.246309 27.795933,90.11327 20.775164,84.252477 11.886441,76.832357 5.9619527,65.158789 5.1748775,53.513789 c -0.2319637,-3.43197 -0.2319637,-3.580148 0,-7.012117 0.2997941,-4.43554 1.6142349,-10.018379 3.2047936,-13.611757 0.9426814,-2.1297 3.0434359,-6.187162 3.2034079,-6.187162 0.0878,0 0.159635,-0.08223 0.159635,-0.182738 0,-0.184553 1.62407,-2.719865 1.864629,-2.910843 0.07144,-0.05672 0.214767,-0.254527 0.318504,-0.43958 0.444428,-0.792801 4.761858,-5.745943 5.011388,-5.74928 0.06982,-9.28e-4 0.599004,-0.44177 1.175954,-0.979634 0.57695,-0.537865 1.357309,-1.209955 1.73413,-1.493533 0.376821,-0.283579 0.803015,-0.631606 0.947096,-0.773396 0.144082,-0.141789 0.33915,-0.257798 0.433485,-0.257798 0.09433,0 0.421739,-0.232019 0.727566,-0.515597 0.305826,-0.283578 0.652906,-0.515597 0.771287,-0.515597 0.118382,0 0.21524,-0.09281 0.21524,-0.206238 0,-0.113432 0.131477,-0.206239 0.292173,-0.206239 0.160693,0 0.323106,-0.06961 0.360917,-0.154679 0.03781,-0.08507 0.486379,-0.379776 0.99682,-0.654895 0.510441,-0.275119 0.974478,-0.553541 1.031194,-0.618716 0.05672,-0.06518 0.683166,-0.390786 1.392111,-0.723578 0.708946,-0.3327925 1.288992,-0.6962883 1.288992,-0.8077684 0,-0.1114801 0.0579,-0.1447959 0.128656,-0.074035 0.07076,0.070761 0.511596,-0.070816 0.979634,-0.3146147 0.468039,-0.2437989 0.943786,-0.4657735 1.057217,-0.4932771 0.232223,-0.056306 4.161879,-1.4638567 4.640372,-1.6621192 0.170147,-0.0705 0.539063,-0.1662889 0.819815,-0.2128641 0.280751,-0.046575 0.614115,-0.1707121 0.740812,-0.2758597 0.126694,-0.1051476 0.274366,-0.1471661 0.328157,-0.093375 0.05379,0.053792 0.416482,9.591e-4 0.805979,-0.1174068 0.389496,-0.1183654 0.893791,-0.250227 1.120654,-0.2930258 C 41.396501,5.8872092 42.53806,5.68664 43.297241,5.5593565 45.066132,5.2627864 46.714013,5.1673775 50,5.1712806 c 3.805191,0.00452 5.083801,0.101258 7.940191,0.6007452 1.309528,0.2289926 1.540035,0.2798038 2.254967,0.4970667 0.389497,0.1183654 0.752188,0.1711985 0.805979,0.1174068 0.05379,-0.053792 0.201463,-0.011773 0.328157,0.093375 0.126697,0.1051476 0.460061,0.2292845 0.740812,0.2758597 0.280752,0.046575 0.649668,0.1381531 0.819815,0.2035062 0.29917,0.1149109 2.471922,0.8292633 2.681104,0.8814871 0.180626,0.045095 1.113229,0.4463555 2.521561,1.0849238 0.752112,0.341023 1.420515,0.5670004 1.485344,0.502172 0.06483,-0.064829 0.11787,-0.026659 0.11787,0.084821 0,0.1114801 0.580046,0.4749759 1.288992,0.8077684 0.708945,0.332792 1.388624,0.697885 1.510393,0.811316 0.121772,0.113431 0.583276,0.391854 1.025566,0.618716 0.442289,0.226863 0.986609,0.574891 1.209598,0.773396 0.222989,0.198504 0.484793,0.360917 0.581787,0.360917 0.09699,0 0.426572,0.232019 0.732399,0.515597 0.305826,0.283578 0.626601,0.515597 0.712834,0.515597 0.08623,0 0.331378,0.162413 0.544769,0.360918 0.213391,0.198505 0.634947,0.546532 0.936792,0.773395 0.301843,0.226863 0.897759,0.737304 1.324259,1.134313 0.426497,0.39701 1.236446,1.139469 1.799884,1.64991 0.880557,0.797729 1.757212,1.75052 3.54596,3.853913 0.210933,0.248036 0.563341,0.699155 0.78313,1.002487 0.219788,0.303332 0.494773,0.657238 0.611075,0.786459 0.455109,0.505658 1.954048,2.819855 1.954048,3.016835 0,0.114715 0.07659,0.208572 0.170192,0.208572 0.161275,0 2.091162,3.737786 3.194546,6.187162 1.541607,3.422177 2.906394,9.221918 3.203098,13.611757 0.231964,3.431969 0.231964,3.580147 0,7.012117 -0.815165,12.060598 -6.997182,23.808557 -16.608839,31.562504 -6.618801,5.339542 -15.182288,8.893437 -23.343332,9.687615 -2.412178,0.234737 -8.77566,0.222144 -10.338278,-0.02046 z m 25.10828,-25.193278 5.003096,-4.999972 -7.37222,-7.374353 -7.372221,-7.374355 7.372221,-7.374354 7.37222,-7.374353 -5.053369,-5.050211 -5.053369,-5.050211 -7.48442,7.482314 -7.48442,7.482312 -7.279626,-7.328953 c -4.003795,-4.030924 -7.3403,-7.328953 -7.414457,-7.328953 -0.204703,0 -9.728156,9.56039 -9.728156,9.765886 0,0.09849 3.255479,3.479009 7.234399,7.512255 l 7.234397,7.333175 -7.389076,7.404243 -7.389078,7.404243 4.94973,4.825596 c 2.722351,2.654079 5.078575,4.828707 5.236051,4.83251 0.157477,0.0038 3.547751,-3.239037 7.53394,-7.206305 l 7.247619,-7.213218 7.31416,7.316337 c 4.022788,4.023984 7.360357,7.316336 7.416821,7.316336 0.05646,0 2.354055,-2.249986 5.105758,-4.999969 z"
id="path4134"
inkscape:connector-curvature="0"
transform="translate(-7.125,-7.3280001)" /></svg>

After

(image error) Size: 7.8 KiB

View file

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="Layer_1"
x="0px"
y="0px"
width="100px"
height="100px"
viewBox="0 0 100 100"
enable-background="new 0 0 100 100"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="login-password.svg"><metadata
id="metadata13"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title /></cc:Work></rdf:RDF></metadata><defs
id="defs11" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1017"
inkscape:window-height="605"
id="namedview9"
showgrid="false"
inkscape:zoom="2.36"
inkscape:cx="-22.245763"
inkscape:cy="50"
inkscape:window-x="138"
inkscape:window-y="109"
inkscape:window-maximized="0"
inkscape:current-layer="Layer_1" /><path
stroke-miterlimit="10"
d="m 70.932259,91.378387 -42.84472,0 c -3.950284,0 -7.163638,-3.213354 -7.163638,-7.162414 l 0,-37.131683 c 0,-3.949059 3.213354,-7.162413 7.163638,-7.162413 l 42.84472,0 c 3.950285,0 7.163639,3.213354 7.163639,7.162413 l 0,37.132908 c -0.0012,3.947835 -3.214579,7.161189 -7.163639,7.161189 z M 28.087539,42.00413 c -2.800821,0 -5.077712,2.278115 -5.077712,5.078936 l 0,37.132907 c 0,2.800821 2.276891,5.078936 5.077712,5.078936 l 42.84472,0 c 2.800822,0 5.077712,-2.278115 5.077712,-5.078936 l 0,-37.131683 c 0,-2.800821 -2.27689,-5.078936 -5.077712,-5.078936 l -42.84472,0 z"
id="path3"
inkscape:connector-curvature="0"
style="stroke:#808080;stroke-width:1.5;stroke-miterlimit:10;stroke-dasharray:none;stroke-opacity:1;fill:#808080;fill-opacity:1" /><path
stroke-miterlimit="10"
d="m 53.812733,79.237417 -8.601995,0 c -1.695428,0 -3.075027,-1.377152 -3.075027,-3.073802 0,-0.09181 0.01224,-0.182397 0.0355,-0.271759 l 2.407874,-8.927616 c -2.105512,-1.555875 -3.340664,-3.983335 -3.340664,-6.632363 0,-4.559902 3.710353,-8.270254 8.271478,-8.270254 4.561127,0 8.269033,3.710352 8.269033,8.270254 0,2.649028 -1.235154,5.075264 -3.340665,6.631139 l 2.410321,8.92884 c 0.02448,0.08813 0.03551,0.178724 0.03551,0.271759 0.0012,1.69665 -1.378375,3.073802 -3.071353,3.073802 z m -9.587424,-2.959958 c 0.05753,0.493326 0.476188,0.877704 0.985429,0.877704 l 8.601995,0 c 0.506792,0 0.927895,-0.38193 0.985429,-0.877704 l -2.563339,-9.489494 c -0.121188,-0.449258 0.071,-0.924222 0.468844,-1.161704 1.87415,-1.133549 2.993011,-3.112975 2.993011,-5.291934 0,-3.411665 -2.775115,-6.189227 -6.186779,-6.189227 -3.412887,0 -6.189226,2.777562 -6.189226,6.189227 0,2.178959 1.11886,4.158385 2.99301,5.291934 0.397844,0.239931 0.587585,0.714895 0.468843,1.161704 l -2.557217,9.489494 z"
id="path5"
inkscape:connector-curvature="0"
style="stroke:#808080;stroke-width:1.5;stroke-miterlimit:10;stroke-dasharray:none;stroke-opacity:1;fill:#808080;fill-opacity:1" /><path
stroke-miterlimit="10"
d="m 67.973525,36.897039 c -0.575343,0 -1.041738,-0.466395 -1.041738,-1.041738 l 0,-8.3486 c 0,-8.992495 -7.316654,-16.309149 -16.306701,-16.309149 l -2.230374,0 c -8.992495,0 -16.306701,7.316654 -16.306701,16.309149 l 0,8.3486 c 0,0.575343 -0.466395,1.041738 -1.041738,1.041738 -0.575343,0 -1.04174,-0.466395 -1.04174,-1.041738 l 0,-8.3486 c 0,-10.140733 8.248221,-18.391402 18.387731,-18.391402 l 2.230374,0 c 10.139509,0 18.38773,8.250669 18.38773,18.391402 l 0,8.3486 c 0.0037,0.575343 -0.462723,1.041738 -1.036843,1.041738 z"
id="path7"
inkscape:connector-curvature="0"
style="stroke:#808080;stroke-width:1.5;stroke-miterlimit:10;stroke-dasharray:none;stroke-opacity:1;fill:#808080;fill-opacity:1" /></svg>

After

(image error) Size: 4.2 KiB

View file

@ -0,0 +1,51 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="Your_Icon"
x="0px"
y="0px"
width="100px"
height="100px"
viewBox="0 0 100 100"
enable-background="new 0 0 100 100"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="login-username.svg"><metadata
id="metadata11"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
id="defs9" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1105"
inkscape:window-height="796"
id="namedview7"
showgrid="false"
inkscape:zoom="2.36"
inkscape:cx="-40.889831"
inkscape:cy="50"
inkscape:window-x="685"
inkscape:window-y="76"
inkscape:window-maximized="0"
inkscape:current-layer="Your_Icon" /><path
d="m 49.245763,52.491929 c 19.609799,0 35.508071,15.898272 35.508071,35.508071 l 3.737691,0 c 0,-21.674874 -17.570888,-39.245763 -39.245762,-39.245763 C 27.570889,48.754237 10,66.325126 10,88 l 3.737692,0 c 0,-19.609799 15.897337,-35.508071 35.508071,-35.508071 z"
id="path3"
inkscape:connector-curvature="0"
style="fill:#808080;fill-opacity:1" /><path
d="m 49.245763,9.508475 c -11.869975,0 -21.491727,9.621752 -21.491727,21.491727 0,11.869974 9.621752,21.491727 21.491727,21.491727 11.869974,0 21.491727,-9.621753 21.491727,-21.491727 0,-11.869975 -9.621753,-21.491727 -21.491727,-21.491727 z m 0,39.245762 c -9.805834,0 -17.754036,-7.948201 -17.754036,-17.754035 0,-9.805834 7.948202,-17.754036 17.754036,-17.754036 9.805834,0 17.754035,7.948202 17.754035,17.754036 0,9.805834 -7.948201,17.754035 -17.754035,17.754035 z"
id="path5"
inkscape:connector-curvature="0"
style="fill:#808080;fill-opacity:1" /></svg>

After

(image error) Size: 2.4 KiB

View file

@ -9,18 +9,16 @@
//
import Hifi 1.0
import QtQuick 2.3
import QtQuick.Controls 1.2
import QtQuick 2.4
import "controls"
import "styles"
Item {
DialogContainer {
id: root
HifiConstants { id: hifi }
objectName: "AddressBarDialog"
property int animationDuration: hifi.effects.fadeInDuration
property bool destroyOnInvisible: false
property real scale: 1.25 // Make this dialog a little larger than normal
@ -101,57 +99,18 @@ Item {
}
}
// The UI enables an object, rather than manipulating its visibility, so that we can do animations in both directions.
// Because visibility and enabled are booleans, they cannot be animated. So when enabled is changed, we modify a property
// that can be animated, like scale or opacity, and then when the target animation value is reached, we can modify the
// visibility.
enabled: false
opacity: 0.0
onEnabledChanged: {
opacity = enabled ? 1.0 : 0.0
if (enabled) {
addressLine.forceActiveFocus();
addressLine.forceActiveFocus()
}
}
onParentChanged: {
if (enabled && visible) {
addressLine.forceActiveFocus();
}
}
Behavior on opacity {
// Animate opacity.
NumberAnimation {
duration: animationDuration
easing.type: Easing.OutCubic
}
}
onOpacityChanged: {
// Once we're transparent, disable the dialog's visibility.
visible = (opacity != 0.0)
}
onVisibleChanged: {
if (!visible) {
reset()
// Some dialogs should be destroyed when they become invisible.
if (destroyOnInvisible) {
destroy()
}
} else {
addressLine.forceActiveFocus();
addressLine.text = ""
}
}
function reset() {
addressLine.text = ""
}
function toggleOrGo() {
if (addressLine.text == "") {
enabled = false
@ -160,21 +119,18 @@ Item {
}
}
Keys.onEscapePressed: {
enabled = false
}
Keys.onPressed: {
switch(event.key) {
case Qt.Key_W:
if (event.modifiers == Qt.ControlModifier) {
event.accepted = true
enabled = false
}
switch (event.key) {
case Qt.Key_Escape:
case Qt.Key_Back:
enabled = false
event.accepted = true
break
case Qt.Key_Enter:
case Qt.Key_Return:
toggleOrGo()
event.accepted = true
break
}
}
Keys.onReturnPressed: toggleOrGo()
Keys.onEnterPressed: toggleOrGo()
}

View file

@ -4,7 +4,7 @@ import QtWebKit 3.0
import "controls"
import "styles"
Dialog {
VrDialog {
id: root
HifiConstants { id: hifi }
title: "Browser"

View file

@ -8,27 +8,22 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.3
import QtQuick.Controls 1.2
import QtQuick.Dialogs 1.2
import Hifi 1.0
import QtQuick 2.4
import "controls"
import "styles"
Item {
DialogContainer {
id: root
HifiConstants { id: hifi }
property int animationDuration: hifi.effects.fadeInDuration
property bool destroyOnInvisible: true
Component.onCompleted: {
enabled = true
}
onParentChanged: {
if (visible && enabled) {
forceActiveFocus();
forceActiveFocus()
}
}
@ -38,7 +33,7 @@ Item {
x: parent ? parent.width / 2 - width / 2 : 0
y: parent ? parent.height / 2 - height / 2 : 0
Hifi.ErrorDialog {
ErrorDialog {
id: content
implicitWidth: box.width
@ -69,7 +64,7 @@ Item {
Text {
id: messageText
font.pointSize: 10
font.pixelSize: hifi.fonts.pixelSize * 0.6
font.weight: Font.Bold
anchors {
@ -91,55 +86,17 @@ Item {
}
MouseArea {
anchors.fill: parent
cursorShape: "PointingHandCursor"
onClicked: {
content.accept();
content.accept()
}
}
}
}
}
// The UI enables an object, rather than manipulating its visibility, so that we can do animations in both directions.
// Because visibility and enabled are booleans, they cannot be animated. So when enabled is changed, we modify a property
// that can be animated, like scale or opacity, and then when the target animation value is reached, we can modify the
// visibility.
enabled: false
opacity: 0.0
onEnabledChanged: {
opacity = enabled ? 1.0 : 0.0
}
Behavior on opacity {
// Animate opacity.
NumberAnimation {
duration: animationDuration
easing.type: Easing.OutCubic
}
}
onOpacityChanged: {
// Once we're transparent, disable the dialog's visibility.
visible = (opacity != 0.0)
}
onVisibleChanged: {
if (!visible) {
// Some dialogs should be destroyed when they become invisible.
if (destroyOnInvisible) {
destroy()
}
}
}
Keys.onPressed: {
if (event.modifiers === Qt.ControlModifier)
switch (event.key) {
case Qt.Key_W:
event.accepted = true
content.accept()
break
} else switch (event.key) {
switch (event.key) {
case Qt.Key_Escape:
case Qt.Key_Back:
case Qt.Key_Enter:

View file

@ -5,7 +5,7 @@ import QtQuick.Controls.Styles 1.3
import QtWebKit 3.0
import "controls"
Dialog {
VrDialog {
id: root
width: 800
height: 800

View file

@ -1,78 +1,173 @@
//
// LoginDialog.qml
//
// Created by David Rowe on 3 Jun 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0
import QtQuick 2.3
import QtQuick.Controls.Styles 1.3
import QtQuick 2.4
import "controls"
import "styles"
Dialog {
DialogContainer {
id: root
HifiConstants { id: hifi }
title: "Login"
objectName: "LoginDialog"
height: 512
width: 384
onVisibleChanged: {
if (!visible) {
reset()
}
}
property bool destroyOnInvisible: false
onEnabledChanged: {
if (enabled) {
username.forceActiveFocus();
}
}
implicitWidth: loginDialog.implicitWidth
implicitHeight: loginDialog.implicitHeight
function reset() {
username.text = ""
password.text = ""
loginDialog.statusText = ""
}
x: parent ? parent.width / 2 - width / 2 : 0
y: parent ? parent.height / 2 - height / 2 : 0
property int maximumX: parent ? parent.width - width : 0
property int maximumY: parent ? parent.height - height : 0
LoginDialog {
id: loginDialog
anchors.fill: parent
anchors.margins: parent.margins
anchors.topMargin: parent.topMargin
Column {
anchors.topMargin: 8
anchors.right: parent.right
anchors.rightMargin: 0
anchors.left: parent.left
anchors.top: parent.top
spacing: 8
Image {
height: 64
anchors.horizontalCenter: parent.horizontalCenter
width: 64
source: "../images/hifi-logo.svg"
implicitWidth: backgroundRectangle.width
implicitHeight: backgroundRectangle.height
readonly property int inputWidth: 500
readonly property int inputHeight: 60
readonly property int borderWidth: 30
readonly property int closeMargin: 16
readonly property real tan30: 0.577 // tan(30°)
readonly property int inputSpacing: 16
property int maximumX: parent ? parent.width - width : 0
property int maximumY: parent ? parent.height - height : 0
Rectangle {
id: backgroundRectangle
width: loginDialog.inputWidth + loginDialog.borderWidth * 2
height: loginDialog.inputHeight * 6 + loginDialog.closeMargin * 2
radius: loginDialog.closeMargin * 2
color: "#2c86b1"
opacity: 0.85
MouseArea {
width: parent.width
height: parent.height
anchors {
horizontalCenter: parent.horizontalCenter
verticalCenter: parent.verticalCenter
}
drag {
target: root
minimumX: 0
minimumY: 0
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY : 0
}
}
}
Image {
id: closeIcon
source: "../images/login-close.svg"
width: 20
height: 20
anchors {
top: backgroundRectangle.top
right: backgroundRectangle.right
topMargin: loginDialog.closeMargin
rightMargin: loginDialog.closeMargin
}
Border {
width: 304
height: 64
anchors.horizontalCenter: parent.horizontalCenter
MouseArea {
anchors.fill: parent
cursorShape: "PointingHandCursor"
onClicked: {
root.enabled = false
}
}
}
Column {
id: mainContent
width: loginDialog.inputWidth
spacing: loginDialog.inputSpacing
anchors {
horizontalCenter: parent.horizontalCenter
verticalCenter: parent.verticalCenter
}
Item {
// Offset content down a little
width: loginDialog.inputWidth
height: loginDialog.closeMargin
}
Rectangle {
width: loginDialog.inputWidth
height: loginDialog.inputHeight
radius: height / 2
color: "#ebebeb"
Image {
source: "../images/login-username.svg"
width: loginDialog.inputHeight * 0.65
height: width
anchors {
verticalCenter: parent.verticalCenter
left: parent.left
leftMargin: loginDialog.inputHeight / 4
}
}
TextInput {
id: username
anchors.fill: parent
helperText: "Username or Email"
anchors.margins: 8
anchors {
fill: parent
leftMargin: loginDialog.inputHeight
rightMargin: loginDialog.inputHeight / 2
}
helperText: "username or email"
color: hifi.colors.text
KeyNavigation.tab: password
KeyNavigation.backtab: password
}
}
Border {
width: 304
height: 64
anchors.horizontalCenter: parent.horizontalCenter
Rectangle {
width: loginDialog.inputWidth
height: loginDialog.inputHeight
radius: height / 2
color: "#ebebeb"
Image {
source: "../images/login-password.svg"
width: loginDialog.inputHeight * 0.65
height: width
anchors {
verticalCenter: parent.verticalCenter
left: parent.left
leftMargin: loginDialog.inputHeight / 4
}
}
TextInput {
id: password
anchors.fill: parent
anchors {
fill: parent
leftMargin: loginDialog.inputHeight
rightMargin: loginDialog.inputHeight / 2
}
helperText: "password"
echoMode: TextInput.Password
helperText: "Password"
anchors.margins: 8
color: hifi.colors.text
KeyNavigation.tab: username
KeyNavigation.backtab: username
onFocusChanged: {
@ -83,102 +178,176 @@ Dialog {
}
}
Text {
anchors.horizontalCenter: parent.horizontalCenter
textFormat: Text.StyledText
width: parent.width
height: 96
wrapMode: Text.WordWrap
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter
text: loginDialog.statusText
}
}
Item {
width: loginDialog.inputWidth
height: loginDialog.inputHeight / 2
Column {
anchors.bottomMargin: 5
anchors.right: parent.right
anchors.rightMargin: 0
anchors.left: parent.left
anchors.bottom: parent.bottom
Text {
id: messageText
visible: loginDialog.statusText != "" && loginDialog.statusText != "Logging in..."
width: loginDialog.inputWidth
height: loginDialog.inputHeight / 2
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
text: loginDialog.statusText
color: "white"
}
Row {
id: messageSpinner
visible: loginDialog.statusText == "Logging in..."
onVisibleChanged: visible ? messageSpinnerAnimation.restart() : messageSpinnerAnimation.stop()
spacing: 24
anchors {
verticalCenter: parent.verticalCenter
horizontalCenter: parent.horizontalCenter
}
Rectangle {
id: spinner1
width: 10
height: 10
color: "#ebebeb"
opacity: 0.05
}
Rectangle {
id: spinner2
width: 10
height: 10
color: "#ebebeb"
opacity: 0.05
}
Rectangle {
id: spinner3
width: 10
height: 10
color: "#ebebeb"
opacity: 0.05
}
SequentialAnimation {
id: messageSpinnerAnimation
running: messageSpinner.visible
loops: Animation.Infinite
NumberAnimation { target: spinner1; property: "opacity"; to: 1.0; duration: 1000 }
NumberAnimation { target: spinner2; property: "opacity"; to: 1.0; duration: 1000 }
NumberAnimation { target: spinner3; property: "opacity"; to: 1.0; duration: 1000 }
NumberAnimation { target: spinner1; property: "opacity"; to: 0.05; duration: 0 }
NumberAnimation { target: spinner2; property: "opacity"; to: 0.05; duration: 0 }
NumberAnimation { target: spinner3; property: "opacity"; to: 0.05; duration: 0 }
}
}
}
Rectangle {
width: 192
height: 64
anchors.horizontalCenter: parent.horizontalCenter
color: hifi.colors.hifiBlue
border.width: 0
radius: 10
width: loginDialog.inputWidth
height: loginDialog.inputHeight
radius: height / 2
color: "#353535"
TextInput {
anchors.fill: parent
text: "Login"
color: "white"
horizontalAlignment: Text.AlignHCenter
}
MouseArea {
anchors.bottom: parent.bottom
anchors.bottomMargin: 0
anchors.top: parent.top
anchors.right: parent.right
anchors.left: parent.left
anchors.fill: parent
cursorShape: "PointingHandCursor"
onClicked: {
loginDialog.login(username.text, password.text)
}
}
}
Row {
anchors.centerIn: parent
Row {
anchors.horizontalCenter: parent.horizontalCenter
Text {
text: "Password?"
font.pixelSize: hifi.fonts.pixelSize * 0.8
font.underline: true
color: "#e0e0e0"
width: loginDialog.inputHeight * 4
horizontalAlignment: Text.AlignRight
anchors.verticalCenter: parent.verticalCenter
spacing: 8
MouseArea {
anchors.fill: parent
cursorShape: "PointingHandCursor"
onClicked: {
loginDialog.openUrl(loginDialog.rootUrl + "/users/password/new")
}
}
}
Item {
width: loginDialog.inputHeight + loginDialog.inputSpacing * 2
height: loginDialog.inputHeight
Image {
id: loginIcon
height: 32
width: 32
source: "../images/login.svg"
}
Text {
text: "Login"
color: "white"
width: 64
height: parent.height
id: hifiIcon
source: "../images/hifi-logo-blackish.svg"
width: loginDialog.inputHeight
height: width
anchors {
horizontalCenter: parent.horizontalCenter
verticalCenter: parent.verticalCenter
}
}
}
}
Text {
text: "Register"
font.pixelSize: hifi.fonts.pixelSize * 0.8
font.underline: true
color: "#e0e0e0"
width: loginDialog.inputHeight * 4
horizontalAlignment: Text.AlignLeft
anchors.verticalCenter: parent.verticalCenter
Text {
width: parent.width
height: 24
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
text:"Create Account"
font.pointSize: 12
font.bold: true
color: hifi.colors.hifiBlue
MouseArea {
anchors.fill: parent
onClicked: {
loginDialog.openUrl(loginDialog.rootUrl + "/signup")
}
}
}
Text {
width: parent.width
height: 24
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
font.pointSize: 12
text: "Recover Password"
color: hifi.colors.hifiBlue
MouseArea {
anchors.fill: parent
onClicked: {
loginDialog.openUrl(loginDialog.rootUrl + "/users/password/new")
MouseArea {
anchors.fill: parent
cursorShape: "PointingHandCursor"
onClicked: {
loginDialog.openUrl(loginDialog.rootUrl + "/signup")
}
}
}
}
}
}
onOpacityChanged: {
// Set focus once animation is completed so that focus is set at start-up when not logged in
if (opacity == 1.0) {
username.forceActiveFocus()
}
}
onVisibleChanged: {
if (!visible) {
username.text = ""
password.text = ""
loginDialog.statusText = ""
}
}
Keys.onPressed: {
switch(event.key) {
switch (event.key) {
case Qt.Key_Escape:
case Qt.Key_Back:
enabled = false
event.accepted = true
break
case Qt.Key_Enter:
case Qt.Key_Return:
if (username.activeFocus) {
@ -192,7 +361,7 @@ Dialog {
loginDialog.login(username.text, password.text)
}
}
break;
break
}
}
}

View file

@ -5,7 +5,7 @@ import QtQuick.Controls.Styles 1.3
import QtWebKit 3.0
import "controls"
Dialog {
VrDialog {
title: "Test Dlg"
id: testDialog
objectName: "Browser"

View file

@ -5,7 +5,7 @@ import QtQuick.Dialogs 1.2
import "controls"
import "styles"
Dialog {
VrDialog {
id: root
HifiConstants { id: hifi }
property real spacing: hifi.layout.spacing

View file

@ -3,7 +3,7 @@ import QtQuick.Controls 1.2
import QtQuick.Controls.Styles 1.3
import "controls"
Dialog {
VrDialog {
title: "Test Dialog"
id: testDialog
objectName: "TestDialog"

View file

@ -0,0 +1,65 @@
//
// DialogCommon.qml
//
// Created by David Rowe on 3 Jun 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0
import QtQuick 2.4
import "../styles"
Item {
id: root
property bool destroyOnInvisible: true
// The UI enables an object, rather than manipulating its visibility, so that we can do animations in both directions.
// Because visibility and enabled are booleans, they cannot be animated. So when enabled is changed, we modify a property
// that can be animated, like scale or opacity, and then when the target animation value is reached, we can modify the
// visibility.
enabled: false
opacity: 0.0
onEnabledChanged: {
opacity = enabled ? 1.0 : 0.0
}
Behavior on opacity {
// Animate opacity.
NumberAnimation {
duration: hifi.effects.fadeInDuration
easing.type: Easing.OutCubic
}
}
onOpacityChanged: {
// Once we're transparent, disable the dialog's visibility.
visible = (opacity != 0.0)
}
onVisibleChanged: {
if (!visible) {
// Some dialogs should be destroyed when they become invisible.
if (destroyOnInvisible) {
destroy()
}
}
}
Keys.onPressed: {
switch(event.key) {
case Qt.Key_W:
if (event.modifiers == Qt.ControlModifier) {
enabled = false
event.accepted = true
}
break
}
}
}

View file

@ -13,10 +13,9 @@ Item {
readonly property color hifiBlue: "#0e7077"
readonly property color window: sysPalette.window
readonly property color dialogBackground: sysPalette.window
//readonly property color dialogBackground: "#00000000"
readonly property color inputBackground: "white"
readonly property color background: sysPalette.dark
readonly property color text: sysPalette.text
readonly property color text: "#202020"
readonly property color disabledText: "gray"
readonly property color hintText: "gray" // A bit darker than sysPalette.dark so that it is visible on the DK2
readonly property color light: sysPalette.light

Binary file not shown.

File diff suppressed because it is too large Load diff

View file

@ -75,7 +75,12 @@
#include "octree/OctreeFade.h"
#include "octree/OctreePacketProcessor.h"
#include "UndoStackScriptingInterface.h"
<<<<<<< HEAD
#include "DisplayPlugins.h"
=======
#include "render/Engine.h"
>>>>>>> master
class QGLWidget;
class QKeyEvent;
@ -213,7 +218,6 @@ public:
ViewFrustum* getShadowViewFrustum() { return &_shadowViewFrustum; }
const OctreePacketProcessor& getOctreePacketProcessor() const { return _octreeProcessor; }
EntityTreeRenderer* getEntities() { return &_entities; }
Environment* getEnvironment() { return &_environment; }
QUndoStack* getUndoStack() { return &_undoStack; }
MainWindow* getWindow() { return _window; }
OctreeQuery& getOctreeQuery() { return _octreeQuery; }
@ -281,9 +285,24 @@ public:
virtual void setupWorldLight();
virtual bool shouldRenderMesh(float largestDimension, float distanceToCamera);
QImage renderAvatarBillboard();
QImage renderAvatarBillboard(RenderArgs* renderArgs);
<<<<<<< HEAD
void displaySide(const Camera& camera, bool selfAvatarOnly = false);
=======
void displaySide(RenderArgs* renderArgs, Camera& whichCamera, bool selfAvatarOnly = false, bool billboard = false);
/// Stores the current modelview matrix as the untranslated view matrix to use for transforms and the supplied vector as
/// the view matrix translation.
void updateUntranslatedViewMatrix(const glm::vec3& viewMatrixTranslation = glm::vec3());
const glm::mat4& getUntranslatedViewMatrix() const { return _untranslatedViewMatrix; }
/// Loads a view matrix that incorporates the specified model translation without the precision issues that can
/// result from matrix multiplication at high translation magnitudes.
void loadTranslatedViewMatrix(const glm::vec3& translation);
>>>>>>> master
void getModelViewMatrix(glm::dmat4* modelViewMatrix);
void getProjectionMatrix(glm::dmat4* projectionMatrix);
@ -353,6 +372,11 @@ public:
void setMaxOctreePacketsPerSecond(int maxOctreePPS);
int getMaxOctreePacketsPerSecond();
render::ScenePointer getMain3DScene() { return _main3DScene; }
render::EnginePointer getRenderEngine() { return _renderEngine; }
render::ScenePointer getMain3DScene() const { return _main3DScene; }
signals:
@ -472,7 +496,9 @@ private slots:
void setCursorVisible(bool visible);
private:
void updateCursorVisibility();
void resetCamerasOnResizeGL(Camera& camera, const glm::uvec2& size);
void updateProjectionMatrix();
void updateProjectionMatrix(Camera& camera, bool updateViewFrustum = true);
void sendPingPackets();
@ -500,8 +526,8 @@ private:
glm::vec3 getSunDirection();
void updateShadowMap();
void renderRearViewMirror(const QRect& region, bool billboard = false);
void updateShadowMap(RenderArgs* renderArgs);
void renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool billboard = false);
void setMenuShortcutsEnabled(bool enabled);
static void attachNewHeadToNode(Node *newNode);
@ -534,7 +560,6 @@ private:
QElapsedTimer _timerStart;
QElapsedTimer _lastTimeUpdated;
bool _justStarted;
Stars _stars;
ShapeManager _shapeManager;
PhysicalEntitySimulation _entitySimulation;
@ -637,9 +662,6 @@ private:
TouchEvent _lastTouchEvent;
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
RunningScriptsWidget* _runningScriptsWidget;
QHash<QString, ScriptEngine*> _scriptEnginesHash;
bool _runningScriptsWidgetWasVisible;
@ -675,6 +697,12 @@ private:
int _maxOctreePPS = DEFAULT_MAX_OCTREE_PPS;
quint64 _lastFaceTrackerUpdate;
render::ScenePointer _main3DScene{ new render::Scene() };
render::EnginePointer _renderEngine{ new render::Engine() };
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
};
#endif // hifi_Application_h

View file

@ -19,7 +19,7 @@
#include "EnvironmentData.h"
class Camera;
class ViewFrustum;
class ProgramObject;
class Environment {

View file

@ -217,6 +217,47 @@ QString LODManager::getLODFeedbackText() {
return result;
}
bool LODManager::shouldRender(const RenderArgs* args, const AABox& bounds) {
const float maxScale = (float)TREE_SCALE;
const float octreeToMeshRatio = 4.0f; // must be this many times closer to a mesh than a voxel to see it.
float octreeSizeScale = args->_sizeScale;
int boundaryLevelAdjust = args->_boundaryLevelAdjust;
float visibleDistanceAtMaxScale = boundaryDistanceForRenderLevel(boundaryLevelAdjust, octreeSizeScale) / octreeToMeshRatio;
float distanceToCamera = glm::length(bounds.calcCenter() - args->_viewFrustum->getPosition());
float largestDimension = bounds.getLargestDimension();
static bool shouldRenderTableNeedsBuilding = true;
static QMap<float, float> shouldRenderTable;
if (shouldRenderTableNeedsBuilding) {
float SMALLEST_SCALE_IN_TABLE = 0.001f; // 1mm is plenty small
float scale = maxScale;
float factor = 1.0f;
while (scale > SMALLEST_SCALE_IN_TABLE) {
scale /= 2.0f;
factor /= 2.0f;
shouldRenderTable[scale] = factor;
}
shouldRenderTableNeedsBuilding = false;
}
float closestScale = maxScale;
float visibleDistanceAtClosestScale = visibleDistanceAtMaxScale;
QMap<float, float>::const_iterator lowerBound = shouldRenderTable.lowerBound(largestDimension);
if (lowerBound != shouldRenderTable.constEnd()) {
closestScale = lowerBound.key();
visibleDistanceAtClosestScale = visibleDistanceAtMaxScale * lowerBound.value();
}
if (closestScale < largestDimension) {
visibleDistanceAtClosestScale *= 2.0f;
}
return distanceToCamera <= visibleDistanceAtClosestScale;
};
// TODO: This is essentially the same logic used to render octree cells, but since models are more detailed then octree cells
// I've added a voxelToModelRatio that adjusts how much closer to a model you have to be to see it.
bool LODManager::shouldRenderMesh(float largestDimension, float distanceToCamera) {

View file

@ -49,6 +49,8 @@ const float ADJUST_LOD_MAX_SIZE_SCALE = DEFAULT_OCTREE_SIZE_SCALE;
// do. But both are still culled using the same angular size logic.
const float AVATAR_TO_ENTITY_RATIO = 2.0f;
class RenderArgs;
class AABox;
class LODManager : public QObject, public Dependency {
Q_OBJECT
@ -79,6 +81,7 @@ public:
Q_INVOKABLE float getLODDecreaseFPS();
Q_INVOKABLE float getLODIncreaseFPS();
static bool shouldRender(const RenderArgs* args, const AABox& bounds);
bool shouldRenderMesh(float largestDimension, float distanceToCamera);
void autoAdjustLOD(float currentFPS);

View file

@ -33,45 +33,44 @@
using namespace std;
void renderWorldBox() {
void renderWorldBox(gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show edge of world
glm::vec3 red(1.0f, 0.0f, 0.0f);
glm::vec3 green(0.0f, 1.0f, 0.0f);
glm::vec3 blue(0.0f, 0.0f, 1.0f);
glm::vec3 grey(0.5f, 0.5f, 0.5f);
glDisable(GL_LIGHTING);
glLineWidth(1.0);
geometryCache->renderLine(glm::vec3(0, 0, 0), glm::vec3(TREE_SCALE, 0, 0), red);
geometryCache->renderLine(glm::vec3(0, 0, 0), glm::vec3(0, TREE_SCALE, 0), green);
geometryCache->renderLine(glm::vec3(0, 0, 0), glm::vec3(0, 0, TREE_SCALE), blue);
geometryCache->renderLine(glm::vec3(0, 0, TREE_SCALE), glm::vec3(TREE_SCALE, 0, TREE_SCALE), grey);
geometryCache->renderLine(glm::vec3(TREE_SCALE, 0, TREE_SCALE), glm::vec3(TREE_SCALE, 0, 0), grey);
static const glm::vec3 red(1.0f, 0.0f, 0.0f);
static const glm::vec3 green(0.0f, 1.0f, 0.0f);
static const glm::vec3 blue(0.0f, 0.0f, 1.0f);
static const glm::vec3 grey(0.5f, 0.5f, 0.5f);
auto transform = Transform{};
batch.setModelTransform(transform);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(TREE_SCALE, 0.0f, 0.0f), red);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, TREE_SCALE, 0.0f), green);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, TREE_SCALE), blue);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), grey);
geometryCache->renderLine(batch, glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, 0.0f), grey);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
const float MARKER_RADIUS = 0.05f;
glEnable(GL_LIGHTING);
glPushMatrix();
glTranslatef(MARKER_DISTANCE, 0, 0);
geometryCache->renderSphere(MARKER_RADIUS, 10, 10, red);
glPopMatrix();
glPushMatrix();
glTranslatef(0, MARKER_DISTANCE, 0);
geometryCache->renderSphere(MARKER_RADIUS, 10, 10, green);
glPopMatrix();
glPushMatrix();
glTranslatef(0, 0, MARKER_DISTANCE);
geometryCache->renderSphere(MARKER_RADIUS, 10, 10, blue);
glPopMatrix();
glPushMatrix();
glTranslatef(MARKER_DISTANCE, 0, MARKER_DISTANCE);
geometryCache->renderSphere(MARKER_RADIUS, 10, 10, grey);
glPopMatrix();
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
transform.setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, green);
transform.setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, blue);
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, grey);
}
// Return a random vector of average length 1

View file

@ -16,10 +16,12 @@
#include <glm/gtc/quaternion.hpp>
#include <QSettings>
#include <gpu/Batch.h>
float randFloat();
const glm::vec3 randVector();
void renderWorldBox();
void renderWorldBox(gpu::Batch& batch);
int widthText(float scale, int mono, char const* string);
void drawText(int x, int y, float scale, float radians, int mono,

View file

@ -40,13 +40,13 @@ void AudioToolBox::render(int x, int y, int padding, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_micTexture) {
_micTexture = DependencyManager::get<TextureCache>()->getImageTexture(PathUtils::resourcesPath() + "images/mic.svg");
_micTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic.svg");
}
if (!_muteTexture) {
_muteTexture = DependencyManager::get<TextureCache>()->getImageTexture(PathUtils::resourcesPath() + "images/mic-mute.svg");
_muteTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/mic-mute.svg");
}
if (_boxTexture) {
_boxTexture = DependencyManager::get<TextureCache>()->getImageTexture(PathUtils::resourcesPath() + "images/audio-box.svg");
_boxTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/audio-box.svg");
}
auto audioIO = DependencyManager::get<AudioClient>();

View file

@ -32,7 +32,7 @@
#include <PathUtils.h>
#include <PerfStat.h>
#include <SharedUtil.h>
#include <TextRenderer.h>
#include <TextRenderer3D.h>
#include <TextureCache.h>
#include "Application.h"
@ -60,6 +60,24 @@ const float DISPLAYNAME_FADE_FACTOR = pow(0.01f, 1.0f / DISPLAYNAME_FADE_TIME);
const float DISPLAYNAME_ALPHA = 0.95f;
const float DISPLAYNAME_BACKGROUND_ALPHA = 0.4f;
namespace render {
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar) {
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const AvatarSharedPointer& avatar) {
return static_cast<Avatar*>(avatar.get())->getBounds();
}
template <> void payloadRender(const AvatarSharedPointer& avatar, RenderArgs* args) {
Avatar* avatarPtr = static_cast<Avatar*>(avatar.get());
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
avatarPtr->setDisplayingLookatVectors(renderLookAtVectors);
if (avatarPtr->isInitialized() && args) {
avatarPtr->render(args, Application::getInstance()->getCamera()->getPosition());
}
}
}
Avatar::Avatar() :
AvatarData(),
_skeletonModel(this),
@ -116,6 +134,10 @@ glm::quat Avatar::getWorldAlignedOrientation () const {
return computeRotationFromBodyToWorldUp() * getOrientation();
}
AABox Avatar::getBounds() const {
return AABox();
}
float Avatar::getLODDistance() const {
return DependencyManager::get<LODManager>()->getAvatarLODDistanceMultiplier() *
glm::distance(qApp->getCamera()->getPosition(), _position) / _scale;
@ -254,10 +276,10 @@ enum TextRendererType {
DISPLAYNAME
};
static TextRenderer* textRenderer(TextRendererType type) {
static TextRenderer* chatRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, 24, -1,
false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* displayNameRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, 12);
static TextRenderer3D* textRenderer(TextRendererType type) {
static TextRenderer3D* chatRenderer = TextRenderer3D::getInstance(SANS_FONT_FAMILY, 24, -1,
false, TextRenderer3D::SHADOW_EFFECT);
static TextRenderer3D* displayNameRenderer = TextRenderer3D::getInstance(SANS_FONT_FAMILY, 12);
switch(type) {
case CHAT:
@ -269,11 +291,29 @@ static TextRenderer* textRenderer(TextRendererType type) {
return displayNameRenderer;
}
void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode, bool postLighting) {
bool Avatar::addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
auto avatarPayload = new render::Payload<AvatarData>(self);
auto avatarPayloadPointer = Avatar::PayloadPointer(avatarPayload);
_renderItemID = scene->allocateID();
pendingChanges.resetItem(_renderItemID, avatarPayloadPointer);
_skeletonModel.addToScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
return true;
}
void Avatar::removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_renderItemID);
_skeletonModel.removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
}
void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
if (_referential) {
_referential->update();
}
auto batch = renderArgs->_batch;
if (postLighting &&
glm::distance(DependencyManager::get<AvatarManager>()->getMyAvatar()->getPosition(), _position) < 10.0f) {
auto geometryCache = DependencyManager::get<GeometryCache>();
@ -299,15 +339,11 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
}
if (havePosition && haveRotation) {
glPushMatrix(); {
glTranslatef(position.x, position.y, position.z);
float angle = glm::degrees(glm::angle(rotation));
glm::vec3 axis = glm::axis(rotation);
glRotatef(angle, axis.x, axis.y, axis.z);
geometryCache->renderLine(glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
} glPopMatrix();
Transform pointerTransform;
pointerTransform.setTranslation(position);
pointerTransform.setRotation(rotation);
batch->setModelTransform(pointerTransform);
geometryCache->renderLine(*batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
}
}
@ -325,14 +361,11 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
}
if (havePosition && haveRotation) {
glPushMatrix(); {
glTranslatef(position.x, position.y, position.z);
float angle = glm::degrees(glm::angle(rotation));
glm::vec3 axis = glm::axis(rotation);
glRotatef(angle, axis.x, axis.y, axis.z);
geometryCache->renderLine(glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
} glPopMatrix();
Transform pointerTransform;
pointerTransform.setTranslation(position);
pointerTransform.setRotation(rotation);
batch->setModelTransform(pointerTransform);
geometryCache->renderLine(*batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
}
}
}
@ -340,7 +373,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
// simple frustum check
float boundingRadius = getBillboardSize();
ViewFrustum* frustum = nullptr;
if (renderMode == RenderArgs::SHADOW_RENDER_MODE) {
if (renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
frustum = Application::getInstance()->getShadowViewFrustum();
} else {
frustum = Application::getInstance()->getDisplayViewFrustum();
@ -366,14 +399,14 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
}
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderMode == RenderArgs::NORMAL_RENDER_MODE
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
? 1.0f
: GLOW_FROM_AVERAGE_LOUDNESS;
// render body
renderBody(frustum, renderMode, postLighting, glowLevel);
renderBody(renderArgs, frustum, postLighting, glowLevel);
if (!postLighting && renderMode != RenderArgs::SHADOW_RENDER_MODE) {
if (!postLighting && renderArgs->_renderMode != RenderArgs::SHADOW_RENDER_MODE) {
// add local lights
const float BASE_LIGHT_DISTANCE = 2.0f;
const float LIGHT_EXPONENT = 1.0f;
@ -397,10 +430,10 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
_skeletonModel.renderJointCollisionShapes(0.7f);
}
if (renderHead && shouldRenderHead(cameraPosition, renderMode)) {
if (renderHead && shouldRenderHead(renderArgs, cameraPosition)) {
getHead()->getFaceModel().renderJointCollisionShapes(0.7f);
}
if (renderBounding && shouldRenderHead(cameraPosition, renderMode)) {
if (renderBounding && shouldRenderHead(renderArgs, cameraPosition)) {
_skeletonModel.renderBoundingCollisionShapes(0.7f);
}
@ -409,14 +442,16 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
const float LOOK_AT_INDICATOR_RADIUS = 0.03f;
const float LOOK_AT_INDICATOR_OFFSET = 0.22f;
const glm::vec4 LOOK_AT_INDICATOR_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
glPushMatrix();
glm::vec3 position;
if (_displayName.isEmpty() || _displayNameAlpha == 0.0f) {
glTranslatef(_position.x, getDisplayNamePosition().y, _position.z);
position = glm::vec3(_position.x, getDisplayNamePosition().y, _position.z);
} else {
glTranslatef(_position.x, getDisplayNamePosition().y + LOOK_AT_INDICATOR_OFFSET, _position.z);
position = glm::vec3(_position.x, getDisplayNamePosition().y + LOOK_AT_INDICATOR_OFFSET, _position.z);
}
DependencyManager::get<GeometryCache>()->renderSphere(LOOK_AT_INDICATOR_RADIUS, 15, 15, LOOK_AT_INDICATOR_COLOR);
glPopMatrix();
Transform transform;
transform.setTranslation(position);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderSphere(*batch, LOOK_AT_INDICATOR_RADIUS, 15, 15, LOOK_AT_INDICATOR_COLOR);
}
}
@ -437,31 +472,33 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode rend
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
if (renderMode == RenderArgs::NORMAL_RENDER_MODE && (sphereRadius > MIN_SPHERE_SIZE) &&
if (renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE && (sphereRadius > MIN_SPHERE_SIZE) &&
(angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
glPushMatrix();
glTranslatef(_position.x, _position.y, _position.z);
glScalef(height, height, height);
Transform transform;
transform.setTranslation(_position);
transform.setScale(height);
batch->setModelTransform(transform);
if (_voiceSphereID == GeometryCache::UNKNOWN_ID) {
_voiceSphereID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<GeometryCache>()->renderSphere(sphereRadius, 15, 15,
DependencyManager::get<GeometryCache>()->renderSphere(*batch, sphereRadius, 15, 15,
glm::vec4(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.0f - angle / MAX_SPHERE_ANGLE), true,
_voiceSphereID);
glPopMatrix();
}
}
}
const float DISPLAYNAME_DISTANCE = 20.0f;
setShowDisplayName(renderMode == RenderArgs::NORMAL_RENDER_MODE && distanceToTarget < DISPLAYNAME_DISTANCE);
if (!postLighting || renderMode != RenderArgs::NORMAL_RENDER_MODE || (isMyAvatar() &&
setShowDisplayName(renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE && distanceToTarget < DISPLAYNAME_DISTANCE);
if (renderArgs->_renderMode != RenderArgs::NORMAL_RENDER_MODE || (isMyAvatar() &&
Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_FIRST_PERSON)) {
return;
}
renderDisplayName();
renderDisplayName(renderArgs);
}
glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
@ -480,32 +517,44 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode renderMode, bool postLighting, float glowLevel) {
Model::RenderMode modelRenderMode = renderMode;
void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel) {
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_skeletonModel.needsFixupInScene()) {
_skeletonModel.removeFromScene(scene, pendingChanges);
_skeletonModel.addToScene(scene, pendingChanges);
}
if (getHead()->getFaceModel().needsFixupInScene()) {
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
{
Glower glower(glowLevel);
Glower glower(renderArgs, glowLevel);
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
if (postLighting || renderMode == RenderArgs::SHADOW_RENDER_MODE) {
if (postLighting || renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
// render the billboard until both models are loaded
renderBillboard();
renderBillboard(renderArgs);
}
return;
}
if (postLighting) {
getHand()->render(false, modelRenderMode);
getHand()->render(renderArgs, false);
} else {
RenderArgs args;
args._viewFrustum = renderFrustum;
_skeletonModel.render(1.0f, modelRenderMode, &args);
renderAttachments(renderMode, &args);
// NOTE: we no longer call this here, because we've added all the model parts as renderable items in the scene
//_skeletonModel.render(renderArgs, 1.0f);
renderAttachments(renderArgs);
}
}
getHead()->render(1.0f, renderFrustum, modelRenderMode, postLighting);
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
}
bool Avatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode) const {
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const {
return true;
}
@ -529,19 +578,21 @@ void Avatar::simulateAttachments(float deltaTime) {
}
}
void Avatar::renderAttachments(RenderArgs::RenderMode renderMode, RenderArgs* args) {
void Avatar::renderAttachments(RenderArgs* args) {
// RenderArgs::RenderMode modelRenderMode = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
// RenderArgs::SHADOW_RENDER_MODE : RenderArgs::DEFAULT_RENDER_MODE;
/*
foreach (Model* model, _attachmentModels) {
model->render(1.0f, renderMode, args);
model->render(args, 1.0f);
}
*/
}
void Avatar::updateJointMappings() {
// no-op; joint mappings come from skeleton model
}
void Avatar::renderBillboard() {
void Avatar::renderBillboard(RenderArgs* renderArgs) {
if (_billboard.isEmpty()) {
return;
}
@ -554,44 +605,29 @@ void Avatar::renderBillboard() {
if (!_billboardTexture->isLoaded()) {
return;
}
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glBindTexture(GL_TEXTURE_2D, _billboardTexture->getID());
glPushMatrix();
glTranslatef(_position.x, _position.y, _position.z);
// rotate about vertical to face the camera
glm::quat rotation = getOrientation();
glm::vec3 cameraVector = glm::inverse(rotation) * (Application::getInstance()->getCamera()->getPosition() - _position);
rotation = rotation * glm::angleAxis(atan2f(-cameraVector.x, -cameraVector.z), glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
// compute the size from the billboard camera parameters and scale
float size = getBillboardSize();
glScalef(size, size, size);
Transform transform;
transform.setTranslation(_position);
transform.setRotation(rotation);
transform.setScale(size);
glm::vec2 topLeft(-1.0f, -1.0f);
glm::vec2 bottomRight(1.0f, 1.0f);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
gpu::Batch& batch = *renderArgs->_batch;
batch.setUniformTexture(0, _billboardTexture->getGPUTexture());
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, true);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glDisable(GL_ALPHA_TEST);
glBindTexture(GL_TEXTURE_2D, 0);
}
float Avatar::getBillboardSize() const {
@ -654,9 +690,10 @@ float Avatar::calculateDisplayNameScaleFactor(const glm::vec3& textPosition, boo
return scaleFactor;
}
void Avatar::renderDisplayName() {
void Avatar::renderDisplayName(RenderArgs* renderArgs) {
auto batch = renderArgs->_batch;
bool shouldShowReceiveStats = DependencyManager::get<AvatarManager>()->shouldShowReceiveStats();
bool shouldShowReceiveStats = DependencyManager::get<AvatarManager>()->shouldShowReceiveStats() && !isMyAvatar();
if ((_displayName.isEmpty() && !shouldShowReceiveStats) || _displayNameAlpha == 0.0f) {
return;
@ -665,31 +702,19 @@ void Avatar::renderDisplayName() {
// which viewing mode?
bool inHMD = Application::getInstance()->isHMDMode();
glDisable(GL_LIGHTING);
glPushMatrix();
glm::vec3 textPosition = getDisplayNamePosition();
glTranslatef(textPosition.x, textPosition.y, textPosition.z);
// we need "always facing camera": we must remove the camera rotation from the stack
glm::vec3 frontAxis(0.0f, 0.0f, 1.0f);
if (inHMD) {
glm::vec3 camPosition = Application::getInstance()->getCamera()->getPosition();
frontAxis = camPosition - textPosition;
} else {
glm::quat rotation = Application::getInstance()->getCamera()->getRotation();
frontAxis = glm::rotate(rotation, frontAxis);
}
frontAxis = glm::normalize(glm::vec3(frontAxis.z, 0.0f, -frontAxis.x));
float angle = acos(frontAxis.x) * ((frontAxis.z < 0) ? 1.0f : -1.0f);
glRotatef(glm::degrees(angle), 0.0f, 1.0f, 0.0f);
// we need "always facing camera": we must remove the camera rotation from the stac
glm::quat rotation = Application::getInstance()->getCamera()->getRotation();
// TODO: Fix scaling - at some point this or the text rendering changed in scale.
float scaleFactor = calculateDisplayNameScaleFactor(textPosition, inHMD);
glScalef(scaleFactor, -scaleFactor, scaleFactor); // TextRenderer::draw paints the text upside down in y axis
scaleFactor /= 3.5f;
Transform textTransform;
textTransform.setTranslation(textPosition);
textTransform.setRotation(rotation);
textTransform.setScale(scaleFactor);
// optionally render timing stats for this avatar with the display name
QString renderedDisplayName = _displayName;
@ -699,55 +724,45 @@ void Avatar::renderDisplayName() {
float kilobitsPerSecond = getAverageBytesReceivedPerSecond() / (float) BYTES_PER_KILOBIT;
QString statsFormat = QString("(%1 Kbps, %2 Hz)");
if (!renderedDisplayName.isEmpty()) {
statsFormat.prepend(" - ");
}
QString statsText = statsFormat.arg(QString::number(kilobitsPerSecond, 'f', 2)).arg(getReceiveRate());
glm::vec2 extent = textRenderer(DISPLAYNAME)->computeExtent(statsText);
// add the extent required for the stats to whatever was calculated for the display name
nameDynamicRect.setWidth(nameDynamicRect.width() + extent.x);
if (extent.y > nameDynamicRect.height()) {
nameDynamicRect.setHeight(extent.y);
}
renderedDisplayName += statsText;
glm::vec2 extent = textRenderer(DISPLAYNAME)->computeExtent(renderedDisplayName);
nameDynamicRect = QRect(0, 0, (int)extent.x, (int)extent.y);
}
int text_x = -nameDynamicRect.width() / 2;
int text_y = -nameDynamicRect.height() / 2;
// draw a gray background
int left = text_x + nameDynamicRect.x();
int left = text_x;
int right = left + nameDynamicRect.width();
int bottom = text_y + nameDynamicRect.y();
int bottom = text_y;
int top = bottom + nameDynamicRect.height();
const int border = 8;
bottom -= border;
left -= border;
top += border;
right += border;
// We are drawing coplanar textures with depth: need the polygon offset
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonOffset(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(left, bottom, right - left, top - bottom, 3,
glm::vec4(0.2f, 0.2f, 0.2f, _displayNameAlpha * DISPLAYNAME_BACKGROUND_ALPHA / DISPLAYNAME_ALPHA));
glm::vec4 color(0.93f, 0.93f, 0.93f, _displayNameAlpha);
glm::vec4 textColor(0.93f, 0.93f, 0.93f, _displayNameAlpha);
glm::vec4 backgroundColor(0.2f, 0.2f, 0.2f,
_displayNameAlpha * DISPLAYNAME_BACKGROUND_ALPHA / DISPLAYNAME_ALPHA);
auto backgroundTransform = textTransform;
backgroundTransform.postTranslate(glm::vec3(0.0f, 0.0f, -0.001f));
batch->setModelTransform(backgroundTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(*batch);
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(*batch, left, bottom, right - left, top - bottom, 3,
backgroundColor);
QByteArray nameUTF8 = renderedDisplayName.toLocal8Bit();
glDisable(GL_POLYGON_OFFSET_FILL);
textRenderer(DISPLAYNAME)->draw(text_x, text_y, nameUTF8.data(), color);
glPopMatrix();
glEnable(GL_LIGHTING);
batch->setModelTransform(textTransform);
textRenderer(DISPLAYNAME)->draw(*batch, text_x, -text_y, nameUTF8.data(), textColor);
}
bool Avatar::findRayIntersection(RayIntersectionInfo& intersection) const {
@ -1050,13 +1065,13 @@ float Avatar::getSkeletonHeight() const {
float Avatar::getHeadHeight() const {
Extents extents = getHead()->getFaceModel().getMeshExtents();
if (!extents.isEmpty()) {
if (!extents.isEmpty() && extents.isValid()) {
return extents.maximum.y - extents.minimum.y;
}
extents = _skeletonModel.getMeshExtents();
glm::vec3 neckPosition;
if (!extents.isEmpty() && _skeletonModel.getNeckPosition(neckPosition)) {
if (!extents.isEmpty() && extents.isValid() && _skeletonModel.getNeckPosition(neckPosition)) {
return extents.maximum.y / 2.0f - neckPosition.y + _position.y;
}

View file

@ -20,12 +20,20 @@
#include <AvatarData.h>
#include <ShapeInfo.h>
#include <render/Scene.h>
#include "Hand.h"
#include "Head.h"
#include "InterfaceConfig.h"
#include "SkeletonModel.h"
#include "world.h"
namespace render {
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar);
template <> const Item::Bound payloadGetBound(const AvatarSharedPointer& avatar);
template <> void payloadRender(const AvatarSharedPointer& avatar, RenderArgs* args);
}
static const float SCALING_RATIO = .05f;
static const float SMOOTHING_RATIO = .05f; // 0 < ratio < 1
static const float RESCALING_TOLERANCE = .02f;
@ -66,12 +74,21 @@ public:
Avatar();
~Avatar();
typedef render::Payload<AvatarData> Payload;
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
void init();
void simulate(float deltaTime);
virtual void render(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode = RenderArgs::NORMAL_RENDER_MODE,
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition,
bool postLighting = false);
bool addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges);
void removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges);
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
@ -87,6 +104,8 @@ public:
Hand* getHand() { return static_cast<Hand*>(_handData); }
glm::quat getWorldAlignedOrientation() const;
AABox getBounds() const;
/// Returns the distance to use as a LOD parameter.
float getLODDistance() const;
@ -212,23 +231,24 @@ protected:
glm::vec3 getDisplayNamePosition();
float calculateDisplayNameScaleFactor(const glm::vec3& textPosition, bool inHMD);
void renderDisplayName();
virtual void renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode) const;
void renderDisplayName(RenderArgs* renderArgs);
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const;
void simulateAttachments(float deltaTime);
virtual void renderAttachments(RenderArgs::RenderMode renderMode, RenderArgs* args);
virtual void renderAttachments(RenderArgs* args);
virtual void updateJointMappings();
render::ItemID _renderItemID;
private:
bool _initialized;
NetworkTexturePointer _billboardTexture;
bool _shouldRenderBillboard;
bool _isLookAtTarget;
void renderBillboard();
void renderBillboard(RenderArgs* renderArgs);
float getBillboardSize() const;

View file

@ -55,12 +55,17 @@ AvatarManager::AvatarManager(QObject* parent) :
_avatarFades() {
// register a meta type for the weak pointer we'll use for the owning avatar mixer for each avatar
qRegisterMetaType<QWeakPointer<Node> >("NodeWeakPointer");
_myAvatar = QSharedPointer<MyAvatar>(new MyAvatar());
_myAvatar = std::make_shared<MyAvatar>();
}
void AvatarManager::init() {
_myAvatar->init();
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
void AvatarManager::updateMyAvatar(float deltaTime) {
@ -92,7 +97,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
// simulate avatars
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
Avatar* avatar = reinterpret_cast<Avatar*>(avatarIterator.value().data());
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
if (avatar == _myAvatar || !avatar->isInitialized()) {
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
@ -111,69 +116,39 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
simulateAvatarFades(deltaTime);
}
void AvatarManager::renderAvatars(RenderArgs::RenderMode renderMode, bool postLighting, bool selfAvatarOnly) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::renderAvatars()");
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
if (!selfAvatarOnly) {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
foreach (const AvatarSharedPointer& avatarPointer, _avatarHash) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
if (!avatar->isInitialized()) {
continue;
}
avatar->render(cameraPosition, renderMode, postLighting);
avatar->setDisplayingLookatVectors(renderLookAtVectors);
}
renderAvatarFades(cameraPosition, renderMode);
}
} else {
// just render myAvatar
_myAvatar->render(cameraPosition, renderMode, postLighting);
_myAvatar->setDisplayingLookatVectors(renderLookAtVectors);
}
}
void AvatarManager::simulateAvatarFades(float deltaTime) {
QVector<AvatarSharedPointer>::iterator fadingIterator = _avatarFades.begin();
const float SHRINK_RATE = 0.9f;
const float MIN_FADE_SCALE = 0.001f;
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
while (fadingIterator != _avatarFades.end()) {
Avatar* avatar = static_cast<Avatar*>(fadingIterator->data());
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
avatar->setTargetScale(avatar->getScale() * SHRINK_RATE, true);
if (avatar->getTargetScale() < MIN_FADE_SCALE) {
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
fadingIterator = _avatarFades.erase(fadingIterator);
} else {
avatar->simulate(deltaTime);
++fadingIterator;
}
}
}
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode) {
// render avatar fades
Glower glower(renderMode == RenderArgs::NORMAL_RENDER_MODE ? 1.0f : 0.0f);
foreach(const AvatarSharedPointer& fadingAvatar, _avatarFades) {
Avatar* avatar = static_cast<Avatar*>(fadingAvatar.data());
if (avatar != static_cast<Avatar*>(_myAvatar.data()) && avatar->isInitialized()) {
avatar->render(cameraPosition, renderMode);
}
}
scene->enqueuePendingChanges(pendingChanges);
}
AvatarSharedPointer AvatarManager::newSharedAvatar() {
return AvatarSharedPointer(new Avatar());
return AvatarSharedPointer(std::make_shared<Avatar>());
}
// virtual
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
AvatarSharedPointer avatar = AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer);
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->addToScene(avatar, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
return avatar;
}
@ -194,10 +169,9 @@ void AvatarManager::removeAvatarMotionState(Avatar* avatar) {
void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
AvatarHash::iterator avatarIterator = _avatarHash.find(sessionUUID);
if (avatarIterator != _avatarHash.end()) {
Avatar* avatar = reinterpret_cast<Avatar*>(avatarIterator.value().data());
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
if (avatar != _myAvatar && avatar->isInitialized()) {
removeAvatarMotionState(avatar);
removeAvatarMotionState(avatar.get());
_avatarFades.push_back(avatarIterator.value());
_avatarHash.erase(avatarIterator);
}
@ -208,12 +182,12 @@ void AvatarManager::clearOtherAvatars() {
// clear any avatars that came from an avatar-mixer
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
Avatar* avatar = reinterpret_cast<Avatar*>(avatarIterator.value().data());
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
if (avatar == _myAvatar || !avatar->isInitialized()) {
// don't remove myAvatar or uninitialized avatars from the list
++avatarIterator;
} else {
removeAvatarMotionState(avatar);
removeAvatarMotionState(avatar.get());
_avatarFades.push_back(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
}
@ -276,7 +250,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
AvatarHash::iterator avatarItr = _avatarHash.find(id);
if (avatarItr != _avatarHash.end()) {
Avatar* avatar = static_cast<Avatar*>(avatarItr.value().data());
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
AvatarMotionState* motionState = avatar->_motionState;
if (motionState) {
motionState->addDirtyFlags(EntityItem::DIRTY_SHAPE);
@ -285,7 +259,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
avatar->computeShapeInfo(shapeInfo);
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
avatar->_motionState = motionState;
_motionStatesToAdd.insert(motionState);
_avatarMotionStates.insert(motionState);

View file

@ -35,11 +35,10 @@ public:
void init();
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
MyAvatar* getMyAvatar() { return _myAvatar.get(); }
void updateMyAvatar(float deltaTime);
void updateOtherAvatars(float deltaTime);
void renderAvatars(RenderArgs::RenderMode renderMode, bool postLighting = false, bool selfAvatarOnly = false);
void clearOtherAvatars();
@ -70,7 +69,6 @@ private:
AvatarManager(const AvatarManager& other);
void simulateAvatarFades(float deltaTime);
void renderAvatarFades(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode);
// virtual overrides
virtual AvatarSharedPointer newSharedAvatar();
@ -79,7 +77,7 @@ private:
virtual void removeAvatar(const QUuid& sessionUUID);
QVector<AvatarSharedPointer> _avatarFades;
QSharedPointer<MyAvatar> _myAvatar;
std::shared_ptr<MyAvatar> _myAvatar;
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
QVector<AvatarManager::LocalLight> _localLights;

View file

@ -39,7 +39,8 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
setPupilDilation(_owningHead->getPupilDilation());
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
invalidCalculatedMeshBoxes();
// FIXME - this is very expensive, we shouldn't do it if we don't have to
//invalidCalculatedMeshBoxes();
if (isActive()) {
setOffset(-_geometry->getFBXGeometry().neckPivot);

View file

@ -102,8 +102,8 @@ void Hand::resolvePenetrations() {
}
}
void Hand::render(bool isMine, Model::RenderMode renderMode) {
if (renderMode != RenderArgs::SHADOW_RENDER_MODE &&
void Hand::render(RenderArgs* renderArgs, bool isMine) {
if (renderArgs->_renderMode != RenderArgs::SHADOW_RENDER_MODE &&
Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionShapes)) {
// draw a green sphere at hand joint location, which is actually near the wrist)
for (size_t i = 0; i < getNumPalms(); i++) {
@ -119,7 +119,7 @@ void Hand::render(bool isMine, Model::RenderMode renderMode) {
}
}
if (renderMode != RenderArgs::SHADOW_RENDER_MODE && Menu::getInstance()->isOptionChecked(MenuOption::DisplayHands)) {
if (renderArgs->_renderMode != RenderArgs::SHADOW_RENDER_MODE && Menu::getInstance()->isOptionChecked(MenuOption::DisplayHands)) {
renderHandTargets(isMine);
}

View file

@ -40,19 +40,8 @@ class Hand : public HandData {
public:
Hand(Avatar* owningAvatar);
struct HandBall
{
glm::vec3 position; // the actual dynamic position of the ball at any given time
glm::quat rotation; // the rotation of the ball
glm::vec3 velocity; // the velocity of the ball
float radius; // the radius of the ball
bool isCollidable; // whether or not the ball responds to collisions
bool isColliding; // ball is currently colliding
float touchForce; // a scalar determining the amount that the cursor (or hand) is penetrating the ball
};
void simulate(float deltaTime, bool isMine);
void render(bool isMine, Model::RenderMode renderMode = RenderArgs::DEFAULT_RENDER_MODE);
void render(RenderArgs* renderArgs, bool isMine);
void collideAgainstAvatar(Avatar* avatar, bool isMyHand);

View file

@ -292,15 +292,11 @@ void Head::relaxLean(float deltaTime) {
_deltaLeanForward *= relaxationFactor;
}
void Head::render(float alpha, ViewFrustum* renderFrustum, Model::RenderMode mode, bool postLighting) {
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum, bool postLighting) {
if (postLighting) {
if (_renderLookatVectors) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
}
} else {
RenderArgs args;
args._viewFrustum = renderFrustum;
_faceModel.render(alpha, mode, &args);
}
}
@ -383,9 +379,9 @@ void Head::addLeanDeltas(float sideways, float forward) {
_deltaLeanForward += forward;
}
void Head::renderLookatVectors(glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition) {
void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition) {
auto geometryCache = DependencyManager::get<GeometryCache>();
DependencyManager::get<GlowEffect>()->begin();
DependencyManager::get<GlowEffect>()->begin(renderArgs);
glLineWidth(2.0);
@ -394,7 +390,7 @@ void Head::renderLookatVectors(glm::vec3 leftEyePosition, glm::vec3 rightEyePosi
geometryCache->renderLine(leftEyePosition, lookatPosition, startColor, endColor, _leftEyeLookAtID);
geometryCache->renderLine(rightEyePosition, lookatPosition, startColor, endColor, _rightEyeLookAtID);
DependencyManager::get<GlowEffect>()->end();
DependencyManager::get<GlowEffect>()->end(renderArgs);
}

View file

@ -40,7 +40,7 @@ public:
void init();
void reset();
void simulate(float deltaTime, bool isMine, bool billboard = false);
void render(float alpha, ViewFrustum* renderFrustum, Model::RenderMode mode, bool postLighting);
void render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum, bool postLighting);
void setScale(float scale);
void setPosition(glm::vec3 position) { _position = position; }
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
@ -153,7 +153,7 @@ private:
int _rightEyeLookAtID;
// private methods
void renderLookatVectors(glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
void renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
void calculateMouthShapes();
void applyEyelidOffset(glm::quat headOrientation);

View file

@ -45,6 +45,9 @@
#include "Util.h"
#include "InterfaceLogging.h"
#include "gpu/GLBackend.h"
using namespace std;
const glm::vec3 DEFAULT_UP_DIRECTION(0.0f, 1.0f, 0.0f);
@ -105,7 +108,7 @@ MyAvatar::MyAvatar() :
}
MyAvatar::~MyAvatar() {
_lookAtTargetAvatar.clear();
_lookAtTargetAvatar.reset();
}
QByteArray MyAvatar::toByteArray() {
@ -328,14 +331,14 @@ void MyAvatar::renderDebugBodyPoints() {
}
// virtual
void MyAvatar::render(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode, bool postLighting) {
void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
// don't render if we've been asked to disable local rendering
if (!_shouldRender) {
return; // exit early
}
Avatar::render(cameraPosition, renderMode, postLighting);
Avatar::render(renderArgs, cameraPosition, postLighting);
// don't display IK constraints in shadow mode
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) && postLighting) {
_skeletonModel.renderIKConstraints();
@ -856,7 +859,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
//
// Look at the avatar whose eyes are closest to the ray in direction of my avatar's head
//
_lookAtTargetAvatar.clear();
_lookAtTargetAvatar.reset();
_targetAvatarPosition = glm::vec3(0.0f);
glm::vec3 lookForward = getHead()->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
@ -868,7 +871,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
int howManyLookingAtMe = 0;
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
Avatar* avatar = static_cast<Avatar*>(avatarPointer.get());
bool isCurrentTarget = avatar->getIsLookAtTarget();
float distanceTo = glm::length(avatar->getHead()->getEyePosition() - cameraPosition);
avatar->setIsLookAtTarget(false);
@ -896,13 +899,14 @@ void MyAvatar::updateLookAtTargetAvatar() {
}
}
}
if (_lookAtTargetAvatar) {
static_cast<Avatar*>(_lookAtTargetAvatar.data())->setIsLookAtTarget(true);
auto avatarPointer = _lookAtTargetAvatar.lock();
if (avatarPointer) {
static_cast<Avatar*>(avatarPointer.get())->setIsLookAtTarget(true);
}
}
void MyAvatar::clearLookAtTargetAvatar() {
_lookAtTargetAvatar.clear();
_lookAtTargetAvatar.reset();
}
bool MyAvatar::isLookingAtLeftEye() {
@ -1166,11 +1170,26 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName, const g
Avatar::attach(modelURL, jointName, translation, rotation, scale, allowDuplicates, useSaved);
}
void MyAvatar::renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode renderMode, bool postLighting, float glowLevel) {
void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel) {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
return; // wait until both models are loaded
}
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_skeletonModel.needsFixupInScene()) {
_skeletonModel.removeFromScene(scene, pendingChanges);
_skeletonModel.addToScene(scene, pendingChanges);
}
if (getHead()->getFaceModel().needsFixupInScene()) {
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
Camera *camera = Application::getInstance()->getCamera();
const glm::vec3 cameraPos = camera->getPosition();
@ -1190,28 +1209,27 @@ void MyAvatar::renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode ren
}*/
// Render the body's voxels and head
RenderArgs::RenderMode modelRenderMode = renderMode;
if (!postLighting) {
RenderArgs args;
args._viewFrustum = renderFrustum;
_skeletonModel.render(1.0f, modelRenderMode, &args);
renderAttachments(renderMode, &args);
// NOTE: we no longer call this here, because we've added all the model parts as renderable items in the scene
//_skeletonModel.render(renderArgs, 1.0f);
renderAttachments(renderArgs);
}
// Render head so long as the camera isn't inside it
if (shouldRenderHead(cameraPos, renderMode)) {
getHead()->render(1.0f, renderFrustum, modelRenderMode, postLighting);
if (shouldRenderHead(renderArgs, cameraPos)) {
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
}
if (postLighting) {
getHand()->render(true, modelRenderMode);
getHand()->render(renderArgs, true);
}
}
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.50f;
bool MyAvatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode) const {
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const {
const Head* head = getHead();
return (renderMode != RenderArgs::NORMAL_RENDER_MODE) || (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
return (renderArgs->_renderMode != RenderArgs::NORMAL_RENDER_MODE) || (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
(glm::length(cameraPosition - head->getEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
}
@ -1474,7 +1492,9 @@ void MyAvatar::maybeUpdateBillboard() {
return;
}
}
QImage image = Application::getInstance()->renderAvatarBillboard();
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context);
QImage image = Application::getInstance()->renderAvatarBillboard(&renderArgs);
_billboard.clear();
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
@ -1551,21 +1571,25 @@ void MyAvatar::updateMotionBehavior() {
_feetTouchFloor = menu->isOptionChecked(MenuOption::ShiftHipsForIdleAnimations);
}
void MyAvatar::renderAttachments(RenderArgs::RenderMode renderMode, RenderArgs* args) {
if (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON || renderMode == RenderArgs::MIRROR_RENDER_MODE) {
Avatar::renderAttachments(renderMode, args);
void MyAvatar::renderAttachments(RenderArgs* args) {
if (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON || args->_renderMode == RenderArgs::MIRROR_RENDER_MODE) {
Avatar::renderAttachments(args);
return;
}
const FBXGeometry& geometry = _skeletonModel.getGeometry()->getFBXGeometry();
QString headJointName = (geometry.headJointIndex == -1) ? QString() : geometry.joints.at(geometry.headJointIndex).name;
// RenderArgs::RenderMode modelRenderMode = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
// RenderArgs::SHADOW_RENDER_MODE : RenderArgs::DEFAULT_RENDER_MODE;
// FIX ME - attachments need to be added to scene too...
/*
for (int i = 0; i < _attachmentData.size(); i++) {
const QString& jointName = _attachmentData.at(i).jointName;
if (jointName != headJointName && jointName != "Head") {
_attachmentModels.at(i)->render(1.0f, renderMode, args);
_attachmentModels.at(i)->render(args, 1.0f);
}
}
*/
}
//Renders sixense laser pointers for UI selection with controllers

View file

@ -37,9 +37,9 @@ public:
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
void render(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode = RenderArgs::NORMAL_RENDER_MODE, bool postLighting = false);
void renderBody(ViewFrustum* renderFrustum, RenderArgs::RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
bool shouldRenderHead(const glm::vec3& cameraPosition, RenderArgs::RenderMode renderMode) const;
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting = false) override;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const override;
void renderDebugBodyPoints();
// setters
@ -108,7 +108,7 @@ public:
Q_INVOKABLE glm::vec3 getEyePosition() const { return getHead()->getEyePosition(); }
Q_INVOKABLE glm::vec3 getTargetAvatarPosition() const { return _targetAvatarPosition; }
QWeakPointer<AvatarData> getLookAtTargetAvatar() const { return _lookAtTargetAvatar; }
AvatarWeakPointer getLookAtTargetAvatar() const { return _lookAtTargetAvatar; }
void updateLookAtTargetAvatar();
void clearLookAtTargetAvatar();
@ -198,7 +198,7 @@ signals:
void transformChanged();
protected:
virtual void renderAttachments(RenderArgs::RenderMode renderMode, RenderArgs* args);
virtual void renderAttachments(RenderArgs* args);
private:
@ -226,7 +226,7 @@ private:
DynamicCharacterController _characterController;
QWeakPointer<AvatarData> _lookAtTargetAvatar;
AvatarWeakPointer _lookAtTargetAvatar;
glm::vec3 _targetAvatarPosition;
bool _shouldRender;
bool _billboardValid;

View file

@ -255,9 +255,12 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
}
void SkeletonModel::updateJointState(int index) {
if (index > _jointStates.size()) {
return; // bail
}
JointState& state = _jointStates[index];
const FBXJoint& joint = state.getFBXJoint();
if (joint.parentIndex != -1) {
if (joint.parentIndex != -1 && joint.parentIndex <= _jointStates.size()) {
const JointState& parentState = _jointStates.at(joint.parentIndex);
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (index == geometry.leanJointIndex) {

View file

@ -76,10 +76,10 @@ void CameraToolBox::render(int x, int y, bool boxed) {
glEnable(GL_TEXTURE_2D);
if (!_enabledTexture) {
_enabledTexture = DependencyManager::get<TextureCache>()->getImageTexture(PathUtils::resourcesPath() + "images/face.svg");
_enabledTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face.svg");
}
if (!_mutedTexture) {
_mutedTexture = DependencyManager::get<TextureCache>()->getImageTexture(PathUtils::resourcesPath() + "images/face-mute.svg");
_mutedTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/face-mute.svg");
}
const int MUTE_ICON_SIZE = 24;

View file

@ -14,7 +14,18 @@
#include "DeviceTracker.h"
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-compare"
#endif
#include <glm/glm.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <glm/gtc/quaternion.hpp>
#include <glm/gtc/matrix_transform.hpp>

View file

@ -465,7 +465,7 @@ void OculusManager::configureCamera(Camera& camera) {
}
//Displays everything for the oculus, frame timing must be active
void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
#ifdef DEBUG
// Ensure the frame counter always increments by exactly 1
@ -532,7 +532,7 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
DependencyManager::get<GlowEffect>()->prepare();
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
} else {
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
@ -613,7 +613,8 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
qApp->displaySide(*_camera, false, RenderArgs::MONO);
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, *_camera, false);
qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
});
_activeEye = ovrEye_Count;
@ -625,7 +626,7 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
//Full texture viewport for glow effect
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
finalFbo = DependencyManager::get<GlowEffect>()->render();
finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
} else {
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);

View file

@ -21,6 +21,8 @@
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "RenderArgs.h"
class Camera;
class PalmData;
class Text3DOverlay;
@ -62,7 +64,7 @@ public:
static void endFrameTiming();
static bool allowSwap();
static void configureCamera(Camera& camera);
static void display(QGLWidget * glCanvas, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void reset();
/// param \yaw[out] yaw in radians

View file

@ -82,7 +82,7 @@ void TV3DManager::configureCamera(Camera& whichCamera_, int screenWidth, int scr
glLoadIdentity();
}
void TV3DManager::display(Camera& whichCamera) {
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
@ -95,7 +95,7 @@ void TV3DManager::display(Camera& whichCamera) {
int portalH = deviceSize.height();
DependencyManager::get<GlowEffect>()->prepare();
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Camera eyeCamera;
@ -120,10 +120,9 @@ void TV3DManager::display(Camera& whichCamera) {
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
qApp->displaySide(eyeCamera, false);
#if 0
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, eyeCamera, false);
qApp->getApplicationOverlay().displayOverlayTextureStereo(whichCamera, _aspect, fov);
#endif
_activeEye = NULL;
}, [&]{
// render right side view
@ -132,7 +131,7 @@ void TV3DManager::display(Camera& whichCamera) {
glPopMatrix();
glDisable(GL_SCISSOR_TEST);
auto finalFbo = DependencyManager::get<GlowEffect>()->render();
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
auto fboSize = finalFbo->getSize();
// Get the ACTUAL device size for the BLIT
deviceSize = qApp->getDeviceSize();

View file

@ -32,7 +32,7 @@ public:
static void connect();
static bool isConnected();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
static void display(Camera& whichCamera);
static void display(RenderArgs* renderArgs, Camera& whichCamera);
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
private:

View file

@ -37,8 +37,8 @@ OctreeFade::OctreeFade(FadeDirection direction, float red, float green, float bl
opacity = (direction == FADE_OUT) ? FADE_OUT_START : FADE_IN_START;
}
void OctreeFade::render() {
DependencyManager::get<GlowEffect>()->begin();
void OctreeFade::render(RenderArgs* renderArgs) {
DependencyManager::get<GlowEffect>()->begin(renderArgs);
glDisable(GL_LIGHTING);
glPushMatrix();
@ -53,7 +53,7 @@ void OctreeFade::render() {
glEnable(GL_LIGHTING);
DependencyManager::get<GlowEffect>()->end();
DependencyManager::get<GlowEffect>()->end(renderArgs);
opacity *= (direction == FADE_OUT) ? FADE_OUT_STEP : FADE_IN_STEP;
}

View file

@ -39,7 +39,7 @@ public:
OctreeFade(FadeDirection direction = FADE_OUT, float red = DEFAULT_RED,
float green = DEFAULT_GREEN, float blue = DEFAULT_BLUE);
void render();
void render(RenderArgs* renderArgs);
bool isDone() const;
};

View file

@ -189,7 +189,7 @@ GLuint ApplicationOverlay::getOverlayTexture() {
}
// Renders the overlays either to a texture or to the screen
void ApplicationOverlay::renderOverlay() {
void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
Overlays& overlays = qApp->getOverlays();
@ -228,7 +228,7 @@ void ApplicationOverlay::renderOverlay() {
// give external parties a change to hook in
emit qApp->renderingOverlay();
overlays.renderHUD();
overlays.renderHUD(renderArgs);
renderPointers();
@ -297,9 +297,8 @@ void ApplicationOverlay::displayOverlayTexture() {
glLoadIdentity();
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
if (_alpha < 1.0) {
glEnable(GL_BLEND);
}
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glViewport(0, 0, qApp->getDeviceSize().width(), qApp->getDeviceSize().height());
static const glm::vec2 topLeft(-1, 1);
@ -307,9 +306,38 @@ void ApplicationOverlay::displayOverlayTexture() {
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
DependencyManager::get<GeometryCache>()->renderQuad(
topLeft, bottomRight,
texCoordTopLeft, texCoordBottomRight,
glm::vec4(1.0f, 1.0f, 1.0f, _alpha));
});
if (!_crosshairTexture) {
_crosshairTexture = DependencyManager::get<TextureCache>()->
getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
//draw the mouse pointer
glm::vec2 canvasSize = qApp->getCanvasSize();
glm::vec2 mouseSize = 32.0f / canvasSize;
auto mouseTopLeft = topLeft * mouseSize;
auto mouseBottomRight = bottomRight * mouseSize;
vec2 mousePosition = vec2(qApp->getMouseX(), qApp->getMouseY());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
mousePosition.y *= -1.0f;
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(
mouseTopLeft + mousePosition, mouseBottomRight + mousePosition,
texCoordTopLeft, texCoordBottomRight,
reticleColor);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glDisable(GL_TEXTURE_2D);
} glPopMatrix();
}
@ -454,11 +482,14 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
});
if (!_crosshairTexture) {
_crosshairTexture = DependencyManager::get<TextureCache>()->
getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() +
"images/sixense-reticle.png");
}
//draw the mouse pointer
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec2 canvasSize = qApp->getCanvasSize();
const float reticleSize = 40.0f / canvasSize.x * quadWidth;
@ -592,11 +623,12 @@ bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position,
void ApplicationOverlay::renderPointers() {
//lazily load crosshair texture
if (_crosshairTexture == 0) {
_crosshairTexture = DependencyManager::get<TextureCache>()->
getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
@ -760,8 +792,14 @@ void ApplicationOverlay::renderControllerPointers() {
}
void ApplicationOverlay::renderPointersOculus() {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_MODELVIEW);
//Controller Pointers
@ -786,6 +824,8 @@ void ApplicationOverlay::renderPointersOculus() {
}
glEnable(GL_DEPTH_TEST);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
//Renders a small magnification of the currently bound texture at the coordinates

View file

@ -35,9 +35,9 @@ public:
ApplicationOverlay();
~ApplicationOverlay();
void renderOverlay();
void renderOverlay(RenderArgs* renderArgs);
GLuint getOverlayTexture();
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;

View file

@ -1,6 +1,6 @@
//
//
// LoginDialog.cpp
// interface/src/ui
//
// Created by Bradley Austin Davis on 2015/04/14
// Copyright 2015 High Fidelity, Inc.
@ -8,16 +8,22 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "LoginDialog.h"
#include "DependencyManager.h"
#include "AccountManager.h"
#include "Menu.h"
#include <QDesktopServices>
#include <NetworkingConstants.h>
#include "AccountManager.h"
#include "DependencyManager.h"
#include "Menu.h"
HIFI_QML_DEF(LoginDialog)
LoginDialog::LoginDialog(QQuickItem *parent) : OffscreenQmlDialog(parent), _rootUrl(NetworkingConstants::METAVERSE_SERVER_URL.toString()) {
LoginDialog::LoginDialog(QQuickItem *parent) : OffscreenQmlDialog(parent),
_rootUrl(NetworkingConstants::METAVERSE_SERVER_URL.toString())
{
connect(&AccountManager::getInstance(), &AccountManager::loginComplete,
this, &LoginDialog::handleLoginCompleted);
connect(&AccountManager::getInstance(), &AccountManager::loginFailed,
@ -48,7 +54,7 @@ void LoginDialog::handleLoginCompleted(const QUrl&) {
}
void LoginDialog::handleLoginFailed() {
setStatusText("<font color = \"#267077\">Invalid username or password.< / font>");
setStatusText("Invalid username or password");
}
void LoginDialog::setStatusText(const QString& statusText) {
@ -68,10 +74,11 @@ QString LoginDialog::rootUrl() const {
void LoginDialog::login(const QString& username, const QString& password) {
qDebug() << "Attempting to login " << username;
setStatusText("Authenticating...");
setStatusText("Logging in...");
AccountManager::getInstance().requestAccessToken(username, password);
}
void LoginDialog::openUrl(const QString& url) {
qDebug() << url;
QDesktopServices::openUrl(url);
}

View file

@ -1,5 +1,6 @@
//
// LoginDialog.h
// interface/src/ui
//
// Created by Bradley Austin Davis on 2015/04/14
// Copyright 2015 High Fidelity, Inc.
@ -9,6 +10,7 @@
//
#pragma once
#ifndef hifi_LoginDialog_h
#define hifi_LoginDialog_h

View file

@ -34,11 +34,10 @@ RearMirrorTools::RearMirrorTools(QRect& bounds) :
_windowed(false),
_fullScreen(false)
{
auto textureCache = DependencyManager::get<TextureCache>();
_closeTexture = textureCache->getImageTexture(PathUtils::resourcesPath() + "images/close.svg");
_closeTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/close.svg");
_zoomHeadTexture = textureCache->getImageTexture(PathUtils::resourcesPath() + "images/plus.svg");
_zoomBodyTexture = textureCache->getImageTexture(PathUtils::resourcesPath() + "images/minus.svg");
_zoomHeadTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/plus.svg");
_zoomBodyTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/minus.svg");
_shrinkIconRect = QRect(ICON_PADDING, ICON_PADDING, ICON_SIZE, ICON_SIZE);
_closeIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.top() + ICON_PADDING, ICON_SIZE, ICON_SIZE);
@ -47,7 +46,7 @@ RearMirrorTools::RearMirrorTools(QRect& bounds) :
_headZoomIconRect = QRect(_bounds.left() + ICON_PADDING, _bounds.bottom() - ICON_PADDING - ICON_SIZE, ICON_SIZE, ICON_SIZE);
}
void RearMirrorTools::render(bool fullScreen, const QPoint & mousePosition) {
void RearMirrorTools::render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePosition) {
if (fullScreen) {
_fullScreen = true;
displayIcon(QRect(QPoint(), qApp->getDeviceSize()), _shrinkIconRect, _closeTexture);

View file

@ -24,7 +24,7 @@ class RearMirrorTools : public QObject {
Q_OBJECT
public:
RearMirrorTools(QRect& bounds);
void render(bool fullScreen, const QPoint & mousePos);
void render(RenderArgs* renderArgs, bool fullScreen, const QPoint & mousePos);
bool mousePressEvent(int x, int y);
static Setting::Handle<int> rearViewZoomLevel;

View file

@ -467,31 +467,30 @@ void Stats::display(
horizontalOffset += 5;
// Model/Entity render details
EntityTreeRenderer* entities = Application::getInstance()->getEntities();
octreeStats.str("");
octreeStats << "Entity Items rendered: " << entities->getItemsRendered()
<< " / Out of view:" << entities->getItemsOutOfView()
<< " / Too small:" << entities->getItemsTooSmall();
octreeStats << "Triangles: " << _renderDetails._trianglesRendered
<< " / Quads:" << _renderDetails._quadsRendered
<< " / Material Switches:" << _renderDetails._materialSwitches;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
if (_expanded) {
octreeStats.str("");
octreeStats << " Meshes rendered: " << entities->getMeshesRendered()
<< " / Out of view:" << entities->getMeshesOutOfView()
<< " / Too small:" << entities->getMeshesTooSmall();
octreeStats << " Mesh Parts Rendered Opaque: " << _renderDetails._opaque._rendered
<< " / Translucent:" << _renderDetails._translucent._rendered;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Triangles: " << entities->getTrianglesRendered()
<< " / Quads:" << entities->getQuadsRendered()
<< " / Material Switches:" << entities->getMaterialSwitches();
octreeStats << " Opaque considered: " << _renderDetails._opaque._considered
<< " / Out of view:" << _renderDetails._opaque._outOfView
<< " / Too small:" << _renderDetails._opaque._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
octreeStats.str("");
octreeStats << " Mesh Parts Rendered Opaque: " << entities->getOpaqueMeshPartsRendered()
<< " / Translucent:" << entities->getTranslucentMeshPartsRendered();
octreeStats << " Translucent considered: " << _renderDetails._translucent._considered
<< " / Out of view:" << _renderDetails._translucent._outOfView
<< " / Too small:" << _renderDetails._translucent._tooSmall;
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}

View file

@ -14,7 +14,7 @@
#include <QObject>
#include <NodeList.h>
#include <RenderArgs.h>
class Stats: public QObject {
Q_OBJECT
@ -35,6 +35,8 @@ public:
int inKbitsPerSecond, int outKbitsPerSecond, int voxelPacketsToProcess);
bool includeTimingRecord(const QString& name);
void setRenderDetails(const RenderDetails& details) { _renderDetails = details; }
private:
static Stats* _sharedInstance;
@ -51,6 +53,7 @@ private:
int _lastHorizontalOffset;
RenderDetails _renderDetails;
};
#endif // hifi_Stats_h

View file

@ -49,6 +49,11 @@ Base3DOverlay::Base3DOverlay(const Base3DOverlay* base3DOverlay) :
Base3DOverlay::~Base3DOverlay() {
}
// TODO: Implement accurate getBounds() implementations
AABox Base3DOverlay::getBounds() const {
return AABox(_position, glm::vec3(1.0f));
}
void Base3DOverlay::setProperties(const QScriptValue& properties) {
Overlay::setProperties(properties);

View file

@ -49,6 +49,8 @@ public:
void setDrawInFront(bool value) { _drawInFront = value; }
void setDrawOnHUD(bool value) { _drawOnHUD = value; }
virtual AABox getBounds() const;
virtual void setProperties(const QScriptValue& properties);
virtual QScriptValue getProperty(const QString& property);

View file

@ -126,7 +126,7 @@ void Cube3DOverlay::render(RenderArgs* args) {
} else {
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(1.0f, cubeColor);
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
}
}
glPopMatrix();

View file

@ -45,10 +45,10 @@ void LocalModelsOverlay::render(RenderArgs* args) {
glPushMatrix(); {
Application* app = Application::getInstance();
Transform originalTransform = qApp->getViewTransform();
qApp->getViewTransform().postTranslate(_position);
_entityTreeRenderer->render();
qApp->setViewTransform(originalTransform);
glm::vec3 oldTranslation = app->getViewMatrixTranslation();
app->setViewMatrixTranslation(oldTranslation + _position);
_entityTreeRenderer->render(args);
Application::getInstance()->setViewMatrixTranslation(oldTranslation);
} glPopMatrix();
}
}

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <Application.h>
#include <GlowEffect.h>
#include "ModelOverlay.h"
@ -54,11 +55,34 @@ void ModelOverlay::update(float deltatime) {
_isLoaded = _model.isActive();
}
bool ModelOverlay::addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
Base3DOverlay::addToScene(overlay, scene, pendingChanges);
_model.addToScene(scene, pendingChanges);
return true;
}
void ModelOverlay::removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
Base3DOverlay::removeFromScene(overlay, scene, pendingChanges);
_model.removeFromScene(scene, pendingChanges);
}
void ModelOverlay::render(RenderArgs* args) {
// check to see if when we added our model to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_model.needsFixupInScene()) {
_model.removeFromScene(scene, pendingChanges);
_model.addToScene(scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
if (!_visible) {
return;
}
/*
if (_model.isActive()) {
if (_model.isRenderable()) {
float glowLevel = getGlowLevel();
@ -66,12 +90,13 @@ void ModelOverlay::render(RenderArgs* args) {
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
_model.render(getAlpha(), RenderArgs::DEFAULT_RENDER_MODE, args);
_model.render(args, getAlpha());
if (glower) {
delete glower;
}
}
}
*/
}
void ModelOverlay::setProperties(const QScriptValue &properties) {

View file

@ -32,6 +32,9 @@ public:
virtual ModelOverlay* createClone() const;
virtual bool addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
private:
Model _model;

View file

@ -16,6 +16,7 @@
#include <NumericalConstants.h>
Overlay::Overlay() :
_renderItemID(render::Item::INVALID_ITEM_ID),
_isLoaded(true),
_alpha(DEFAULT_ALPHA),
_glowLevel(0.0f),
@ -35,6 +36,7 @@ Overlay::Overlay() :
}
Overlay::Overlay(const Overlay* overlay) :
_renderItemID(render::Item::INVALID_ITEM_ID),
_isLoaded(overlay->_isLoaded),
_alpha(overlay->_alpha),
_glowLevel(overlay->_glowLevel),
@ -225,3 +227,16 @@ float Overlay::updatePulse() {
return _pulse;
}
bool Overlay::addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
auto overlayPayload = new Overlay::Payload(overlay);
auto overlayPayloadPointer = Overlay::PayloadPointer(overlayPayload);
_renderItemID = scene->allocateID();
pendingChanges.resetItem(_renderItemID, overlayPayloadPointer);
return true;
}
void Overlay::removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_renderItemID);
}

View file

@ -21,6 +21,8 @@
#include <RegisteredMetaTypes.h>
#include <SharedUtil.h> // for xColor
#include <RenderArgs.h>
#include <AABox.h>
#include <render/Scene.h>
const xColor DEFAULT_OVERLAY_COLOR = { 255, 255, 255 };
const float DEFAULT_ALPHA = 0.7f;
@ -33,7 +35,12 @@ public:
NO_ANCHOR,
MY_AVATAR
};
typedef std::shared_ptr<Overlay> Pointer;
typedef render::Payload<Overlay> Payload;
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
Overlay();
Overlay(const Overlay* overlay);
~Overlay();
@ -41,6 +48,9 @@ public:
virtual void update(float deltatime) {}
virtual void render(RenderArgs* args) = 0;
virtual bool addToScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(Overlay::Pointer overlay, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
// getters
virtual bool is3D() const = 0;
bool isLoaded() { return _isLoaded; }
@ -50,7 +60,6 @@ public:
float getGlowLevel();
Anchor getAnchor() const { return _anchor; }
float getPulseMax() const { return _pulseMax; }
float getPulseMin() const { return _pulseMin; }
float getPulsePeriod() const { return _pulsePeriod; }
@ -81,9 +90,14 @@ public:
virtual Overlay* createClone() const = 0;
virtual QScriptValue getProperty(const QString& property);
render::ItemID getRenderItemID() const { return _renderItemID; }
void setRenderItemID(render::ItemID renderItemID) { _renderItemID = renderItemID; }
protected:
float updatePulse();
render::ItemID _renderItemID;
bool _isLoaded;
float _alpha;
float _glowLevel;
@ -106,5 +120,11 @@ protected:
QScriptEngine* _scriptEngine;
};
namespace render {
template <> const ItemKey payloadGetKey(const Overlay::Pointer& overlay);
template <> const Item::Bound payloadGetBound(const Overlay::Pointer& overlay);
template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args);
}
#endif // hifi_Overlay_h

View file

@ -16,6 +16,7 @@
#include <Application.h>
#include <avatar/AvatarManager.h>
#include <LODManager.h>
#include <render/Scene.h>
#include "BillboardOverlay.h"
#include "Circle3DOverlay.h"
@ -31,6 +32,7 @@
#include "TextOverlay.h"
#include "Text3DOverlay.h"
Overlays::Overlays() : _nextOverlayID(1) {
}
@ -38,23 +40,18 @@ Overlays::~Overlays() {
{
QWriteLocker lock(&_lock);
foreach(Overlay* thisOverlay, _overlaysHUD) {
delete thisOverlay;
QWriteLocker deleteLock(&_deleteLock);
foreach(Overlay::Pointer overlay, _overlaysHUD) {
_overlaysToDelete.push_back(overlay);
}
foreach(Overlay::Pointer overlay, _overlaysWorld) {
_overlaysToDelete.push_back(overlay);
}
_overlaysHUD.clear();
foreach(Overlay* thisOverlay, _overlaysWorld) {
delete thisOverlay;
}
_overlaysWorld.clear();
}
if (!_overlaysToDelete.isEmpty()) {
QWriteLocker lock(&_deleteLock);
do {
delete _overlaysToDelete.takeLast();
} while (!_overlaysToDelete.isEmpty());
}
cleanupOverlaysToDelete();
}
void Overlays::init() {
@ -65,100 +62,61 @@ void Overlays::update(float deltatime) {
{
QWriteLocker lock(&_lock);
foreach(Overlay* thisOverlay, _overlaysHUD) {
foreach(Overlay::Pointer thisOverlay, _overlaysHUD) {
thisOverlay->update(deltatime);
}
foreach(Overlay* thisOverlay, _overlaysWorld) {
foreach(Overlay::Pointer thisOverlay, _overlaysWorld) {
thisOverlay->update(deltatime);
}
}
if (!_overlaysToDelete.isEmpty()) {
QWriteLocker lock(&_deleteLock);
do {
delete _overlaysToDelete.takeLast();
} while (!_overlaysToDelete.isEmpty());
}
cleanupOverlaysToDelete();
}
void Overlays::renderHUD() {
void Overlays::cleanupOverlaysToDelete() {
if (!_overlaysToDelete.isEmpty()) {
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
{
QWriteLocker lock(&_deleteLock);
do {
Overlay::Pointer overlay = _overlaysToDelete.takeLast();
auto itemID = overlay->getRenderItemID();
if (itemID != render::Item::INVALID_ITEM_ID) {
overlay->removeFromScene(overlay, scene, pendingChanges);
}
} while (!_overlaysToDelete.isEmpty());
}
if (pendingChanges._removedItems.size() > 0) {
scene->enqueuePendingChanges(pendingChanges);
}
}
}
void Overlays::renderHUD(RenderArgs* renderArgs) {
QReadLocker lock(&_lock);
auto lodManager = DependencyManager::get<LODManager>();
RenderArgs args(NULL, Application::getInstance()->getViewFrustum(),
lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(),
RenderArgs::DEFAULT_RENDER_MODE, RenderArgs::MONO, RenderArgs::RENDER_DEBUG_NONE);
foreach(Overlay* thisOverlay, _overlaysHUD) {
foreach(Overlay::Pointer thisOverlay, _overlaysHUD) {
if (thisOverlay->is3D()) {
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
thisOverlay->render(&args);
thisOverlay->render(renderArgs);
glDisable(GL_LIGHTING);
glDisable(GL_DEPTH_TEST);
} else{
thisOverlay->render(&args);
} else {
thisOverlay->render(renderArgs);
}
}
}
void Overlays::renderWorld(bool drawFront,
RenderArgs::RenderMode renderMode,
RenderArgs::RenderSide renderSide,
RenderArgs::DebugFlags renderDebugFlags) {
QReadLocker lock(&_lock);
if (_overlaysWorld.size() == 0) {
return;
}
bool myAvatarComputed = false;
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat myAvatarRotation;
glm::vec3 myAvatarPosition(0.0f);
float angle = 0.0f;
glm::vec3 axis(0.0f, 1.0f, 0.0f);
float myAvatarScale = 1.0f;
auto lodManager = DependencyManager::get<LODManager>();
RenderArgs args(NULL, Application::getInstance()->getDisplayViewFrustum(),
lodManager->getOctreeSizeScale(),
lodManager->getBoundaryLevelAdjust(),
renderMode, renderSide, renderDebugFlags);
foreach(Overlay* thisOverlay, _overlaysWorld) {
Base3DOverlay* overlay3D = static_cast<Base3DOverlay*>(thisOverlay);
if (overlay3D->getDrawInFront() != drawFront) {
continue;
}
glPushMatrix();
switch (thisOverlay->getAnchor()) {
case Overlay::MY_AVATAR:
if (!myAvatarComputed) {
myAvatarRotation = avatar->getOrientation();
myAvatarPosition = avatar->getPosition();
angle = glm::degrees(glm::angle(myAvatarRotation));
axis = glm::axis(myAvatarRotation);
myAvatarScale = avatar->getScale();
myAvatarComputed = true;
}
glTranslatef(myAvatarPosition.x, myAvatarPosition.y, myAvatarPosition.z);
glRotatef(angle, axis.x, axis.y, axis.z);
glScalef(myAvatarScale, myAvatarScale, myAvatarScale);
break;
default:
break;
}
thisOverlay->render(&args);
glPopMatrix();
}
}
unsigned int Overlays::addOverlay(const QString& type, const QScriptValue& properties) {
unsigned int thisID = 0;
Overlay* thisOverlay = NULL;
@ -201,6 +159,7 @@ unsigned int Overlays::addOverlay(const QString& type, const QScriptValue& prope
}
unsigned int Overlays::addOverlay(Overlay* overlay) {
Overlay::Pointer overlayPointer(overlay);
overlay->init(_scriptEngine);
QWriteLocker lock(&_lock);
@ -209,19 +168,26 @@ unsigned int Overlays::addOverlay(Overlay* overlay) {
if (overlay->is3D()) {
Base3DOverlay* overlay3D = static_cast<Base3DOverlay*>(overlay);
if (overlay3D->getDrawOnHUD()) {
_overlaysHUD[thisID] = overlay;
_overlaysHUD[thisID] = overlayPointer;
} else {
_overlaysWorld[thisID] = overlay;
_overlaysWorld[thisID] = overlayPointer;
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
overlayPointer->addToScene(overlayPointer, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
} else {
_overlaysHUD[thisID] = overlay;
_overlaysHUD[thisID] = overlayPointer;
}
return thisID;
}
unsigned int Overlays::cloneOverlay(unsigned int id) {
Overlay* thisOverlay = NULL;
Overlay::Pointer thisOverlay = NULL;
if (_overlaysHUD.contains(id)) {
thisOverlay = _overlaysHUD[id];
} else if (_overlaysWorld.contains(id)) {
@ -237,7 +203,7 @@ unsigned int Overlays::cloneOverlay(unsigned int id) {
bool Overlays::editOverlay(unsigned int id, const QScriptValue& properties) {
QWriteLocker lock(&_lock);
Overlay* thisOverlay = NULL;
Overlay::Pointer thisOverlay;
if (_overlaysHUD.contains(id)) {
thisOverlay = _overlaysHUD[id];
@ -247,7 +213,7 @@ bool Overlays::editOverlay(unsigned int id, const QScriptValue& properties) {
if (thisOverlay) {
if (thisOverlay->is3D()) {
Base3DOverlay* overlay3D = static_cast<Base3DOverlay*>(thisOverlay);
Base3DOverlay* overlay3D = static_cast<Base3DOverlay*>(thisOverlay.get());
bool oldDrawOnHUD = overlay3D->getDrawOnHUD();
thisOverlay->setProperties(properties);
@ -272,7 +238,7 @@ bool Overlays::editOverlay(unsigned int id, const QScriptValue& properties) {
}
void Overlays::deleteOverlay(unsigned int id) {
Overlay* overlayToDelete;
Overlay::Pointer overlayToDelete;
{
QWriteLocker lock(&_lock);
@ -296,7 +262,7 @@ unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
}
QReadLocker lock(&_lock);
QMapIterator<unsigned int, Overlay*> i(_overlaysHUD);
QMapIterator<unsigned int, Overlay::Pointer> i(_overlaysHUD);
i.toBack();
const float LARGE_NEGATIVE_FLOAT = -9999999;
@ -309,14 +275,14 @@ unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
i.previous();
unsigned int thisID = i.key();
if (i.value()->is3D()) {
Base3DOverlay* thisOverlay = static_cast<Base3DOverlay*>(i.value());
Base3DOverlay* thisOverlay = static_cast<Base3DOverlay*>(i.value().get());
if (!thisOverlay->getIgnoreRayIntersection()) {
if (thisOverlay->findRayIntersection(origin, direction, distance, thisFace)) {
return thisID;
}
}
} else {
Overlay2D* thisOverlay = static_cast<Overlay2D*>(i.value());
Overlay2D* thisOverlay = static_cast<Overlay2D*>(i.value().get());
if (thisOverlay->getVisible() && thisOverlay->isLoaded() &&
thisOverlay->getBounds().contains(pointCopy.x, pointCopy.y, false)) {
return thisID;
@ -329,7 +295,7 @@ unsigned int Overlays::getOverlayAtPoint(const glm::vec2& point) {
OverlayPropertyResult Overlays::getProperty(unsigned int id, const QString& property) {
OverlayPropertyResult result;
Overlay* thisOverlay = NULL;
Overlay::Pointer thisOverlay;
QReadLocker lock(&_lock);
if (_overlaysHUD.contains(id)) {
thisOverlay = _overlaysHUD[id];
@ -376,12 +342,12 @@ RayToOverlayIntersectionResult Overlays::findRayIntersection(const PickRay& ray)
float bestDistance = std::numeric_limits<float>::max();
bool bestIsFront = false;
RayToOverlayIntersectionResult result;
QMapIterator<unsigned int, Overlay*> i(_overlaysWorld);
QMapIterator<unsigned int, Overlay::Pointer> i(_overlaysWorld);
i.toBack();
while (i.hasPrevious()) {
i.previous();
unsigned int thisID = i.key();
Base3DOverlay* thisOverlay = static_cast<Base3DOverlay*>(i.value());
Base3DOverlay* thisOverlay = static_cast<Base3DOverlay*>(i.value().get());
if (thisOverlay->getVisible() && !thisOverlay->getIgnoreRayIntersection() && thisOverlay->isLoaded()) {
float thisDistance;
BoxFace thisFace;
@ -483,7 +449,7 @@ void RayToOverlayIntersectionResultFromScriptValue(const QScriptValue& object, R
bool Overlays::isLoaded(unsigned int id) {
QReadLocker lock(&_lock);
Overlay* thisOverlay = NULL;
Overlay::Pointer thisOverlay = NULL;
if (_overlaysHUD.contains(id)) {
thisOverlay = _overlaysHUD[id];
} else if (_overlaysWorld.contains(id)) {
@ -495,16 +461,16 @@ bool Overlays::isLoaded(unsigned int id) {
}
QSizeF Overlays::textSize(unsigned int id, const QString& text) const {
Overlay* thisOverlay = _overlaysHUD[id];
Overlay::Pointer thisOverlay = _overlaysHUD[id];
if (thisOverlay) {
if (typeid(*thisOverlay) == typeid(TextOverlay)) {
return static_cast<TextOverlay*>(thisOverlay)->textSize(text);
return static_cast<TextOverlay*>(thisOverlay.get())->textSize(text);
}
} else {
thisOverlay = _overlaysWorld[id];
if (thisOverlay) {
if (typeid(*thisOverlay) == typeid(Text3DOverlay)) {
return static_cast<Text3DOverlay*>(thisOverlay)->textSize(text);
return static_cast<Text3DOverlay*>(thisOverlay.get())->textSize(text);
}
}
}

View file

@ -53,10 +53,7 @@ public:
~Overlays();
void init();
void update(float deltatime);
void renderWorld(bool drawFront, RenderArgs::RenderMode renderMode = RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::RenderSide renderSide = RenderArgs::MONO,
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE);
void renderHUD();
void renderHUD(RenderArgs* renderArgs);
public slots:
/// adds an overlay with the specific properties
@ -92,9 +89,10 @@ public slots:
QSizeF textSize(unsigned int id, const QString& text) const;
private:
QMap<unsigned int, Overlay*> _overlaysHUD;
QMap<unsigned int, Overlay*> _overlaysWorld;
QList<Overlay*> _overlaysToDelete;
void cleanupOverlaysToDelete();
QMap<unsigned int, Overlay::Pointer> _overlaysHUD;
QMap<unsigned int, Overlay::Pointer> _overlaysWorld;
QList<Overlay::Pointer> _overlaysToDelete;
unsigned int _nextOverlayID;
QReadWriteLock _lock;
QReadWriteLock _deleteLock;

View file

@ -0,0 +1,75 @@
//
// OverlaysPayload.cpp
// interface/src/ui/overlays
//
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QScriptValueIterator>
#include <limits>
#include <typeinfo>
#include <Application.h>
#include <avatar/AvatarManager.h>
#include <LODManager.h>
#include <render/Scene.h>
#include "BillboardOverlay.h"
#include "Circle3DOverlay.h"
#include "Cube3DOverlay.h"
#include "ImageOverlay.h"
#include "Line3DOverlay.h"
#include "LocalModelsOverlay.h"
#include "ModelOverlay.h"
#include "Overlays.h"
#include "Rectangle3DOverlay.h"
#include "Sphere3DOverlay.h"
#include "Grid3DOverlay.h"
#include "TextOverlay.h"
#include "Text3DOverlay.h"
namespace render {
template <> const ItemKey payloadGetKey(const Overlay::Pointer& overlay) {
if (overlay->is3D() && !static_cast<Base3DOverlay*>(overlay.get())->getDrawOnHUD()) {
if (static_cast<Base3DOverlay*>(overlay.get())->getDrawInFront()) {
return ItemKey::Builder().withTypeShape().withNoDepthSort().build();
} else {
return ItemKey::Builder::opaqueShape();
}
} else {
return ItemKey::Builder().withTypeShape().withViewSpace().build();
}
}
template <> const Item::Bound payloadGetBound(const Overlay::Pointer& overlay) {
if (overlay->is3D()) {
return static_cast<Base3DOverlay*>(overlay.get())->getBounds();
} else {
QRect bounds = static_cast<Overlay2D*>(overlay.get())->getBounds();
return AABox(glm::vec3(bounds.x(), bounds.y(), 0.0f), glm::vec3(bounds.width(), bounds.height(), 0.1f));
}
}
template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args) {
if (args) {
glPushMatrix();
if (overlay->getAnchor() == Overlay::MY_AVATAR) {
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat myAvatarRotation = avatar->getOrientation();
glm::vec3 myAvatarPosition = avatar->getPosition();
float angle = glm::degrees(glm::angle(myAvatarRotation));
glm::vec3 axis = glm::axis(myAvatarRotation);
float myAvatarScale = avatar->getScale();
glTranslatef(myAvatarPosition.x, myAvatarPosition.y, myAvatarPosition.z);
glRotatef(angle, axis.x, axis.y, axis.z);
glScalef(myAvatarScale, myAvatarScale, myAvatarScale);
}
overlay->render(args);
glPopMatrix();
}
}
}

View file

@ -11,15 +11,12 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <TextRenderer.h>
#include "Application.h"
#include "Text3DOverlay.h"
const xColor DEFAULT_BACKGROUND_COLOR = { 0, 0, 0 };
const float DEFAULT_BACKGROUND_ALPHA = 0.7f;
const float DEFAULT_MARGIN = 0.1f;
const int FIXED_FONT_POINT_SIZE = 40;
const int FIXED_FONT_SCALING_RATIO = FIXED_FONT_POINT_SIZE * 40.0f; // this is a ratio determined through experimentation
const float LINE_SCALE_RATIO = 1.2f;
@ -50,6 +47,7 @@ Text3DOverlay::Text3DOverlay(const Text3DOverlay* text3DOverlay) :
}
Text3DOverlay::~Text3DOverlay() {
delete _textRenderer;
}
xColor Text3DOverlay::getBackgroundColor() {
@ -106,8 +104,7 @@ void Text3DOverlay::render(RenderArgs* args) {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
// Same font properties as textSize()
TextRenderer* textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, FIXED_FONT_POINT_SIZE);
float maxHeight = (float)textRenderer->computeExtent("Xy").y * LINE_SCALE_RATIO;
float maxHeight = (float)_textRenderer->computeExtent("Xy").y * LINE_SCALE_RATIO;
float scaleFactor = (maxHeight / FIXED_FONT_SCALING_RATIO) * _lineHeight;
@ -124,7 +121,7 @@ void Text3DOverlay::render(RenderArgs* args) {
enableClipPlane(GL_CLIP_PLANE3, 0.0f, 1.0f, 0.0f, -clipMinimum.y);
glm::vec4 textColor = { _color.red / MAX_COLOR, _color.green / MAX_COLOR, _color.blue / MAX_COLOR, getAlpha() };
textRenderer->draw(0, 0, _text, textColor);
_textRenderer->draw(0, 0, _text, textColor);
glDisable(GL_CLIP_PLANE0);
glDisable(GL_CLIP_PLANE1);
@ -228,10 +225,9 @@ Text3DOverlay* Text3DOverlay::createClone() const {
}
QSizeF Text3DOverlay::textSize(const QString& text) const {
auto textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, FIXED_FONT_POINT_SIZE);
auto extents = textRenderer->computeExtent(text);
auto extents = _textRenderer->computeExtent(text);
float maxHeight = (float)textRenderer->computeExtent("Xy").y * LINE_SCALE_RATIO;
float maxHeight = (float)_textRenderer->computeExtent("Xy").y * LINE_SCALE_RATIO;
float pointToWorldScale = (maxHeight / FIXED_FONT_SCALING_RATIO) * _lineHeight;
return QSizeF(extents.x, extents.y) * pointToWorldScale;

View file

@ -17,8 +17,12 @@
#include <QString>
#include <RenderArgs.h>
#include <TextRenderer.h>
#include "Planar3DOverlay.h"
const int FIXED_FONT_POINT_SIZE = 40;
class Text3DOverlay : public Planar3DOverlay {
Q_OBJECT
@ -58,6 +62,8 @@ public:
private:
void enableClipPlane(GLenum plane, float x, float y, float z, float w);
TextRenderer* _textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, FIXED_FONT_POINT_SIZE);
QString _text;
xColor _backgroundColor;
float _backgroundAlpha;

View file

@ -14,7 +14,6 @@
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <SharedUtil.h>
#include <TextRenderer.h>
#include "TextOverlay.h"
@ -25,6 +24,7 @@ TextOverlay::TextOverlay() :
_topMargin(DEFAULT_MARGIN),
_fontSize(DEFAULT_FONTSIZE)
{
_textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
}
TextOverlay::TextOverlay(const TextOverlay* textOverlay) :
@ -36,9 +36,11 @@ TextOverlay::TextOverlay(const TextOverlay* textOverlay) :
_topMargin(textOverlay->_topMargin),
_fontSize(textOverlay->_fontSize)
{
_textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
}
TextOverlay::~TextOverlay() {
delete _textRenderer;
}
xColor TextOverlay::getBackgroundColor() {
@ -79,9 +81,6 @@ void TextOverlay::render(RenderArgs* args) {
glm::vec2 topLeft(left, top);
glm::vec2 bottomRight(right, bottom);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
// Same font properties as textSize()
TextRenderer* textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
const int leftAdjust = -1; // required to make text render relative to left edge of bounds
const int topAdjust = -2; // required to make text render relative to top edge of bounds
@ -90,7 +89,7 @@ void TextOverlay::render(RenderArgs* args) {
float alpha = getAlpha();
glm::vec4 textColor = {_color.red / MAX_COLOR, _color.green / MAX_COLOR, _color.blue / MAX_COLOR, alpha };
textRenderer->draw(x, y, _text, textColor);
_textRenderer->draw(x, y, _text, textColor);
}
void TextOverlay::setProperties(const QScriptValue& properties) {
@ -163,8 +162,7 @@ QScriptValue TextOverlay::getProperty(const QString& property) {
}
QSizeF TextOverlay::textSize(const QString& text) const {
auto textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
auto extents = textRenderer->computeExtent(text);
auto extents = _textRenderer->computeExtent(text);
return QSizeF(extents.x, extents.y);
}

View file

@ -19,6 +19,7 @@
#include <QString>
#include <SharedUtil.h>
#include <TextRenderer.h>
#include "Overlay.h"
#include "Overlay2D.h"
@ -58,6 +59,9 @@ public:
QSizeF textSize(const QString& text) const; // Pixels
private:
TextRenderer* _textRenderer = nullptr;
QString _text;
xColor _backgroundColor;
float _backgroundAlpha;

View file

@ -153,6 +153,7 @@ void AudioClient::reset() {
}
void AudioClient::audioMixerKilled() {
_hasReceivedFirstPacket = false;
_outgoingAvatarAudioSequenceNumber = 0;
_stats.reset();
}
@ -481,6 +482,7 @@ void AudioClient::start() {
qCDebug(audioclient) << "Unable to set up audio input because of a problem with input format.";
qCDebug(audioclient) << "The closest format available is" << inputDeviceInfo.nearestFormat(_desiredInputFormat);
}
if (!outputFormatSupported) {
qCDebug(audioclient) << "Unable to set up audio output because of a problem with output format.";
qCDebug(audioclient) << "The closest format available is" << outputDeviceInfo.nearestFormat(_desiredOutputFormat);
@ -489,6 +491,7 @@ void AudioClient::start() {
if (_audioInput) {
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 8 );
}
_inputGain.initialize();
_sourceGain.initialize();
_noiseSource.initialize();
@ -926,6 +929,14 @@ void AudioClient::addReceivedAudioToStream(const QByteArray& audioByteArray) {
DependencyManager::get<NodeList>()->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
if (_audioOutput) {
if (!_hasReceivedFirstPacket) {
_hasReceivedFirstPacket = true;
// have the audio scripting interface emit a signal to say we just connected to mixer
emit receivedFirstPacket();
}
// Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(audioByteArray);
}

View file

@ -37,6 +37,7 @@
#include <MixedProcessedAudioStream.h>
#include <RingBufferHistory.h>
#include <SettingHandle.h>
#include <Sound.h>
#include <StDev.h>
#include "AudioIOStats.h"
@ -57,7 +58,7 @@ static const int NUM_AUDIO_CHANNELS = 2;
static const int DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 3;
static const int MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 1;
static const int MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 20;
#if defined(Q_OS_ANDROID) || defined(Q_OS_WIN)
#if defined(Q_OS_ANDROID) || defined(Q_OS_WIN)
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = false;
#else
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = true;
@ -88,14 +89,14 @@ public:
void stop() { close(); }
qint64 readData(char * data, qint64 maxSize);
qint64 writeData(const char * data, qint64 maxSize) { return 0; }
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
private:
MixedProcessedAudioStream& _receivedAudioStream;
AudioClient* _audio;
int _unfulfilledReads;
};
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
@ -105,30 +106,30 @@ public:
float getAudioAverageInputLoudness() const { return _lastInputLoudness; }
int getDesiredJitterBufferFrames() const { return _receivedAudioStream.getDesiredJitterBufferFrames(); }
bool isMuted() { return _muted; }
const AudioIOStats& getStats() const { return _stats; }
float getInputRingBufferMsecsAvailable() const;
float getAudioOutputMsecsUnplayed() const;
int getOutputBufferSize() { return _outputBufferSizeFrames.get(); }
bool getOutputStarveDetectionEnabled() { return _outputStarveDetectionEnabled.get(); }
void setOutputStarveDetectionEnabled(bool enabled) { _outputStarveDetectionEnabled.set(enabled); }
int getOutputStarveDetectionPeriod() { return _outputStarveDetectionPeriodMsec.get(); }
void setOutputStarveDetectionPeriod(int msecs) { _outputStarveDetectionPeriodMsec.set(msecs); }
int getOutputStarveDetectionThreshold() { return _outputStarveDetectionThreshold.get(); }
void setOutputStarveDetectionThreshold(int threshold) { _outputStarveDetectionThreshold.set(threshold); }
void setPositionGetter(AudioPositionGetter positionGetter) { _positionGetter = positionGetter; }
void setOrientationGetter(AudioOrientationGetter orientationGetter) { _orientationGetter = orientationGetter; }
static const float CALLBACK_ACCELERATOR_RATIO;
public slots:
void start();
void stop();
@ -140,7 +141,7 @@ public slots:
void reset();
void audioMixerKilled();
void toggleMute();
virtual void enableAudioSourceInject(bool enable);
virtual void selectAudioSourcePinkNoise();
virtual void selectAudioSourceSine440();
@ -148,10 +149,10 @@ public slots:
virtual void setIsStereoInput(bool stereo);
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
void toggleLocalEcho() { _shouldEchoLocally = !_shouldEchoLocally; }
void toggleServerEcho() { _shouldEchoToServer = !_shouldEchoToServer; }
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
void sendMuteEnvironmentPacket();
@ -172,10 +173,10 @@ public slots:
void setReverbOptions(const AudioEffectOptions* options);
void outputNotify();
void loadSettings();
void saveSettings();
signals:
bool muteToggled();
void inputReceived(const QByteArray& inputSamples);
@ -184,14 +185,16 @@ signals:
void deviceChanged();
void receivedFirstPacket();
protected:
AudioClient();
~AudioClient();
virtual void customDeleter() {
deleteLater();
}
private:
void outputFormatChanged();
@ -216,35 +219,35 @@ private:
QString _inputAudioDeviceName;
QString _outputAudioDeviceName;
quint64 _outputStarveDetectionStartTimeMsec;
int _outputStarveDetectionCount;
Setting::Handle<int> _outputBufferSizeFrames;
Setting::Handle<bool> _outputStarveDetectionEnabled;
Setting::Handle<int> _outputStarveDetectionPeriodMsec;
// Maximum number of starves per _outputStarveDetectionPeriod before increasing buffer size
Setting::Handle<int> _outputStarveDetectionThreshold;
StDev _stdev;
QElapsedTimer _timeSinceLastReceived;
float _averagedLatency;
float _lastInputLoudness;
float _timeSinceLastClip;
int _totalInputAudioSamples;
bool _muted;
bool _shouldEchoLocally;
bool _shouldEchoToServer;
bool _isNoiseGateEnabled;
bool _audioSourceInjectEnabled;
bool _reverb;
AudioEffectOptions _scriptReverbOptions;
AudioEffectOptions _zoneReverbOptions;
AudioEffectOptions* _reverbOptions;
ty_gverb* _gverb;
// possible soxr streams needed for resample
soxr* _inputToNetworkResampler;
soxr* _networkToOutputResampler;
@ -268,17 +271,17 @@ private:
// Input framebuffer
AudioBufferFloat32 _inputFrameBuffer;
// Input gain
AudioGain _inputGain;
// Post tone/pink noise generator gain
AudioGain _sourceGain;
// Pink noise source
bool _noiseSourceEnabled;
AudioSourcePinkNoise _noiseSource;
// Tone source
bool _toneSourceEnabled;
AudioSourceTone _toneSource;
@ -286,17 +289,19 @@ private:
quint16 _outgoingAvatarAudioSequenceNumber;
AudioOutputIODevice _audioOutputIODevice;
AudioIOStats _stats;
AudioNoiseGate _inputGate;
AudioPositionGetter _positionGetter;
AudioOrientationGetter _orientationGetter;
QVector<QString> _inputDevices;
QVector<QString> _outputDevices;
void checkDevices();
bool _hasReceivedFirstPacket = false;
};

View file

@ -26,7 +26,7 @@
AudioInjector::AudioInjector(QObject* parent) :
QObject(parent)
{
}
AudioInjector::AudioInjector(Sound* sound, const AudioInjectorOptions& injectorOptions) :
@ -39,24 +39,25 @@ AudioInjector::AudioInjector(const QByteArray& audioData, const AudioInjectorOpt
_audioData(audioData),
_options(injectorOptions)
{
}
void AudioInjector::setIsFinished(bool isFinished) {
_isFinished = isFinished;
// In all paths, regardless of isFinished argument. restart() passes false to prepare for new play, and injectToMixer() needs _shouldStop reset.
_shouldStop = false;
if (_isFinished) {
emit finished();
if (_localBuffer) {
_localBuffer->stop();
_localBuffer->deleteLater();
_localBuffer = NULL;
}
_isStarted = false;
_shouldStop = false;
if (_shouldDeleteAfterFinish) {
// we've been asked to delete after finishing, trigger a queued deleteLater here
qCDebug(audio) << "AudioInjector triggering delete from setIsFinished";
@ -67,18 +68,19 @@ void AudioInjector::setIsFinished(bool isFinished) {
void AudioInjector::injectAudio() {
if (!_isStarted) {
_isStarted = true;
// check if we need to offset the sound by some number of seconds
if (_options.secondOffset > 0.0f) {
// convert the offset into a number of bytes
int byteOffset = (int) floorf(AudioConstants::SAMPLE_RATE * _options.secondOffset * (_options.stereo ? 2.0f : 1.0f));
byteOffset *= sizeof(int16_t);
_currentSendPosition = byteOffset;
} else {
_currentSendPosition = 0;
}
if (_options.localOnly) {
injectLocally();
} else {
@ -86,12 +88,20 @@ void AudioInjector::injectAudio() {
}
} else {
qCDebug(audio) << "AudioInjector::injectAudio called but already started.";
}
}
}
void AudioInjector::restart() {
qCDebug(audio) << "Restarting an AudioInjector by stopping and starting over.";
stop();
connect(this, &AudioInjector::finished, this, &AudioInjector::restartPortionAfterFinished);
if (!_isStarted || _isFinished) {
emit finished();
} else {
stop();
}
}
void AudioInjector::restartPortionAfterFinished() {
disconnect(this, &AudioInjector::finished, this, &AudioInjector::restartPortionAfterFinished);
setIsFinished(false);
QMetaObject::invokeMethod(this, "injectAudio", Qt::QueuedConnection);
}
@ -100,37 +110,37 @@ void AudioInjector::injectLocally() {
bool success = false;
if (_localAudioInterface) {
if (_audioData.size() > 0) {
_localBuffer = new AudioInjectorLocalBuffer(_audioData, this);
_localBuffer->open(QIODevice::ReadOnly);
_localBuffer->setShouldLoop(_options.loop);
_localBuffer->setVolume(_options.volume);
// give our current send position to the local buffer
_localBuffer->setCurrentOffset(_currentSendPosition);
success = _localAudioInterface->outputLocalInjector(_options.stereo, this);
// if we're not looping and the buffer tells us it is empty then emit finished
connect(_localBuffer, &AudioInjectorLocalBuffer::bufferEmpty, this, &AudioInjector::stop);
if (!success) {
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
}
} else {
qCDebug(audio) << "AudioInjector::injectLocally called without any data in Sound QByteArray";
}
} else {
qCDebug(audio) << "AudioInjector::injectLocally cannot inject locally with no local audio interface present.";
}
if (!success) {
// we never started so we are finished, call our stop method
stop();
}
}
const uchar MAX_INJECTOR_VOLUME = 0xFF;
@ -140,65 +150,65 @@ void AudioInjector::injectToMixer() {
_currentSendPosition >= _audioData.size()) {
_currentSendPosition = 0;
}
auto nodeList = DependencyManager::get<NodeList>();
// make sure we actually have samples downloaded to inject
if (_audioData.size()) {
// setup the packet for injected audio
QByteArray injectAudioPacket = nodeList->byteArrayWithPopulatedHeader(PacketTypeInjectAudio);
QDataStream packetStream(&injectAudioPacket, QIODevice::Append);
// pack some placeholder sequence number for now
int numPreSequenceNumberBytes = injectAudioPacket.size();
packetStream << (quint16)0;
// pack stream identifier (a generated UUID)
packetStream << QUuid::createUuid();
// pack the stereo/mono type of the stream
packetStream << _options.stereo;
// pack the flag for loopback
uchar loopbackFlag = (uchar) true;
packetStream << loopbackFlag;
// pack the position for injected audio
int positionOptionOffset = injectAudioPacket.size();
packetStream.writeRawData(reinterpret_cast<const char*>(&_options.position),
sizeof(_options.position));
// pack our orientation for injected audio
int orientationOptionOffset = injectAudioPacket.size();
packetStream.writeRawData(reinterpret_cast<const char*>(&_options.orientation),
sizeof(_options.orientation));
// pack zero for radius
float radius = 0;
packetStream << radius;
// pack 255 for attenuation byte
int volumeOptionOffset = injectAudioPacket.size();
quint8 volume = MAX_INJECTOR_VOLUME * _options.volume;
packetStream << volume;
packetStream << _options.ignorePenumbra;
QElapsedTimer timer;
timer.start();
int nextFrame = 0;
int numPreAudioDataBytes = injectAudioPacket.size();
bool shouldLoop = _options.loop;
// loop to send off our audio in NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL byte chunks
quint16 outgoingInjectedAudioSequenceNumber = 0;
while (_currentSendPosition < _audioData.size() && !_shouldStop) {
int bytesToCopy = std::min(((_options.stereo) ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL,
_audioData.size() - _currentSendPosition);
// Measure the loudness of this frame
_loudness = 0.0f;
for (int i = 0; i < bytesToCopy; i += sizeof(int16_t)) {
@ -215,45 +225,45 @@ void AudioInjector::injectToMixer() {
sizeof(_options.orientation));
volume = MAX_INJECTOR_VOLUME * _options.volume;
memcpy(injectAudioPacket.data() + volumeOptionOffset, &volume, sizeof(volume));
// resize the QByteArray to the right size
injectAudioPacket.resize(numPreAudioDataBytes + bytesToCopy);
// pack the sequence number
memcpy(injectAudioPacket.data() + numPreSequenceNumberBytes,
&outgoingInjectedAudioSequenceNumber, sizeof(quint16));
// copy the next NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL bytes to the packet
memcpy(injectAudioPacket.data() + numPreAudioDataBytes,
_audioData.data() + _currentSendPosition, bytesToCopy);
// grab our audio mixer from the NodeList, if it exists
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
// send off this audio packet
nodeList->writeDatagram(injectAudioPacket, audioMixer);
outgoingInjectedAudioSequenceNumber++;
_currentSendPosition += bytesToCopy;
// send two packets before the first sleep so the mixer can start playback right away
if (_currentSendPosition != bytesToCopy && _currentSendPosition < _audioData.size()) {
// process events in case we have been told to stop and be deleted
QCoreApplication::processEvents();
if (_shouldStop) {
break;
}
// not the first packet and not done
// sleep for the appropriate time
int usecToSleep = (++nextFrame * AudioConstants::NETWORK_FRAME_USECS) - timer.nsecsElapsed() / 1000;
if (usecToSleep > 0) {
usleep(usecToSleep);
}
}
}
if (shouldLoop && _currentSendPosition >= _audioData.size()) {
@ -261,13 +271,13 @@ void AudioInjector::injectToMixer() {
}
}
}
setIsFinished(true);
}
void AudioInjector::stop() {
_shouldStop = true;
if (_options.localOnly) {
// we're only a local injector, so we can say we are finished right away too
setIsFinished(true);

View file

@ -59,6 +59,7 @@ public slots:
void setCurrentSendPosition(int currentSendPosition) { _currentSendPosition = currentSendPosition; }
float getLoudness() const { return _loudness; }
bool isPlaying() const { return !_isFinished; }
void restartPortionAfterFinished();
signals:
void finished();

View file

@ -53,21 +53,25 @@ Sound::Sound(const QUrl& url, bool isStereo) :
_isStereo(isStereo),
_isReady(false)
{
}
void Sound::downloadFinished(QNetworkReply* reply) {
// replace our byte array with the downloaded data
QByteArray rawAudioByteArray = reply->readAll();
QString fileName = reply->url().fileName();
if (reply->hasRawHeader("Content-Type")) {
const QString WAV_EXTENSION = ".wav";
if (reply->hasRawHeader("Content-Type") || fileName.endsWith(WAV_EXTENSION)) {
QByteArray headerContentType = reply->rawHeader("Content-Type");
// WAV audio file encountered
if (headerContentType == "audio/x-wav"
|| headerContentType == "audio/wav"
|| headerContentType == "audio/wave") {
|| headerContentType == "audio/wave"
|| fileName.endsWith(WAV_EXTENSION)) {
QByteArray outputAudioByteArray;
@ -80,7 +84,7 @@ void Sound::downloadFinished(QNetworkReply* reply) {
_isStereo = true;
qCDebug(audio) << "Processing sound of" << rawAudioByteArray.size() << "bytes from" << reply->url() << "as stereo audio file.";
}
// Process as RAW file
downSample(rawAudioByteArray);
}
@ -88,7 +92,7 @@ void Sound::downloadFinished(QNetworkReply* reply) {
} else {
qCDebug(audio) << "Network reply without 'Content-Type'.";
}
_isReady = true;
reply->deleteLater();
}
@ -99,16 +103,16 @@ void Sound::downSample(const QByteArray& rawAudioByteArray) {
// we want to convert it to the format that the audio-mixer wants
// which is signed, 16-bit, 24Khz
int numSourceSamples = rawAudioByteArray.size() / sizeof(AudioConstants::AudioSample);
int numDestinationBytes = rawAudioByteArray.size() / sizeof(AudioConstants::AudioSample);
if (_isStereo && numSourceSamples % 2 != 0) {
numDestinationBytes += sizeof(AudioConstants::AudioSample);
}
_byteArray.resize(numDestinationBytes);
int16_t* sourceSamples = (int16_t*) rawAudioByteArray.data();
int16_t* destinationSamples = (int16_t*) _byteArray.data();
@ -134,22 +138,22 @@ void Sound::downSample(const QByteArray& rawAudioByteArray) {
}
void Sound::trimFrames() {
const uint32_t inputFrameCount = _byteArray.size() / sizeof(int16_t);
const uint32_t trimCount = 1024; // number of leading and trailing frames to trim
if (inputFrameCount <= (2 * trimCount)) {
return;
}
int16_t* inputFrameData = (int16_t*)_byteArray.data();
AudioEditBufferFloat32 editBuffer(1, inputFrameCount);
editBuffer.copyFrames(1, inputFrameCount, inputFrameData, false /*copy in*/);
editBuffer.linearFade(0, trimCount, true);
editBuffer.linearFade(inputFrameCount - trimCount, inputFrameCount, false);
editBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
}
@ -238,7 +242,7 @@ void Sound::interpretAsWav(const QByteArray& inputAudioByteArray, QByteArray& ou
} else if (qFromLittleEndian<quint16>(fileHeader.wave.numChannels) > 2) {
qCDebug(audio) << "Currently not support audio files with more than 2 channels.";
}
if (qFromLittleEndian<quint16>(fileHeader.wave.bitsPerSample) != 16) {
qCDebug(audio) << "Currently not supporting non 16bit audio files.";
return;

View file

@ -13,6 +13,7 @@
#define hifi_AvatarData_h
#include <string>
#include <memory>
/* VS2010 defines stdint.h, but not inttypes.h */
#if defined(_MSC_VER)
typedef signed char int8_t;
@ -57,6 +58,10 @@ typedef unsigned long long quint64;
#include "Recorder.h"
#include "Referential.h"
typedef std::shared_ptr<AvatarData> AvatarSharedPointer;
typedef std::weak_ptr<AvatarData> AvatarWeakPointer;
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
// avatar motion behaviors
const quint32 AVATAR_MOTION_KEYBOARD_MOTOR_ENABLED = 1U << 0;
const quint32 AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED = 1U << 1;

View file

@ -16,16 +16,14 @@
#include <QtCore/QSharedPointer>
#include <QtCore/QUuid>
#include <memory>
#include <DependencyManager.h>
#include <Node.h>
#include "AvatarData.h"
#include <glm/glm.hpp>
typedef QSharedPointer<AvatarData> AvatarSharedPointer;
typedef QWeakPointer<AvatarData> AvatarWeakPointer;
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
class AvatarHashMap : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY

View file

@ -23,4 +23,4 @@ find_package(Soxr REQUIRED)
target_link_libraries(${TARGET_NAME} ${SOXR_LIBRARIES})
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${SOXR_INCLUDE_DIRS})
link_hifi_libraries(shared gpu script-engine render-utils)
link_hifi_libraries(shared gpu script-engine render render-utils)

View file

@ -10,6 +10,7 @@
//
#include <gpu/GPUConfig.h>
#include <gpu/GLBackend.h>
#include <glm/gtx/quaternion.hpp>
@ -30,8 +31,11 @@
#include <soxr.h>
#include <AudioConstants.h>
#include "EntityTreeRenderer.h"
#include "RenderableEntityItem.h"
#include "RenderableBoxEntityItem.h"
#include "RenderableLightEntityItem.h"
#include "RenderableModelEntityItem.h"
@ -95,6 +99,14 @@ void EntityTreeRenderer::clear() {
}
OctreeRenderer::clear();
_entityScripts.clear();
auto scene = _viewState->getMain3DScene();
render::PendingChanges pendingChanges;
foreach(auto entity, _entitiesInScene) {
entity->removeFromScene(entity, scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
_entitiesInScene.clear();
}
void EntityTreeRenderer::init() {
@ -395,127 +407,107 @@ void EntityTreeRenderer::leaveAllEntities() {
_lastAvatarPosition = _viewState->getAvatarPosition() + glm::vec3((float)TREE_SCALE);
}
}
void EntityTreeRenderer::render(RenderArgs::RenderMode renderMode,
RenderArgs::RenderSide renderSide,
RenderArgs::DebugFlags renderDebugFlags) {
void EntityTreeRenderer::applyZonePropertiesToScene(std::shared_ptr<ZoneEntityItem> zone) {
QSharedPointer<SceneScriptingInterface> scene = DependencyManager::get<SceneScriptingInterface>();
if (zone) {
if (!_hasPreviousZone) {
_previousKeyLightColor = scene->getKeyLightColor();
_previousKeyLightIntensity = scene->getKeyLightIntensity();
_previousKeyLightAmbientIntensity = scene->getKeyLightAmbientIntensity();
_previousKeyLightDirection = scene->getKeyLightDirection();
_previousStageSunModelEnabled = scene->isStageSunModelEnabled();
_previousStageLongitude = scene->getStageLocationLongitude();
_previousStageLatitude = scene->getStageLocationLatitude();
_previousStageAltitude = scene->getStageLocationAltitude();
_previousStageHour = scene->getStageDayTime();
_previousStageDay = scene->getStageYearTime();
_hasPreviousZone = true;
}
scene->setKeyLightColor(zone->getKeyLightColorVec3());
scene->setKeyLightIntensity(zone->getKeyLightIntensity());
scene->setKeyLightAmbientIntensity(zone->getKeyLightAmbientIntensity());
scene->setKeyLightDirection(zone->getKeyLightDirection());
scene->setStageSunModelEnable(zone->getStageProperties().getSunModelEnabled());
scene->setStageLocation(zone->getStageProperties().getLongitude(), zone->getStageProperties().getLatitude(),
zone->getStageProperties().getAltitude());
scene->setStageDayTime(zone->getStageProperties().calculateHour());
scene->setStageYearTime(zone->getStageProperties().calculateDay());
if (zone->getBackgroundMode() == BACKGROUND_MODE_ATMOSPHERE) {
EnvironmentData data = zone->getEnvironmentData();
glm::vec3 keyLightDirection = scene->getKeyLightDirection();
glm::vec3 inverseKeyLightDirection = keyLightDirection * -1.0f;
// NOTE: is this right? It seems like the "sun" should be based on the center of the
// atmosphere, not where the camera is.
glm::vec3 keyLightLocation = _viewState->getAvatarPosition()
+ (inverseKeyLightDirection * data.getAtmosphereOuterRadius());
data.setSunLocation(keyLightLocation);
const float KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO = 20.0f;
float sunBrightness = scene->getKeyLightIntensity() * KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO;
data.setSunBrightness(sunBrightness);
_viewState->overrideEnvironmentData(data);
scene->getSkyStage()->setBackgroundMode(model::SunSkyStage::SKY_DOME);
} else {
_viewState->endOverrideEnvironmentData();
auto stage = scene->getSkyStage();
if (zone->getBackgroundMode() == BACKGROUND_MODE_SKYBOX) {
stage->getSkybox()->setColor(zone->getSkyboxProperties().getColorVec3());
if (zone->getSkyboxProperties().getURL().isEmpty()) {
stage->getSkybox()->setCubemap(gpu::TexturePointer());
} else {
// Update the Texture of the Skybox with the one pointed by this zone
auto cubeMap = DependencyManager::get<TextureCache>()->getTexture(zone->getSkyboxProperties().getURL(), CUBE_TEXTURE);
stage->getSkybox()->setCubemap(cubeMap->getGPUTexture());
}
stage->setBackgroundMode(model::SunSkyStage::SKY_BOX);
} else {
stage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
}
}
} else {
if (_hasPreviousZone) {
scene->setKeyLightColor(_previousKeyLightColor);
scene->setKeyLightIntensity(_previousKeyLightIntensity);
scene->setKeyLightAmbientIntensity(_previousKeyLightAmbientIntensity);
scene->setKeyLightDirection(_previousKeyLightDirection);
scene->setStageSunModelEnable(_previousStageSunModelEnabled);
scene->setStageLocation(_previousStageLongitude, _previousStageLatitude,
_previousStageAltitude);
scene->setStageDayTime(_previousStageHour);
scene->setStageYearTime(_previousStageDay);
_hasPreviousZone = false;
}
_viewState->endOverrideEnvironmentData();
scene->getSkyStage()->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
}
}
void EntityTreeRenderer::render(RenderArgs* renderArgs) {
if (_tree && !_shuttingDown) {
Model::startScene(renderSide);
ViewFrustum* frustum = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
_viewState->getShadowViewFrustum() : _viewState->getCurrentViewFrustum();
RenderArgs args(this, frustum, getSizeScale(), getBoundaryLevelAdjust(),
renderMode, renderSide, renderDebugFlags);
renderArgs->_renderer = this;
_tree->lockForRead();
// Whenever you're in an intersection between zones, we will always choose the smallest zone.
_bestZone = NULL;
_bestZone = NULL; // NOTE: Is this what we want?
_bestZoneVolume = std::numeric_limits<float>::max();
_tree->recurseTreeWithOperation(renderOperation, &args);
QSharedPointer<SceneScriptingInterface> scene = DependencyManager::get<SceneScriptingInterface>();
if (_bestZone) {
if (!_hasPreviousZone) {
_previousKeyLightColor = scene->getKeyLightColor();
_previousKeyLightIntensity = scene->getKeyLightIntensity();
_previousKeyLightAmbientIntensity = scene->getKeyLightAmbientIntensity();
_previousKeyLightDirection = scene->getKeyLightDirection();
_previousStageSunModelEnabled = scene->isStageSunModelEnabled();
_previousStageLongitude = scene->getStageLocationLongitude();
_previousStageLatitude = scene->getStageLocationLatitude();
_previousStageAltitude = scene->getStageLocationAltitude();
_previousStageHour = scene->getStageDayTime();
_previousStageDay = scene->getStageYearTime();
_hasPreviousZone = true;
}
scene->setKeyLightColor(_bestZone->getKeyLightColorVec3());
scene->setKeyLightIntensity(_bestZone->getKeyLightIntensity());
scene->setKeyLightAmbientIntensity(_bestZone->getKeyLightAmbientIntensity());
scene->setKeyLightDirection(_bestZone->getKeyLightDirection());
scene->setStageSunModelEnable(_bestZone->getStageProperties().getSunModelEnabled());
scene->setStageLocation(_bestZone->getStageProperties().getLongitude(), _bestZone->getStageProperties().getLatitude(),
_bestZone->getStageProperties().getAltitude());
scene->setStageDayTime(_bestZone->getStageProperties().calculateHour());
scene->setStageYearTime(_bestZone->getStageProperties().calculateDay());
// FIX ME: right now the renderOperation does the following:
// 1) determining the best zone (not really rendering)
// 2) render the debug cell details
// we should clean this up
_tree->recurseTreeWithOperation(renderOperation, renderArgs);
if (_bestZone->getBackgroundMode() == BACKGROUND_MODE_ATMOSPHERE) {
EnvironmentData data = _bestZone->getEnvironmentData();
glm::vec3 keyLightDirection = scene->getKeyLightDirection();
glm::vec3 inverseKeyLightDirection = keyLightDirection * -1.0f;
// NOTE: is this right? It seems like the "sun" should be based on the center of the
// atmosphere, not where the camera is.
glm::vec3 keyLightLocation = _viewState->getAvatarPosition()
+ (inverseKeyLightDirection * data.getAtmosphereOuterRadius());
data.setSunLocation(keyLightLocation);
applyZonePropertiesToScene(_bestZone);
const float KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO = 20.0f;
float sunBrightness = scene->getKeyLightIntensity() * KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO;
data.setSunBrightness(sunBrightness);
_viewState->overrideEnvironmentData(data);
scene->getSkyStage()->setBackgroundMode(model::SunSkyStage::SKY_DOME);
} else {
_viewState->endOverrideEnvironmentData();
auto stage = scene->getSkyStage();
if (_bestZone->getBackgroundMode() == BACKGROUND_MODE_SKYBOX) {
stage->getSkybox()->setColor(_bestZone->getSkyboxProperties().getColorVec3());
if (_bestZone->getSkyboxProperties().getURL().isEmpty()) {
stage->getSkybox()->setCubemap(gpu::TexturePointer());
} else {
// Update the Texture of the Skybox with the one pointed by this zone
auto cubeMap = DependencyManager::get<TextureCache>()->getTexture(_bestZone->getSkyboxProperties().getURL(), CUBE_TEXTURE);
stage->getSkybox()->setCubemap(cubeMap->getGPUTexture());
}
stage->setBackgroundMode(model::SunSkyStage::SKY_BOX);
} else {
stage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
}
}
} else {
if (_hasPreviousZone) {
scene->setKeyLightColor(_previousKeyLightColor);
scene->setKeyLightIntensity(_previousKeyLightIntensity);
scene->setKeyLightAmbientIntensity(_previousKeyLightAmbientIntensity);
scene->setKeyLightDirection(_previousKeyLightDirection);
scene->setStageSunModelEnable(_previousStageSunModelEnabled);
scene->setStageLocation(_previousStageLongitude, _previousStageLatitude,
_previousStageAltitude);
scene->setStageDayTime(_previousStageHour);
scene->setStageYearTime(_previousStageDay);
_hasPreviousZone = false;
}
_viewState->endOverrideEnvironmentData();
scene->getSkyStage()->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
}
// we must call endScene while we still have the tree locked so that no one deletes a model
// on us while rendering the scene
Model::endScene(renderMode, &args);
_tree->unlock();
// stats...
_meshesConsidered = args._meshesConsidered;
_meshesRendered = args._meshesRendered;
_meshesOutOfView = args._meshesOutOfView;
_meshesTooSmall = args._meshesTooSmall;
_elementsTouched = args._elementsTouched;
_itemsRendered = args._itemsRendered;
_itemsOutOfView = args._itemsOutOfView;
_itemsTooSmall = args._itemsTooSmall;
_materialSwitches = args._materialSwitches;
_trianglesRendered = args._trianglesRendered;
_quadsRendered = args._quadsRendered;
_translucentMeshPartsRendered = args._translucentMeshPartsRendered;
_opaqueMeshPartsRendered = args._opaqueMeshPartsRendered;
}
deleteReleasedModels(); // seems like as good as any other place to do some memory cleanup
}
@ -564,57 +556,36 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
return result;
}
void EntityTreeRenderer::renderElementProxy(EntityTreeElement* entityTreeElement) {
void EntityTreeRenderer::renderElementProxy(EntityTreeElement* entityTreeElement, RenderArgs* args) {
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform;
glm::vec3 elementCenter = entityTreeElement->getAACube().calcCenter();
float elementSize = entityTreeElement->getScale();
glPushMatrix();
glTranslatef(elementCenter.x, elementCenter.y, elementCenter.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(elementSize, glm::vec4(1.0f, 0.0f, 0.0f, 1.0f));
glPopMatrix();
auto drawWireCube = [&](glm::vec3 offset, float size, glm::vec4 color) {
transform.setTranslation(elementCenter + offset);
batch.setModelTransform(transform);
deferredLighting->renderWireCube(batch, size, color);
};
drawWireCube(glm::vec3(), elementSize, glm::vec4(1.0f, 0.0f, 0.0f, 1.0f));
if (_displayElementChildProxies) {
// draw the children
float halfSize = elementSize / 2.0f;
float quarterSize = elementSize / 4.0f;
glPushMatrix();
glTranslatef(elementCenter.x - quarterSize, elementCenter.y - quarterSize, elementCenter.z - quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(1.0f, 1.0f, 0.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x + quarterSize, elementCenter.y - quarterSize, elementCenter.z - quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(1.0f, 0.0f, 1.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x - quarterSize, elementCenter.y + quarterSize, elementCenter.z - quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(0.0f, 1.0f, 0.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x - quarterSize, elementCenter.y - quarterSize, elementCenter.z + quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(0.0f, 0.0f, 1.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x + quarterSize, elementCenter.y + quarterSize, elementCenter.z + quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x - quarterSize, elementCenter.y + quarterSize, elementCenter.z + quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(0.0f, 0.5f, 0.5f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x + quarterSize, elementCenter.y - quarterSize, elementCenter.z + quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(0.5f, 0.0f, 0.0f, 1.0f));
glPopMatrix();
glPushMatrix();
glTranslatef(elementCenter.x + quarterSize, elementCenter.y + quarterSize, elementCenter.z - quarterSize);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(halfSize, glm::vec4(0.0f, 0.5f, 0.0f, 1.0f));
glPopMatrix();
drawWireCube(glm::vec3(-quarterSize, -quarterSize, -quarterSize), halfSize, glm::vec4(1.0f, 1.0f, 0.0f, 1.0f));
drawWireCube(glm::vec3(quarterSize, -quarterSize, -quarterSize), halfSize, glm::vec4(1.0f, 0.0f, 1.0f, 1.0f));
drawWireCube(glm::vec3(-quarterSize, quarterSize, -quarterSize), halfSize, glm::vec4(0.0f, 1.0f, 0.0f, 1.0f));
drawWireCube(glm::vec3(-quarterSize, -quarterSize, quarterSize), halfSize, glm::vec4(0.0f, 0.0f, 1.0f, 1.0f));
drawWireCube(glm::vec3(quarterSize, quarterSize, quarterSize), halfSize, glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
drawWireCube(glm::vec3(-quarterSize, quarterSize, quarterSize), halfSize, glm::vec4(0.0f, 0.5f, 0.5f, 1.0f));
drawWireCube(glm::vec3(quarterSize, -quarterSize, quarterSize), halfSize, glm::vec4(0.5f, 0.0f, 0.0f, 1.0f));
drawWireCube(glm::vec3(quarterSize, quarterSize, -quarterSize), halfSize, glm::vec4(0.0f, 0.5f, 0.0f, 1.0f));
}
}
@ -631,48 +602,35 @@ void EntityTreeRenderer::renderProxies(EntityItemPointer entity, RenderArgs* arg
glm::vec3 minCenter = minCube.calcCenter();
glm::vec3 entityBoxCenter = entityBox.calcCenter();
glm::vec3 entityBoxScale = entityBox.getScale();
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform;
// draw the max bounding cube
glPushMatrix();
glTranslatef(maxCenter.x, maxCenter.y, maxCenter.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(maxCube.getScale(), glm::vec4(1.0f, 1.0f, 0.0f, 1.0f));
glPopMatrix();
transform.setTranslation(maxCenter);
batch.setModelTransform(transform);
deferredLighting->renderWireCube(batch, maxCube.getScale(), glm::vec4(1.0f, 1.0f, 0.0f, 1.0f));
// draw the min bounding cube
glPushMatrix();
glTranslatef(minCenter.x, minCenter.y, minCenter.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(minCube.getScale(), glm::vec4(0.0f, 1.0f, 0.0f, 1.0f));
glPopMatrix();
transform.setTranslation(minCenter);
batch.setModelTransform(transform);
deferredLighting->renderWireCube(batch, minCube.getScale(), glm::vec4(0.0f, 1.0f, 0.0f, 1.0f));
// draw the entityBox bounding box
glPushMatrix();
glTranslatef(entityBoxCenter.x, entityBoxCenter.y, entityBoxCenter.z);
glScalef(entityBoxScale.x, entityBoxScale.y, entityBoxScale.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(1.0f, glm::vec4(0.0f, 0.0f, 1.0f, 1.0f));
glPopMatrix();
transform.setTranslation(entityBoxCenter);
transform.setScale(entityBoxScale);
batch.setModelTransform(transform);
deferredLighting->renderWireCube(batch, 1.0f, glm::vec4(0.0f, 0.0f, 1.0f, 1.0f));
glm::vec3 position = entity->getPosition();
glm::vec3 center = entity->getCenter();
glm::vec3 dimensions = entity->getDimensions();
glm::quat rotation = entity->getRotation();
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(1.0f, glm::vec4(1.0f, 0.0f, 1.0f, 1.0f));
glPopMatrix();
glPopMatrix();
// Rotated bounding box
batch.setModelTransform(entity->getTransformToCenter());
deferredLighting->renderWireCube(batch, 1.0f, glm::vec4(1.0f, 0.0f, 1.0f, 1.0f));
}
}
void EntityTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args) {
args->_elementsTouched++;
// actually render it here...
// we need to iterate the actual entityItems of the element
EntityTreeElement* entityTreeElement = static_cast<EntityTreeElement*>(element);
@ -685,7 +643,7 @@ void EntityTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args)
bool isShadowMode = args->_renderMode == RenderArgs::SHADOW_RENDER_MODE;
if (!isShadowMode && _displayModelElementProxy && numberOfEntities > 0) {
renderElementProxy(entityTreeElement);
renderElementProxy(entityTreeElement, args);
}
for (uint16_t i = 0; i < numberOfEntities; i++) {
@ -715,36 +673,6 @@ void EntityTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args)
}
}
}
// render entityItem
AABox entityBox = entityItem->getAABox();
// TODO: some entity types (like lights) might want to be rendered even
// when they are outside of the view frustum...
float distance = args->_viewFrustum->distanceToCamera(entityBox.calcCenter());
bool outOfView = args->_viewFrustum->boxInFrustum(entityBox) == ViewFrustum::OUTSIDE;
if (!outOfView) {
bool bigEnoughToRender = _viewState->shouldRenderMesh(entityBox.getLargestDimension(), distance);
if (bigEnoughToRender) {
renderProxies(entityItem, args);
Glower* glower = NULL;
if (entityItem->getGlowLevel() > 0.0f) {
glower = new Glower(entityItem->getGlowLevel());
}
entityItem->render(args);
args->_itemsRendered++;
if (glower) {
delete glower;
}
} else {
args->_itemsTooSmall++;
}
} else {
args->_itemsOutOfView++;
}
}
}
}
@ -1070,12 +998,34 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
checkAndCallUnload(entityID);
}
_entityScripts.remove(entityID);
// here's where we remove the entity payload from the scene
if (_entitiesInScene.contains(entityID)) {
auto entity = _entitiesInScene.take(entityID);
render::PendingChanges pendingChanges;
auto scene = _viewState->getMain3DScene();
entity->removeFromScene(entity, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
}
void EntityTreeRenderer::addingEntity(const EntityItemID& entityID) {
checkAndCallPreload(entityID);
auto entity = static_cast<EntityTree*>(_tree)->findEntityByID(entityID);
addEntityToScene(entity);
}
void EntityTreeRenderer::addEntityToScene(EntityItemPointer entity) {
// here's where we add the entity payload to the scene
render::PendingChanges pendingChanges;
auto scene = _viewState->getMain3DScene();
if (entity->addToScene(entity, scene, pendingChanges)) {
_entitiesInScene.insert(entity->getEntityItemID(), entity);
}
scene->enqueuePendingChanges(pendingChanges);
}
void EntityTreeRenderer::entitySciptChanging(const EntityItemID& entityID) {
if (_tree && !_shuttingDown) {
checkAndCallUnload(entityID);
@ -1244,4 +1194,3 @@ void EntityTreeRenderer::entityCollisionWithEntity(const EntityItemID& idA, cons
entityScriptB.property("collisionWithEntity").call(entityScriptA, args);
}
}

View file

@ -58,9 +58,7 @@ public:
void processEraseMessage(const QByteArray& dataByteArray, const SharedNodePointer& sourceNode);
virtual void init();
virtual void render(RenderArgs::RenderMode renderMode = RenderArgs::DEFAULT_RENDER_MODE,
RenderArgs::RenderSide renderSide = RenderArgs::MONO,
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE);
virtual void render(RenderArgs* renderArgs) override;
virtual const FBXGeometry* getGeometryForEntity(EntityItemPointer entityItem);
virtual const Model* getModelForEntityItem(EntityItemPointer entityItem);
@ -125,7 +123,10 @@ protected:
virtual Octree* createTree() { return new EntityTree(true); }
private:
void renderElementProxy(EntityTreeElement* entityTreeElement);
void addEntityToScene(EntityItemPointer entity);
void applyZonePropertiesToScene(std::shared_ptr<ZoneEntityItem> zone);
void renderElementProxy(EntityTreeElement* entityTreeElement, RenderArgs* args);
void checkAndCallPreload(const EntityItemID& entityID);
void checkAndCallUnload(const EntityItemID& entityID);
@ -187,6 +188,8 @@ private:
float _previousStageHour;
int _previousStageDay;
QHash<EntityItemID, EntityItemPointer> _entitiesInScene;
};
#endif // hifi_EntityTreeRenderer_h

View file

@ -12,6 +12,7 @@
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <DeferredLightingEffect.h>
#include <PerfStat.h>
@ -24,17 +25,8 @@ EntityItemPointer RenderableBoxEntityItem::factory(const EntityItemID& entityID,
void RenderableBoxEntityItem::render(RenderArgs* args) {
PerformanceTimer perfTimer("RenderableBoxEntityItem::render");
assert(getType() == EntityTypes::Box);
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();
const float MAX_COLOR = 255.0f;
glm::vec4 cubeColor(getColor()[RED_INDEX] / MAX_COLOR, getColor()[GREEN_INDEX] / MAX_COLOR,
getColor()[BLUE_INDEX] / MAX_COLOR, getLocalRenderAlpha());
Q_ASSERT(getType() == EntityTypes::Box);
glm::vec4 cubeColor(toGlm(getXColor()), getLocalRenderAlpha());
bool debugSimulationOwnership = args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP;
bool highlightSimulationOwnership = false;
@ -43,22 +35,15 @@ void RenderableBoxEntityItem::render(RenderArgs* args) {
const QUuid& myNodeID = nodeList->getSessionUUID();
highlightSimulationOwnership = (getSimulatorID() == myNodeID);
}
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
if (highlightSimulationOwnership) {
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(1.0f, cubeColor);
} else {
DependencyManager::get<DeferredLightingEffect>()->renderSolidCube(1.0f, cubeColor);
}
glPopMatrix();
glPopMatrix();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
if (highlightSimulationOwnership) {
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f, cubeColor);
} else {
DependencyManager::get<DeferredLightingEffect>()->renderSolidCube(batch, 1.0f, cubeColor);
}
RenderableDebugableEntityItem::render(this, args);
};

View file

@ -14,6 +14,7 @@
#include <BoxEntityItem.h>
#include "RenderableDebugableEntityItem.h"
#include "RenderableEntityItem.h"
class RenderableBoxEntityItem : public BoxEntityItem {
public:
@ -24,6 +25,8 @@ public:
{ }
virtual void render(RenderArgs* args);
SIMPLE_RENDERABLE()
};

View file

@ -12,7 +12,10 @@
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <DeferredLightingEffect.h>
#include <PhysicsEngine.h>
@ -21,48 +24,24 @@
void RenderableDebugableEntityItem::renderBoundingBox(EntityItem* entity, RenderArgs* args,
float puffedOut, glm::vec4& color) {
glm::vec3 position = entity->getPosition();
glm::vec3 center = entity->getCenter();
glm::vec3 dimensions = entity->getDimensions();
glm::quat rotation = entity->getRotation();
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(1.0f + puffedOut, color);
glPopMatrix();
glPopMatrix();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(entity->getTransformToCenter());
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f + puffedOut, color);
}
void RenderableDebugableEntityItem::renderHoverDot(EntityItem* entity, RenderArgs* args) {
glm::vec3 position = entity->getPosition();
glm::vec3 center = entity->getCenter();
glm::vec3 dimensions = entity->getDimensions();
glm::quat rotation = entity->getRotation();
glm::vec4 blueColor(0.0f, 0.0f, 1.0f, 1.0f);
const int SLICES = 8, STACKS = 8;
float radius = 0.05f;
glPushMatrix();
glTranslatef(position.x, position.y + dimensions.y + radius, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(radius, radius, radius);
const int SLICES = 8;
const int STACKS = 8;
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(0.5f, SLICES, STACKS, blueColor);
glPopMatrix();
glPopMatrix();
glm::vec4 blueColor(0.0f, 0.0f, 1.0f, 1.0f);
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform = entity->getTransformToCenter();
// Cancel true dimensions and set scale to 2 * radius (diameter)
transform.postScale(2.0f * glm::vec3(radius, radius, radius) / entity->getDimensions());
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, 0.5f, SLICES, STACKS, blueColor);
}
void RenderableDebugableEntityItem::render(EntityItem* entity, RenderArgs* args) {

View file

@ -0,0 +1,41 @@
//
// RenderableEntityItem.cpp
// interface/src
//
// Created by Brad Hefta-Gaub on 12/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderableEntityItem.h"
namespace render {
template <> const ItemKey payloadGetKey(const RenderableEntityItemProxy::Pointer& payload) {
if (payload && payload->entity) {
if (payload->entity->getType() == EntityTypes::Light) {
return ItemKey::Builder::light();
}
}
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const RenderableEntityItemProxy::Pointer& payload) {
if (payload && payload->entity) {
return payload->entity->getAABox();
}
return render::Item::Bound();
}
template <> void payloadRender(const RenderableEntityItemProxy::Pointer& payload, RenderArgs* args) {
if (args) {
if (payload && payload->entity && payload->entity->getVisible()) {
payload->entity->render(args);
}
}
}
}

View file

@ -0,0 +1,66 @@
//
// RenderableEntityItem.h
// interface/src
//
// Created by Brad Hefta-Gaub on 12/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RenderableEntityItem_h
#define hifi_RenderableEntityItem_h
#include <render/Scene.h>
#include <EntityItem.h>
class RenderableEntityItemProxy {
public:
RenderableEntityItemProxy(EntityItemPointer entity) : entity(entity) { }
typedef render::Payload<RenderableEntityItemProxy> Payload;
typedef Payload::DataPointer Pointer;
EntityItemPointer entity;
};
namespace render {
template <> const ItemKey payloadGetKey(const RenderableEntityItemProxy::Pointer& payload);
template <> const Item::Bound payloadGetBound(const RenderableEntityItemProxy::Pointer& payload);
template <> void payloadRender(const RenderableEntityItemProxy::Pointer& payload, RenderArgs* args);
}
// Mixin class for implementing basic single item rendering
class SimpleRenderableEntityItem {
public:
bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
_myItem = scene->allocateID();
auto renderData = RenderableEntityItemProxy::Pointer(new RenderableEntityItemProxy(self));
auto renderPayload = render::PayloadPointer(new RenderableEntityItemProxy::Payload(renderData));
pendingChanges.resetItem(_myItem, renderPayload);
return true;
}
void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_myItem);
}
private:
render::ItemID _myItem;
};
#define SIMPLE_RENDERABLE() \
public: \
virtual bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) { return _renderHelper.addToScene(self, scene, pendingChanges); } \
virtual void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) { _renderHelper.removeFromScene(self, scene, pendingChanges); } \
private: \
SimpleRenderableEntityItem _renderHelper;
#endif // hifi_RenderableEntityItem_h

View file

@ -12,6 +12,7 @@
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <DeferredLightingEffect.h>
#include <GLMHelpers.h>
@ -31,12 +32,7 @@ void RenderableLightEntityItem::render(RenderArgs* args) {
glm::quat rotation = getRotation();
float largestDiameter = glm::max(dimensions.x, dimensions.y, dimensions.z);
const float MAX_COLOR = 255.0f;
float colorR = getColor()[RED_INDEX] / MAX_COLOR;
float colorG = getColor()[GREEN_INDEX] / MAX_COLOR;
float colorB = getColor()[BLUE_INDEX] / MAX_COLOR;
glm::vec3 color = glm::vec3(colorR, colorG, colorB);
glm::vec3 color = toGlm(getXColor());
float intensity = getIntensity();
float exponent = getExponent();
@ -49,21 +45,12 @@ void RenderableLightEntityItem::render(RenderArgs* args) {
DependencyManager::get<DeferredLightingEffect>()->addPointLight(position, largestDiameter / 2.0f,
color, intensity);
}
#ifdef WANT_DEBUG
glm::vec4 color(diffuseR, diffuseG, diffuseB, 1.0f);
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<DeferredLightingEffect>()->renderWireSphere(0.5f, 15, 15, color);
glPopMatrix();
glPopMatrix();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
DependencyManager::get<DeferredLightingEffect>()->renderWireSphere(batch, 0.5f, 15, 15, glm::vec4(color, 1.0f));
#endif
};

View file

@ -13,6 +13,7 @@
#define hifi_RenderableLightEntityItem_h
#include <LightEntityItem.h>
#include "RenderableEntityItem.h"
class RenderableLightEntityItem : public LightEntityItem {
public:
@ -27,6 +28,8 @@ public:
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
void** intersectedObject, bool precisionPicking) const;
SIMPLE_RENDERABLE();
};

View file

@ -12,6 +12,7 @@
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <GeometryCache.h>
#include <DeferredLightingEffect.h>
@ -23,32 +24,34 @@ EntityItemPointer RenderableLineEntityItem::factory(const EntityItemID& entityID
return EntityItemPointer(new RenderableLineEntityItem(entityID, properties));
}
void RenderableLineEntityItem::updateGeometry() {
auto geometryCache = DependencyManager::get<GeometryCache>();
if (_lineVerticesID == GeometryCache::UNKNOWN_ID) {
_lineVerticesID = geometryCache ->allocateID();
}
if (_pointsChanged) {
glm::vec4 lineColor(toGlm(getXColor()), getLocalRenderAlpha());
geometryCache->updateVertices(_lineVerticesID, getLinePoints(), lineColor);
_pointsChanged = false;
}
}
void RenderableLineEntityItem::render(RenderArgs* args) {
PerformanceTimer perfTimer("RenderableLineEntityItem::render");
assert(getType() == EntityTypes::Line);
// glm::vec3 position = getPosition();
// glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();
glm::vec4 lineColor(toGlm(getXColor()), getLocalRenderAlpha());
glPushMatrix();
glLineWidth(getLineWidth());
auto geometryCache = DependencyManager::get<GeometryCache>();
if (_lineVerticesID == GeometryCache::UNKNOWN_ID) {
_lineVerticesID = geometryCache ->allocateID();
}
//TODO: Figure out clean , efficient way to do relative line positioning. For now we'll just use absolute positioning.
//glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
if (_pointsChanged) {
geometryCache->updateVertices(_lineVerticesID, getLinePoints(), lineColor);
_pointsChanged = false;
}
if (getLinePoints().size() > 1) {
geometryCache->renderVertices(gpu::LINE_STRIP, _lineVerticesID);
}
glPopMatrix();
Q_ASSERT(getType() == EntityTypes::Line);
updateGeometry();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
// TODO: Figure out clean , efficient way to do relative line positioning. For now we'll just use absolute positioning.
//batch.setModelTransform(getTransformToCenter());
batch.setModelTransform(Transform());
batch._glLineWidth(getLineWidth());
if (getLinePoints().size() > 1) {
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::LINE_STRIP, _lineVerticesID);
}
batch._glLineWidth(1.0f);
RenderableDebugableEntityItem::render(this, args);
};

View file

@ -14,6 +14,7 @@
#include <LineEntityItem.h>
#include "RenderableDebugableEntityItem.h"
#include "RenderableEntityItem.h"
#include <GeometryCache.h>
class RenderableLineEntityItem : public LineEntityItem {
@ -27,7 +28,11 @@ public:
virtual void render(RenderArgs* args);
SIMPLE_RENDERABLE();
protected:
void updateGeometry();
int _lineVerticesID;
};

View file

@ -15,9 +15,11 @@
#include <QJsonDocument>
#include <AbstractViewStateInterface.h>
#include <DeferredLightingEffect.h>
#include <Model.h>
#include <PerfStat.h>
#include <render/Scene.h>
#include "EntityTreeRenderer.h"
#include "EntitiesRendererLogging.h"
@ -108,6 +110,90 @@ void RenderableModelEntityItem::remapTextures() {
_currentTextures = _textures;
}
// TODO: we need a solution for changes to the postion/rotation/etc of a model...
// this current code path only addresses that in this setup case... not the changing/moving case
bool RenderableModelEntityItem::readyToAddToScene(RenderArgs* renderArgs) {
if (!_model && renderArgs) {
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
PerformanceTimer perfTimer("getModel");
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(renderArgs->_renderer);
getModel(renderer);
}
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoadedWithTextures()) {
_model->setScaleToFit(true, getDimensions());
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
_model->setRotation(getRotation());
_model->setTranslation(getPosition());
// make sure to simulate so everything gets set up correctly for rendering
{
PerformanceTimer perfTimer("_model->simulate");
_model->simulate(0.0f);
}
_needsInitialSimulation = false;
_model->renderSetup(renderArgs);
}
bool ready = !_needsInitialSimulation && _model && _model->readyToAddToScene(renderArgs);
return ready;
}
class RenderableModelEntityItemMeta {
public:
RenderableModelEntityItemMeta(EntityItemPointer entity) : entity(entity){ }
typedef render::Payload<RenderableModelEntityItemMeta> Payload;
typedef Payload::DataPointer Pointer;
EntityItemPointer entity;
};
namespace render {
template <> const ItemKey payloadGetKey(const RenderableModelEntityItemMeta::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const RenderableModelEntityItemMeta::Pointer& payload) {
if (payload && payload->entity) {
return payload->entity->getAABox();
}
return render::Item::Bound();
}
template <> void payloadRender(const RenderableModelEntityItemMeta::Pointer& payload, RenderArgs* args) {
if (args) {
if (payload && payload->entity) {
payload->entity->render(args);
}
}
}
}
bool RenderableModelEntityItem::addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
_myMetaItem = scene->allocateID();
auto renderData = RenderableModelEntityItemMeta::Pointer(new RenderableModelEntityItemMeta(self));
auto renderPayload = render::PayloadPointer(new RenderableModelEntityItemMeta::Payload(renderData));
pendingChanges.resetItem(_myMetaItem, renderPayload);
if (_model) {
return _model->addToScene(scene, pendingChanges);
}
return true;
}
void RenderableModelEntityItem::removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_myMetaItem);
if (_model) {
_model->removeFromScene(scene, pendingChanges);
}
}
// NOTE: this only renders the "meta" portion of the Model, namely it renders debugging items, and it handles
// the per frame simulation/update that might be required if the models properties changed.
void RenderableModelEntityItem::render(RenderArgs* args) {
PerformanceTimer perfTimer("RMEIrender");
assert(getType() == EntityTypes::Model);
@ -124,10 +210,29 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
if (hasModel()) {
if (_model) {
if (QUrl(getModelURL()) != _model->getURL()) {
qDebug() << "Updating model URL: " << getModelURL();
_model->setURL(getModelURL());
}
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_model->needsFixupInScene()) {
_model->removeFromScene(scene, pendingChanges);
_model->addToScene(scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
_model->setVisibleInScene(getVisible(), scene);
}
remapTextures();
glPushMatrix();
{
float alpha = getLocalRenderAlpha();
// float alpha = getLocalRenderAlpha();
if (!_model || _needsModelReload) {
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
@ -167,23 +272,8 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
_needsInitialSimulation = false;
}
if (_model->isActive()) {
// TODO: this is the majority of model render time. And rendering of a cube model vs the basic Box render
// is significantly more expensive. Is there a way to call this that doesn't cost us as much?
PerformanceTimer perfTimer("model->render");
// filter out if not needed to render
if (args && (args->_renderMode == RenderArgs::SHADOW_RENDER_MODE)) {
if (movingOrAnimating) {
_model->renderInScene(alpha, args);
}
} else {
_model->renderInScene(alpha, args);
}
}
}
}
glPopMatrix();
if (highlightSimulationOwnership) {
glm::vec4 greenColor(0.0f, 1.0f, 0.0f, 1.0f);
@ -199,6 +289,10 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
Model* RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
Model* result = NULL;
if (!renderer) {
return result;
}
// make sure our renderer is setup
if (!_myRenderer) {
@ -206,7 +300,7 @@ Model* RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
}
assert(_myRenderer == renderer); // you should only ever render on one renderer
if (QThread::currentThread() != _myRenderer->thread()) {
if (!_myRenderer || QThread::currentThread() != _myRenderer->thread()) {
return _model;
}
@ -394,7 +488,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
// to the visual model and apply them to the collision model (without regard for the
// collision model's extents).
glm::vec3 scale = _dimensions / renderGeometry.getUnscaledMeshExtents().size();
glm::vec3 scale = getDimensions() / renderGeometry.getUnscaledMeshExtents().size();
// multiply each point by scale before handing the point-set off to the physics engine.
// also determine the extents of the collision model.
AABox box;

View file

@ -43,6 +43,11 @@ public:
virtual void somethingChangedNotification() { _needsInitialSimulation = true; }
virtual bool readyToAddToScene(RenderArgs* renderArgs = nullptr);
virtual bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void render(RenderArgs* args);
virtual bool supportsDetailedRayIntersection() const { return true; }
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
@ -71,6 +76,8 @@ private:
QStringList _originalTextures;
bool _originalTexturesRead;
QVector<QVector<glm::vec3>> _points;
render::ItemID _myMetaItem;
};
#endif // hifi_RenderableModelEntityItem_h

View file

@ -30,8 +30,7 @@ RenderableParticleEffectEntityItem::RenderableParticleEffectEntityItem(const Ent
}
void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
assert(getType() == EntityTypes::ParticleEffect);
Q_ASSERT(getType() == EntityTypes::ParticleEffect);
PerformanceTimer perfTimer("RenderableParticleEffectEntityItem::render");
if (_texturesChangedFlag) {
@ -45,143 +44,67 @@ void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
_texturesChangedFlag = false;
}
if (!_texture) {
renderUntexturedQuads(args);
} else if (_texture && !_texture->isLoaded()) {
renderUntexturedQuads(args);
} else {
renderTexturedQuads(args);
bool textured = _texture && _texture->isLoaded();
updateQuads(args, textured);
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
if (textured) {
batch.setUniformTexture(0, _texture->getGPUTexture());
}
batch.setModelTransform(getTransformToCenter());
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::QUADS, _cacheID);
};
void RenderableParticleEffectEntityItem::renderUntexturedQuads(RenderArgs* args) {
float particleRadius = getParticleRadius();
const float MAX_COLOR = 255.0f;
glm::vec4 particleColor(getColor()[RED_INDEX] / MAX_COLOR,
getColor()[GREEN_INDEX] / MAX_COLOR,
getColor()[BLUE_INDEX] / MAX_COLOR,
getLocalRenderAlpha());
glm::vec3 upOffset = args->_viewFrustum->getUp() * particleRadius;
glm::vec3 rightOffset = args->_viewFrustum->getRight() * particleRadius;
QVector<glm::vec3> vertices(getLivingParticleCount() * VERTS_PER_PARTICLE);
quint32 count = 0;
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
glm::vec3 pos = _particlePositions[i];
// generate corners of quad, aligned to face the camera
vertices.append(pos - rightOffset + upOffset);
vertices.append(pos + rightOffset + upOffset);
vertices.append(pos + rightOffset - upOffset);
vertices.append(pos - rightOffset - upOffset);
count++;
}
// just double checking, cause if this invarient is false, we might have memory corruption bugs.
assert(count == getLivingParticleCount());
// update geometry cache with all the verts in model coordinates.
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, particleColor);
glPushMatrix();
glm::vec3 position = getPosition();
glTranslatef(position.x, position.y, position.z);
glm::quat rotation = getRotation();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = getCenter() - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
DependencyManager::get<GeometryCache>()->renderVertices(gpu::QUADS, _cacheID);
glPopMatrix();
glPopMatrix();
}
static glm::vec3 zSortAxis;
static bool zSort(const glm::vec3& rhs, const glm::vec3& lhs) {
return glm::dot(rhs, ::zSortAxis) > glm::dot(lhs, ::zSortAxis);
}
void RenderableParticleEffectEntityItem::renderTexturedQuads(RenderArgs* args) {
void RenderableParticleEffectEntityItem::updateQuads(RenderArgs* args, bool textured) {
float particleRadius = getParticleRadius();
const float MAX_COLOR = 255.0f;
glm::vec4 particleColor(getColor()[RED_INDEX] / MAX_COLOR,
getColor()[GREEN_INDEX] / MAX_COLOR,
getColor()[BLUE_INDEX] / MAX_COLOR,
getLocalRenderAlpha());
QVector<glm::vec3> positions(getLivingParticleCount());
quint32 count = 0;
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
positions.append(_particlePositions[i]);
count++;
}
// just double checking, cause if this invarient is false, we might have memory corruption bugs.
assert(count == getLivingParticleCount());
// sort particles back to front
::zSortAxis = args->_viewFrustum->getDirection();
qSort(positions.begin(), positions.end(), zSort);
QVector<glm::vec3> vertices(getLivingParticleCount() * VERTS_PER_PARTICLE);
QVector<glm::vec2> textureCoords(getLivingParticleCount() * VERTS_PER_PARTICLE);
glm::vec4 particleColor(toGlm(getXColor()), getLocalRenderAlpha());
glm::vec3 upOffset = args->_viewFrustum->getUp() * particleRadius;
glm::vec3 rightOffset = args->_viewFrustum->getRight() * particleRadius;
QVector<glm::vec3> vertices;
QVector<glm::vec3> positions;
QVector<glm::vec2> textureCoords;
vertices.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
if (textured) {
positions.reserve(getLivingParticleCount());
textureCoords.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
positions.append(_particlePositions[i]);
textureCoords.append(glm::vec2(0, 1));
textureCoords.append(glm::vec2(1, 1));
textureCoords.append(glm::vec2(1, 0));
textureCoords.append(glm::vec2(0, 0));
}
// sort particles back to front
::zSortAxis = args->_viewFrustum->getDirection();
qSort(positions.begin(), positions.end(), zSort);
}
for (int i = 0; i < positions.size(); i++) {
glm::vec3 pos = positions[i];
glm::vec3 pos = (textured) ? positions[i] : _particlePositions[i];
// generate corners of quad aligned to face the camera.
vertices.append(pos - rightOffset + upOffset);
vertices.append(pos + rightOffset + upOffset);
vertices.append(pos + rightOffset - upOffset);
vertices.append(pos - rightOffset - upOffset);
textureCoords.append(glm::vec2(0, 1));
textureCoords.append(glm::vec2(1, 1));
textureCoords.append(glm::vec2(1, 0));
textureCoords.append(glm::vec2(0, 0));
}
// update geometry cache with all the verts in model coordinates.
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, textureCoords, particleColor);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _texture->getID());
glPushMatrix();
glm::vec3 position = getPosition();
glTranslatef(position.x, position.y, position.z);
glm::quat rotation = getRotation();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = getCenter() - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
DependencyManager::get<GeometryCache>()->renderVertices(gpu::QUADS, _cacheID);
glPopMatrix();
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
if (textured) {
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, textureCoords, particleColor);
} else {
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, particleColor);
}
}

View file

@ -13,6 +13,7 @@
#include <ParticleEffectEntityItem.h>
#include <TextureCache.h>
#include "RenderableEntityItem.h"
class RenderableParticleEffectEntityItem : public ParticleEffectEntityItem {
public:
@ -20,8 +21,9 @@ public:
RenderableParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
virtual void render(RenderArgs* args);
void renderUntexturedQuads(RenderArgs* args);
void renderTexturedQuads(RenderArgs* args);
void updateQuads(RenderArgs* args, bool textured);
SIMPLE_RENDERABLE();
protected:

View file

@ -170,7 +170,7 @@ void RenderablePolyVoxEntityItem::setVoxelData(QByteArray voxelData) {
}
glm::vec3 RenderablePolyVoxEntityItem::getSurfacePositionAdjustment() const {
glm::vec3 scale = _dimensions / _voxelVolumeSize; // meters / voxel-units
glm::vec3 scale = getDimensions() / _voxelVolumeSize; // meters / voxel-units
switch (_voxelSurfaceStyle) {
case PolyVoxEntityItem::SURFACE_MARCHING_CUBES:
return scale / 2.0f;
@ -183,18 +183,18 @@ glm::vec3 RenderablePolyVoxEntityItem::getSurfacePositionAdjustment() const {
}
glm::mat4 RenderablePolyVoxEntityItem::voxelToLocalMatrix() const {
glm::vec3 scale = _dimensions / _voxelVolumeSize; // meters / voxel-units
glm::vec3 center = getCenter();
glm::vec3 scale = getDimensions() / _voxelVolumeSize; // meters / voxel-units
glm::vec3 center = getCenterPosition();
glm::vec3 position = getPosition();
glm::vec3 positionToCenter = center - position;
positionToCenter -= _dimensions * glm::vec3(0.5f, 0.5f, 0.5f) - getSurfacePositionAdjustment();
positionToCenter -= getDimensions() * glm::vec3(0.5f, 0.5f, 0.5f) - getSurfacePositionAdjustment();
glm::mat4 centerToCorner = glm::translate(glm::mat4(), positionToCenter);
glm::mat4 scaled = glm::scale(centerToCorner, scale);
return scaled;
}
glm::mat4 RenderablePolyVoxEntityItem::voxelToWorldMatrix() const {
glm::mat4 rotation = glm::mat4_cast(_rotation);
glm::mat4 rotation = glm::mat4_cast(getRotation());
glm::mat4 translation = glm::translate(getPosition());
return translation * rotation * voxelToLocalMatrix();
}
@ -213,7 +213,7 @@ uint8_t RenderablePolyVoxEntityItem::getVoxel(int x, int y, int z) {
// if _voxelSurfaceStyle is SURFACE_EDGED_CUBIC, we maintain an extra layer of
// voxels all around the requested voxel space. Having the empty voxels around
// the edges changes how the surface extractor behaves.
if (_voxelSurfaceStyle == SURFACE_EDGED_CUBIC) {
return _volData->getVoxelAt(x + 1, y + 1, z + 1);
}
@ -239,7 +239,7 @@ void RenderablePolyVoxEntityItem::updateOnCount(int x, int y, int z, uint8_t toV
if (!inUserBounds(_volData, _voxelSurfaceStyle, x, y, z)) {
return;
}
uint8_t uVoxelValue = getVoxel(x, y, z);
if (toValue != 0) {
if (uVoxelValue == 0) {
@ -294,7 +294,7 @@ void RenderablePolyVoxEntityItem::setSphereInVolume(glm::vec3 center, float radi
void RenderablePolyVoxEntityItem::setSphere(glm::vec3 centerWorldCoords, float radiusWorldCoords, uint8_t toValue) {
// glm::vec3 centerVoxelCoords = worldToVoxelCoordinates(centerWorldCoords);
glm::vec4 centerVoxelCoords = worldToVoxelMatrix() * glm::vec4(centerWorldCoords, 1.0f);
glm::vec3 scale = _dimensions / _voxelVolumeSize; // meters / voxel-units
glm::vec3 scale = getDimensions() / _voxelVolumeSize; // meters / voxel-units
float scaleY = scale.y;
float radiusVoxelCoords = radiusWorldCoords / scaleY;
setSphereInVolume(glm::vec3(centerVoxelCoords), radiusVoxelCoords, toValue);
@ -347,8 +347,8 @@ void RenderablePolyVoxEntityItem::getModel() {
sizeof(PolyVox::PositionMaterialNormal),
gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::RAW)));
// auto normalAttrib = mesh->getAttributeBuffer(gpu::Stream::NORMAL);
// for (auto normal = normalAttrib.begin<glm::vec3>(); normal != normalAttrib.end<glm::vec3>(); normal++) {
// (*normal) = -(*normal);
@ -363,7 +363,7 @@ void RenderablePolyVoxEntityItem::getModel() {
// gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::RAW)));
#ifdef WANT_DEBUG
qDebug() << "---- vecIndices.size() =" << vecIndices.size();
qDebug() << "---- vecVertices.size() =" << vecVertices.size();
@ -380,22 +380,25 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
getModel();
}
glPushMatrix();
glm::mat4 m = voxelToWorldMatrix();
glMultMatrixf(&m[0][0]);
Transform transform;
transform.setTranslation(getPosition() - getRegistrationPoint() * getDimensions());
transform.setRotation(getRotation());
transform.setScale(getDimensions() / _voxelVolumeSize);
auto mesh = _modelGeometry.getMesh();
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch);
batch.setModelTransform(transform);
batch.setInputFormat(mesh->getVertexFormat());
batch.setInputBuffer(gpu::Stream::POSITION, mesh->getVertexBuffer());
batch.setInputBuffer(gpu::Stream::NORMAL,
mesh->getVertexBuffer()._buffer,
sizeof(float) * 3,
mesh->getVertexBuffer()._stride);
batch.setIndexBuffer(gpu::UINT32, mesh->getIndexBuffer()._buffer, 0);
batch.drawIndexed(gpu::TRIANGLES, mesh->getNumIndices(), 0);
auto mesh = _modelGeometry.getMesh();
gpu::Batch batch;
batch.setInputFormat(mesh->getVertexFormat());
batch.setInputBuffer(gpu::Stream::POSITION, mesh->getVertexBuffer());
batch.setInputBuffer(gpu::Stream::NORMAL,
mesh->getVertexBuffer()._buffer,
sizeof(float) * 3,
mesh->getVertexBuffer()._stride);
batch.setIndexBuffer(gpu::UINT32, mesh->getIndexBuffer()._buffer, 0);
batch.drawIndexed(gpu::TRIANGLES, mesh->getNumIndices(), 0);
gpu::GLBackend::renderBatch(batch);
glPopMatrix();
RenderableDebugableEntityItem::render(this, args);
}
@ -444,17 +447,15 @@ bool RenderablePolyVoxEntityItem::findDetailedRayIntersection(const glm::vec3& o
glm::mat4 wtvMatrix = worldToVoxelMatrix();
glm::vec3 normDirection = glm::normalize(direction);
// set ray cast length to long enough to cover all of the voxel space
float distanceToEntity = glm::distance(origin, _position);
float largestDimension = glm::max(_dimensions.x, _dimensions.y, _dimensions.z) * 2.0f;
// the PolyVox ray intersection code requires a near and far point.
// set ray cast length to long enough to cover all of the voxel space
float distanceToEntity = glm::distance(origin, getPosition());
float largestDimension = glm::max(getDimensions().x, getDimensions().y, getDimensions().z) * 2.0f;
glm::vec3 farPoint = origin + normDirection * (distanceToEntity + largestDimension);
glm::vec4 originInVoxel = wtvMatrix * glm::vec4(origin, 1.0f);
glm::vec4 farInVoxel = wtvMatrix * glm::vec4(farPoint, 1.0f);
PolyVox::Vector3DFloat startPoint(originInVoxel.x, originInVoxel.y, originInVoxel.z);
// PolyVox::Vector3DFloat pvDirection(directionInVoxel.x, directionInVoxel.y, directionInVoxel.z);
PolyVox::Vector3DFloat endPoint(farInVoxel.x, farInVoxel.y, farInVoxel.z);
PolyVox::RaycastResult raycastResult;
@ -477,7 +478,7 @@ bool RenderablePolyVoxEntityItem::findDetailedRayIntersection(const glm::vec3& o
}
result -= glm::vec4(0.5f, 0.5f, 0.5f, 0.0f);
glm::vec4 intersectedWorldPosition = voxelToWorldMatrix() * result;
distance = glm::distance(glm::vec3(intersectedWorldPosition), origin);
@ -554,9 +555,9 @@ void RenderablePolyVoxEntityItem::decompressVolumeData() {
<< voxelXSize << voxelYSize << voxelZSize;
return;
}
int rawSize = voxelXSize * voxelYSize * voxelZSize;
QByteArray compressedData;
reader >> compressedData;
QByteArray uncompressedData = qUncompress(compressedData);
@ -633,9 +634,6 @@ void RenderablePolyVoxEntityItem::computeShapeInfo(ShapeInfo& info) {
float offL = -0.5f;
float offH = 0.5f;
// float offL = 0.0f;
// float offH = 1.0f;
glm::vec3 p000 = glm::vec3(wToM * glm::vec4(x + offL, y + offL, z + offL, 1.0f));
glm::vec3 p001 = glm::vec3(wToM * glm::vec4(x + offL, y + offL, z + offH, 1.0f));
glm::vec3 p010 = glm::vec3(wToM * glm::vec4(x + offL, y + offH, z + offL, 1.0f));

View file

@ -16,6 +16,7 @@
#include "PolyVoxEntityItem.h"
#include "RenderableDebugableEntityItem.h"
#include "RenderableEntityItem.h"
class RenderablePolyVoxEntityItem : public PolyVoxEntityItem {
public:
@ -70,6 +71,8 @@ public:
virtual void setAll(uint8_t toValue);
virtual void setVoxelInVolume(glm::vec3 position, uint8_t toValue);
SIMPLE_RENDERABLE();
private:
// The PolyVoxEntityItem class has _voxelData which contains dimensions and compressed voxel data. The dimensions

Some files were not shown because too many files have changed in this diff Show more