Merge branch 'master' of https://github.com/highfidelity/hifi into rig

This commit is contained in:
Howard Stearns 2015-07-17 14:53:10 -07:00
commit d8287ac2b4
45 changed files with 1030 additions and 692 deletions

View file

@ -3,8 +3,8 @@ set(EXTERNAL_NAME polyvox)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox.zip
URL_MD5 904b840328278c9b36fa7a14be730c34
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox-master-2015-7-15.zip
URL_MD5 9ec6323b87e849ae36e562ae1c7494a9
CMAKE_ARGS -DENABLE_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1
@ -24,7 +24,16 @@ if (APPLE)
${EXTERNAL_NAME}
change-install-name
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR} -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Debug -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
DEPENDEES install
WORKING_DIRECTORY <SOURCE_DIR>
LOG 1
)
ExternalProject_Add_Step(
${EXTERNAL_NAME}
change-install-name
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Release -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
DEPENDEES install
WORKING_DIRECTORY <SOURCE_DIR>
LOG 1
@ -48,12 +57,15 @@ endif ()
if (WIN32)
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/PolyVoxCore/lib/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/PolyVoxCore/lib/Debug/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/PolyVoxCore/lib/Release/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/PolyVoxUtil/lib/PolyVoxUtil.lib CACHE FILEPATH "polyvox util library")
elseif (APPLE)
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.dylib CACHE FILEPATH "polyvox util library")
else ()
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.so CACHE FILEPATH "polyvox util library")
endif ()

View file

@ -24,9 +24,12 @@ hifi_library_search_hints("polyvox")
find_path(POLYVOX_CORE_INCLUDE_DIRS PolyVoxCore/SimpleVolume.h PATH_SUFFIXES include include/PolyVoxCore HINTS ${POLYVOX_SEARCH_DIRS})
# find_path(POLYVOX_UTIL_INCLUDE_DIRS PolyVoxUtil/Serialization.h PATH_SUFFIXES include include/PolyVoxUtil HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY NAMES PolyVoxCore PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY_DEBUG NAMES PolyVoxCore PATH_SUFFIXES lib/Debug HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY_RELEASE NAMES PolyVoxCore PATH_SUFFIXES lib/Release lib HINTS ${POLYVOX_SEARCH_DIRS})
# find_library(POLYVOX_UTIL_LIBRARY NAMES PolyVoxUtil PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(POLYVOX_CORE)
# if (WIN32)
# find_path(POLYVOX_DLL_PATH polyvox.dll PATH_SUFFIXES bin HINTS ${POLYVOX_SEARCH_DIRS})

265
examples/FlockOfbirds.js Normal file
View file

@ -0,0 +1,265 @@
//
// flockOfbirds.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
// Creates a flock of birds that fly around and chirp, staying inside the corners of the box defined
// at the start of the script.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// The area over which the birds will fly
var lowerCorner = { x: 1, y: 1, z: 1 };
var upperCorner = { x: 10, y: 10, z: 10 };
var STARTING_FRACTION = 0.25;
var NUM_BIRDS = 50;
var playSounds = true;
var SOUND_PROBABILITY = 0.001;
var numPlaying = 0;
var BIRD_SIZE = 0.08;
var BIRD_MASTER_VOLUME = 0.1;
var FLAP_PROBABILITY = 0.005;
var RANDOM_FLAP_VELOCITY = 1.0;
var FLAP_UP = 1.0;
var BIRD_GRAVITY = -0.5;
var LINEAR_DAMPING = 0.2;
var FLAP_FALLING_PROBABILITY = 0.025;
var MIN_ALIGNMENT_VELOCITY = 0.0;
var MAX_ALIGNMENT_VELOCITY = 1.0;
var VERTICAL_ALIGNMENT_COUPLING = 0.0;
var ALIGNMENT_FORCE = 1.5;
var COHESION_FORCE = 1.0;
var MAX_COHESION_VELOCITY = 0.5;
var floor = false;
var MAKE_FLOOR = false;
var averageVelocity = { x: 0, y: 0, z: 0 };
var averagePosition = { x: 0, y: 0, z: 0 };
var birdsLoaded = false;
var birds = [];
var playing = [];
function randomVector(scale) {
return { x: Math.random() * scale - scale / 2.0, y: Math.random() * scale - scale / 2.0, z: Math.random() * scale - scale / 2.0 };
}
function updateBirds(deltaTime) {
if (!Entities.serversExist() || !Entities.canRez()) {
return;
}
if (!birdsLoaded) {
loadBirds(NUM_BIRDS);
birdsLoaded = true;
return;
}
var sumVelocity = { x: 0, y: 0, z: 0 };
var sumPosition = { x: 0, y: 0, z: 0 };
var birdPositionsCounted = 0;
var birdVelocitiesCounted = 0;
for (var i = 0; i < birds.length; i++) {
if (birds[i].entityId) {
var properties = Entities.getEntityProperties(birds[i].entityId);
// If Bird has been deleted, bail
if (properties.id != birds[i].entityId) {
birds[i].entityId = false;
return;
}
// Sum up average position and velocity
if (Vec3.length(properties.velocity) > MIN_ALIGNMENT_VELOCITY) {
sumVelocity = Vec3.sum(sumVelocity, properties.velocity);
birdVelocitiesCounted += 1;
}
sumPosition = Vec3.sum(sumPosition, properties.position);
birdPositionsCounted += 1;
var downwardSpeed = (properties.velocity.y < 0) ? -properties.velocity.y : 0.0;
if ((properties.position.y < upperCorner.y) && (Math.random() < (FLAP_PROBABILITY + (downwardSpeed * FLAP_FALLING_PROBABILITY)))) {
// More likely to flap if falling
var randomVelocity = randomVector(RANDOM_FLAP_VELOCITY);
randomVelocity.y = FLAP_UP + Math.random() * FLAP_UP;
// Alignment Velocity
var alignmentVelocityMagnitude = Math.min(MAX_ALIGNMENT_VELOCITY, Vec3.length(Vec3.multiply(ALIGNMENT_FORCE, averageVelocity)));
var alignmentVelocity = Vec3.multiply(alignmentVelocityMagnitude, Vec3.normalize(averageVelocity));
alignmentVelocity.y *= VERTICAL_ALIGNMENT_COUPLING;
// Cohesion
var distanceFromCenter = Vec3.length(Vec3.subtract(averagePosition, properties.position));
var cohesionVelocitySize = Math.min(distanceFromCenter * COHESION_FORCE, MAX_COHESION_VELOCITY);
var cohesionVelocity = Vec3.multiply(cohesionVelocitySize, Vec3.normalize(Vec3.subtract(averagePosition, properties.position)));
var newVelocity = Vec3.sum(randomVelocity, Vec3.sum(alignmentVelocity, cohesionVelocity));
Entities.editEntity(birds[i].entityId, { velocity: Vec3.sum(properties.velocity, newVelocity) });
}
// Check whether to play a chirp
if (playSounds && (!birds[i].audioId || !birds[i].audioId.isPlaying) && (Math.random() < ((numPlaying > 0) ? SOUND_PROBABILITY / numPlaying : SOUND_PROBABILITY))) {
var options = {
position: properties.position,
volume: BIRD_MASTER_VOLUME
};
// Play chirp
if (birds[i].audioId) {
birds[i].audioId.setOptions(options);
birds[i].audioId.restart();
} else {
birds[i].audioId = Audio.playSound(birds[i].sound, options);
}
numPlaying++;
// Change size
Entities.editEntity(birds[i].entityId, { dimensions: Vec3.multiply(1.5, properties.dimensions)});
} else if (birds[i].audioId) {
// If bird is playing a chirp
if (!birds[i].audioId.isPlaying) {
Entities.editEntity(birds[i].entityId, { dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE }});
numPlaying--;
}
}
// Keep birds in their 'cage'
var bounce = false;
var newVelocity = properties.velocity;
var newPosition = properties.position;
if (properties.position.x < lowerCorner.x) {
newPosition.x = lowerCorner.x;
newVelocity.x *= -1.0;
bounce = true;
} else if (properties.position.x > upperCorner.x) {
newPosition.x = upperCorner.x;
newVelocity.x *= -1.0;
bounce = true;
}
if (properties.position.y < lowerCorner.y) {
newPosition.y = lowerCorner.y;
newVelocity.y *= -1.0;
bounce = true;
} else if (properties.position.y > upperCorner.y) {
newPosition.y = upperCorner.y;
newVelocity.y *= -1.0;
bounce = true;
}
if (properties.position.z < lowerCorner.z) {
newPosition.z = lowerCorner.z;
newVelocity.z *= -1.0;
bounce = true;
} else if (properties.position.z > upperCorner.z) {
newPosition.z = upperCorner.z;
newVelocity.z *= -1.0;
bounce = true;
}
if (bounce) {
Entities.editEntity(birds[i].entityId, { position: newPosition, velocity: newVelocity });
}
}
}
// Update average velocity and position of flock
if (birdVelocitiesCounted > 0) {
averageVelocity = Vec3.multiply(1.0 / birdVelocitiesCounted, sumVelocity);
//print(Vec3.length(averageVelocity));
}
if (birdPositionsCounted > 0) {
averagePosition = Vec3.multiply(1.0 / birdPositionsCounted, sumPosition);
}
}
// Connect a call back that happens every frame
Script.update.connect(updateBirds);
// Delete our little friends if script is stopped
Script.scriptEnding.connect(function() {
for (var i = 0; i < birds.length; i++) {
Entities.deleteEntity(birds[i].entityId);
}
if (floor) {
Entities.deleteEntity(floor);
}
});
function loadBirds(howMany) {
while (!Entities.serversExist() || !Entities.canRez()) {
}
var sound_filenames = ["bushtit_1.raw", "bushtit_2.raw", "bushtit_3.raw"];
/* Here are more sounds/species you can use
, "mexicanWhipoorwill.raw",
"rosyfacedlovebird.raw", "saysphoebe.raw", "westernscreechowl.raw", "bandtailedpigeon.wav", "bridledtitmouse.wav",
"browncrestedflycatcher.wav", "commonnighthawk.wav", "commonpoorwill.wav", "doublecrestedcormorant.wav",
"gambelsquail.wav", "goldcrownedkinglet.wav", "greaterroadrunner.wav","groovebilledani.wav","hairywoodpecker.wav",
"housewren.wav","hummingbird.wav", "mountainchickadee.wav", "nightjar.wav", "piebilledgrieb.wav", "pygmynuthatch.wav",
"whistlingduck.wav", "woodpecker.wav"];
*/
var colors = [
{ red: 242, green: 207, blue: 013 },
{ red: 238, green: 94, blue: 11 },
{ red: 81, green: 30, blue: 7 },
{ red: 195, green: 176, blue: 81 },
{ red: 235, green: 190, blue: 152 },
{ red: 167, green: 99, blue: 52 },
{ red: 199, green: 122, blue: 108 },
{ red: 246, green: 220, blue: 189 },
{ red: 208, green: 145, blue: 65 },
{ red: 173, green: 120 , blue: 71 },
{ red: 132, green: 147, blue: 174 },
{ red: 164, green: 74, blue: 40 },
{ red: 131, green: 127, blue: 134 },
{ red: 209, green: 157, blue: 117 },
{ red: 205, green: 191, blue: 193 },
{ red: 193, green: 154, blue: 118 },
{ red: 205, green: 190, blue: 169 },
{ red: 199, green: 111, blue: 69 },
{ red: 221, green: 223, blue: 228 },
{ red: 115, green: 92, blue: 87 },
{ red: 214, green: 165, blue: 137 },
{ red: 160, green: 124, blue: 33 },
{ red: 117, green: 91, blue: 86 },
{ red: 113, green: 104, blue: 107 },
{ red: 216, green: 153, blue: 99 },
{ red: 242, green: 226, blue: 64 }
];
var SOUND_BASE_URL = "http://public.highfidelity.io/sounds/Animals/";
for (var i = 0; i < howMany; i++) {
var whichBird = Math.floor(Math.random() * sound_filenames.length);
var position = {
x: lowerCorner.x + (upperCorner.x - lowerCorner.x) / 2.0 + (Math.random() - 0.5) * (upperCorner.x - lowerCorner.x) * STARTING_FRACTION,
y: lowerCorner.y + (upperCorner.y - lowerCorner.y) / 2.0 + (Math.random() - 0.5) * (upperCorner.y - lowerCorner.y) * STARTING_FRACTION,
z: lowerCorner.z + (upperCorner.z - lowerCorner.x) / 2.0 + (Math.random() - 0.5) * (upperCorner.z - lowerCorner.z) * STARTING_FRACTION
};
birds.push({
sound: SoundCache.getSound(SOUND_BASE_URL + sound_filenames[whichBird]),
entityId: Entities.addEntity({
type: "Sphere",
position: position,
dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE },
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
velocity: { x: 0, y: -0.1, z: 0 },
linearDamping: LINEAR_DAMPING,
collisionsWillMove: true,
color: colors[whichBird]
}),
audioId: false,
isPlaying: false
});
}
if (MAKE_FLOOR) {
var FLOOR_THICKNESS = 0.05;
floor = Entities.addEntity({ type: "Box", position: { x: lowerCorner.x + (upperCorner.x - lowerCorner.x) / 2.0,
y: lowerCorner.y,
z: lowerCorner.z + (upperCorner.z - lowerCorner.z) / 2.0 },
dimensions: { x: (upperCorner.x - lowerCorner.x), y: FLOOR_THICKNESS, z: (upperCorner.z - lowerCorner.z)},
color: {red: 100, green: 100, blue: 100}
});
}
}

View file

@ -656,7 +656,9 @@ function mouseMove(event) {
function handleIdleMouse() {
idleMouseTimerId = null;
highlightEntityUnderCursor(lastMousePosition, true);
if (isActive) {
highlightEntityUnderCursor(lastMousePosition, true);
}
}
function highlightEntityUnderCursor(position, accurateRay) {

View file

@ -130,10 +130,10 @@
var others = Entities.findEntities(this.properties.position, this.properties.dimensions.y);
for (var i = 0; i < others.length; i++) {
var piece = others[i];
var pieceID = others[i];
if (piece.id != this.entityID) {
var properties = Entities.getEntityProperties(piece);
if (pieceID != this.entityID) {
var properties = Entities.getEntityProperties(pieceID);
var isWhite = properties.modelURL.search("White") !== -1;
var type = (properties.modelURL.search("King") !== -1) ? 4 :
@ -147,7 +147,7 @@
if (myPos.i === piecePos.i && myPos.j === piecePos.j && type !== -2) {
var position = this.getAbsolutePosition((isWhite) ? { i: type, j: -1 } : { i: 7 - type, j: 8 },
properties.dimensions.y / 2.0);
Entities.editEntity(piece, {
Entities.editEntity(pieceID, {
position: position
});
break;

View file

@ -10,97 +10,13 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var MOVE_TIMESCALE = 0.1;
var INV_MOVE_TIMESCALE = 1.0 / MOVE_TIMESCALE;
var MAX_SOLID_ANGLE = 0.01; // objects that appear smaller than this can't be grabbed
var CLOSE_ENOUGH = 0.001;
var ZERO_VEC3 = { x: 0, y: 0, z: 0 };
var ANGULAR_DAMPING_RATE = 0.40;
// NOTE: to improve readability global variable names start with 'g'
var gIsGrabbing = false;
var gGrabbedEntity = null;
var gActionID = null;
var gEntityProperties;
var gStartPosition;
var gStartRotation;
var gCurrentPosition;
var gOriginalGravity = ZERO_VEC3;
var gPlaneNormal = ZERO_VEC3;
// gMaxGrabDistance is a function of the size of the object.
var gMaxGrabDistance;
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
// Modes to eventually support?:
// xyPlane
// yzPlane
// polar
// elevationAzimuth
var gGrabMode = "xzplane";
// gGrabOffset allows the user to grab an object off-center. It points from the object's center
// to the point where the ray intersects the grab plane (at the moment the grab is initiated).
// Future target positions of the ray intersection are on the same plane, and the offset is subtracted
// to compute the target position of the object's center.
var gGrabOffset = { x: 0, y: 0, z: 0 };
var gTargetPosition;
var gTargetRotation;
var gLiftKey = false; // SHIFT
var gRotateKey = false; // CONTROL
var gInitialMouse = { x: 0, y: 0 };
var gPreviousMouse = { x: 0, y: 0 };
var gMouseCursorLocation = { x: 0, y: 0 };
var gMouseAtRotateStart = { x: 0, y: 0 };
var gBeaconHeight = 0.10;
// var gAngularVelocity = ZERO_VEC3;
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
// var VOLUME = 0.0;
var gBeaconHeight = 0.10;
var BEACON_COLOR = {
red: 200,
green: 200,
blue: 200
};
var BEACON_WIDTH = 2;
var ZERO_VEC3 = {x: 0, y: 0, z: 0};
var IDENTITY_QUAT = {x: 0, y: 0, z: 0, w: 0};
var gBeacon = Overlays.addOverlay("line3d", {
color: BEACON_COLOR,
alpha: 1,
visible: false,
lineWidth: BEACON_WIDTH
});
function updateDropLine(position) {
Overlays.editOverlay(gBeacon, {
visible: true,
start: {
x: position.x,
y: position.y + gBeaconHeight,
z: position.z
},
end: {
x: position.x,
y: position.y - gBeaconHeight,
z: position.z
}
});
}
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
// helper function
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event, maxDistance) {
var cameraPosition = Camera.getPosition();
var localPointOnPlane = Vec3.subtract(pointOnPlane, cameraPosition);
var distanceFromPlane = Vec3.dot(localPointOnPlane, planeNormal);
@ -117,7 +33,7 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
var useMaxForwardGrab = false;
if (Math.abs(dirDotNorm) > MIN_RAY_PLANE_DOT) {
var distanceToIntersection = distanceFromPlane / dirDotNorm;
if (distanceToIntersection > 0 && distanceToIntersection < gMaxGrabDistance) {
if (distanceToIntersection > 0 && distanceToIntersection < maxDistance) {
// ray points into the plane
localIntersection = Vec3.multiply(pickRay.direction, distanceFromPlane / dirDotNorm);
} else {
@ -134,52 +50,160 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
// we re-route the intersection to be in front at max distance.
var rayDirection = Vec3.subtract(pickRay.direction, Vec3.multiply(planeNormal, dirDotNorm));
rayDirection = Vec3.normalize(rayDirection);
localIntersection = Vec3.multiply(rayDirection, gMaxGrabDistance);
localIntersection = Vec3.multiply(rayDirection, maxDistance);
localIntersection = Vec3.sum(localIntersection, Vec3.multiply(planeNormal, distanceFromPlane));
}
var worldIntersection = Vec3.sum(cameraPosition, localIntersection);
return worldIntersection;
}
function computeNewGrabPlane() {
if (!gIsGrabbing) {
// Mouse class stores mouse click and drag info
Mouse = function() {
this.current = {x: 0, y: 0 };
this.previous = {x: 0, y: 0 };
this.rotateStart = {x: 0, y: 0 };
this.cursorRestore = {x: 0, y: 0};
}
Mouse.prototype.startDrag = function(position) {
this.current = {x: position.x, y: position.y};
this.startRotateDrag();
}
Mouse.prototype.updateDrag = function(position) {
this.current = {x: position.x, y: position.y };
}
Mouse.prototype.startRotateDrag = function() {
this.previous = {x: this.current.x, y: this.current.y};
this.rotateStart = {x: this.current.x, y: this.current.y};
this.cursorRestore = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
}
Mouse.prototype.getDrag = function() {
var delta = {x: this.current.x - this.previous.x, y: this.current.y - this.previous.y};
this.previous = {x: this.current.x, y: this.current.y};
return delta;
}
Mouse.prototype.restoreRotateCursor = function() {
Window.setCursorPosition(this.cursorRestore.x, this.cursorRestore.y);
this.current = {x: this.rotateStart.x, y: this.rotateStart.y};
}
var mouse = new Mouse();
// Beacon class stores info for drawing a line at object's target position
Beacon = function() {
this.height = 0.10;
this.overlayID = Overlays.addOverlay("line3d", {
color: {red: 200, green: 200, blue: 200},
alpha: 1,
visible: false,
lineWidth: 2
});
}
Beacon.prototype.enable = function() {
Overlays.editOverlay(this.overlayID, { visible: true });
}
Beacon.prototype.disable = function() {
Overlays.editOverlay(this.overlayID, { visible: false });
}
Beacon.prototype.updatePosition = function(position) {
Overlays.editOverlay(this.overlayID, {
visible: true,
start: {
x: position.x,
y: position.y + this.height,
z: position.z
},
end: {
x: position.x,
y: position.y - this.height,
z: position.z
}
});
}
var beacon = new Beacon();
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
// var VOLUME = 0.0;
// Grabber class stores and computes info for grab behavior
Grabber = function() {
this.isGrabbing = false;
this.entityID = null;
this.actionID = null;
this.startPosition = ZERO_VEC3;
this.lastRotation = IDENTITY_QUAT;
this.currentPosition = ZERO_VEC3;
this.planeNormal = ZERO_VEC3;
this.originalGravity = ZERO_VEC3;
// maxDistance is a function of the size of the object.
this.maxDistance;
// mode defines the degrees of freedom of the grab target positions
// relative to startPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
this.mode = "xzplane";
// offset allows the user to grab an object off-center. It points from the object's center
// to the point where the ray intersects the grab plane (at the moment the grab is initiated).
// Future target positions of the ray intersection are on the same plane, and the offset is subtracted
// to compute the target position of the object's center.
this.offset = {x: 0, y: 0, z: 0 };
this.targetPosition;
this.targetRotation;
this.liftKey = false; // SHIFT
this.rotateKey = false; // CONTROL
}
Grabber.prototype.computeNewGrabPlane = function() {
if (!this.isGrabbing) {
return;
}
var maybeResetMousePosition = false;
if (gGrabMode !== "rotate") {
gMouseAtRotateStart = gMouseCursorLocation;
var modeWasRotate = (this.mode == "rotate");
this.mode = "xzPlane";
this.planeNormal = {x: 0, y: 1, z: 0 };
if (this.rotateKey) {
this.mode = "rotate";
mouse.startRotateDrag();
} else {
maybeResetMousePosition = true;
}
gGrabMode = "xzPlane";
gPlaneNormal = { x: 0, y: 1, z: 0 };
if (gLiftKey) {
if (!gRotateKey) {
gGrabMode = "verticalCylinder";
// a new planeNormal will be computed each move
if (modeWasRotate) {
// we reset the mouse screen position whenever we stop rotating
mouse.restoreRotateCursor();
}
} else if (gRotateKey) {
gGrabMode = "rotate";
}
if (this.liftKey) {
this.mode = "verticalCylinder";
// NOTE: during verticalCylinder mode a new planeNormal will be computed each move
}
}
gPointOnPlane = Vec3.sum(gCurrentPosition, gGrabOffset);
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
this.pointOnPlane = Vec3.sum(this.currentPosition, this.offset);
var xzOffset = Vec3.subtract(this.pointOnPlane, Camera.getPosition());
xzOffset.y = 0;
gXzDistanceToGrab = Vec3.length(xzOffset);
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
// we reset the mouse position whenever we stop rotating
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
}
this.xzDistanceToGrab = Vec3.length(xzOffset);
}
function mousePressEvent(event) {
Grabber.prototype.pressEvent = function(event) {
if (!event.isLeftButton) {
return;
}
gInitialMouse = {x: event.x, y: event.y };
gPreviousMouse = {x: event.x, y: event.y };
var pickRay = Camera.computePickRay(event.x, event.y);
var pickResults = Entities.findRayIntersection(pickRay, true); // accurate picking
@ -193,150 +217,172 @@ function mousePressEvent(event) {
return;
}
mouse.startDrag(event);
var clickedEntity = pickResults.entityID;
var entityProperties = Entities.getEntityProperties(clickedEntity)
gStartPosition = entityProperties.position;
gStartRotation = entityProperties.rotation;
this.startPosition = entityProperties.position;
this.lastRotation = entityProperties.rotation;
var cameraPosition = Camera.getPosition();
gBeaconHeight = Vec3.length(entityProperties.dimensions);
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
if (Vec3.distance(gStartPosition, cameraPosition) > gMaxGrabDistance) {
var objectBoundingDiameter = Vec3.length(entityProperties.dimensions);
beacon.height = objectBoundingDiameter;
this.maxDistance = objectBoundingDiameter / MAX_SOLID_ANGLE;
if (Vec3.distance(this.startPosition, cameraPosition) > this.maxDistance) {
// don't allow grabs of things far away
return;
}
Entities.editEntity(clickedEntity, { gravity: ZERO_VEC3 });
gIsGrabbing = true;
this.isGrabbing = true;
gGrabbedEntity = clickedEntity;
gCurrentPosition = entityProperties.position;
gOriginalGravity = entityProperties.gravity;
gTargetPosition = gStartPosition;
this.entityID = clickedEntity;
this.currentPosition = entityProperties.position;
this.originalGravity = entityProperties.gravity;
this.targetPosition = {x: this.startPosition.x, y: this.startPosition.y, z: this.startPosition.z};
// compute the grab point
var nearestPoint = Vec3.subtract(gStartPosition, cameraPosition);
var nearestPoint = Vec3.subtract(this.startPosition, cameraPosition);
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
this.pointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
// compute the grab offset (points from object center to point of grab)
gGrabOffset = Vec3.subtract(gPointOnPlane, gStartPosition);
this.offset = Vec3.subtract(this.pointOnPlane, this.startPosition);
computeNewGrabPlane();
this.computeNewGrabPlane();
updateDropLine(gStartPosition);
beacon.updatePosition(this.startPosition);
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
}
function mouseReleaseEvent() {
if (gIsGrabbing) {
if (Vec3.length(gOriginalGravity) != 0) {
Entities.editEntity(gGrabbedEntity, { gravity: gOriginalGravity });
Grabber.prototype.releaseEvent = function() {
if (this.isGrabbing) {
if (Vec3.length(this.originalGravity) != 0) {
Entities.editEntity(this.entityID, { gravity: this.originalGravity});
}
gIsGrabbing = false
Entities.deleteAction(gGrabbedEntity, gActionID);
gActionID = null;
this.isGrabbing = false
Entities.deleteAction(this.entityID, this.actionID);
this.actionID = null;
Overlays.editOverlay(gBeacon, { visible: false });
beacon.disable();
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(releaseSound, { position: entityProperties.position, volume: VOLUME });
}
}
function mouseMoveEvent(event) {
if (!gIsGrabbing) {
Grabber.prototype.moveEvent = function(event) {
if (!this.isGrabbing) {
return;
}
mouse.updateDrag(event);
// see if something added/restored gravity
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
var entityProperties = Entities.getEntityProperties(this.entityID);
if (Vec3.length(entityProperties.gravity) != 0) {
gOriginalGravity = entityProperties.gravity;
this.originalGravity = entityProperties.gravity;
}
gCurrentPosition = entityProperties.position;
this.currentPosition = entityProperties.position;
var actionArgs = {};
if (gGrabMode === "rotate") {
var deltaMouse = { x: 0, y: 0 };
var dx = event.x - gInitialMouse.x;
var dy = event.y - gInitialMouse.y;
if (this.mode === "rotate") {
var drag = mouse.getDrag();
var orientation = Camera.getOrientation();
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
var dragOffset = Vec3.multiply(drag.x, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-drag.y, Quat.getUp(orientation)));
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
var angle = ROTATE_STRENGTH * Math.sqrt((dx * dx) + (dy * dy));
var angle = ROTATE_STRENGTH * Math.sqrt((drag.x * drag.x) + (drag.y * drag.y));
var deltaQ = Quat.angleAxis(angle, axis);
// var qZero = entityProperties.rotation;
var qZero = gStartRotation;
var qOne = Quat.multiply(deltaQ, qZero);
actionArgs = {targetRotation: qOne, angularTimeScale: 0.1};
//var qZero = this.lastRotation;
this.lastRotation = Quat.multiply(deltaQ, this.lastRotation);
actionArgs = {targetRotation: this.lastRotation, angularTimeScale: 0.1};
} else {
var newTargetPosition;
if (gGrabMode === "verticalCylinder") {
var newPointOnPlane;
if (this.mode === "verticalCylinder") {
// for this mode we recompute the plane based on current Camera
var planeNormal = Quat.getFront(Camera.getOrientation());
planeNormal.y = 0;
planeNormal = Vec3.normalize(planeNormal);
var pointOnCylinder = Vec3.multiply(planeNormal, gXzDistanceToGrab);
var pointOnCylinder = Vec3.multiply(planeNormal, this.xzDistanceToGrab);
pointOnCylinder = Vec3.sum(Camera.getPosition(), pointOnCylinder);
newTargetPosition = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, event);
gPointOnPlane = Vec3.sum(newTargetPosition, gGrabOffset);
this.pointOnPlane = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, mouse.current, this.maxDistance);
newPointOnPlane = {x: this.pointOnPlane.x, y: this.pointOnPlane.y, z: this.pointOnPlane.z};
} else {
var cameraPosition = Camera.getPosition();
newTargetPosition = mouseIntersectionWithPlane(gPointOnPlane, gPlaneNormal, event);
var relativePosition = Vec3.subtract(newTargetPosition, cameraPosition);
newPointOnPlane = mouseIntersectionWithPlane(this.pointOnPlane, this.planeNormal, mouse.current, this.maxDistance);
var relativePosition = Vec3.subtract(newPointOnPlane, cameraPosition);
var distance = Vec3.length(relativePosition);
if (distance > gMaxGrabDistance) {
if (distance > this.maxDistance) {
// clamp distance
relativePosition = Vec3.multiply(relativePosition, gMaxGrabDistance / distance);
newTargetPosition = Vec3.sum(relativePosition, cameraPosition);
relativePosition = Vec3.multiply(relativePosition, this.maxDistance / distance);
newPointOnPlane = Vec3.sum(relativePosition, cameraPosition);
}
}
gTargetPosition = Vec3.subtract(newTargetPosition, gGrabOffset);
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
}
gPreviousMouse = { x: event.x, y: event.y };
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
this.targetPosition = Vec3.subtract(newPointOnPlane, this.offset);
actionArgs = {targetPosition: this.targetPosition, linearTimeScale: 0.1};
if (!gActionID) {
gActionID = Entities.addAction("spring", gGrabbedEntity, actionArgs);
beacon.updatePosition(this.targetPosition);
}
if (!this.actionID) {
this.actionID = Entities.addAction("spring", this.entityID, actionArgs);
} else {
Entities.updateAction(gGrabbedEntity, gActionID, actionArgs);
Entities.updateAction(this.entityID, this.actionID, actionArgs);
}
updateDropLine(gTargetPosition);
}
function keyReleaseEvent(event) {
Grabber.prototype.keyReleaseEvent = function(event) {
if (event.text === "SHIFT") {
gLiftKey = false;
this.liftKey = false;
}
if (event.text === "CONTROL") {
gRotateKey = false;
this.rotateKey = false;
}
computeNewGrabPlane();
this.computeNewGrabPlane();
}
Grabber.prototype.keyPressEvent = function(event) {
if (event.text === "SHIFT") {
this.liftKey = true;
}
if (event.text === "CONTROL") {
this.rotateKey = true;
}
this.computeNewGrabPlane();
}
var grabber = new Grabber();
function pressEvent(event) {
grabber.pressEvent(event);
}
function moveEvent(event) {
grabber.moveEvent(event);
}
function releaseEvent(event) {
grabber.releaseEvent(event);
}
function keyPressEvent(event) {
if (event.text === "SHIFT") {
gLiftKey = true;
}
if (event.text === "CONTROL") {
gRotateKey = true;
}
computeNewGrabPlane();
grabber.keyPressEvent(event);
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
function keyReleaseEvent(event) {
grabber.keyReleaseEvent(event);
}
Controller.mousePressEvent.connect(pressEvent);
Controller.mouseMoveEvent.connect(moveEvent);
Controller.mouseReleaseEvent.connect(releaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Controller.keyReleaseEvent.connect(keyReleaseEvent);

View file

@ -65,7 +65,6 @@
#include <DependencyManager.h>
#include <EntityScriptingInterface.h>
#include <ErrorDialog.h>
#include <GlowEffect.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/GLBackend.h>
@ -269,7 +268,6 @@ bool setupEssentials(int& argc, char** argv) {
auto geometryCache = DependencyManager::set<GeometryCache>();
auto scriptCache = DependencyManager::set<ScriptCache>();
auto soundCache = DependencyManager::set<SoundCache>();
auto glowEffect = DependencyManager::set<GlowEffect>();
auto faceshift = DependencyManager::set<Faceshift>();
auto audio = DependencyManager::set<AudioClient>();
auto audioScope = DependencyManager::set<AudioScope>();
@ -974,7 +972,10 @@ void Application::paintGL() {
} else {
PROFILE_RANGE(__FUNCTION__ "/mainRender");
DependencyManager::get<GlowEffect>()->prepare(&renderArgs);
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
@ -994,8 +995,7 @@ void Application::paintGL() {
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
auto finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
@ -1003,6 +1003,8 @@ void Application::paintGL() {
0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height(),
GL_COLOR_BUFFER_BIT, GL_LINEAR);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0); // ???
_compositor.displayOverlayTexture(&renderArgs);
}
@ -1566,7 +1568,9 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
return;
}
_keyboardMouseDevice.mouseMoveEvent(event, deviceID);
if (deviceID == 0) {
_keyboardMouseDevice.mouseMoveEvent(event, deviceID);
}
}
@ -1587,7 +1591,9 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
if (activeWindow() == _window) {
_keyboardMouseDevice.mousePressEvent(event);
if (deviceID == 0) {
_keyboardMouseDevice.mousePressEvent(event);
}
if (event->button() == Qt::LeftButton) {
_mouseDragStarted = getTrueMouse();
@ -1627,7 +1633,9 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
}
if (activeWindow() == _window) {
_keyboardMouseDevice.mouseReleaseEvent(event);
if (deviceID == 0) {
_keyboardMouseDevice.mouseReleaseEvent(event);
}
if (event->button() == Qt::LeftButton) {
_mousePressed = false;
@ -2231,10 +2239,6 @@ void Application::init() {
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
_entityClipboardRenderer.setTree(&_entityClipboard);
// initialize the GlowEffect with our widget
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
DependencyManager::get<GlowEffect>()->init(glow);
// Make sure any new sounds are loaded as soon as know about them.
connect(tree, &EntityTree::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
connect(_myAvatar, &MyAvatar::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
@ -2440,6 +2444,12 @@ void Application::updateDialogs(float deltaTime) {
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
auto dialogsManager = DependencyManager::get<DialogsManager>();
// Update audio stats dialog, if any
AudioStatsDialog* audioStatsDialog = dialogsManager->getAudioStatsDialog();
if(audioStatsDialog) {
audioStatsDialog->update();
}
// Update bandwidth dialog, if any
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
if (bandwidthDialog) {
@ -3194,9 +3204,6 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
glClear(GL_COLOR_BUFFER_BIT);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
// the "glow" here causes an alpha of one
Glower glower(renderArgs);
const int BILLBOARD_SIZE = 64;
// TODO: Pass a RenderArgs to renderAvatarBillboard
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,

View file

@ -57,6 +57,7 @@
#include "devices/SixenseManager.h"
#include "scripting/ControllerScriptingInterface.h"
#include "scripting/WebWindowClass.h"
#include "ui/AudioStatsDialog.h"
#include "ui/BandwidthDialog.h"
#include "ui/HMDToolsDialog.h"
#include "ui/ModelsBrowser.h"

View file

@ -17,8 +17,8 @@
#include <OctreeConstants.h>
#include <SimpleMovingAverage.h>
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 30.0;
const float DEFAULT_HMD_LOD_DOWN_FPS = 60.0;
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 15.0;
const float DEFAULT_HMD_LOD_DOWN_FPS = 30.0;
const float MAX_LIKELY_DESKTOP_FPS = 59.0; // this is essentially, V-synch - 1 fps
const float MAX_LIKELY_HMD_FPS = 74.0; // this is essentially, V-synch - 1 fps
const float INCREASE_LOD_GAP = 15.0f;

View file

@ -9,6 +9,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <QFileDialog>
#include <QMenuBar>
#include <QShortcut>
@ -16,7 +19,6 @@
#include <AddressManager.h>
#include <AudioClient.h>
#include <DependencyManager.h>
#include <GlowEffect.h>
#include <PathUtils.h>
#include <SettingHandle.h>
#include <UserActivityLogger.h>
@ -246,7 +248,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
avatar, SLOT(updateMotionBehavior()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehavior()));
@ -320,6 +321,8 @@ Menu::Menu() {
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
qApp, SLOT(toggleLogDialog()));
addActionToQMenuAndActionHash(viewMenu, MenuOption::AudioNetworkStats, 0,
dialogsManager.data(), SLOT(audioStatsDetails()));
addActionToQMenuAndActionHash(viewMenu, MenuOption::BandwidthDetails, 0,
dialogsManager.data(), SLOT(bandwidthDetails()));
addActionToQMenuAndActionHash(viewMenu, MenuOption::OctreeStats, 0,
@ -385,8 +388,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars,
0, // QML Qt::Key_Asterisk,
true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true,
DependencyManager::get<GlowEffect>().data(), SLOT(toggleGlowEffect(bool)));
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Wireframe, Qt::ALT | Qt::Key_W, false);
addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::LodTools,
@ -587,15 +588,6 @@ Menu::Menu() {
audioScopeFramesGroup->addAction(fiftyFrames);
}
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
Qt::CTRL | Qt::SHIFT | Qt::Key_A,
false); //, statsRenderer.data(), SLOT(toggle())); // TODO: convert to dialogbox
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStatsShowInjectedStreams,
0,
false); //, statsRenderer.data(), SLOT(toggleShowInjectedStreams)); // TODO: convert to dialogbox
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned);
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowHulls);

View file

@ -145,7 +145,7 @@ namespace MenuOption {
const QString AudioScopeFrames = "Display Frames";
const QString AudioScopePause = "Pause Scope";
const QString AudioScopeTwentyFrames = "Twenty";
const QString AudioStats = "Audio Stats";
const QString AudioNetworkStats = "Audio Network Stats";
const QString AudioStatsShowInjectedStreams = "Audio Stats Show Injected Streams";
const QString AutoMuteAudio = "Auto Mute Microphone";
const QString AvatarReceiveStats = "Show Receive Stats";
@ -185,7 +185,6 @@ namespace MenuOption {
const QString EditEntitiesHelp = "Edit Entities Help...";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableCharacterController = "Enable avatar collisions";
const QString EnableGlowEffect = "Enable Glow Effect";
const QString EnableVRMode = "Enable VR Mode";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
@ -199,7 +198,6 @@ namespace MenuOption {
const QString FrameTimer = "Show Timer";
const QString Fullscreen = "Fullscreen";
const QString FullscreenMirror = "Fullscreen Mirror";
const QString GlowWhenSpeaking = "Glow When Speaking";
const QString HMDTools = "HMD Tools";
const QString IncreaseAvatarSize = "Increase Avatar Size";
const QString IndependentMode = "Independent Mode";

View file

@ -24,7 +24,6 @@
#include <DeferredLightingEffect.h>
#include <GeometryUtil.h>
#include <GlowEffect.h>
#include <LODManager.h>
#include <NodeList.h>
#include <NumericalConstants.h>
@ -410,9 +409,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
? 0.0f
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
}
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
? 1.0f
@ -526,7 +523,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
auto cameraMode = Application::getInstance()->getCamera()->getMode();
if (!isMyAvatar() || cameraMode != CAMERA_MODE_FIRST_PERSON) {
renderDisplayName(batch, *renderArgs->_viewFrustum);
renderDisplayName(batch, *renderArgs->_viewFrustum, renderArgs->_viewport);
}
}
@ -579,8 +576,6 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
fixupModelsInScene();
{
Glower glower(renderArgs, glowLevel);
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
if (postLighting || renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
// render the billboard until both models are loaded
@ -679,7 +674,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
return namePosition;
}
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const {
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const {
Transform result;
// We assume textPosition is whithin the frustum
glm::vec3 textPosition = getDisplayNamePosition();
@ -698,12 +693,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
glm::vec4 p0 = viewProj * glm::vec4(testPoint0, 1.0);
glm::vec4 p1 = viewProj * glm::vec4(testPoint1, 1.0);
// TODO REMOVE vvv
GLint viewportMatrix[4];
glGetIntegerv(GL_VIEWPORT, viewportMatrix);
glm::dmat4 modelViewMatrix;
float windowSizeY = viewportMatrix[3] - viewportMatrix[1];
// TODO REMOVE ^^^
float windowSizeY = viewport.w;
const float DESIRED_HIGHT_ON_SCREEN = 20; // In pixels (this is double on retinas)
@ -736,7 +726,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
}
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const {
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const {
bool shouldShowReceiveStats = DependencyManager::get<AvatarManager>()->shouldShowReceiveStats() && !isMyAvatar();
// If we have nothing to draw, or it's tottaly transparent, return
@ -778,7 +768,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
(_displayNameAlpha / DISPLAYNAME_ALPHA) * DISPLAYNAME_BACKGROUND_ALPHA);
// Compute display name transform
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize());
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize(), viewport);
batch.setModelTransform(textTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, true, true);

View file

@ -234,8 +234,8 @@ protected:
float getPelvisFloatingHeight() const;
glm::vec3 getDisplayNamePosition() const;
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const;
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
virtual void fixupModelsInScene();

View file

@ -25,7 +25,6 @@
#endif
#include <GlowEffect.h>
#include <PerfStat.h>
#include <RegisteredMetaTypes.h>
#include <UUID.h>

View file

@ -120,8 +120,8 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
Hand* hand = _owningAvatar->getHand();
hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 || rightPalmIndex == -1) {
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 && rightPalmIndex == -1) {
// palms are not yet set, use mouse
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
@ -138,8 +138,16 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
if (leftPalmIndex != -1) {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
} else {
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
}
if (rightPalmIndex != -1) {
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
} else {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
}
}
if (_isFirstPerson) {
@ -791,19 +799,24 @@ void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float alpha
transform.setTranslation(endPoint);
batch.setModelTransform(transform);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule startpoint
glm::vec3 startPoint;
_boundingShape.getStartPoint(startPoint);
startPoint = startPoint - _translation;
glm::vec3 axis = endPoint - startPoint;
glTranslatef(-axis.x, -axis.y, -axis.z);
geometryCache->renderSphere(_boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
Transform axisTransform = Transform();
axisTransform.setTranslation(-axis);
batch.setModelTransform(axisTransform);
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
glm::vec4(0.8f, 0.8f, 0.6f, alpha));
// draw a green cylinder between the two points
glm::vec3 origin(0.0f);
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(), glm::vec4(0.6f, 0.8f, 0.6f, alpha));
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(),
glm::vec4(0.6f, 0.8f, 0.6f, alpha));
}
bool SkeletonModel::hasSkeleton() {

View file

@ -24,7 +24,6 @@
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>
#include <GlowEffect.h>
#include <GlWindow.h>
#include <gpu/GLBackend.h>
#include <OglplusHelpers.h>
@ -644,15 +643,9 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
return;
}
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
} else {
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
@ -724,15 +717,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glPopMatrix();
gpu::FramebufferPointer finalFbo;
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
//Full texture viewport for glow effect
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
} else {
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glMatrixMode(GL_PROJECTION);
glPopMatrix();
@ -825,7 +811,6 @@ glm::quat OculusManager::getOrientation() {
return toGlm(trackingState.HeadPose.ThePose.Orientation);
}
//Used to set the size of the glow framebuffers
QSize OculusManager::getRenderTargetSize() {
QSize rv;
rv.setWidth(_renderTargetSize.w);

View file

@ -244,14 +244,6 @@ void SixenseManager::update(float deltaTime) {
palm->setTrigger(data->trigger);
palm->setJoystick(data->joystick_x, data->joystick_y);
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// Emulate the mouse so we can use scripts
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
emulateMouse(palm, numActiveControllers - 1);
}
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
position *= METERS_PER_MILLIMETER;
@ -260,6 +252,15 @@ void SixenseManager::update(float deltaTime) {
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
numControllersAtBase++;
palm->setActive(false);
} else {
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// Emulate the mouse so we can use scripts
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
emulateMouse(palm, numActiveControllers - 1);
}
}
// Transform the measured position into body frame.

View file

@ -14,7 +14,6 @@
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GlowEffect.h>
#include "gpu/GLBackend.h"
#include "Application.h"
@ -82,6 +81,9 @@ void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int scre
}
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
@ -94,6 +96,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
int portalH = deviceSize.height();
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
@ -130,6 +133,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
glPopMatrix();
glDisable(GL_SCISSOR_TEST);
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
auto fboSize = finalFbo->getSize();
// Get the ACTUAL device size for the BLIT
@ -144,6 +148,8 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
// reset the viewport to how we started
glViewport(0, 0, deviceSize.width(), deviceSize.height());
#endif
}
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,

View file

@ -197,7 +197,6 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
updateTooltips();
auto deviceSize = qApp->getDeviceSize();
glViewport(0, 0, deviceSize.width(), deviceSize.height());
//Handle fading and deactivation/activation of UI
gpu::Batch batch;

View file

@ -148,6 +148,7 @@ void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
}
void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {
// Display stats and log text onscreen
// Determine whether to compute timing details

View file

@ -0,0 +1,269 @@
//
// AudioStatsDialog.cpp
// interface/src/ui
//
// Created by Bridget Went on 7/9/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <cstdio>
#include "InterfaceConfig.h"
#include <AudioClient.h>
#include <AudioConstants.h>
#include <AudioIOStats.h>
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <NodeList.h>
#include <Util.h>
#include "AudioStatsDialog.h"
const unsigned COLOR0 = 0x33cc99ff;
const unsigned COLOR1 = 0xffef40c0;
const unsigned COLOR2 = 0xd0d0d0a0;
const unsigned COLOR3 = 0x01DD7880;
AudioStatsDisplay::AudioStatsDisplay(QFormLayout* form,
QString text, unsigned colorRGBA) :
_text(text),
_colorRGBA(colorRGBA)
{
_label = new QLabel();
_label->setAlignment(Qt::AlignCenter);
QPalette palette = _label->palette();
unsigned rgb = colorRGBA >> 8;
rgb = ((rgb & 0xfefefeu) >> 1) + ((rgb & 0xf8f8f8) >> 3);
palette.setColor(QPalette::WindowText, QColor::fromRgb(rgb));
_label->setPalette(palette);
form->addRow(_label);
}
void AudioStatsDisplay::paint() {
_label->setText(_strBuf);
}
void AudioStatsDisplay::updatedDisplay(QString str) {
_strBuf = str;
}
AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
_shouldShowInjectedStreams = false;
setWindowTitle("Audio Network Statistics");
// Get statistics from the Audio Client
_stats = &DependencyManager::get<AudioClient>()->getStats();
// Create layouter
_form = new QFormLayout();
QDialog::setLayout(_form);
// Load and initilize all channels
renderStats();
_audioDisplayChannels = QVector<QVector<AudioStatsDisplay*>>(1);
_audioMixerID = addChannel(_form, _audioMixerStats, COLOR0);
_upstreamClientID = addChannel(_form, _upstreamClientStats, COLOR1);
_upstreamMixerID = addChannel(_form, _upstreamMixerStats, COLOR2);
_downstreamID = addChannel(_form, _downstreamStats, COLOR3);
_upstreamInjectedID = addChannel(_form, _upstreamInjectedStats, COLOR0);
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(updateTimerTimeout()));
averageUpdateTimer->start(1000);
}
int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color) {
int channelID = _audioDisplayChannels.size() - 1;
for (int i = 0; i < stats.size(); i++)
// Create new display label
_audioDisplayChannels[channelID].push_back(new AudioStatsDisplay(form, stats.at(i), color));
// Expand vector to fit next channel
_audioDisplayChannels.resize(_audioDisplayChannels.size() + 1);
return channelID;
}
void AudioStatsDialog::updateStats(QVector<QString>& stats, int channelID) {
// Update all stat displays at specified channel
for (int i = 0; i < stats.size(); i++)
_audioDisplayChannels[channelID].at(i)->updatedDisplay(stats.at(i));
}
void AudioStatsDialog::renderStats() {
// Clear current stats from all vectors
clearAllChannels();
double audioInputBufferLatency = 0.0,
inputRingBufferLatency = 0.0,
networkRoundtripLatency = 0.0,
mixerRingBufferLatency = 0.0,
outputRingBufferLatency = 0.0,
audioOutputBufferLatency = 0.0;
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
if (!audioMixerNodePointer.isNull()) {
audioInputBufferLatency = (double)_stats->getAudioInputMsecsReadStats().getWindowAverage();
inputRingBufferLatency = (double)_stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
networkRoundtripLatency = (double) audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = (double)downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
audioOutputBufferLatency = (double)_stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
}
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency
+ outputRingBufferLatency + audioOutputBufferLatency;
_audioMixerStats.push_back(QString("Audio input buffer: %1ms").arg(
QString::number(audioInputBufferLatency, 'f', 2)) + QString(" - avg msecs of samples read to the audio input buffer in last 10s"));
_audioMixerStats.push_back(QString("Input ring buffer: %1ms").arg(
QString::number(inputRingBufferLatency, 'f', 2)) + QString(" - avg msecs of samples read to the input ring buffer in last 10s"));
_audioMixerStats.push_back(QString("Network to mixer: %1ms").arg(
QString::number((networkRoundtripLatency / 2.0), 'f', 2)) + QString(" - half of last ping value calculated by the node list"));
_audioMixerStats.push_back(QString("Network to client: %1ms").arg(
QString::number((mixerRingBufferLatency / 2.0),'f', 2)) + QString(" - half of last ping value calculated by the node list"));
_audioMixerStats.push_back(QString("Output ring buffer: %1ms").arg(
QString::number(outputRingBufferLatency,'f', 2)) + QString(" - avg msecs of samples in output ring buffer in last 10s"));
_audioMixerStats.push_back(QString("Audio output buffer: %1ms").arg(
QString::number(mixerRingBufferLatency,'f', 2)) + QString(" - avg msecs of samples in audio output buffer in last 10s"));
_audioMixerStats.push_back(QString("TOTAL: %1ms").arg(
QString::number(totalLatency, 'f', 2)) +QString(" - avg msecs of samples in audio output buffer in last 10s"));
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
_upstreamClientStats.push_back(
QString("\nUpstream Mic Audio Packets Sent Gaps (by client):"));
_upstreamClientStats.push_back(
QString("Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3").arg(formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data()).arg(formatUsecTime( packetSentTimeGaps.getMax()).toLatin1().data()).arg(formatUsecTime( packetSentTimeGaps.getAverage()).toLatin1().data()));
_upstreamClientStats.push_back(
QString("Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3").arg(formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data()).arg(formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data()).arg(formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data()));
_upstreamMixerStats.push_back(QString("\nUpstream mic audio stats (received and reported by audio-mixer):"));
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats, true);
_downstreamStats.push_back(QString("\nDownstream mixed audio stats:"));
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, &_downstreamStats, true);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
_upstreamInjectedStats.push_back(QString("\nUpstream injected audio stats: stream ID: %1").arg( injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data()));
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats, true);
}
}
}
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats, bool isDownstreamStats) {
audioStreamStats->push_back(
QString("Packet loss | overall: %1% (%2 lost), last_30s: %3% (%4 lost)").arg(QString::number((int)(streamStats->_packetStreamStats.getLostRate() * 100.0f))).arg(QString::number((int)(streamStats->_packetStreamStats._lost))).arg(QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f))).arg(QString::number((int)(streamStats->_packetStreamWindowStats._lost)))
);
if (isDownstreamStats) {
audioStreamStats->push_back(
QString("Ringbuffer frames | desired: %1, avg_available(10s): %2 + %3, available: %4+%5").arg(QString::number(streamStats->_desiredJitterBufferFrames)).arg(QString::number(streamStats->_framesAvailableAverage)).arg(QString::number((int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS))).arg(QString::number(streamStats->_framesAvailable)).arg(QString::number((int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample() / AudioConstants::NETWORK_FRAME_MSECS))));
} else {
audioStreamStats->push_back(
QString("Ringbuffer frames | desired: %1, avg_available(10s): %2, available: %3").arg(QString::number(streamStats->_desiredJitterBufferFrames)).arg(QString::number(streamStats->_framesAvailableAverage)).arg(QString::number(streamStats->_framesAvailable)));
}
audioStreamStats->push_back(
QString("Ringbuffer stats | starves: %1, prev_starve_lasted: %2, frames_dropped: %3, overflows: %4").arg(QString::number(streamStats->_starveCount)).arg(QString::number(streamStats->_consecutiveNotMixedCount)).arg(QString::number(streamStats->_framesDropped)).arg(QString::number(streamStats->_overflowCount)));
audioStreamStats->push_back(
QString("Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3").arg(formatUsecTime(streamStats->_timeGapMin).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapMax).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapAverage).toLatin1().data()));
audioStreamStats->push_back(
QString("Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3").arg(formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data()).arg(QString::number(streamStats->_timeGapWindowAverage).toLatin1().data()));
}
void AudioStatsDialog::clearAllChannels() {
_audioMixerStats.clear();
_upstreamClientStats.clear();
_upstreamMixerStats.clear();
_downstreamStats.clear();
_upstreamInjectedStats.clear();
}
void AudioStatsDialog::updateTimerTimeout() {
renderStats();
// Update all audio stats
updateStats(_audioMixerStats, _audioMixerID);
updateStats(_upstreamClientStats, _upstreamClientID);
updateStats(_upstreamMixerStats, _upstreamMixerID);
updateStats(_downstreamStats, _downstreamID);
updateStats(_upstreamInjectedStats, _upstreamInjectedID);
}
void AudioStatsDialog::paintEvent(QPaintEvent* event) {
// Repaint each stat in each channel
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
_audioDisplayChannels[i].at(j)->paint();
}
}
QDialog::paintEvent(event);
setFixedSize(width(), height());
}
void AudioStatsDialog::reject() {
// Just regularly close upon ESC
QDialog::close();
}
void AudioStatsDialog::closeEvent(QCloseEvent* event) {
QDialog::closeEvent(event);
emit closed();
}
AudioStatsDialog::~AudioStatsDialog() {
clearAllChannels();
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
_audioDisplayChannels[i].clear();
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
delete _audioDisplayChannels[i].at(j);
}
}
}

View file

@ -0,0 +1,113 @@
//
// AudioStatsDialog.h
// hifi
//
// Created by Bridget Went on 7/9/15.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef __hifi__AudioStatsDialog__
#define __hifi__AudioStatsDialog__
#include <stdio.h>
#include <QDialog>
#include <QLabel>
#include <QFormLayout>
#include <QVector>
#include <QTimer>
#include <QString>
#include <QObject>
#include <DependencyManager.h>
class AudioIOStats;
class AudioStreamStats;
//display
class AudioStatsDisplay : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
AudioStatsDisplay(QFormLayout* form, QString text, unsigned colorRGBA);
void updatedDisplay(QString str);
void paint();
private:
QString _strBuf;
QLabel* _label;
QString _text;
unsigned _colorRGBA;
};
//dialog
class AudioStatsDialog : public QDialog {
Q_OBJECT
public:
AudioStatsDialog(QWidget* parent);
~AudioStatsDialog();
void paintEvent(QPaintEvent*);
private:
// audio stats methods for rendering
QVector<QString> _audioMixerStats;
QVector<QString> _upstreamClientStats;
QVector<QString> _upstreamMixerStats;
QVector<QString> _downstreamStats;
QVector<QString> _upstreamInjectedStats;
int _audioMixerID;
int _upstreamClientID;
int _upstreamMixerID;
int _downstreamID;
int _upstreamInjectedID;
QVector<QVector<AudioStatsDisplay*>> _audioDisplayChannels;
int addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color);
void updateStats(QVector<QString>& stats, const int channelID);
void renderStats();
void clearAllChannels();
void renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamstats, bool isDownstreamStats);
const AudioIOStats* _stats;
QFormLayout* _form;
bool _isEnabled;
bool _shouldShowInjectedStreams;
signals:
void closed();
public slots:
void reject();
void updateTimerTimeout();
protected:
// Emits a 'closed' signal when this dialog is closed.
void closeEvent(QCloseEvent*);
private:
QTimer* averageUpdateTimer = new QTimer(this);
};
#endif /* defined(__hifi__AudioStatsDialog__) */

View file

@ -114,6 +114,20 @@ void DialogsManager::editAnimations() {
}
}
void DialogsManager::audioStatsDetails() {
if (! _audioStatsDialog) {
_audioStatsDialog = new AudioStatsDialog(qApp->getWindow());
connect(_audioStatsDialog, SIGNAL(closed()), _audioStatsDialog, SLOT(deleteLater()));
if (_hmdToolsDialog) {
_hmdToolsDialog->watchWindow(_audioStatsDialog->windowHandle());
}
_audioStatsDialog->show();
}
_audioStatsDialog->raise();
}
void DialogsManager::bandwidthDetails() {
if (! _bandwidthDialog) {
_bandwidthDialog = new BandwidthDialog(qApp->getWindow());

View file

@ -24,6 +24,7 @@ class QAction;
class AddressBarDialog;
class AnimationsDialog;
class AttachmentsDialog;
class AudioStatsDialog;
class BandwidthDialog;
class CachesSizeDialog;
class DiskCacheEditor;
@ -42,6 +43,7 @@ class DialogsManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
QPointer<AudioStatsDialog> getAudioStatsDialog() const { return _audioStatsDialog; }
QPointer<BandwidthDialog> getBandwidthDialog() const { return _bandwidthDialog; }
QPointer<HMDToolsDialog> getHMDToolsDialog() const { return _hmdToolsDialog; }
QPointer<LodToolsDialog> getLodToolsDialog() const { return _lodToolsDialog; }
@ -58,6 +60,7 @@ public slots:
void editPreferences();
void editAttachments();
void editAnimations();
void audioStatsDetails();
void bandwidthDetails();
void lodTools();
void hmdTools(bool showTools);
@ -93,6 +96,7 @@ private:
QPointer<AddressBarDialog> _addressBarDialog;
QPointer<AnimationsDialog> _animationsDialog;
QPointer<AttachmentsDialog> _attachmentsDialog;
QPointer<AudioStatsDialog> _audioStatsDialog;
QPointer<BandwidthDialog> _bandwidthDialog;
QPointer<CachesSizeDialog> _cachesSizeDialog;
QPointer<DiskCacheEditor> _diskCacheEditor;

View file

@ -12,7 +12,6 @@
#include "InterfaceConfig.h"
#include <DeferredLightingEffect.h>
#include <GlowEffect.h>
#include <SharedUtil.h>
#include <StreamUtils.h>
@ -34,8 +33,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
glm::vec4 cubeColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
//glDisable(GL_LIGHTING);
// TODO: handle registration point??
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();

View file

@ -11,7 +11,6 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <GlowEffect.h>
#include <GeometryCache.h>
#include <RegisteredMetaTypes.h>

View file

@ -9,8 +9,6 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GlowEffect.h>
#include "Application.h"
#include "LocalModelsOverlay.h"
@ -32,11 +30,7 @@ void LocalModelsOverlay::update(float deltatime) {
void LocalModelsOverlay::render(RenderArgs* args) {
if (_visible) {
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
float glowLevel = getGlowLevel(); // FIXME, glowing removed for now
auto batch = args ->_batch;
Application* app = Application::getInstance();
@ -47,10 +41,6 @@ void LocalModelsOverlay::render(RenderArgs* args) {
_entityTreeRenderer->render(args);
transform.setTranslation(oldTranslation);
batch->setViewTransform(transform);
if (glower) {
delete glower;
}
}
}

View file

@ -12,8 +12,6 @@
#include "ModelOverlay.h"
#include <Application.h>
#include <GlowEffect.h>
ModelOverlay::ModelOverlay()
: _model(),

View file

@ -14,7 +14,6 @@
#include "Rectangle3DOverlay.h"
#include <GeometryCache.h>
#include <GlowEffect.h>
#include <SharedUtil.h>
Rectangle3DOverlay::Rectangle3DOverlay() :

View file

@ -11,7 +11,6 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <GlowEffect.h>
#include <SharedUtil.h>
#include "Sphere3DOverlay.h"

View file

@ -20,7 +20,6 @@
#include <AbstractScriptingServicesInterface.h>
#include <AbstractViewStateInterface.h>
#include <DeferredLightingEffect.h>
#include <GlowEffect.h>
#include <Model.h>
#include <NetworkAccessManager.h>
#include <PerfStat.h>

View file

@ -14,7 +14,6 @@
#include <gpu/GPUConfig.h>
#include <GlowEffect.h>
#include <DeferredLightingEffect.h>
#include <GeometryCache.h>
#include <PerfStat.h>
@ -169,7 +168,6 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
_webSurface->resize(QSize(dims.x, dims.y));
currentContext->makeCurrent(currentSurface);
Glower glow(0.0f);
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
Q_ASSERT(getType() == EntityTypes::Web);
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);

View file

@ -86,7 +86,7 @@ public:
// Then by the inverse of the ViewTransform from world space to eye space
// finaly projected into the clip space by the projection transform
// WARNING: ViewTransform transform from eye space to world space, its inverse is composed
// with the ModelTransformu to create the equivalent of the glModelViewMatrix
// with the ModelTransform to create the equivalent of the gl ModelViewMatrix
void setModelTransform(const Transform& model);
void setViewTransform(const Transform& view);
void setProjectionTransform(const Mat4& proj);
@ -114,7 +114,7 @@ public:
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
// For now, instead of calling the raw glCall, use the equivalent call on the batch so the call is beeing recorded
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
// THe implementation of these functions is in GLBackend.cpp
void _glEnable(GLenum cap);

View file

@ -148,7 +148,7 @@ public:
//
// As of now (03/2015), the call to makeProgram is in fact calling gpu::Context::makeProgram and does rely
// on the underneath gpu::Context::Backend available. Since we only support glsl, this means that it relies
// on a glContext and the driver to compile the glsl shader.
// on a gl Context and the driver to compile the glsl shader.
// Hoppefully in a few years the shader compilation will be completely abstracted in a separate shader compiler library
// independant of the graphics api in use underneath (looking at you opengl & vulkan).
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());

View file

@ -21,7 +21,6 @@
#include "AbstractViewStateInterface.h"
#include "AmbientOcclusionEffect.h"
#include "GlowEffect.h"
#include "ProgramObject.h"
#include "RenderUtil.h"
#include "TextureCache.h"
@ -107,7 +106,7 @@ void AmbientOcclusionEffect::render() {
glBindTexture(GL_TEXTURE_2D, _rotationTextureID);
// render with the occlusion shader to the secondary/tertiary buffer
auto freeFramebuffer = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFramebuffer = nullptr; // DependencyManager::get<GlowEffect>()->getFreeFramebuffer(); // FIXME
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(freeFramebuffer));
float left, right, bottom, top, nearVal, farVal;
@ -150,7 +149,7 @@ void AmbientOcclusionEffect::render() {
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_ZERO, GL_SRC_COLOR, GL_ZERO, GL_ONE);
auto freeFramebufferTexture = freeFramebuffer->getRenderBuffer(0);
auto freeFramebufferTexture = nullptr; // freeFramebuffer->getRenderBuffer(0); // FIXME
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(freeFramebufferTexture));
_blurProgram->bind();

View file

@ -18,7 +18,6 @@
#include "AbstractViewStateInterface.h"
#include "DeferredLightingEffect.h"
#include "GeometryCache.h"
#include "GlowEffect.h"
#include "RenderUtil.h"
#include "TextureCache.h"
@ -238,7 +237,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
QSize framebufferSize = textureCache->getFrameBufferSize();
// binding the first framebuffer
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
batch.setFramebuffer(freeFBO);
batch.setViewportTransform(args->_viewport);
@ -541,7 +540,7 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
auto textureCache = DependencyManager::get<TextureCache>();
QSize framebufferSize = textureCache->getFrameBufferSize();
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
batch.setFramebuffer(textureCache->getPrimaryFramebuffer());
batch.setPipeline(_blitLightBuffer);

View file

@ -433,8 +433,6 @@ void GeometryCache::renderGrid(gpu::Batch& batch, int x, int y, int width, int h
}
// Draw vertical grid lines
for (int i = cols + 1; --i >= 0; ) {
//glVertex2i(tx, y);
//glVertex2i(tx, y + height);
*(vertex++) = tx;
*(vertex++) = y;

View file

@ -1,225 +0,0 @@
//
// GlowEffect.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 8/7/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
#include <gpu/GPUConfig.h>
#include <QOpenGLFramebufferObject>
#include <QWindow>
#include <PathUtils.h>
#include <PerfStat.h>
#include "GlowEffect.h"
#include "ProgramObject.h"
#include "RenderUtil.h"
#include "TextureCache.h"
#include "RenderUtilsLogging.h"
#include "gpu/GLBackend.h"
GlowEffect::GlowEffect()
: _initialized(false),
_isOddFrame(false),
_isFirstFrame(true),
_intensity(0.0f),
_enabled(false) {
}
GlowEffect::~GlowEffect() {
if (_initialized) {
delete _addProgram;
delete _horizontalBlurProgram;
delete _verticalBlurAddProgram;
delete _verticalBlurProgram;
delete _addSeparateProgram;
delete _diffuseProgram;
}
}
gpu::FramebufferPointer GlowEffect::getFreeFramebuffer() const {
return (_isOddFrame ?
DependencyManager::get<TextureCache>()->getSecondaryFramebuffer():
DependencyManager::get<TextureCache>()->getTertiaryFramebuffer());
}
static ProgramObject* createProgram(const QString& name) {
ProgramObject* program = new ProgramObject();
program->addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/" + name + ".frag");
program->link();
program->bind();
program->setUniformValue("originalTexture", 0);
program->release();
return program;
}
void GlowEffect::init(bool enabled) {
if (_initialized) {
qCDebug(renderutils, "[ERROR] GlowEffeect is already initialized.");
return;
}
_addProgram = createProgram("glow_add");
_horizontalBlurProgram = createProgram("horizontal_blur");
_verticalBlurAddProgram = createProgram("vertical_blur_add");
_verticalBlurProgram = createProgram("vertical_blur");
_addSeparateProgram = createProgram("glow_add_separate");
_diffuseProgram = createProgram("diffuse");
_verticalBlurAddProgram->bind();
_verticalBlurAddProgram->setUniformValue("horizontallyBlurredTexture", 1);
_verticalBlurAddProgram->release();
_addSeparateProgram->bind();
_addSeparateProgram->setUniformValue("blurredTexture", 1);
_addSeparateProgram->release();
_diffuseProgram->bind();
_diffuseProgram->setUniformValue("diffusedTexture", 1);
_diffuseProgram->release();
_diffusionScaleLocation = _diffuseProgram->uniformLocation("diffusionScale");
_initialized = true;
_enabled = enabled;
}
void GlowEffect::prepare(RenderArgs* renderArgs) {
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
_isEmpty = true;
_isOddFrame = !_isOddFrame;
}
void GlowEffect::begin(RenderArgs* renderArgs, float intensity) {
// store the current intensity and add the new amount
_intensityStack.push(_intensity);
glBlendColor(0.0f, 0.0f, 0.0f, _intensity += intensity);
_isEmpty &= (_intensity == 0.0f);
}
void GlowEffect::end(RenderArgs* renderArgs) {
// restore the saved intensity
glBlendColor(0.0f, 0.0f, 0.0f, _intensity = _intensityStack.pop());
}
gpu::FramebufferPointer GlowEffect::render(RenderArgs* renderArgs) {
PerformanceTimer perfTimer("glowEffect");
auto textureCache = DependencyManager::get<TextureCache>();
auto primaryFBO = gpu::GLBackend::getFramebufferID(textureCache->getPrimaryFramebuffer());
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, textureCache->getPrimaryColorTextureID());
auto framebufferSize = textureCache->getFrameBufferSize();
glPushMatrix();
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glDisable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
gpu::FramebufferPointer destFBO = textureCache->getSecondaryFramebuffer();
if (!_enabled || _isEmpty) {
// copy the primary to the screen
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
glBindFramebuffer(GL_READ_FRAMEBUFFER, primaryFBO);
glBlitFramebuffer(
0, 0, framebufferSize.width(), framebufferSize.height(),
0, 0, framebufferSize.width(), framebufferSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
} else {
// diffuse into the secondary/tertiary (alternating between frames)
auto oldDiffusedFBO =
textureCache->getSecondaryFramebuffer();
auto newDiffusedFBO =
textureCache->getTertiaryFramebuffer();
if (_isOddFrame) {
qSwap(oldDiffusedFBO, newDiffusedFBO);
}
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(newDiffusedFBO));
if (_isFirstFrame) {
glClear(GL_COLOR_BUFFER_BIT);
} else {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(oldDiffusedFBO->getRenderBuffer(0)));
_diffuseProgram->bind();
_diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / framebufferSize.width(), 1.0f / framebufferSize.height());
renderFullscreenQuad();
_diffuseProgram->release();
}
destFBO = oldDiffusedFBO;
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// add diffused texture to the primary
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(newDiffusedFBO->getRenderBuffer(0)));
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
glViewport(0, 0, framebufferSize.width(), framebufferSize.height());
_addSeparateProgram->bind();
renderFullscreenQuad();
_addSeparateProgram->release();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
}
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
_isFirstFrame = false;
return destFBO;
}
void GlowEffect::toggleGlowEffect(bool enabled) {
_enabled = enabled;
}
Glower::Glower(float amount) {
RenderArgs renderArgs;
DependencyManager::get<GlowEffect>()->begin(&renderArgs, amount);
}
Glower::Glower(RenderArgs* renderArgs, float amount) : _renderArgs(renderArgs) {
DependencyManager::get<GlowEffect>()->begin(_renderArgs, amount);
}
Glower::~Glower() {
DependencyManager::get<GlowEffect>()->end(_renderArgs);
}

View file

@ -1,97 +0,0 @@
//
// GlowEffect.h
// interface/src/renderer
//
// Created by Andrzej Kapolka on 8/7/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GlowEffect_h
#define hifi_GlowEffect_h
#include <gpu/GPUConfig.h>
#include <gpu/Framebuffer.h>
#include "RenderArgs.h"
#include <QObject>
#include <QGLWidget>
#include <QStack>
#include <DependencyManager.h>
class ProgramObject;
/// A generic full screen glow effect.
class GlowEffect : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
/// Returns a pointer to the framebuffer object that the glow effect is *not* using for persistent state
/// (either the secondary or the tertiary).
gpu::FramebufferPointer getFreeFramebuffer() const;
void init(bool enabled);
/// Prepares the glow effect for rendering the current frame. To be called before rendering the scene.
void prepare(RenderArgs* renderArgs);
/// Starts using the glow effect.
/// \param intensity the desired glow intensity, from zero to one
void begin(RenderArgs* renderArgs, float intensity = 1.0f);
/// Stops using the glow effect.
void end(RenderArgs* renderArgs);
/// Returns the current glow intensity.
float getIntensity() const { return _intensity; }
/// Renders the glow effect. To be called after rendering the scene.
/// \param toTexture whether to render to a texture, rather than to the frame buffer
/// \return the framebuffer object to which we rendered, or NULL if to the frame buffer
gpu::FramebufferPointer render(RenderArgs* renderArgs);
public slots:
void toggleGlowEffect(bool enabled);
private:
GlowEffect();
virtual ~GlowEffect();
bool _initialized;
ProgramObject* _addProgram;
ProgramObject* _horizontalBlurProgram;
ProgramObject* _verticalBlurAddProgram;
ProgramObject* _verticalBlurProgram;
ProgramObject* _addSeparateProgram;
ProgramObject* _diffuseProgram;
int _diffusionScaleLocation;
bool _isEmpty; ///< set when nothing in the scene is currently glowing
bool _isOddFrame; ///< controls the alternation between texture targets in diffuse add mode
bool _isFirstFrame; ///< for persistent modes, notes whether this is the first frame rendered
float _intensity;
QStack<float> _intensityStack;
bool _enabled;
};
/// RAII-style glow handler. Applies glow when in scope.
class Glower {
public:
Glower(float amount = 1.0f);
Glower(RenderArgs* renderArgs, float amount = 1.0f);
~Glower();
private:
RenderArgs* _renderArgs;
};
#endif // hifi_GlowEffect_h

View file

@ -32,7 +32,6 @@
#include "AbstractViewStateInterface.h"
#include "AnimationHandle.h"
#include "DeferredLightingEffect.h"
#include "GlowEffect.h"
#include "Model.h"
#include "RenderUtilsLogging.h"
@ -56,10 +55,6 @@
#include "model_lightmap_specular_map_frag.h"
#include "model_translucent_frag.h"
#define GLBATCH( call ) batch._##call
//#define GLBATCH( call ) call
using namespace std;
static int modelPointerTypeId = qRegisterMetaType<QPointer<Model> >();
@ -1851,22 +1846,6 @@ void Model::deleteGeometry() {
_blendedBlendshapeCoefficients.clear();
}
void Model::setupBatchTransform(gpu::Batch& batch, RenderArgs* args) {
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(Transform());
}
// We should be able to use the Frustum viewpoint onstead of the "viewTransform"
// but it s still buggy in some cases, so let's s wait and fix it...
_transforms[0] = _viewState->getViewTransform();
_transforms[0].preTranslate(-_translation);
batch.setViewTransform(_transforms[0]);
}
AABox Model::getPartBounds(int meshIndex, int partIndex) {
if (meshIndex < _meshStates.size()) {
@ -2001,7 +1980,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
if (isSkinned) {
GLBATCH(glUniformMatrix4fv)(locations->clusterMatrices, state.clusterMatrices.size(), false,
batch._glUniformMatrix4fv(locations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
_transforms[0] = Transform();
_transforms[0].preTranslate(_translation);
@ -2022,7 +2001,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
if (mesh.colors.isEmpty()) {
GLBATCH(glColor4f)(1.0f, 1.0f, 1.0f, 1.0f);
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
// guard against partially loaded meshes
@ -2078,7 +2057,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
if (!part.emissiveTexture.transform.isIdentity()) {
part.emissiveTexture.transform.getMatrix(texcoordTransform[1]);
}
GLBATCH(glUniformMatrix4fv)(locations->texcoordMatrices, 2, false, (const float*) &texcoordTransform);
batch._glUniformMatrix4fv(locations->texcoordMatrices, 2, false, (const float*) &texcoordTransform);
}
if (!mesh.tangents.isEmpty()) {
@ -2103,7 +2082,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
// assert(locations->emissiveParams >= 0); // we should have the emissiveParams defined in the shader
float emissiveOffset = part.emissiveParams.x;
float emissiveScale = part.emissiveParams.y;
GLBATCH(glUniform2f)(locations->emissiveParams, emissiveOffset, emissiveScale);
batch._glUniform2f(locations->emissiveParams, emissiveOffset, emissiveScale);
NetworkTexture* emissiveMap = networkPart.emissiveTexture.data();
batch.setResourceTexture(locations->emissiveTextureUnit, (!emissiveMap || !emissiveMap->isLoaded()) ?
@ -2211,11 +2190,12 @@ void Model::pickPrograms(gpu::Batch& batch, RenderMode mode, bool translucent, f
batch.setPipeline((*pipeline).second._pipeline);
if ((locations->alphaThreshold > -1) && (mode != RenderArgs::SHADOW_RENDER_MODE)) {
GLBATCH(glUniform1f)(locations->alphaThreshold, alphaThreshold);
batch._glUniform1f(locations->alphaThreshold, alphaThreshold);
}
if ((locations->glowIntensity > -1) && (mode != RenderArgs::SHADOW_RENDER_MODE)) {
GLBATCH(glUniform1f)(locations->glowIntensity, DependencyManager::get<GlowEffect>()->getIntensity());
const float DEFAULT_GLOW_INTENSITY = 1.0f; // FIXME - glow is removed
batch._glUniform1f(locations->glowIntensity, DEFAULT_GLOW_INTENSITY);
}
}

View file

@ -407,7 +407,6 @@ private:
// helper functions used by render() or renderInScene()
void setupBatchTransform(gpu::Batch& batch, RenderArgs* args);
static void pickPrograms(gpu::Batch& batch, RenderArgs::RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args,
Locations*& locations);

View file

@ -504,9 +504,6 @@ glm::vec2 Font::drawString(float x, float y, const QString & str,
_vao->release();
_texture->release(); // TODO: Brad & Sam, let's discuss this. Without this non-textured quads get their colors borked.
_program->release();
// FIXME, needed?
// glDisable(GL_TEXTURE_2D);
return advance;
}

View file

@ -110,9 +110,6 @@ const gpu::TexturePointer& TextureCache::getPermutationNormalTexture() {
_permutationNormalTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC3, gpu::UINT8, gpu::RGB), 256, 2));
_permutationNormalTexture->assignStoredMip(0, _blueTexture->getTexelFormat(), sizeof(data), data);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
}
return _permutationNormalTexture;
}
@ -122,13 +119,6 @@ const unsigned char OPAQUE_GRAY[] = { 0x80, 0x80, 0x80, 0xFF };
const unsigned char OPAQUE_BLUE[] = { 0x80, 0x80, 0xFF, 0xFF };
const unsigned char OPAQUE_BLACK[] = { 0x00, 0x00, 0x00, 0xFF };
/*
static void loadSingleColorTexture(const unsigned char* color) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1, 1, 0, GL_RGBA, GL_UNSIGNED_BYTE, color);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
*/
const gpu::TexturePointer& TextureCache::getWhiteTexture() {
if (!_whiteTexture) {
_whiteTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));

View file

@ -87,20 +87,20 @@ public:
const Vec3& getTranslation() const;
void setTranslation(const Vec3& translation); // [new this] = [translation] * [this.rotation] * [this.scale]
void preTranslate(const Vec3& translation); // [new this] = [translation] * [this]
void postTranslate(const Vec3& translation); // [new this] = [this] * [translation] equivalent to glTranslate
void postTranslate(const Vec3& translation); // [new this] = [this] * [translation] equivalent to:glTranslate
const Quat& getRotation() const;
void setRotation(const Quat& rotation); // [new this] = [this.translation] * [rotation] * [this.scale]
void preRotate(const Quat& rotation); // [new this] = [rotation] * [this]
void postRotate(const Quat& rotation); // [new this] = [this] * [rotation] equivalent to glRotate
void postRotate(const Quat& rotation); // [new this] = [this] * [rotation] equivalent to:glRotate
const Vec3& getScale() const;
void setScale(float scale);
void setScale(const Vec3& scale); // [new this] = [this.translation] * [this.rotation] * [scale]
void preScale(float scale);
void preScale(const Vec3& scale);
void postScale(float scale); // [new this] = [this] * [scale] equivalent to glScale
void postScale(const Vec3& scale); // [new this] = [this] * [scale] equivalent to glScale
void postScale(float scale); // [new this] = [this] * [scale] equivalent to:glScale
void postScale(const Vec3& scale); // [new this] = [this] * [scale] equivalent to:glScale
bool isIdentity() const { return (_flags & ~Flags(FLAG_CACHE_INVALID_BITSET)).none(); }
bool isTranslating() const { return _flags[FLAG_TRANSLATION]; }

View file

@ -131,7 +131,6 @@ public:
EditEntitiesHelp,
Enable3DTVMode,
EnableCharacterController,
EnableGlowEffect,
EnableVRMode,
ExpandMyAvatarSimulateTiming,
ExpandMyAvatarTiming,