mirror of
https://github.com/lubosz/overte.git
synced 2025-04-26 19:55:27 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into smart_pointers
This commit is contained in:
commit
256c786e28
155 changed files with 3244 additions and 3655 deletions
assignment-client/src/avatars
cmake
examples
interface
resources
src
Application.cppApplication.hLODManager.hMenu.cppMenu.hUtil.cppUtil.h
audio
avatar
Avatar.cppAvatar.hAvatarActionHold.cppAvatarManager.cppAvatarManager.hFaceModel.cppMyAvatar.cppMyAvatar.hSkeletonModel.cppSkeletonModel.h
devices
octree
ui
libraries
animation/src
audio/src
avatars/src
entities-renderer/src
EntityTreeRenderer.cppRenderableModelEntityItem.cppRenderableParticleEffectEntityItem.cppRenderableWebEntityItem.cpp
entities/src
BoxEntityItem.cppEntityActionInterface.cppEntityItem.cppEntityItem.hEntityScriptingInterface.cppEntityTreeElement.cppLightEntityItem.cppLineEntityItem.cppModelEntityItem.cppModelEntityItem.hParticleEffectEntityItem.cppPolyVoxEntityItem.cppSphereEntityItem.cppTextEntityItem.cppWebEntityItem.cppZoneEntityItem.cpp
fbx/src
gpu/src/gpu
|
@ -53,7 +53,7 @@ AnimationDetails ScriptableAvatar::getAnimationDetails() {
|
|||
|
||||
void ScriptableAvatar::update(float deltatime) {
|
||||
// Run animation
|
||||
if (_animation != NULL && _animation->isValid() && _animation->getFrames().size() > 0) {
|
||||
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0) {
|
||||
QStringList modelJoints = getJointNames();
|
||||
QStringList animationJoints = _animation->getJointNames();
|
||||
|
||||
|
|
24
cmake/externals/polyvox/CMakeLists.txt
vendored
24
cmake/externals/polyvox/CMakeLists.txt
vendored
|
@ -3,8 +3,8 @@ set(EXTERNAL_NAME polyvox)
|
|||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox.zip
|
||||
URL_MD5 904b840328278c9b36fa7a14be730c34
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox-master-2015-7-15.zip
|
||||
URL_MD5 9ec6323b87e849ae36e562ae1c7494a9
|
||||
CMAKE_ARGS -DENABLE_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
|
@ -24,7 +24,16 @@ if (APPLE)
|
|||
${EXTERNAL_NAME}
|
||||
change-install-name
|
||||
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
|
||||
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR} -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
|
||||
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Debug -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
|
||||
DEPENDEES install
|
||||
WORKING_DIRECTORY <SOURCE_DIR>
|
||||
LOG 1
|
||||
)
|
||||
ExternalProject_Add_Step(
|
||||
${EXTERNAL_NAME}
|
||||
change-install-name
|
||||
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
|
||||
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Release -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
|
||||
DEPENDEES install
|
||||
WORKING_DIRECTORY <SOURCE_DIR>
|
||||
LOG 1
|
||||
|
@ -48,12 +57,15 @@ endif ()
|
|||
|
||||
|
||||
if (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/PolyVoxCore/lib/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/PolyVoxCore/lib/Debug/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/PolyVoxCore/lib/Release/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
|
||||
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/PolyVoxUtil/lib/PolyVoxUtil.lib CACHE FILEPATH "polyvox util library")
|
||||
elseif (APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
|
||||
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.dylib CACHE FILEPATH "polyvox util library")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
|
||||
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
|
||||
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.so CACHE FILEPATH "polyvox util library")
|
||||
endif ()
|
||||
|
|
|
@ -24,9 +24,12 @@ hifi_library_search_hints("polyvox")
|
|||
find_path(POLYVOX_CORE_INCLUDE_DIRS PolyVoxCore/SimpleVolume.h PATH_SUFFIXES include include/PolyVoxCore HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
# find_path(POLYVOX_UTIL_INCLUDE_DIRS PolyVoxUtil/Serialization.h PATH_SUFFIXES include include/PolyVoxUtil HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
|
||||
find_library(POLYVOX_CORE_LIBRARY NAMES PolyVoxCore PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
find_library(POLYVOX_CORE_LIBRARY_DEBUG NAMES PolyVoxCore PATH_SUFFIXES lib/Debug HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
find_library(POLYVOX_CORE_LIBRARY_RELEASE NAMES PolyVoxCore PATH_SUFFIXES lib/Release lib HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
# find_library(POLYVOX_UTIL_LIBRARY NAMES PolyVoxUtil PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(POLYVOX_CORE)
|
||||
|
||||
# if (WIN32)
|
||||
# find_path(POLYVOX_DLL_PATH polyvox.dll PATH_SUFFIXES bin HINTS ${POLYVOX_SEARCH_DIRS})
|
||||
|
|
265
examples/FlockOfbirds.js
Normal file
265
examples/FlockOfbirds.js
Normal file
|
@ -0,0 +1,265 @@
|
|||
//
|
||||
// flockOfbirds.js
|
||||
// examples
|
||||
//
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
// Creates a flock of birds that fly around and chirp, staying inside the corners of the box defined
|
||||
// at the start of the script.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
|
||||
|
||||
// The area over which the birds will fly
|
||||
var lowerCorner = { x: 1, y: 1, z: 1 };
|
||||
var upperCorner = { x: 10, y: 10, z: 10 };
|
||||
var STARTING_FRACTION = 0.25;
|
||||
|
||||
var NUM_BIRDS = 50;
|
||||
var playSounds = true;
|
||||
var SOUND_PROBABILITY = 0.001;
|
||||
var numPlaying = 0;
|
||||
var BIRD_SIZE = 0.08;
|
||||
var BIRD_MASTER_VOLUME = 0.1;
|
||||
var FLAP_PROBABILITY = 0.005;
|
||||
var RANDOM_FLAP_VELOCITY = 1.0;
|
||||
var FLAP_UP = 1.0;
|
||||
var BIRD_GRAVITY = -0.5;
|
||||
var LINEAR_DAMPING = 0.2;
|
||||
var FLAP_FALLING_PROBABILITY = 0.025;
|
||||
var MIN_ALIGNMENT_VELOCITY = 0.0;
|
||||
var MAX_ALIGNMENT_VELOCITY = 1.0;
|
||||
var VERTICAL_ALIGNMENT_COUPLING = 0.0;
|
||||
var ALIGNMENT_FORCE = 1.5;
|
||||
var COHESION_FORCE = 1.0;
|
||||
var MAX_COHESION_VELOCITY = 0.5;
|
||||
|
||||
var floor = false;
|
||||
var MAKE_FLOOR = false;
|
||||
|
||||
var averageVelocity = { x: 0, y: 0, z: 0 };
|
||||
var averagePosition = { x: 0, y: 0, z: 0 };
|
||||
|
||||
var birdsLoaded = false;
|
||||
|
||||
var birds = [];
|
||||
var playing = [];
|
||||
|
||||
function randomVector(scale) {
|
||||
return { x: Math.random() * scale - scale / 2.0, y: Math.random() * scale - scale / 2.0, z: Math.random() * scale - scale / 2.0 };
|
||||
}
|
||||
|
||||
function updateBirds(deltaTime) {
|
||||
if (!Entities.serversExist() || !Entities.canRez()) {
|
||||
return;
|
||||
}
|
||||
if (!birdsLoaded) {
|
||||
loadBirds(NUM_BIRDS);
|
||||
birdsLoaded = true;
|
||||
return;
|
||||
}
|
||||
var sumVelocity = { x: 0, y: 0, z: 0 };
|
||||
var sumPosition = { x: 0, y: 0, z: 0 };
|
||||
var birdPositionsCounted = 0;
|
||||
var birdVelocitiesCounted = 0;
|
||||
for (var i = 0; i < birds.length; i++) {
|
||||
if (birds[i].entityId) {
|
||||
var properties = Entities.getEntityProperties(birds[i].entityId);
|
||||
// If Bird has been deleted, bail
|
||||
if (properties.id != birds[i].entityId) {
|
||||
birds[i].entityId = false;
|
||||
return;
|
||||
}
|
||||
// Sum up average position and velocity
|
||||
if (Vec3.length(properties.velocity) > MIN_ALIGNMENT_VELOCITY) {
|
||||
sumVelocity = Vec3.sum(sumVelocity, properties.velocity);
|
||||
birdVelocitiesCounted += 1;
|
||||
}
|
||||
sumPosition = Vec3.sum(sumPosition, properties.position);
|
||||
birdPositionsCounted += 1;
|
||||
|
||||
var downwardSpeed = (properties.velocity.y < 0) ? -properties.velocity.y : 0.0;
|
||||
if ((properties.position.y < upperCorner.y) && (Math.random() < (FLAP_PROBABILITY + (downwardSpeed * FLAP_FALLING_PROBABILITY)))) {
|
||||
// More likely to flap if falling
|
||||
var randomVelocity = randomVector(RANDOM_FLAP_VELOCITY);
|
||||
randomVelocity.y = FLAP_UP + Math.random() * FLAP_UP;
|
||||
|
||||
// Alignment Velocity
|
||||
var alignmentVelocityMagnitude = Math.min(MAX_ALIGNMENT_VELOCITY, Vec3.length(Vec3.multiply(ALIGNMENT_FORCE, averageVelocity)));
|
||||
var alignmentVelocity = Vec3.multiply(alignmentVelocityMagnitude, Vec3.normalize(averageVelocity));
|
||||
alignmentVelocity.y *= VERTICAL_ALIGNMENT_COUPLING;
|
||||
|
||||
// Cohesion
|
||||
var distanceFromCenter = Vec3.length(Vec3.subtract(averagePosition, properties.position));
|
||||
var cohesionVelocitySize = Math.min(distanceFromCenter * COHESION_FORCE, MAX_COHESION_VELOCITY);
|
||||
var cohesionVelocity = Vec3.multiply(cohesionVelocitySize, Vec3.normalize(Vec3.subtract(averagePosition, properties.position)));
|
||||
|
||||
var newVelocity = Vec3.sum(randomVelocity, Vec3.sum(alignmentVelocity, cohesionVelocity));
|
||||
|
||||
Entities.editEntity(birds[i].entityId, { velocity: Vec3.sum(properties.velocity, newVelocity) });
|
||||
|
||||
}
|
||||
|
||||
// Check whether to play a chirp
|
||||
if (playSounds && (!birds[i].audioId || !birds[i].audioId.isPlaying) && (Math.random() < ((numPlaying > 0) ? SOUND_PROBABILITY / numPlaying : SOUND_PROBABILITY))) {
|
||||
var options = {
|
||||
position: properties.position,
|
||||
volume: BIRD_MASTER_VOLUME
|
||||
};
|
||||
// Play chirp
|
||||
if (birds[i].audioId) {
|
||||
birds[i].audioId.setOptions(options);
|
||||
birds[i].audioId.restart();
|
||||
} else {
|
||||
birds[i].audioId = Audio.playSound(birds[i].sound, options);
|
||||
}
|
||||
numPlaying++;
|
||||
// Change size
|
||||
Entities.editEntity(birds[i].entityId, { dimensions: Vec3.multiply(1.5, properties.dimensions)});
|
||||
|
||||
} else if (birds[i].audioId) {
|
||||
// If bird is playing a chirp
|
||||
if (!birds[i].audioId.isPlaying) {
|
||||
Entities.editEntity(birds[i].entityId, { dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE }});
|
||||
numPlaying--;
|
||||
}
|
||||
}
|
||||
|
||||
// Keep birds in their 'cage'
|
||||
var bounce = false;
|
||||
var newVelocity = properties.velocity;
|
||||
var newPosition = properties.position;
|
||||
if (properties.position.x < lowerCorner.x) {
|
||||
newPosition.x = lowerCorner.x;
|
||||
newVelocity.x *= -1.0;
|
||||
bounce = true;
|
||||
} else if (properties.position.x > upperCorner.x) {
|
||||
newPosition.x = upperCorner.x;
|
||||
newVelocity.x *= -1.0;
|
||||
bounce = true;
|
||||
}
|
||||
if (properties.position.y < lowerCorner.y) {
|
||||
newPosition.y = lowerCorner.y;
|
||||
newVelocity.y *= -1.0;
|
||||
bounce = true;
|
||||
} else if (properties.position.y > upperCorner.y) {
|
||||
newPosition.y = upperCorner.y;
|
||||
newVelocity.y *= -1.0;
|
||||
bounce = true;
|
||||
}
|
||||
if (properties.position.z < lowerCorner.z) {
|
||||
newPosition.z = lowerCorner.z;
|
||||
newVelocity.z *= -1.0;
|
||||
bounce = true;
|
||||
} else if (properties.position.z > upperCorner.z) {
|
||||
newPosition.z = upperCorner.z;
|
||||
newVelocity.z *= -1.0;
|
||||
bounce = true;
|
||||
}
|
||||
if (bounce) {
|
||||
Entities.editEntity(birds[i].entityId, { position: newPosition, velocity: newVelocity });
|
||||
}
|
||||
}
|
||||
}
|
||||
// Update average velocity and position of flock
|
||||
if (birdVelocitiesCounted > 0) {
|
||||
averageVelocity = Vec3.multiply(1.0 / birdVelocitiesCounted, sumVelocity);
|
||||
//print(Vec3.length(averageVelocity));
|
||||
}
|
||||
if (birdPositionsCounted > 0) {
|
||||
averagePosition = Vec3.multiply(1.0 / birdPositionsCounted, sumPosition);
|
||||
}
|
||||
}
|
||||
|
||||
// Connect a call back that happens every frame
|
||||
Script.update.connect(updateBirds);
|
||||
|
||||
// Delete our little friends if script is stopped
|
||||
Script.scriptEnding.connect(function() {
|
||||
for (var i = 0; i < birds.length; i++) {
|
||||
Entities.deleteEntity(birds[i].entityId);
|
||||
}
|
||||
if (floor) {
|
||||
Entities.deleteEntity(floor);
|
||||
}
|
||||
});
|
||||
|
||||
function loadBirds(howMany) {
|
||||
while (!Entities.serversExist() || !Entities.canRez()) {
|
||||
}
|
||||
var sound_filenames = ["bushtit_1.raw", "bushtit_2.raw", "bushtit_3.raw"];
|
||||
/* Here are more sounds/species you can use
|
||||
, "mexicanWhipoorwill.raw",
|
||||
"rosyfacedlovebird.raw", "saysphoebe.raw", "westernscreechowl.raw", "bandtailedpigeon.wav", "bridledtitmouse.wav",
|
||||
"browncrestedflycatcher.wav", "commonnighthawk.wav", "commonpoorwill.wav", "doublecrestedcormorant.wav",
|
||||
"gambelsquail.wav", "goldcrownedkinglet.wav", "greaterroadrunner.wav","groovebilledani.wav","hairywoodpecker.wav",
|
||||
"housewren.wav","hummingbird.wav", "mountainchickadee.wav", "nightjar.wav", "piebilledgrieb.wav", "pygmynuthatch.wav",
|
||||
"whistlingduck.wav", "woodpecker.wav"];
|
||||
*/
|
||||
|
||||
var colors = [
|
||||
{ red: 242, green: 207, blue: 013 },
|
||||
{ red: 238, green: 94, blue: 11 },
|
||||
{ red: 81, green: 30, blue: 7 },
|
||||
{ red: 195, green: 176, blue: 81 },
|
||||
{ red: 235, green: 190, blue: 152 },
|
||||
{ red: 167, green: 99, blue: 52 },
|
||||
{ red: 199, green: 122, blue: 108 },
|
||||
{ red: 246, green: 220, blue: 189 },
|
||||
{ red: 208, green: 145, blue: 65 },
|
||||
{ red: 173, green: 120 , blue: 71 },
|
||||
{ red: 132, green: 147, blue: 174 },
|
||||
{ red: 164, green: 74, blue: 40 },
|
||||
{ red: 131, green: 127, blue: 134 },
|
||||
{ red: 209, green: 157, blue: 117 },
|
||||
{ red: 205, green: 191, blue: 193 },
|
||||
{ red: 193, green: 154, blue: 118 },
|
||||
{ red: 205, green: 190, blue: 169 },
|
||||
{ red: 199, green: 111, blue: 69 },
|
||||
{ red: 221, green: 223, blue: 228 },
|
||||
{ red: 115, green: 92, blue: 87 },
|
||||
{ red: 214, green: 165, blue: 137 },
|
||||
{ red: 160, green: 124, blue: 33 },
|
||||
{ red: 117, green: 91, blue: 86 },
|
||||
{ red: 113, green: 104, blue: 107 },
|
||||
{ red: 216, green: 153, blue: 99 },
|
||||
{ red: 242, green: 226, blue: 64 }
|
||||
];
|
||||
|
||||
var SOUND_BASE_URL = "http://public.highfidelity.io/sounds/Animals/";
|
||||
|
||||
for (var i = 0; i < howMany; i++) {
|
||||
var whichBird = Math.floor(Math.random() * sound_filenames.length);
|
||||
var position = {
|
||||
x: lowerCorner.x + (upperCorner.x - lowerCorner.x) / 2.0 + (Math.random() - 0.5) * (upperCorner.x - lowerCorner.x) * STARTING_FRACTION,
|
||||
y: lowerCorner.y + (upperCorner.y - lowerCorner.y) / 2.0 + (Math.random() - 0.5) * (upperCorner.y - lowerCorner.y) * STARTING_FRACTION,
|
||||
z: lowerCorner.z + (upperCorner.z - lowerCorner.x) / 2.0 + (Math.random() - 0.5) * (upperCorner.z - lowerCorner.z) * STARTING_FRACTION
|
||||
};
|
||||
|
||||
birds.push({
|
||||
sound: SoundCache.getSound(SOUND_BASE_URL + sound_filenames[whichBird]),
|
||||
entityId: Entities.addEntity({
|
||||
type: "Sphere",
|
||||
position: position,
|
||||
dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE },
|
||||
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
|
||||
velocity: { x: 0, y: -0.1, z: 0 },
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
collisionsWillMove: true,
|
||||
color: colors[whichBird]
|
||||
}),
|
||||
audioId: false,
|
||||
isPlaying: false
|
||||
});
|
||||
}
|
||||
if (MAKE_FLOOR) {
|
||||
var FLOOR_THICKNESS = 0.05;
|
||||
floor = Entities.addEntity({ type: "Box", position: { x: lowerCorner.x + (upperCorner.x - lowerCorner.x) / 2.0,
|
||||
y: lowerCorner.y,
|
||||
z: lowerCorner.z + (upperCorner.z - lowerCorner.z) / 2.0 },
|
||||
dimensions: { x: (upperCorner.x - lowerCorner.x), y: FLOOR_THICKNESS, z: (upperCorner.z - lowerCorner.z)},
|
||||
color: {red: 100, green: 100, blue: 100}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -656,7 +656,9 @@ function mouseMove(event) {
|
|||
|
||||
function handleIdleMouse() {
|
||||
idleMouseTimerId = null;
|
||||
highlightEntityUnderCursor(lastMousePosition, true);
|
||||
if (isActive) {
|
||||
highlightEntityUnderCursor(lastMousePosition, true);
|
||||
}
|
||||
}
|
||||
|
||||
function highlightEntityUnderCursor(position, accurateRay) {
|
||||
|
|
|
@ -130,10 +130,10 @@
|
|||
var others = Entities.findEntities(this.properties.position, this.properties.dimensions.y);
|
||||
|
||||
for (var i = 0; i < others.length; i++) {
|
||||
var piece = others[i];
|
||||
var pieceID = others[i];
|
||||
|
||||
if (piece.id != this.entityID) {
|
||||
var properties = Entities.getEntityProperties(piece);
|
||||
if (pieceID != this.entityID) {
|
||||
var properties = Entities.getEntityProperties(pieceID);
|
||||
|
||||
var isWhite = properties.modelURL.search("White") !== -1;
|
||||
var type = (properties.modelURL.search("King") !== -1) ? 4 :
|
||||
|
@ -147,7 +147,7 @@
|
|||
if (myPos.i === piecePos.i && myPos.j === piecePos.j && type !== -2) {
|
||||
var position = this.getAbsolutePosition((isWhite) ? { i: type, j: -1 } : { i: 7 - type, j: 8 },
|
||||
properties.dimensions.y / 2.0);
|
||||
Entities.editEntity(piece, {
|
||||
Entities.editEntity(pieceID, {
|
||||
position: position
|
||||
});
|
||||
break;
|
||||
|
|
|
@ -175,10 +175,12 @@ function positionStick(stickOrientation) {
|
|||
inHand = false;
|
||||
Entities.updateAction(stickID, actionID, {
|
||||
relativePosition: offset,
|
||||
relativeRotation: stickOrientation
|
||||
relativeRotation: stickOrientation,
|
||||
hand: "right"
|
||||
});
|
||||
}
|
||||
function resetToHand() { // Maybe coordinate with positionStick?
|
||||
function resetToHand() { // For use with controllers, puts the sword in contact with the hand.
|
||||
// Maybe coordinate with positionStick?
|
||||
if (inHand) { // Optimization: bail if we're already inHand.
|
||||
return;
|
||||
}
|
||||
|
@ -191,14 +193,14 @@ function resetToHand() { // Maybe coordinate with positionStick?
|
|||
});
|
||||
inHand = true;
|
||||
}
|
||||
function isControllerActive() {
|
||||
// I don't think the hydra API provides any reliable way to know whether a particular controller is active. Ask for both.
|
||||
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(3)) > 0) || Vec3.length(Controller.getSpatialControlPosition(4)) > 0;
|
||||
return controllerActive;
|
||||
}
|
||||
function mouseMoveEvent(event) {
|
||||
if (event.deviceID) { // Not a MOUSE mouse event, but a (e.g., hydra) mouse event, with x/y that is not meaningful for us.
|
||||
resetToHand(); // Can only happen when controller is uncradled, so let's drive with that, resetting our attachement.
|
||||
return;
|
||||
}
|
||||
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(controllerID)) > 0);
|
||||
//print("Mouse move with hand controller " + (controllerActive ? "active" : "inactive") + JSON.stringify(event));
|
||||
if (controllerActive || !isFighting()) {
|
||||
// When a controller like the hydra gives a mouse event, the x/y is not meaningful to us, but we can detect with a truty deviceID
|
||||
if (event.deviceID || !isFighting() || isControllerActive()) {
|
||||
print('Attempting attachment reset');
|
||||
resetToHand();
|
||||
return;
|
||||
|
@ -244,12 +246,20 @@ function cleanUp(leaveButtons) {
|
|||
}
|
||||
function makeSword() {
|
||||
initControls();
|
||||
var swordPosition;
|
||||
if (!isControllerActive()) { // Dont' knock yourself with sword
|
||||
swordPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getFront(MyAvatar.orientation)));
|
||||
} else if (hand === 'right') {
|
||||
swordPosition = MyAvatar.getRightPalmPosition();
|
||||
} else {
|
||||
swordPosition = MyAvatar.getLeftPalmPosition();
|
||||
}
|
||||
stickID = Entities.addEntity({
|
||||
type: "Model",
|
||||
modelURL: swordModel,
|
||||
compoundShapeURL: swordCollisionShape,
|
||||
dimensions: dimensions,
|
||||
position: (hand === 'right') ? MyAvatar.getRightPalmPosition() : MyAvatar.getLeftPalmPosition(), // initial position doesn't matter, as long as it's close
|
||||
position: swordPosition,
|
||||
rotation: MyAvatar.orientation,
|
||||
damping: 0.1,
|
||||
collisionSoundURL: swordCollisionSoundURL,
|
||||
|
|
424
examples/grab.js
424
examples/grab.js
|
@ -10,96 +10,13 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var MOVE_TIMESCALE = 0.1;
|
||||
var INV_MOVE_TIMESCALE = 1.0 / MOVE_TIMESCALE;
|
||||
var MAX_SOLID_ANGLE = 0.01; // objects that appear smaller than this can't be grabbed
|
||||
var CLOSE_ENOUGH = 0.001;
|
||||
var ZERO_VEC3 = { x: 0, y: 0, z: 0 };
|
||||
var ANGULAR_DAMPING_RATE = 0.40;
|
||||
|
||||
// NOTE: to improve readability global variable names start with 'g'
|
||||
var gIsGrabbing = false;
|
||||
var gGrabbedEntity = null;
|
||||
var gActionID = null;
|
||||
var gEntityProperties;
|
||||
var gStartPosition;
|
||||
var gStartRotation;
|
||||
var gCurrentPosition;
|
||||
var gOriginalGravity = ZERO_VEC3;
|
||||
var gPlaneNormal = ZERO_VEC3;
|
||||
|
||||
// gMaxGrabDistance is a function of the size of the object.
|
||||
var gMaxGrabDistance;
|
||||
|
||||
// gGrabMode defines the degrees of freedom of the grab target positions
|
||||
// relative to gGrabStartPosition options include:
|
||||
// xzPlane (default)
|
||||
// verticalCylinder (SHIFT)
|
||||
// rotate (CONTROL)
|
||||
// Modes to eventually support?:
|
||||
// xyPlane
|
||||
// yzPlane
|
||||
// polar
|
||||
// elevationAzimuth
|
||||
var gGrabMode = "xzplane";
|
||||
|
||||
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
|
||||
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
|
||||
// are relative to the ray's intersection by the same offset.
|
||||
var gGrabOffset = { x: 0, y: 0, z: 0 };
|
||||
|
||||
var gTargetPosition;
|
||||
var gTargetRotation;
|
||||
var gLiftKey = false; // SHIFT
|
||||
var gRotateKey = false; // CONTROL
|
||||
|
||||
var gInitialMouse = { x: 0, y: 0 };
|
||||
var gPreviousMouse = { x: 0, y: 0 };
|
||||
var gMouseCursorLocation = { x: 0, y: 0 };
|
||||
var gMouseAtRotateStart = { x: 0, y: 0 };
|
||||
|
||||
var gBeaconHeight = 0.10;
|
||||
|
||||
// var gAngularVelocity = ZERO_VEC3;
|
||||
|
||||
// TODO: play sounds again when we aren't leaking AudioInjector threads
|
||||
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
|
||||
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
|
||||
// var VOLUME = 0.0;
|
||||
|
||||
var gBeaconHeight = 0.10;
|
||||
var BEACON_COLOR = {
|
||||
red: 200,
|
||||
green: 200,
|
||||
blue: 200
|
||||
};
|
||||
var BEACON_WIDTH = 2;
|
||||
var ZERO_VEC3 = {x: 0, y: 0, z: 0};
|
||||
var IDENTITY_QUAT = {x: 0, y: 0, z: 0, w: 0};
|
||||
|
||||
|
||||
var gBeacon = Overlays.addOverlay("line3d", {
|
||||
color: BEACON_COLOR,
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: BEACON_WIDTH
|
||||
});
|
||||
|
||||
function updateDropLine(position) {
|
||||
Overlays.editOverlay(gBeacon, {
|
||||
visible: true,
|
||||
start: {
|
||||
x: position.x,
|
||||
y: position.y + gBeaconHeight,
|
||||
z: position.z
|
||||
},
|
||||
end: {
|
||||
x: position.x,
|
||||
y: position.y - gBeaconHeight,
|
||||
z: position.z
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
|
||||
// helper function
|
||||
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event, maxDistance) {
|
||||
var cameraPosition = Camera.getPosition();
|
||||
var localPointOnPlane = Vec3.subtract(pointOnPlane, cameraPosition);
|
||||
var distanceFromPlane = Vec3.dot(localPointOnPlane, planeNormal);
|
||||
|
@ -116,7 +33,7 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
|
|||
var useMaxForwardGrab = false;
|
||||
if (Math.abs(dirDotNorm) > MIN_RAY_PLANE_DOT) {
|
||||
var distanceToIntersection = distanceFromPlane / dirDotNorm;
|
||||
if (distanceToIntersection > 0 && distanceToIntersection < gMaxGrabDistance) {
|
||||
if (distanceToIntersection > 0 && distanceToIntersection < maxDistance) {
|
||||
// ray points into the plane
|
||||
localIntersection = Vec3.multiply(pickRay.direction, distanceFromPlane / dirDotNorm);
|
||||
} else {
|
||||
|
@ -133,53 +50,160 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
|
|||
// we re-route the intersection to be in front at max distance.
|
||||
var rayDirection = Vec3.subtract(pickRay.direction, Vec3.multiply(planeNormal, dirDotNorm));
|
||||
rayDirection = Vec3.normalize(rayDirection);
|
||||
localIntersection = Vec3.multiply(rayDirection, gMaxGrabDistance);
|
||||
localIntersection = Vec3.multiply(rayDirection, maxDistance);
|
||||
localIntersection = Vec3.sum(localIntersection, Vec3.multiply(planeNormal, distanceFromPlane));
|
||||
}
|
||||
var worldIntersection = Vec3.sum(cameraPosition, localIntersection);
|
||||
return worldIntersection;
|
||||
}
|
||||
|
||||
function computeNewGrabPlane() {
|
||||
if (!gIsGrabbing) {
|
||||
// Mouse class stores mouse click and drag info
|
||||
Mouse = function() {
|
||||
this.current = {x: 0, y: 0 };
|
||||
this.previous = {x: 0, y: 0 };
|
||||
this.rotateStart = {x: 0, y: 0 };
|
||||
this.cursorRestore = {x: 0, y: 0};
|
||||
}
|
||||
|
||||
Mouse.prototype.startDrag = function(position) {
|
||||
this.current = {x: position.x, y: position.y};
|
||||
this.startRotateDrag();
|
||||
}
|
||||
|
||||
Mouse.prototype.updateDrag = function(position) {
|
||||
this.current = {x: position.x, y: position.y };
|
||||
}
|
||||
|
||||
Mouse.prototype.startRotateDrag = function() {
|
||||
this.previous = {x: this.current.x, y: this.current.y};
|
||||
this.rotateStart = {x: this.current.x, y: this.current.y};
|
||||
this.cursorRestore = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
|
||||
}
|
||||
|
||||
Mouse.prototype.getDrag = function() {
|
||||
var delta = {x: this.current.x - this.previous.x, y: this.current.y - this.previous.y};
|
||||
this.previous = {x: this.current.x, y: this.current.y};
|
||||
return delta;
|
||||
}
|
||||
|
||||
Mouse.prototype.restoreRotateCursor = function() {
|
||||
Window.setCursorPosition(this.cursorRestore.x, this.cursorRestore.y);
|
||||
this.current = {x: this.rotateStart.x, y: this.rotateStart.y};
|
||||
}
|
||||
|
||||
var mouse = new Mouse();
|
||||
|
||||
|
||||
// Beacon class stores info for drawing a line at object's target position
|
||||
Beacon = function() {
|
||||
this.height = 0.10;
|
||||
this.overlayID = Overlays.addOverlay("line3d", {
|
||||
color: {red: 200, green: 200, blue: 200},
|
||||
alpha: 1,
|
||||
visible: false,
|
||||
lineWidth: 2
|
||||
});
|
||||
}
|
||||
|
||||
Beacon.prototype.enable = function() {
|
||||
Overlays.editOverlay(this.overlayID, { visible: true });
|
||||
}
|
||||
|
||||
Beacon.prototype.disable = function() {
|
||||
Overlays.editOverlay(this.overlayID, { visible: false });
|
||||
}
|
||||
|
||||
Beacon.prototype.updatePosition = function(position) {
|
||||
Overlays.editOverlay(this.overlayID, {
|
||||
visible: true,
|
||||
start: {
|
||||
x: position.x,
|
||||
y: position.y + this.height,
|
||||
z: position.z
|
||||
},
|
||||
end: {
|
||||
x: position.x,
|
||||
y: position.y - this.height,
|
||||
z: position.z
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var beacon = new Beacon();
|
||||
|
||||
|
||||
// TODO: play sounds again when we aren't leaking AudioInjector threads
|
||||
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
|
||||
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
|
||||
// var VOLUME = 0.0;
|
||||
|
||||
|
||||
// Grabber class stores and computes info for grab behavior
|
||||
Grabber = function() {
|
||||
this.isGrabbing = false;
|
||||
this.entityID = null;
|
||||
this.actionID = null;
|
||||
this.startPosition = ZERO_VEC3;
|
||||
this.lastRotation = IDENTITY_QUAT;
|
||||
this.currentPosition = ZERO_VEC3;
|
||||
this.planeNormal = ZERO_VEC3;
|
||||
|
||||
this.originalGravity = ZERO_VEC3;
|
||||
// maxDistance is a function of the size of the object.
|
||||
this.maxDistance;
|
||||
|
||||
// mode defines the degrees of freedom of the grab target positions
|
||||
// relative to startPosition options include:
|
||||
// xzPlane (default)
|
||||
// verticalCylinder (SHIFT)
|
||||
// rotate (CONTROL)
|
||||
this.mode = "xzplane";
|
||||
|
||||
// offset allows the user to grab an object off-center. It points from the object's center
|
||||
// to the point where the ray intersects the grab plane (at the moment the grab is initiated).
|
||||
// Future target positions of the ray intersection are on the same plane, and the offset is subtracted
|
||||
// to compute the target position of the object's center.
|
||||
this.offset = {x: 0, y: 0, z: 0 };
|
||||
|
||||
this.targetPosition;
|
||||
this.targetRotation;
|
||||
|
||||
this.liftKey = false; // SHIFT
|
||||
this.rotateKey = false; // CONTROL
|
||||
}
|
||||
|
||||
Grabber.prototype.computeNewGrabPlane = function() {
|
||||
if (!this.isGrabbing) {
|
||||
return;
|
||||
}
|
||||
|
||||
var maybeResetMousePosition = false;
|
||||
if (gGrabMode !== "rotate") {
|
||||
gMouseAtRotateStart = gMouseCursorLocation;
|
||||
var modeWasRotate = (this.mode == "rotate");
|
||||
this.mode = "xzPlane";
|
||||
this.planeNormal = {x: 0, y: 1, z: 0 };
|
||||
if (this.rotateKey) {
|
||||
this.mode = "rotate";
|
||||
mouse.startRotateDrag();
|
||||
} else {
|
||||
maybeResetMousePosition = true;
|
||||
}
|
||||
gGrabMode = "xzPlane";
|
||||
gPointOnPlane = gCurrentPosition;
|
||||
gPlaneNormal = { x: 0, y: 1, z: 0 };
|
||||
if (gLiftKey) {
|
||||
if (!gRotateKey) {
|
||||
gGrabMode = "verticalCylinder";
|
||||
// a new planeNormal will be computed each move
|
||||
if (modeWasRotate) {
|
||||
// we reset the mouse screen position whenever we stop rotating
|
||||
mouse.restoreRotateCursor();
|
||||
}
|
||||
} else if (gRotateKey) {
|
||||
gGrabMode = "rotate";
|
||||
}
|
||||
if (this.liftKey) {
|
||||
this.mode = "verticalCylinder";
|
||||
// NOTE: during verticalCylinder mode a new planeNormal will be computed each move
|
||||
}
|
||||
}
|
||||
|
||||
gPointOnPlane = Vec3.subtract(gCurrentPosition, gGrabOffset);
|
||||
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
|
||||
this.pointOnPlane = Vec3.sum(this.currentPosition, this.offset);
|
||||
var xzOffset = Vec3.subtract(this.pointOnPlane, Camera.getPosition());
|
||||
xzOffset.y = 0;
|
||||
gXzDistanceToGrab = Vec3.length(xzOffset);
|
||||
|
||||
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
|
||||
// we reset the mouse position whenever we stop rotating
|
||||
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
|
||||
}
|
||||
this.xzDistanceToGrab = Vec3.length(xzOffset);
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
Grabber.prototype.pressEvent = function(event) {
|
||||
if (!event.isLeftButton) {
|
||||
return;
|
||||
}
|
||||
gInitialMouse = {x: event.x, y: event.y };
|
||||
gPreviousMouse = {x: event.x, y: event.y };
|
||||
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var pickResults = Entities.findRayIntersection(pickRay, true); // accurate picking
|
||||
|
@ -193,148 +217,172 @@ function mousePressEvent(event) {
|
|||
return;
|
||||
}
|
||||
|
||||
mouse.startDrag(event);
|
||||
|
||||
var clickedEntity = pickResults.entityID;
|
||||
var entityProperties = Entities.getEntityProperties(clickedEntity)
|
||||
gStartPosition = entityProperties.position;
|
||||
gStartRotation = entityProperties.rotation;
|
||||
this.startPosition = entityProperties.position;
|
||||
this.lastRotation = entityProperties.rotation;
|
||||
var cameraPosition = Camera.getPosition();
|
||||
|
||||
gBeaconHeight = Vec3.length(entityProperties.dimensions);
|
||||
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
|
||||
if (Vec3.distance(gStartPosition, cameraPosition) > gMaxGrabDistance) {
|
||||
var objectBoundingDiameter = Vec3.length(entityProperties.dimensions);
|
||||
beacon.height = objectBoundingDiameter;
|
||||
this.maxDistance = objectBoundingDiameter / MAX_SOLID_ANGLE;
|
||||
if (Vec3.distance(this.startPosition, cameraPosition) > this.maxDistance) {
|
||||
// don't allow grabs of things far away
|
||||
return;
|
||||
}
|
||||
|
||||
Entities.editEntity(clickedEntity, { gravity: ZERO_VEC3 });
|
||||
gIsGrabbing = true;
|
||||
this.isGrabbing = true;
|
||||
|
||||
gGrabbedEntity = clickedEntity;
|
||||
gCurrentPosition = entityProperties.position;
|
||||
gOriginalGravity = entityProperties.gravity;
|
||||
gTargetPosition = gStartPosition;
|
||||
this.entityID = clickedEntity;
|
||||
this.currentPosition = entityProperties.position;
|
||||
this.originalGravity = entityProperties.gravity;
|
||||
this.targetPosition = {x: this.startPosition.x, y: this.startPosition.y, z: this.startPosition.z};
|
||||
|
||||
// compute the grab point
|
||||
var nearestPoint = Vec3.subtract(gStartPosition, cameraPosition);
|
||||
var nearestPoint = Vec3.subtract(this.startPosition, cameraPosition);
|
||||
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
|
||||
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
|
||||
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
|
||||
this.pointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
|
||||
|
||||
// compute the grab offset
|
||||
gGrabOffset = Vec3.subtract(gStartPosition, gPointOnPlane);
|
||||
// compute the grab offset (points from object center to point of grab)
|
||||
this.offset = Vec3.subtract(this.pointOnPlane, this.startPosition);
|
||||
|
||||
computeNewGrabPlane();
|
||||
this.computeNewGrabPlane();
|
||||
|
||||
updateDropLine(gStartPosition);
|
||||
beacon.updatePosition(this.startPosition);
|
||||
|
||||
// TODO: play sounds again when we aren't leaking AudioInjector threads
|
||||
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
|
||||
}
|
||||
|
||||
function mouseReleaseEvent() {
|
||||
if (gIsGrabbing) {
|
||||
if (Vec3.length(gOriginalGravity) != 0) {
|
||||
Entities.editEntity(gGrabbedEntity, { gravity: gOriginalGravity });
|
||||
Grabber.prototype.releaseEvent = function() {
|
||||
if (this.isGrabbing) {
|
||||
if (Vec3.length(this.originalGravity) != 0) {
|
||||
Entities.editEntity(this.entityID, { gravity: this.originalGravity});
|
||||
}
|
||||
|
||||
gIsGrabbing = false
|
||||
Entities.deleteAction(gGrabbedEntity, gActionID);
|
||||
gActionID = null;
|
||||
this.isGrabbing = false
|
||||
Entities.deleteAction(this.entityID, this.actionID);
|
||||
this.actionID = null;
|
||||
|
||||
Overlays.editOverlay(gBeacon, { visible: false });
|
||||
beacon.disable();
|
||||
|
||||
// TODO: play sounds again when we aren't leaking AudioInjector threads
|
||||
//Audio.playSound(releaseSound, { position: entityProperties.position, volume: VOLUME });
|
||||
}
|
||||
}
|
||||
|
||||
function mouseMoveEvent(event) {
|
||||
if (!gIsGrabbing) {
|
||||
Grabber.prototype.moveEvent = function(event) {
|
||||
if (!this.isGrabbing) {
|
||||
return;
|
||||
}
|
||||
mouse.updateDrag(event);
|
||||
|
||||
// see if something added/restored gravity
|
||||
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
|
||||
var entityProperties = Entities.getEntityProperties(this.entityID);
|
||||
if (Vec3.length(entityProperties.gravity) != 0) {
|
||||
gOriginalGravity = entityProperties.gravity;
|
||||
this.originalGravity = entityProperties.gravity;
|
||||
}
|
||||
this.currentPosition = entityProperties.position;
|
||||
|
||||
var actionArgs = {};
|
||||
|
||||
if (gGrabMode === "rotate") {
|
||||
var deltaMouse = { x: 0, y: 0 };
|
||||
var dx = event.x - gInitialMouse.x;
|
||||
var dy = event.y - gInitialMouse.y;
|
||||
if (this.mode === "rotate") {
|
||||
var drag = mouse.getDrag();
|
||||
var orientation = Camera.getOrientation();
|
||||
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
|
||||
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
|
||||
var dragOffset = Vec3.multiply(drag.x, Quat.getRight(orientation));
|
||||
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-drag.y, Quat.getUp(orientation)));
|
||||
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
|
||||
axis = Vec3.normalize(axis);
|
||||
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
|
||||
var angle = ROTATE_STRENGTH * Math.sqrt((dx * dx) + (dy * dy));
|
||||
var angle = ROTATE_STRENGTH * Math.sqrt((drag.x * drag.x) + (drag.y * drag.y));
|
||||
var deltaQ = Quat.angleAxis(angle, axis);
|
||||
// var qZero = entityProperties.rotation;
|
||||
var qZero = gStartRotation;
|
||||
var qOne = Quat.multiply(deltaQ, qZero);
|
||||
actionArgs = {targetRotation: qOne, angularTimeScale: 0.1};
|
||||
//var qZero = this.lastRotation;
|
||||
this.lastRotation = Quat.multiply(deltaQ, this.lastRotation);
|
||||
actionArgs = {targetRotation: this.lastRotation, angularTimeScale: 0.1};
|
||||
} else {
|
||||
var newTargetPosition;
|
||||
if (gGrabMode === "verticalCylinder") {
|
||||
var newPointOnPlane;
|
||||
if (this.mode === "verticalCylinder") {
|
||||
// for this mode we recompute the plane based on current Camera
|
||||
var planeNormal = Quat.getFront(Camera.getOrientation());
|
||||
planeNormal.y = 0;
|
||||
planeNormal = Vec3.normalize(planeNormal);
|
||||
var pointOnCylinder = Vec3.multiply(planeNormal, gXzDistanceToGrab);
|
||||
var pointOnCylinder = Vec3.multiply(planeNormal, this.xzDistanceToGrab);
|
||||
pointOnCylinder = Vec3.sum(Camera.getPosition(), pointOnCylinder);
|
||||
newTargetPosition = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, event);
|
||||
this.pointOnPlane = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, mouse.current, this.maxDistance);
|
||||
newPointOnPlane = {x: this.pointOnPlane.x, y: this.pointOnPlane.y, z: this.pointOnPlane.z};
|
||||
} else {
|
||||
var cameraPosition = Camera.getPosition();
|
||||
newTargetPosition = mouseIntersectionWithPlane(gPointOnPlane, gPlaneNormal, event);
|
||||
var relativePosition = Vec3.subtract(newTargetPosition, cameraPosition);
|
||||
newPointOnPlane = mouseIntersectionWithPlane(this.pointOnPlane, this.planeNormal, mouse.current, this.maxDistance);
|
||||
var relativePosition = Vec3.subtract(newPointOnPlane, cameraPosition);
|
||||
var distance = Vec3.length(relativePosition);
|
||||
if (distance > gMaxGrabDistance) {
|
||||
if (distance > this.maxDistance) {
|
||||
// clamp distance
|
||||
relativePosition = Vec3.multiply(relativePosition, gMaxGrabDistance / distance);
|
||||
newTargetPosition = Vec3.sum(relativePosition, cameraPosition);
|
||||
relativePosition = Vec3.multiply(relativePosition, this.maxDistance / distance);
|
||||
newPointOnPlane = Vec3.sum(relativePosition, cameraPosition);
|
||||
}
|
||||
}
|
||||
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
|
||||
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
|
||||
}
|
||||
gPreviousMouse = { x: event.x, y: event.y };
|
||||
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
|
||||
this.targetPosition = Vec3.subtract(newPointOnPlane, this.offset);
|
||||
actionArgs = {targetPosition: this.targetPosition, linearTimeScale: 0.1};
|
||||
|
||||
if (!gActionID) {
|
||||
gActionID = Entities.addAction("spring", gGrabbedEntity, actionArgs);
|
||||
beacon.updatePosition(this.targetPosition);
|
||||
}
|
||||
|
||||
if (!this.actionID) {
|
||||
this.actionID = Entities.addAction("spring", this.entityID, actionArgs);
|
||||
} else {
|
||||
Entities.updateAction(gGrabbedEntity, gActionID, actionArgs);
|
||||
Entities.updateAction(this.entityID, this.actionID, actionArgs);
|
||||
}
|
||||
|
||||
updateDropLine(gTargetPosition);
|
||||
}
|
||||
|
||||
function keyReleaseEvent(event) {
|
||||
Grabber.prototype.keyReleaseEvent = function(event) {
|
||||
if (event.text === "SHIFT") {
|
||||
gLiftKey = false;
|
||||
this.liftKey = false;
|
||||
}
|
||||
if (event.text === "CONTROL") {
|
||||
gRotateKey = false;
|
||||
this.rotateKey = false;
|
||||
}
|
||||
computeNewGrabPlane();
|
||||
this.computeNewGrabPlane();
|
||||
}
|
||||
|
||||
Grabber.prototype.keyPressEvent = function(event) {
|
||||
if (event.text === "SHIFT") {
|
||||
this.liftKey = true;
|
||||
}
|
||||
if (event.text === "CONTROL") {
|
||||
this.rotateKey = true;
|
||||
}
|
||||
this.computeNewGrabPlane();
|
||||
}
|
||||
|
||||
var grabber = new Grabber();
|
||||
|
||||
function pressEvent(event) {
|
||||
grabber.pressEvent(event);
|
||||
}
|
||||
|
||||
function moveEvent(event) {
|
||||
grabber.moveEvent(event);
|
||||
}
|
||||
|
||||
function releaseEvent(event) {
|
||||
grabber.releaseEvent(event);
|
||||
}
|
||||
|
||||
function keyPressEvent(event) {
|
||||
if (event.text === "SHIFT") {
|
||||
gLiftKey = true;
|
||||
}
|
||||
if (event.text === "CONTROL") {
|
||||
gRotateKey = true;
|
||||
}
|
||||
computeNewGrabPlane();
|
||||
grabber.keyPressEvent(event);
|
||||
}
|
||||
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
||||
function keyReleaseEvent(event) {
|
||||
grabber.keyReleaseEvent(event);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(pressEvent);
|
||||
Controller.mouseMoveEvent.connect(moveEvent);
|
||||
Controller.mouseReleaseEvent.connect(releaseEvent);
|
||||
Controller.keyPressEvent.connect(keyPressEvent);
|
||||
Controller.keyReleaseEvent.connect(keyReleaseEvent);
|
||||
|
|
|
@ -65,70 +65,55 @@ function removeLine() {
|
|||
|
||||
|
||||
function createOrUpdateLine(event) {
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var intersection = Entities.findRayIntersection(pickRay, true); // accurate picking
|
||||
var props = Entities.getEntityProperties(intersection.entityID);
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var intersection = Entities.findRayIntersection(pickRay, true); // accurate picking
|
||||
var props = Entities.getEntityProperties(intersection.entityID);
|
||||
|
||||
if (intersection.intersects && userCanPoint) {
|
||||
var points = [nearLinePoint(intersection.intersection), intersection.intersection]
|
||||
if (lineIsRezzed) {
|
||||
Entities.editEntity(lineEntityID, {
|
||||
position: nearLinePoint(intersection.intersection),
|
||||
linePoints: points,
|
||||
dimensions: {
|
||||
x: 1,
|
||||
y: 1,
|
||||
z: 1
|
||||
},
|
||||
lifetime: 15 + props.lifespan // renew lifetime
|
||||
});
|
||||
if (intersection.intersects && userCanPoint) {
|
||||
var points = [Vec3.subtract(nearLinePoint(intersection.intersection), MyAvatar.position),
|
||||
Vec3.subtract(intersection.intersection, MyAvatar.position)];
|
||||
if (lineIsRezzed) {
|
||||
Entities.editEntity(lineEntityID, {
|
||||
linePoints: points,
|
||||
position: MyAvatar.position,
|
||||
lifetime: 15 + props.lifespan // renew lifetime
|
||||
});
|
||||
// Entities.setAllPoints(lineEntityID, points);
|
||||
} else {
|
||||
lineIsRezzed = true;
|
||||
lineEntityID = Entities.addEntity({
|
||||
type: "Line",
|
||||
position: MyAvatar.position,
|
||||
linePoints: points,
|
||||
dimensions: { x: 100, y: 100, z: 100 },
|
||||
color: { red: 255, green: 255, blue: 255 },
|
||||
lifetime: 15 // if someone crashes while pointing, don't leave the line there forever.
|
||||
});
|
||||
}
|
||||
} else {
|
||||
lineIsRezzed = true;
|
||||
lineEntityID = Entities.addEntity({
|
||||
type: "Line",
|
||||
position: nearLinePoint(intersection.intersection),
|
||||
linePoints: points,
|
||||
dimensions: {
|
||||
x: 1,
|
||||
y: 1,
|
||||
z: 1
|
||||
},
|
||||
color: {
|
||||
red: 255,
|
||||
green: 255,
|
||||
blue: 255
|
||||
},
|
||||
lifetime: 15 // if someone crashes while pointing, don't leave the line there forever.
|
||||
});
|
||||
removeLine();
|
||||
}
|
||||
} else {
|
||||
removeLine();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function mousePressEvent(event) {
|
||||
if (!event.isLeftButton) {
|
||||
return;
|
||||
}
|
||||
|
||||
createOrUpdateLine(event);
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({
|
||||
x: event.x,
|
||||
y: event.y
|
||||
});
|
||||
if (clickedOverlay == pointerButton) {
|
||||
userCanPoint = !userCanPoint;
|
||||
if (userCanPoint === true) {
|
||||
Overlays.editOverlay(pointerButton, {
|
||||
color: buttonOnColor
|
||||
});
|
||||
} else {
|
||||
Overlays.editOverlay(pointerButton, {
|
||||
color: buttonOffColor
|
||||
});
|
||||
if (!event.isLeftButton) {
|
||||
return;
|
||||
}
|
||||
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({
|
||||
x: event.x,
|
||||
y: event.y
|
||||
});
|
||||
|
||||
if (clickedOverlay == pointerButton) {
|
||||
userCanPoint = !userCanPoint;
|
||||
if (userCanPoint === true) {
|
||||
Overlays.editOverlay(pointerButton, { color: buttonOnColor });
|
||||
} else {
|
||||
Overlays.editOverlay(pointerButton, { color: buttonOffColor });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,32 @@ var controlHeld = false;
|
|||
var shiftHeld = false;
|
||||
|
||||
|
||||
function attemptVoxelChange(intersection) {
|
||||
var ids = Entities.findEntities(intersection.intersection, 10);
|
||||
var success = false;
|
||||
for (var i = 0; i < ids.length; i++) {
|
||||
var id = ids[i];
|
||||
if (controlHeld) {
|
||||
// hold control to erase a sphere
|
||||
if (Entities.setVoxelSphere(id, intersection.intersection, 1.0, 0)) {
|
||||
success = true;
|
||||
}
|
||||
} else if (shiftHeld) {
|
||||
// hold shift to set all voxels to 255
|
||||
if (Entities.setAllVoxels(id, 255)) {
|
||||
success = true;
|
||||
}
|
||||
} else {
|
||||
// no modifier key means to add a sphere
|
||||
if (Entities.setVoxelSphere(id, intersection.intersection, 1.0, 255)) {
|
||||
success = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
|
||||
function mousePressEvent(event) {
|
||||
if (!event.isLeftButton) {
|
||||
return;
|
||||
|
@ -9,20 +35,21 @@ function mousePressEvent(event) {
|
|||
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var intersection = Entities.findRayIntersection(pickRay, true); // accurate picking
|
||||
// var props = Entities.getEntityProperties(intersection.entityID);
|
||||
|
||||
// we've used a picking ray to decide where to add the new sphere of voxels. If we pick nothing
|
||||
// or if we pick a non-PolyVox entity, we fall through to the next picking attempt.
|
||||
if (intersection.intersects) {
|
||||
var ids = Entities.findEntities(intersection.intersection, 10);
|
||||
for (var i = 0; i < ids.length; i++) {
|
||||
var id = ids[i];
|
||||
if (controlHeld) {
|
||||
Entities.setVoxelSphere(id, intersection.intersection, 1.0, 0);
|
||||
} else if (shiftHeld) {
|
||||
Entities.setAllVoxels(id, 255);
|
||||
} else {
|
||||
Entities.setVoxelSphere(id, intersection.intersection, 1.0, 255);
|
||||
}
|
||||
if (attemptVoxelChange(intersection)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// if the PolyVox entity is empty, we can't pick against its voxel. try picking against its
|
||||
// bounding box, instead.
|
||||
intersection = Entities.findRayIntersection(pickRay, false); // bounding box picking
|
||||
if (intersection.intersects) {
|
||||
attemptVoxelChange(intersection);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
34
interface/resources/images/interface-logo.svg
Normal file
34
interface/resources/images/interface-logo.svg
Normal file
|
@ -0,0 +1,34 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 261 261" enable-background="new 0 0 261 261" xml:space="preserve">
|
||||
<g>
|
||||
<g>
|
||||
<circle fill="#149ABB" cx="130.5" cy="130.5" r="130.5"/>
|
||||
</g>
|
||||
<g>
|
||||
<path fill="#0C9AB2" d="M251,80.6c27.5,66.6-4.3,142.9-70.9,170.4S37.1,246.7,9.6,180"/>
|
||||
</g>
|
||||
<g opacity="0.21">
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M130.5,1.8c17.4,0,34.2,3.4,50.1,10.1c15.3,6.5,29.1,15.8,40.9,27.6s21.1,25.6,27.6,40.9
|
||||
c6.7,15.9,10.1,32.7,10.1,50.1s-3.4,34.2-10.1,50.1c-6.5,15.3-15.8,29.1-27.6,40.9c-11.8,11.8-25.6,21.1-40.9,27.6
|
||||
c-15.9,6.7-32.7,10.1-50.1,10.1s-34.2-3.4-50.1-10.1c-15.3-6.5-29.1-15.8-40.9-27.6c-11.8-11.8-21.1-25.6-27.6-40.9
|
||||
c-6.7-15.9-10.1-32.7-10.1-50.1s3.4-34.2,10.1-50.1c6.5-15.3,15.8-29.1,27.6-40.9s25.6-21.1,40.9-27.6
|
||||
C96.3,5.2,113.1,1.8,130.5,1.8 M130.5,0C58.4,0,0,58.4,0,130.5S58.4,261,130.5,261S261,202.6,261,130.5S202.6,0,130.5,0L130.5,0z
|
||||
"/>
|
||||
</g>
|
||||
</g>
|
||||
<rect x="100.8" y="65.2" fill="#FFFFFF" width="59.4" height="98"/>
|
||||
<rect x="3.4" y="169.4" fill="#FFFFFF" width="254.2" height="1.9"/>
|
||||
<rect x="3.4" y="176" fill="#FFFFFF" width="254.2" height="1.9"/>
|
||||
<rect x="3.4" y="184.7" fill="#FFFFFF" width="254.2" height="1.9"/>
|
||||
<rect x="3.4" y="197.9" fill="#FFFFFF" width="254.2" height="1.9"/>
|
||||
<path fill="#FFFFFF" d="M93.7,169.5l-14.7,75.8c0,0,47.4,23.4,101.2,0.5c33.3-14.2,51.5-41.5,51.5-41.5L154,170.2L93.7,169.5z"/>
|
||||
<path fill="#FFFFFF" d="M166,169.5l14.7,75.8c0,0-47.4,23.4-101.2,0.5c-33.3-14.2-51.5-41.5-51.5-41.5l77.5-34.1L166,169.5z"/>
|
||||
<rect x="85.8" y="184.7" fill="#149ABB" width="89.4" height="1.9"/>
|
||||
<rect x="98" y="176" fill="#149ABB" width="65.1" height="1.9"/>
|
||||
<rect x="56.4" y="197.9" fill="#149ABB" width="148.2" height="1.9"/>
|
||||
<rect x="51.4" y="219.4" fill="#149ABB" width="158.3" height="1.9"/>
|
||||
</g>
|
||||
</svg>
|
After (image error) Size: 2 KiB |
|
@ -41,8 +41,6 @@ DialogContainer {
|
|||
readonly property int closeMargin: 16
|
||||
readonly property real tan30: 0.577 // tan(30°)
|
||||
readonly property int inputSpacing: 16
|
||||
property int maximumX: parent ? parent.width - width : 0
|
||||
property int maximumY: parent ? parent.height - height : 0
|
||||
|
||||
Rectangle {
|
||||
id: backgroundRectangle
|
||||
|
|
23
interface/resources/qml/TextOverlayElement.qml
Normal file
23
interface/resources/qml/TextOverlayElement.qml
Normal file
|
@ -0,0 +1,23 @@
|
|||
import Hifi 1.0
|
||||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
|
||||
TextOverlayElement {
|
||||
id: root
|
||||
Rectangle {
|
||||
color: root.backgroundColor
|
||||
anchors.fill: parent
|
||||
Text {
|
||||
x: root.leftMargin
|
||||
y: root.topMargin
|
||||
id: text
|
||||
objectName: "textElement"
|
||||
text: root.text
|
||||
color: root.textColor
|
||||
font.family: root.fontFamily
|
||||
font.pixelSize: root.fontSize
|
||||
lineHeightMode: Text.FixedHeight
|
||||
lineHeight: root.lineHeight
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,164 +1,176 @@
|
|||
import Hifi 1.0
|
||||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.3
|
||||
import QtQuick.Controls.Styles 1.3
|
||||
import QtGraphicalEffects 1.0
|
||||
import "controls"
|
||||
import "styles"
|
||||
|
||||
DialogContainer {
|
||||
HifiConstants { id: hifi }
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
objectName: "UpdateDialog"
|
||||
implicitWidth: updateDialog.width
|
||||
implicitHeight: updateDialog.height
|
||||
|
||||
implicitWidth: updateDialog.implicitWidth
|
||||
implicitHeight: updateDialog.implicitHeight
|
||||
|
||||
x: parent ? parent.width / 2 - width / 2 : 0
|
||||
y: parent ? parent.height / 2 - height / 2 : 0
|
||||
|
||||
property int maximumX: parent ? parent.width - width : 0
|
||||
property int maximumY: parent ? parent.height - height : 0
|
||||
|
||||
UpdateDialog {
|
||||
id: updateDialog
|
||||
|
||||
implicitWidth: backgroundRectangle.width
|
||||
implicitHeight: backgroundRectangle.height
|
||||
|
||||
readonly property int inputWidth: 500
|
||||
readonly property int inputHeight: 60
|
||||
readonly property int contentWidth: 500
|
||||
readonly property int logoSize: 60
|
||||
readonly property int borderWidth: 30
|
||||
readonly property int closeMargin: 16
|
||||
readonly property int inputSpacing: 16
|
||||
readonly property int buttonWidth: 150
|
||||
readonly property int buttonHeight: 50
|
||||
readonly property int buttonRadius: 15
|
||||
|
||||
readonly property int buttonWidth: 100
|
||||
readonly property int buttonHeight: 30
|
||||
readonly property int noticeHeight: 15 * inputSpacing
|
||||
readonly property string fontFamily: Qt.platform.os === "windows" ? "Trebuchet MS" : "Trebuchet"
|
||||
|
||||
signal triggerBuildDownload
|
||||
signal closeUpdateDialog
|
||||
|
||||
Rectangle {
|
||||
id: backgroundRectangle
|
||||
color: "#ffffff"
|
||||
|
||||
width: updateDialog.contentWidth + updateDialog.borderWidth * 2
|
||||
height: mainContent.height + updateDialog.borderWidth * 2 - updateDialog.closeMargin / 2
|
||||
|
||||
MouseArea {
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
drag {
|
||||
target: root
|
||||
minimumX: 0
|
||||
minimumY: 0
|
||||
maximumX: root.parent ? root.maximumX : 0
|
||||
maximumY: root.parent ? root.maximumY : 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: logo
|
||||
source: "../images/interface-logo.svg"
|
||||
width: updateDialog.logoSize
|
||||
height: updateDialog.logoSize
|
||||
anchors {
|
||||
top: mainContent.top
|
||||
right: mainContent.right
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
id: mainContent
|
||||
width: updateDialog.inputWidth
|
||||
width: updateDialog.contentWidth
|
||||
spacing: updateDialog.inputSpacing
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
verticalCenter: parent.verticalCenter
|
||||
topMargin: updateDialog.borderWidth
|
||||
top: parent.top
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: backgroundRectangle
|
||||
color: "#2c86b1"
|
||||
opacity: 0.85
|
||||
radius: updateDialog.closeMargin * 2
|
||||
|
||||
width: updateDialog.inputWidth + updateDialog.borderWidth * 2
|
||||
height: updateDialog.inputHeight * 6 + updateDialog.closeMargin * 2
|
||||
|
||||
Rectangle {
|
||||
id: dialogTitle
|
||||
width: updateDialog.inputWidth
|
||||
height: updateDialog.inputHeight
|
||||
radius: height / 2
|
||||
color: "#ebebeb"
|
||||
|
||||
id: header
|
||||
width: parent.width - updateDialog.logoSize - updateDialog.inputSpacing
|
||||
height: updateAvailable.height + versionDetails.height
|
||||
|
||||
Text {
|
||||
id: updateAvailable
|
||||
text: "Update Available"
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.5
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
color: "#303030"
|
||||
}
|
||||
|
||||
Text {
|
||||
id: versionDetails
|
||||
text: updateDialog.updateAvailableDetails
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.6
|
||||
letterSpacing: -0.5
|
||||
}
|
||||
color: hifi.colors.text
|
||||
anchors {
|
||||
top: parent.top
|
||||
topMargin: updateDialog.inputSpacing
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
|
||||
Text {
|
||||
id: updateAvailableText
|
||||
text: "Update Available"
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
left: parent.left
|
||||
leftMargin: updateDialog.inputSpacing
|
||||
}
|
||||
}
|
||||
|
||||
Text {
|
||||
text: updateDialog.updateAvailableDetails
|
||||
font.pixelSize: 14
|
||||
color: hifi.colors.text
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
left: updateAvailableText.right
|
||||
leftMargin: 13
|
||||
}
|
||||
top: updateAvailable.bottom
|
||||
}
|
||||
}
|
||||
|
||||
Flickable {
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
width: parent.width
|
||||
height: updateDialog.noticeHeight
|
||||
|
||||
border {
|
||||
width: 1
|
||||
color: "#a0a0a0"
|
||||
}
|
||||
|
||||
ScrollView {
|
||||
id: scrollArea
|
||||
anchors {
|
||||
top: dialogTitle.bottom
|
||||
}
|
||||
contentWidth: updateDialog.inputWidth
|
||||
contentHeight: backgroundRectangle.height - (dialogTitle.height * 2.5)
|
||||
width: updateDialog.inputWidth
|
||||
height: backgroundRectangle.height - (dialogTitle.height * 2.5)
|
||||
flickableDirection: Flickable.VerticalFlick
|
||||
clip: true
|
||||
|
||||
TextEdit {
|
||||
id: releaseNotes
|
||||
wrapMode: TextEdit.Wrap
|
||||
width: parent.width
|
||||
readOnly: true
|
||||
text: updateDialog.releaseNotes
|
||||
font.pixelSize: 14
|
||||
color: hifi.colors.text
|
||||
anchors {
|
||||
left: parent.left
|
||||
leftMargin: updateDialog.borderWidth
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: downloadButton
|
||||
width: updateDialog.buttonWidth
|
||||
height: updateDialog.buttonHeight
|
||||
radius: updateDialog.buttonRadius
|
||||
color: "green"
|
||||
anchors {
|
||||
top: scrollArea.bottom
|
||||
topMargin: 10
|
||||
right: backgroundRectangle.right
|
||||
rightMargin: 15
|
||||
}
|
||||
width: parent.width - updateDialog.closeMargin
|
||||
height: parent.height
|
||||
horizontalScrollBarPolicy: Qt.ScrollBarAlwaysOff
|
||||
verticalScrollBarPolicy: Qt.ScrollBarAsNeeded
|
||||
anchors.right: parent.right
|
||||
|
||||
Text {
|
||||
text: "Upgrade"
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
id: releaseNotes
|
||||
wrapMode: Text.Wrap
|
||||
width: parent.width - updateDialog.closeMargin
|
||||
text: updateDialog.releaseNotes
|
||||
color: hifi.colors.text
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.65
|
||||
}
|
||||
}
|
||||
MouseArea {
|
||||
id: downloadButtonAction
|
||||
anchors.fill: parent
|
||||
onClicked: updateDialog.triggerUpgrade()
|
||||
cursorShape: "PointingHandCursor"
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Row {
|
||||
anchors.right: parent.right
|
||||
spacing: updateDialog.inputSpacing
|
||||
height: updateDialog.buttonHeight + updateDialog.closeMargin / 2
|
||||
|
||||
Rectangle {
|
||||
id: cancelButton
|
||||
width: updateDialog.buttonWidth
|
||||
height: updateDialog.buttonHeight
|
||||
radius: updateDialog.buttonRadius
|
||||
color: "red"
|
||||
anchors {
|
||||
top: scrollArea.bottom
|
||||
topMargin: 10
|
||||
right: downloadButton.left
|
||||
rightMargin: 15
|
||||
}
|
||||
|
||||
anchors.bottom: parent.bottom
|
||||
|
||||
Text {
|
||||
text: "Cancel"
|
||||
color: "#0c9ab4" // Same as logo
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.2
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: cancelButtonAction
|
||||
anchors.fill: parent
|
||||
|
@ -166,7 +178,35 @@ DialogContainer {
|
|||
cursorShape: "PointingHandCursor"
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: updateButton
|
||||
width: updateDialog.buttonWidth
|
||||
height: updateDialog.buttonHeight
|
||||
anchors.bottom: parent.bottom
|
||||
|
||||
Text {
|
||||
text: "Update"
|
||||
color: "#0c9ab4" // Same as logo
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.2
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: updateButtonAction
|
||||
anchors.fill: parent
|
||||
onClicked: updateDialog.triggerUpgrade()
|
||||
cursorShape: "PointingHandCursor"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,6 @@
|
|||
#include <DependencyManager.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <ErrorDialog.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
|
@ -93,7 +92,6 @@
|
|||
#include <SettingHandle.h>
|
||||
#include <SimpleAverage.h>
|
||||
#include <SoundCache.h>
|
||||
#include <TextRenderer.h>
|
||||
#include <Tooltip.h>
|
||||
#include <UserActivityLogger.h>
|
||||
#include <UUID.h>
|
||||
|
@ -112,7 +110,6 @@
|
|||
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
|
@ -270,11 +267,9 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto geometryCache = DependencyManager::set<GeometryCache>();
|
||||
auto scriptCache = DependencyManager::set<ScriptCache>();
|
||||
auto soundCache = DependencyManager::set<SoundCache>();
|
||||
auto glowEffect = DependencyManager::set<GlowEffect>();
|
||||
auto faceshift = DependencyManager::set<Faceshift>();
|
||||
auto audio = DependencyManager::set<AudioClient>();
|
||||
auto audioScope = DependencyManager::set<AudioScope>();
|
||||
auto audioIOStatsRenderer = DependencyManager::set<AudioIOStatsRenderer>();
|
||||
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
|
||||
auto ambientOcclusionEffect = DependencyManager::set<AmbientOcclusionEffect>();
|
||||
auto textureCache = DependencyManager::set<TextureCache>();
|
||||
|
@ -337,7 +332,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_mousePressed(false),
|
||||
_enableProcessOctreeThread(true),
|
||||
_octreeProcessor(),
|
||||
_nodeBoundsDisplay(this),
|
||||
_runningScriptsWidget(NULL),
|
||||
_runningScriptsWidgetWasVisible(false),
|
||||
_trayIcon(new QSystemTrayIcon(_window)),
|
||||
|
@ -702,6 +696,14 @@ void Application::cleanupBeforeQuit() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void Application::emptyLocalCache() {
|
||||
QNetworkDiskCache* cache = qobject_cast<QNetworkDiskCache*>(NetworkAccessManager::getInstance().cache());
|
||||
if (cache) {
|
||||
qDebug() << "DiskCacheEditor::clear(): Clearing disk cache.";
|
||||
cache->clear();
|
||||
}
|
||||
}
|
||||
|
||||
Application::~Application() {
|
||||
EntityTree* tree = _entities.getTree();
|
||||
tree->lockForWrite();
|
||||
|
@ -892,6 +894,11 @@ void Application::paintGL() {
|
|||
|
||||
{
|
||||
PerformanceTimer perfTimer("renderOverlay");
|
||||
|
||||
// NOTE: There is no batch associated with this renderArgs
|
||||
// the ApplicationOverlay class assumes it's viewport is setup to be the device size
|
||||
QSize size = qApp->getDeviceSize();
|
||||
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
|
||||
_applicationOverlay.renderOverlay(&renderArgs);
|
||||
}
|
||||
|
||||
|
@ -969,12 +976,16 @@ void Application::paintGL() {
|
|||
} else {
|
||||
PROFILE_RANGE(__FUNCTION__ "/mainRender");
|
||||
|
||||
DependencyManager::get<GlowEffect>()->prepare(&renderArgs);
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
// Viewport is assigned to the size of the framebuffer
|
||||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
glViewport(0, 0, size.width(), size.height());
|
||||
|
||||
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
|
@ -988,8 +999,7 @@ void Application::paintGL() {
|
|||
|
||||
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
|
||||
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
|
||||
|
||||
auto finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
|
@ -997,6 +1007,8 @@ void Application::paintGL() {
|
|||
0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_LINEAR);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0); // ???
|
||||
|
||||
_compositor.displayOverlayTexture(&renderArgs);
|
||||
}
|
||||
|
@ -1560,7 +1572,9 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
return;
|
||||
}
|
||||
|
||||
_keyboardMouseDevice.mouseMoveEvent(event, deviceID);
|
||||
if (deviceID == 0) {
|
||||
_keyboardMouseDevice.mouseMoveEvent(event, deviceID);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -1581,7 +1595,9 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
|
||||
|
||||
if (activeWindow() == _window) {
|
||||
_keyboardMouseDevice.mousePressEvent(event);
|
||||
if (deviceID == 0) {
|
||||
_keyboardMouseDevice.mousePressEvent(event);
|
||||
}
|
||||
|
||||
if (event->button() == Qt::LeftButton) {
|
||||
_mouseDragStarted = getTrueMouse();
|
||||
|
@ -1621,7 +1637,9 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
}
|
||||
|
||||
if (activeWindow() == _window) {
|
||||
_keyboardMouseDevice.mouseReleaseEvent(event);
|
||||
if (deviceID == 0) {
|
||||
_keyboardMouseDevice.mouseReleaseEvent(event);
|
||||
}
|
||||
|
||||
if (event->button() == Qt::LeftButton) {
|
||||
_mousePressed = false;
|
||||
|
@ -1853,7 +1871,7 @@ void Application::idle() {
|
|||
}
|
||||
// After finishing all of the above work, ensure the idle timer is set to the proper interval,
|
||||
// depending on whether we're throttling or not
|
||||
idleTimer->start(_glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : 0);
|
||||
idleTimer->start(_glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : 1);
|
||||
}
|
||||
|
||||
// check for any requested background downloads.
|
||||
|
@ -2225,10 +2243,6 @@ void Application::init() {
|
|||
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
|
||||
_entityClipboardRenderer.setTree(&_entityClipboard);
|
||||
|
||||
// initialize the GlowEffect with our widget
|
||||
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
|
||||
DependencyManager::get<GlowEffect>()->init(glow);
|
||||
|
||||
// Make sure any new sounds are loaded as soon as know about them.
|
||||
connect(tree, &EntityTree::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
|
||||
connect(_myAvatar, &MyAvatar::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
|
||||
|
@ -2407,6 +2421,15 @@ void Application::cameraMenuChanged() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::reloadResourceCaches() {
|
||||
emptyLocalCache();
|
||||
|
||||
DependencyManager::get<AnimationCache>()->refreshAll();
|
||||
DependencyManager::get<GeometryCache>()->refreshAll();
|
||||
DependencyManager::get<SoundCache>()->refreshAll();
|
||||
DependencyManager::get<TextureCache>()->refreshAll();
|
||||
}
|
||||
|
||||
void Application::rotationModeChanged() {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
|
||||
_myAvatar->setHeadPitch(0);
|
||||
|
@ -2425,6 +2448,12 @@ void Application::updateDialogs(float deltaTime) {
|
|||
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
|
||||
// Update audio stats dialog, if any
|
||||
AudioStatsDialog* audioStatsDialog = dialogsManager->getAudioStatsDialog();
|
||||
if(audioStatsDialog) {
|
||||
audioStatsDialog->update();
|
||||
}
|
||||
|
||||
// Update bandwidth dialog, if any
|
||||
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
|
||||
if (bandwidthDialog) {
|
||||
|
@ -3179,9 +3208,6 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
|
|||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
|
||||
|
||||
// the "glow" here causes an alpha of one
|
||||
Glower glower(renderArgs);
|
||||
|
||||
const int BILLBOARD_SIZE = 64;
|
||||
// TODO: Pass a RenderArgs to renderAvatarBillboard
|
||||
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
|
||||
|
@ -3570,25 +3596,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
}
|
||||
|
||||
if (!selfAvatarOnly) {
|
||||
_nodeBoundsDisplay.draw();
|
||||
|
||||
// render octree fades if they exist
|
||||
if (_octreeFades.size() > 0) {
|
||||
PerformanceTimer perfTimer("octreeFades");
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
"Application::displaySide() ... octree fades...");
|
||||
_octreeFadesLock.lockForWrite();
|
||||
for(std::vector<OctreeFade>::iterator fade = _octreeFades.begin(); fade != _octreeFades.end();) {
|
||||
fade->render(renderArgs);
|
||||
if(fade->isDone()) {
|
||||
fade = _octreeFades.erase(fade);
|
||||
} else {
|
||||
++fade;
|
||||
}
|
||||
}
|
||||
_octreeFadesLock.unlock();
|
||||
}
|
||||
|
||||
// give external parties a change to hook in
|
||||
{
|
||||
PerformanceTimer perfTimer("inWorldInterface");
|
||||
|
@ -3721,6 +3728,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
renderArgs->_viewport = glm::ivec4(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
} else {
|
||||
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
|
||||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
|
@ -3728,6 +3736,8 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
|
||||
glViewport(x, size.height() - y - height, width, height);
|
||||
glScissor(x, size.height() - y - height, width, height);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(x, size.height() - y - height, width, height);
|
||||
}
|
||||
bool updateViewFrustum = false;
|
||||
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
|
||||
|
@ -3740,6 +3750,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
glPopMatrix();
|
||||
|
||||
// reset Viewport and projection matrix
|
||||
renderArgs->_viewport = glm::ivec4(viewport[0], viewport[1], viewport[2], viewport[3]);
|
||||
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
updateProjectionMatrix(_myCamera, updateViewFrustum);
|
||||
|
@ -3880,17 +3891,6 @@ void Application::nodeKilled(SharedNodePointer node) {
|
|||
qCDebug(interfaceapp, "model server going away...... v[%f, %f, %f, %f]",
|
||||
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
|
||||
|
||||
// Add the jurisditionDetails object to the list of "fade outs"
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
|
||||
OctreeFade fade(OctreeFade::FADE_OUT, NODE_KILLED_RED, NODE_KILLED_GREEN, NODE_KILLED_BLUE);
|
||||
fade.voxelDetails = rootDetails;
|
||||
const float slightly_smaller = 0.99f;
|
||||
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
|
||||
_octreeFadesLock.lockForWrite();
|
||||
_octreeFades.push_back(fade);
|
||||
_octreeFadesLock.unlock();
|
||||
}
|
||||
|
||||
// If the model server is going away, remove it from our jurisdiction map so we don't send voxels to a dead server
|
||||
_entityServerJurisdictions.lockForWrite();
|
||||
_entityServerJurisdictions.erase(_entityServerJurisdictions.find(nodeUUID));
|
||||
|
@ -3967,16 +3967,6 @@ int Application::parseOctreeStats(const QByteArray& packet, const SharedNodePoin
|
|||
qPrintable(serverType),
|
||||
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
|
||||
|
||||
// Add the jurisditionDetails object to the list of "fade outs"
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
|
||||
OctreeFade fade(OctreeFade::FADE_OUT, NODE_ADDED_RED, NODE_ADDED_GREEN, NODE_ADDED_BLUE);
|
||||
fade.voxelDetails = rootDetails;
|
||||
const float slightly_smaller = 0.99f;
|
||||
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
|
||||
_octreeFadesLock.lockForWrite();
|
||||
_octreeFades.push_back(fade);
|
||||
_octreeFadesLock.unlock();
|
||||
}
|
||||
} else {
|
||||
jurisdiction->unlock();
|
||||
}
|
||||
|
|
|
@ -57,10 +57,10 @@
|
|||
#include "devices/SixenseManager.h"
|
||||
#include "scripting/ControllerScriptingInterface.h"
|
||||
#include "scripting/WebWindowClass.h"
|
||||
#include "ui/AudioStatsDialog.h"
|
||||
#include "ui/BandwidthDialog.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "ui/ModelsBrowser.h"
|
||||
#include "ui/NodeBounds.h"
|
||||
#include "ui/OctreeStatsDialog.h"
|
||||
#include "ui/SnapshotShareDialog.h"
|
||||
#include "ui/LodToolsDialog.h"
|
||||
|
@ -72,7 +72,6 @@
|
|||
#include "ui/ToolWindow.h"
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "devices/KeyboardMouseDevice.h"
|
||||
#include "octree/OctreeFade.h"
|
||||
#include "octree/OctreePacketProcessor.h"
|
||||
#include "UndoStackScriptingInterface.h"
|
||||
|
||||
|
@ -91,13 +90,6 @@ class Node;
|
|||
class ProgramObject;
|
||||
class ScriptEngine;
|
||||
|
||||
static const float NODE_ADDED_RED = 0.0f;
|
||||
static const float NODE_ADDED_GREEN = 1.0f;
|
||||
static const float NODE_ADDED_BLUE = 0.0f;
|
||||
static const float NODE_KILLED_RED = 1.0f;
|
||||
static const float NODE_KILLED_GREEN = 0.0f;
|
||||
static const float NODE_KILLED_BLUE = 0.0f;
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
|
@ -311,8 +303,6 @@ public:
|
|||
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
|
||||
virtual qreal getDevicePixelRatio();
|
||||
|
||||
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
|
||||
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
||||
glm::vec2 getViewportDimensions() const;
|
||||
|
@ -370,9 +360,6 @@ signals:
|
|||
/// Fired when we're rendering in-world interface elements; allows external parties to hook in.
|
||||
void renderingInWorldInterface();
|
||||
|
||||
/// Fired when we're rendering the overlay.
|
||||
void renderingOverlay();
|
||||
|
||||
/// Fired when the import window is closed
|
||||
void importDone();
|
||||
|
||||
|
@ -450,6 +437,8 @@ public slots:
|
|||
void domainConnectionDenied(const QString& reason);
|
||||
|
||||
void cameraMenuChanged();
|
||||
|
||||
void reloadResourceCaches();
|
||||
|
||||
private slots:
|
||||
void clearDomainOctreeDetails();
|
||||
|
@ -495,6 +484,8 @@ private:
|
|||
void init();
|
||||
|
||||
void cleanupBeforeQuit();
|
||||
|
||||
void emptyLocalCache();
|
||||
|
||||
void update(float deltaTime);
|
||||
|
||||
|
@ -625,10 +616,6 @@ private:
|
|||
NodeToOctreeSceneStats _octreeServerSceneStats;
|
||||
QReadWriteLock _octreeSceneStatsLock;
|
||||
|
||||
NodeBounds _nodeBoundsDisplay;
|
||||
|
||||
std::vector<OctreeFade> _octreeFades;
|
||||
QReadWriteLock _octreeFadesLock;
|
||||
ControllerScriptingInterface _controllerScriptingInterface;
|
||||
QPointer<LogDialog> _logDialog;
|
||||
QPointer<SnapshotShareDialog> _snapshotShareDialog;
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
#include <OctreeConstants.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 30.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 60.0;
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 15.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 30.0;
|
||||
const float MAX_LIKELY_DESKTOP_FPS = 59.0; // this is essentially, V-synch - 1 fps
|
||||
const float MAX_LIKELY_HMD_FPS = 74.0; // this is essentially, V-synch - 1 fps
|
||||
const float INCREASE_LOD_GAP = 15.0f;
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <QFileDialog>
|
||||
#include <QMenuBar>
|
||||
#include <QShortcut>
|
||||
|
@ -16,7 +19,6 @@
|
|||
#include <AddressManager.h>
|
||||
#include <AudioClient.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <PathUtils.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
@ -24,7 +26,6 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "AccountManager.h"
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
|
@ -37,7 +38,6 @@
|
|||
#include "SpeechRecognizer.h"
|
||||
#endif
|
||||
#include "ui/DialogsManager.h"
|
||||
#include "ui/NodeBounds.h"
|
||||
#include "ui/StandAloneJSConsole.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
|
@ -248,7 +248,6 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
@ -256,6 +255,8 @@ Menu::Menu() {
|
|||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
||||
MenuWrapper* viewMenu = addMenu("View");
|
||||
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::Fullscreen,
|
||||
|
@ -314,19 +315,14 @@ Menu::Menu() {
|
|||
qApp,
|
||||
SLOT(setEnable3DTVMode(bool)));
|
||||
|
||||
|
||||
MenuWrapper* nodeBordersMenu = viewMenu->addMenu("Server Borders");
|
||||
NodeBounds& nodeBounds = qApp->getNodeBoundsDisplay();
|
||||
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersEntityNodes,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_1, false,
|
||||
&nodeBounds, SLOT(setShowEntityNodes(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
|
||||
qApp, SLOT(toggleLogDialog()));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::AudioNetworkStats, 0,
|
||||
dialogsManager.data(), SLOT(audioStatsDetails()));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::BandwidthDetails, 0,
|
||||
dialogsManager.data(), SLOT(bandwidthDetails()));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::OctreeStats, 0,
|
||||
|
@ -340,7 +336,6 @@ Menu::Menu() {
|
|||
0, // QML Qt::SHIFT | Qt::Key_A,
|
||||
true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DontFadeOnOctreeServerChanges);
|
||||
|
||||
MenuWrapper* ambientLightMenu = renderOptionsMenu->addMenu(MenuOption::RenderAmbientLight);
|
||||
QActionGroup* ambientLightGroup = new QActionGroup(ambientLightMenu);
|
||||
|
@ -393,8 +388,6 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars,
|
||||
0, // QML Qt::Key_Asterisk,
|
||||
true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true,
|
||||
DependencyManager::get<GlowEffect>().data(), SLOT(toggleGlowEffect(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Wireframe, Qt::ALT | Qt::Key_W, false);
|
||||
addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::LodTools,
|
||||
|
@ -458,6 +451,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
|
||||
|
||||
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);
|
||||
|
@ -594,20 +588,6 @@ Menu::Menu() {
|
|||
audioScopeFramesGroup->addAction(fiftyFrames);
|
||||
}
|
||||
|
||||
auto statsRenderer = DependencyManager::get<AudioIOStatsRenderer>();
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_A,
|
||||
false,
|
||||
statsRenderer.data(),
|
||||
SLOT(toggle()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStatsShowInjectedStreams,
|
||||
0,
|
||||
false,
|
||||
statsRenderer.data(),
|
||||
SLOT(toggleShowInjectedStreams()));
|
||||
|
||||
|
||||
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
|
||||
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned);
|
||||
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowHulls);
|
||||
|
|
|
@ -145,7 +145,7 @@ namespace MenuOption {
|
|||
const QString AudioScopeFrames = "Display Frames";
|
||||
const QString AudioScopePause = "Pause Scope";
|
||||
const QString AudioScopeTwentyFrames = "Twenty";
|
||||
const QString AudioStats = "Audio Stats";
|
||||
const QString AudioNetworkStats = "Audio Network Stats";
|
||||
const QString AudioStatsShowInjectedStreams = "Audio Stats Show Injected Streams";
|
||||
const QString AutoMuteAudio = "Auto Mute Microphone";
|
||||
const QString AvatarReceiveStats = "Show Receive Stats";
|
||||
|
@ -179,14 +179,12 @@ namespace MenuOption {
|
|||
const QString DisplayModelElementProxy = "Display Model Element Bounds";
|
||||
const QString DisplayDebugTimingDetails = "Display Timing Details";
|
||||
const QString DontDoPrecisionPicking = "Don't Do Precision Picking";
|
||||
const QString DontFadeOnOctreeServerChanges = "Don't Fade In/Out on Octree Server Changes";
|
||||
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
|
||||
const QString EchoLocalAudio = "Echo Local Audio";
|
||||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EditEntitiesHelp = "Edit Entities Help...";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
|
@ -200,7 +198,6 @@ namespace MenuOption {
|
|||
const QString FrameTimer = "Show Timer";
|
||||
const QString Fullscreen = "Fullscreen";
|
||||
const QString FullscreenMirror = "Fullscreen Mirror";
|
||||
const QString GlowWhenSpeaking = "Glow When Speaking";
|
||||
const QString HMDTools = "HMD Tools";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
|
@ -230,6 +227,7 @@ namespace MenuOption {
|
|||
const QString Preferences = "Preferences...";
|
||||
const QString Quit = "Quit";
|
||||
const QString ReloadAllScripts = "Reload All Scripts";
|
||||
const QString ReloadContent = "Reload Content (Clears all caches)";
|
||||
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
|
||||
const QString RenderFocusIndicator = "Show Eye Focus";
|
||||
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
|
||||
|
@ -270,6 +268,7 @@ namespace MenuOption {
|
|||
const QString ShowBordersEntityNodes = "Show Entity Nodes";
|
||||
const QString ShowIKConstraints = "Show IK Constraints";
|
||||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
|
||||
const QString SimpleShadows = "Simple";
|
||||
const QString SixenseEnabled = "Enable Hydra Support";
|
||||
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
|
||||
#include <ByteCountCoding.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <TextRenderer.h>
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
#include "world.h"
|
||||
|
@ -79,44 +78,6 @@ const glm::vec3 randVector() {
|
|||
return glm::vec3(randFloat() - 0.5f, randFloat() - 0.5f, randFloat() - 0.5f) * 2.0f;
|
||||
}
|
||||
|
||||
static TextRenderer* textRenderer(int mono) {
|
||||
static TextRenderer* monoRenderer = TextRenderer::getInstance(MONO_FONT_FAMILY);
|
||||
static TextRenderer* proportionalRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY,
|
||||
-1, -1, false, TextRenderer::SHADOW_EFFECT);
|
||||
static TextRenderer* inconsolataRenderer = TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, -1, INCONSOLATA_FONT_WEIGHT,
|
||||
false);
|
||||
switch (mono) {
|
||||
case 1:
|
||||
return monoRenderer;
|
||||
case 2:
|
||||
return inconsolataRenderer;
|
||||
case 0:
|
||||
default:
|
||||
return proportionalRenderer;
|
||||
}
|
||||
}
|
||||
|
||||
int widthText(float scale, int mono, char const* string) {
|
||||
return textRenderer(mono)->computeExtent(string).x; // computeWidth(string) * (scale / 0.10);
|
||||
}
|
||||
|
||||
void drawText(int x, int y, float scale, float radians, int mono,
|
||||
char const* string, const float* color) {
|
||||
//
|
||||
// Draws text on screen as stroked so it can be resized
|
||||
//
|
||||
glPushMatrix();
|
||||
glTranslatef(static_cast<float>(x), static_cast<float>(y), 0.0f);
|
||||
|
||||
|
||||
glRotated(double(radians * DEGREES_PER_RADIAN), 0.0, 0.0, 1.0);
|
||||
glScalef(scale / 0.1f, scale / 0.1f, 1.0f);
|
||||
|
||||
glm::vec4 colorV4 = {color[0], color[1], color[2], 1.0f };
|
||||
textRenderer(mono)->draw(0, 0, string, colorV4);
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
void renderCollisionOverlay(int width, int height, float magnitude, float red, float blue, float green) {
|
||||
const float MIN_VISIBLE_COLLISION = 0.01f;
|
||||
if (magnitude > MIN_VISIBLE_COLLISION) {
|
||||
|
|
|
@ -22,10 +22,6 @@ float randFloat();
|
|||
const glm::vec3 randVector();
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch);
|
||||
int widthText(float scale, int mono, char const* string);
|
||||
|
||||
void drawText(int x, int y, float scale, float radians, int mono,
|
||||
char const* string, const float* color);
|
||||
|
||||
void renderCollisionOverlay(int width, int height, float magnitude, float red = 0, float blue = 0, float green = 0);
|
||||
|
||||
|
|
|
@ -1,242 +0,0 @@
|
|||
//
|
||||
// AudioIOStatsRenderer.cpp
|
||||
// interface/src/audio
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-12-16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <AudioClient.h>
|
||||
#include <AudioConstants.h>
|
||||
#include <AudioIOStats.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <NodeList.h>
|
||||
#include <Util.h>
|
||||
|
||||
#include "AudioIOStatsRenderer.h"
|
||||
|
||||
AudioIOStatsRenderer::AudioIOStatsRenderer() :
|
||||
_stats(NULL),
|
||||
_isEnabled(false),
|
||||
_shouldShowInjectedStreams(false)
|
||||
{
|
||||
// grab the stats object from the audio I/O singleton
|
||||
_stats = &DependencyManager::get<AudioClient>()->getStats();
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
const unsigned int STATS_WIDTH = 1500;
|
||||
#else
|
||||
const unsigned int STATS_WIDTH = 650;
|
||||
#endif
|
||||
const unsigned int STATS_HEIGHT_PER_LINE = 20;
|
||||
|
||||
void AudioIOStatsRenderer::render(const float* color, int width, int height) {
|
||||
if (!_isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const int linesWhenCentered = _shouldShowInjectedStreams ? 34 : 27;
|
||||
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * linesWhenCentered;
|
||||
|
||||
int lines = _shouldShowInjectedStreams ? _stats->getMixerInjectedStreamStatsMap().size() * 7 + 27 : 27;
|
||||
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
|
||||
|
||||
|
||||
static const glm::vec4 backgroundColor = { 0.2f, 0.2f, 0.2f, 0.6f };
|
||||
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
|
||||
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
|
||||
int w = STATS_WIDTH;
|
||||
int h = statsHeight;
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(x, y, w, h, backgroundColor);
|
||||
|
||||
int horizontalOffset = x + 5;
|
||||
int verticalOffset = y;
|
||||
|
||||
float scale = 0.10f;
|
||||
float rotation = 0.0f;
|
||||
int font = 2;
|
||||
|
||||
char latencyStatString[512];
|
||||
|
||||
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
|
||||
|
||||
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
|
||||
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
|
||||
if (!audioMixerNodePointer.isNull()) {
|
||||
audioInputBufferLatency = _stats->getAudioInputMsecsReadStats().getWindowAverage();
|
||||
inputRingBufferLatency = (float) _stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
|
||||
networkRoundtripLatency = audioMixerNodePointer->getPingMs();
|
||||
mixerRingBufferLatency = _stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
outputRingBufferLatency = downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
audioOutputBufferLatency = _stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
|
||||
}
|
||||
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s",
|
||||
(double)audioInputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s",
|
||||
(double)inputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Network to mixer: %7.2fms - half of last ping value calculated by the node list",
|
||||
(double)(networkRoundtripLatency / 2.0f));
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s",
|
||||
(double)mixerRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Network to client: %7.2fms - half of last ping value calculated by the node list",
|
||||
(double)(networkRoundtripLatency / 2.0f));
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s",
|
||||
(double)outputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s",
|
||||
(double)audioOutputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " TOTAL: %7.2fms\n", (double)totalLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char clientUpstreamMicLabelString[] = "Upstream Mic Audio Packets Sent Gaps (by client):";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, clientUpstreamMicLabelString, color);
|
||||
|
||||
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
|
||||
|
||||
char stringBuffer[512];
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getMax()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getAverage()).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamMicLabelString[] = "Upstream mic audio stats (received and reported by audio-mixer):";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
|
||||
|
||||
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), horizontalOffset, verticalOffset,
|
||||
scale, rotation, font, color);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char downstreamLabelString[] = "Downstream mixed audio stats:";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
|
||||
|
||||
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
|
||||
renderAudioStreamStats(&downstreamStats, horizontalOffset, verticalOffset,
|
||||
scale, rotation, font, color, true);
|
||||
|
||||
|
||||
if (_shouldShowInjectedStreams) {
|
||||
|
||||
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamInjectedLabelString[512];
|
||||
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
|
||||
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
|
||||
|
||||
renderAudioStreamStats(&injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
|
||||
|
||||
char stringBuffer[512];
|
||||
|
||||
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
|
||||
(double)(streamStats->_packetStreamStats.getLostRate() * 100.0f),
|
||||
streamStats->_packetStreamStats._lost,
|
||||
(double)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f),
|
||||
streamStats->_packetStreamWindowStats._lost);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
if (isDownstreamStats) {
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
|
||||
streamStats->_desiredJitterBufferFrames,
|
||||
streamStats->_framesAvailableAverage,
|
||||
(int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
|
||||
streamStats->_framesAvailable,
|
||||
(int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample()
|
||||
/ AudioConstants::NETWORK_FRAME_MSECS));
|
||||
} else {
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u",
|
||||
streamStats->_desiredJitterBufferFrames,
|
||||
streamStats->_framesAvailableAverage,
|
||||
streamStats->_framesAvailable);
|
||||
}
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
|
||||
streamStats->_starveCount,
|
||||
streamStats->_consecutiveNotMixedCount,
|
||||
streamStats->_framesDropped,
|
||||
streamStats->_overflowCount);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats->_timeGapMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapWindowAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
//
|
||||
// AudioIOStatsRenderer.h
|
||||
// interface/src/audio
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-12-16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioIOStatsRenderer_h
|
||||
#define hifi_AudioIOStatsRenderer_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
class AudioIOStats;
|
||||
class AudioStreamStats;
|
||||
|
||||
class AudioIOStatsRenderer : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
void render(const float* color, int width, int height);
|
||||
|
||||
public slots:
|
||||
void toggle() { _isEnabled = !_isEnabled; }
|
||||
void toggleShowInjectedStreams() { _shouldShowInjectedStreams = !_shouldShowInjectedStreams; }
|
||||
protected:
|
||||
AudioIOStatsRenderer();
|
||||
private:
|
||||
// audio stats methods for rendering
|
||||
void renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
|
||||
|
||||
const AudioIOStats* _stats;
|
||||
|
||||
bool _isEnabled;
|
||||
bool _shouldShowInjectedStreams;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_AudioIOStatsRenderer_h
|
|
@ -38,6 +38,7 @@ AudioScope::AudioScope() :
|
|||
_scopeOutputLeft(NULL),
|
||||
_scopeOutputRight(NULL),
|
||||
_scopeLastFrame(),
|
||||
_audioScopeBackground(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_audioScopeGrid(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_inputID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_outputLeftID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
|
@ -126,22 +127,24 @@ void AudioScope::render(RenderArgs* renderArgs, int width, int height) {
|
|||
int w = (int)SCOPE_WIDTH;
|
||||
int h = (int)SCOPE_HEIGHT;
|
||||
|
||||
gpu::Batch batch;
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
batch.setUniformTexture(0, textureCache->getWhiteTexture());
|
||||
batch.setResourceTexture(0, textureCache->getWhiteTexture());
|
||||
|
||||
// FIXME - do we really need to reset this here? we know that we're called inside of ApplicationOverlay::renderOverlays
|
||||
// which already set up our batch for us to have these settings
|
||||
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
|
||||
batch.setProjectionTransform(legacyProjection);
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
geometryCache->renderQuad(batch, x, y, w, h, backgroundColor);
|
||||
batch._glLineWidth(1.0f); // default
|
||||
geometryCache->renderQuad(batch, x, y, w, h, backgroundColor, _audioScopeBackground);
|
||||
geometryCache->renderGrid(batch, x, y, w, h, gridRows, gridCols, gridColor, _audioScopeGrid);
|
||||
renderLineStrip(batch, _inputID, inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput);
|
||||
renderLineStrip(batch, _outputLeftID, outputLeftColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputLeft);
|
||||
renderLineStrip(batch, _outputRightD, outputRightColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputRight);
|
||||
renderArgs->_context->syncCache();
|
||||
renderArgs->_context->render(batch);
|
||||
}
|
||||
|
||||
void AudioScope::renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& color, int x, int y, int n, int offset, const QByteArray* byteArray) {
|
||||
|
|
|
@ -69,6 +69,7 @@ private:
|
|||
QByteArray* _scopeOutputRight;
|
||||
QByteArray _scopeLastFrame;
|
||||
|
||||
int _audioScopeBackground;
|
||||
int _audioScopeGrid;
|
||||
int _inputID;
|
||||
int _outputLeftID;
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <LODManager.h>
|
||||
#include <NodeList.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
@ -282,7 +281,7 @@ enum TextRendererType {
|
|||
|
||||
static TextRenderer3D* textRenderer(TextRendererType type) {
|
||||
static TextRenderer3D* chatRenderer = TextRenderer3D::getInstance(SANS_FONT_FAMILY, -1,
|
||||
false, TextRenderer3D::SHADOW_EFFECT);
|
||||
false, SHADOW_EFFECT);
|
||||
static TextRenderer3D* displayNameRenderer = TextRenderer3D::getInstance(SANS_FONT_FAMILY);
|
||||
|
||||
switch(type) {
|
||||
|
@ -410,9 +409,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
|
|||
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
|
||||
? 0.0f
|
||||
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
|
||||
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
|
||||
}
|
||||
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
|
||||
|
||||
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
|
||||
? 1.0f
|
||||
|
@ -453,22 +450,36 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
|
|||
}
|
||||
}
|
||||
|
||||
// Stack indicator spheres
|
||||
float indicatorOffset = 0.0f;
|
||||
if (!_displayName.isEmpty() && _displayNameAlpha != 0.0f) {
|
||||
const float DISPLAY_NAME_INDICATOR_OFFSET = 0.22f;
|
||||
indicatorOffset = DISPLAY_NAME_INDICATOR_OFFSET;
|
||||
}
|
||||
const float INDICATOR_RADIUS = 0.03f;
|
||||
const float INDICATOR_INDICATOR_OFFSET = 3.0f * INDICATOR_RADIUS;
|
||||
|
||||
// If this is the avatar being looked at, render a little ball above their head
|
||||
if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::RenderFocusIndicator)) {
|
||||
const float LOOK_AT_INDICATOR_RADIUS = 0.03f;
|
||||
const float LOOK_AT_INDICATOR_OFFSET = 0.22f;
|
||||
const glm::vec4 LOOK_AT_INDICATOR_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
|
||||
glm::vec3 position;
|
||||
if (_displayName.isEmpty() || _displayNameAlpha == 0.0f) {
|
||||
position = glm::vec3(_position.x, getDisplayNamePosition().y, _position.z);
|
||||
} else {
|
||||
position = glm::vec3(_position.x, getDisplayNamePosition().y + LOOK_AT_INDICATOR_OFFSET, _position.z);
|
||||
}
|
||||
glm::vec3 position = glm::vec3(_position.x, getDisplayNamePosition().y + indicatorOffset, _position.z);
|
||||
Transform transform;
|
||||
transform.setTranslation(position);
|
||||
batch.setModelTransform(transform);
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, LOOK_AT_INDICATOR_RADIUS
|
||||
, 15, 15, LOOK_AT_INDICATOR_COLOR);
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, INDICATOR_RADIUS,
|
||||
15, 15, LOOK_AT_INDICATOR_COLOR);
|
||||
indicatorOffset += INDICATOR_INDICATOR_OFFSET;
|
||||
}
|
||||
|
||||
// If the avatar is looking at me, render an indication that they area
|
||||
if (getHead()->getIsLookingAtMe() && Menu::getInstance()->isOptionChecked(MenuOption::ShowWhosLookingAtMe)) {
|
||||
const glm::vec4 LOOKING_AT_ME_COLOR = { 0.8f, 0.65f, 0.0f, 0.1f };
|
||||
glm::vec3 position = glm::vec3(_position.x, getDisplayNamePosition().y + indicatorOffset, _position.z);
|
||||
Transform transform;
|
||||
transform.setTranslation(position);
|
||||
batch.setModelTransform(transform);
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, INDICATOR_RADIUS,
|
||||
15, 15, LOOKING_AT_ME_COLOR);
|
||||
}
|
||||
|
||||
// quick check before falling into the code below:
|
||||
|
@ -512,7 +523,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
|
|||
|
||||
auto cameraMode = Application::getInstance()->getCamera()->getMode();
|
||||
if (!isMyAvatar() || cameraMode != CAMERA_MODE_FIRST_PERSON) {
|
||||
renderDisplayName(batch, *renderArgs->_viewFrustum);
|
||||
renderDisplayName(batch, *renderArgs->_viewFrustum, renderArgs->_viewport);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -565,8 +576,6 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
|
|||
fixupModelsInScene();
|
||||
|
||||
{
|
||||
Glower glower(renderArgs, glowLevel);
|
||||
|
||||
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||
if (postLighting || renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
|
||||
// render the billboard until both models are loaded
|
||||
|
@ -622,7 +631,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
|
|||
_billboardTexture = DependencyManager::get<TextureCache>()->getTexture(
|
||||
uniqueUrl, DEFAULT_TEXTURE, false, _billboard);
|
||||
}
|
||||
if (!_billboardTexture->isLoaded()) {
|
||||
if (!_billboardTexture || !_billboardTexture->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
// rotate about vertical to face the camera
|
||||
|
@ -644,7 +653,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
|
|||
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
|
||||
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
batch.setUniformTexture(0, _billboardTexture->getGPUTexture());
|
||||
batch.setResourceTexture(0, _billboardTexture->getGPUTexture());
|
||||
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, true);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
|
||||
glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
|
||||
|
@ -665,7 +674,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
|
|||
return namePosition;
|
||||
}
|
||||
|
||||
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const {
|
||||
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const {
|
||||
Transform result;
|
||||
// We assume textPosition is whithin the frustum
|
||||
glm::vec3 textPosition = getDisplayNamePosition();
|
||||
|
@ -684,12 +693,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
|
|||
glm::vec4 p0 = viewProj * glm::vec4(testPoint0, 1.0);
|
||||
glm::vec4 p1 = viewProj * glm::vec4(testPoint1, 1.0);
|
||||
|
||||
// TODO REMOVE vvv
|
||||
GLint viewportMatrix[4];
|
||||
glGetIntegerv(GL_VIEWPORT, viewportMatrix);
|
||||
glm::dmat4 modelViewMatrix;
|
||||
float windowSizeY = viewportMatrix[3] - viewportMatrix[1];
|
||||
// TODO REMOVE ^^^
|
||||
float windowSizeY = viewport.w;
|
||||
|
||||
const float DESIRED_HIGHT_ON_SCREEN = 20; // In pixels (this is double on retinas)
|
||||
|
||||
|
@ -722,7 +726,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
|
|||
|
||||
}
|
||||
|
||||
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const {
|
||||
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const {
|
||||
bool shouldShowReceiveStats = DependencyManager::get<AvatarManager>()->shouldShowReceiveStats() && !isMyAvatar();
|
||||
|
||||
// If we have nothing to draw, or it's tottaly transparent, return
|
||||
|
@ -764,7 +768,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
|
|||
(_displayNameAlpha / DISPLAYNAME_ALPHA) * DISPLAYNAME_BACKGROUND_ALPHA);
|
||||
|
||||
// Compute display name transform
|
||||
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize());
|
||||
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize(), viewport);
|
||||
batch.setModelTransform(textTransform);
|
||||
|
||||
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, true, true);
|
||||
|
|
|
@ -234,8 +234,8 @@ protected:
|
|||
float getPelvisFloatingHeight() const;
|
||||
glm::vec3 getDisplayNamePosition() const;
|
||||
|
||||
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const;
|
||||
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const;
|
||||
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const;
|
||||
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const;
|
||||
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
|
||||
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
|
||||
virtual void fixupModelsInScene();
|
||||
|
|
|
@ -70,30 +70,14 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
|
|||
return;
|
||||
}
|
||||
|
||||
// check for NaNs
|
||||
if (position.x != position.x ||
|
||||
position.y != position.y ||
|
||||
position.z != position.z) {
|
||||
qDebug() << "AvatarActionHold::updateActionWorker -- target position includes NaN";
|
||||
return;
|
||||
}
|
||||
if (rotation.x != rotation.x ||
|
||||
rotation.y != rotation.y ||
|
||||
rotation.z != rotation.z ||
|
||||
rotation.w != rotation.w) {
|
||||
qDebug() << "AvatarActionHold::updateActionWorker -- target rotation includes NaN";
|
||||
return;
|
||||
}
|
||||
|
||||
if (_positionalTarget != position || _rotationalTarget != rotation) {
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (ownerEntity) {
|
||||
ownerEntity->setActionDataDirty(true);
|
||||
}
|
||||
_positionalTarget = position;
|
||||
_rotationalTarget = rotation;
|
||||
}
|
||||
|
||||
_positionalTarget = position;
|
||||
_rotationalTarget = rotation;
|
||||
unlock();
|
||||
|
||||
ObjectActionSpring::updateActionWorker(deltaTimeStep);
|
||||
|
@ -101,59 +85,51 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
|
|||
|
||||
|
||||
bool AvatarActionHold::updateArguments(QVariantMap arguments) {
|
||||
bool rPOk = true;
|
||||
bool ok = true;
|
||||
glm::vec3 relativePosition =
|
||||
EntityActionInterface::extractVec3Argument("hold", arguments, "relativePosition", rPOk, false);
|
||||
bool rROk = true;
|
||||
EntityActionInterface::extractVec3Argument("hold", arguments, "relativePosition", ok, false);
|
||||
if (!ok) {
|
||||
relativePosition = _relativePosition;
|
||||
}
|
||||
|
||||
ok = true;
|
||||
glm::quat relativeRotation =
|
||||
EntityActionInterface::extractQuatArgument("hold", arguments, "relativeRotation", rROk, false);
|
||||
bool tSOk = true;
|
||||
EntityActionInterface::extractQuatArgument("hold", arguments, "relativeRotation", ok, false);
|
||||
if (!ok) {
|
||||
relativeRotation = _relativeRotation;
|
||||
}
|
||||
|
||||
ok = true;
|
||||
float timeScale =
|
||||
EntityActionInterface::extractFloatArgument("hold", arguments, "timeScale", tSOk, false);
|
||||
bool hOk = true;
|
||||
EntityActionInterface::extractFloatArgument("hold", arguments, "timeScale", ok, false);
|
||||
if (!ok) {
|
||||
timeScale = _linearTimeScale;
|
||||
}
|
||||
|
||||
ok = true;
|
||||
QString hand =
|
||||
EntityActionInterface::extractStringArgument("hold", arguments, "hand", hOk, false);
|
||||
EntityActionInterface::extractStringArgument("hold", arguments, "hand", ok, false);
|
||||
if (!ok || !(hand == "left" || hand == "right")) {
|
||||
hand = _hand;
|
||||
}
|
||||
|
||||
lockForWrite();
|
||||
if (rPOk) {
|
||||
if (relativePosition != _relativePosition
|
||||
|| relativeRotation != _relativeRotation
|
||||
|| timeScale != _linearTimeScale
|
||||
|| hand != _hand) {
|
||||
lockForWrite();
|
||||
_relativePosition = relativePosition;
|
||||
} else {
|
||||
_relativePosition = glm::vec3(0.0f, 0.0f, 1.0f);
|
||||
}
|
||||
|
||||
if (rROk) {
|
||||
_relativeRotation = relativeRotation;
|
||||
} else {
|
||||
_relativeRotation = glm::quat(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
}
|
||||
const float MIN_TIMESCALE = 0.1f;
|
||||
_linearTimeScale = glm::min(MIN_TIMESCALE, timeScale);
|
||||
_angularTimeScale = _linearTimeScale;
|
||||
_hand = hand;
|
||||
|
||||
if (tSOk) {
|
||||
_linearTimeScale = timeScale;
|
||||
_angularTimeScale = timeScale;
|
||||
} else {
|
||||
_linearTimeScale = 0.2f;
|
||||
_angularTimeScale = 0.2f;
|
||||
_mine = true;
|
||||
_active = true;
|
||||
activateBody();
|
||||
unlock();
|
||||
}
|
||||
|
||||
if (hOk) {
|
||||
hand = hand.toLower();
|
||||
if (hand == "left") {
|
||||
_hand = "left";
|
||||
} else if (hand == "right") {
|
||||
_hand = "right";
|
||||
} else {
|
||||
qDebug() << "hold action -- invalid hand argument:" << hand;
|
||||
_hand = "right";
|
||||
}
|
||||
} else {
|
||||
_hand = "right";
|
||||
}
|
||||
|
||||
_mine = true;
|
||||
_positionalTargetSet = true;
|
||||
_rotationalTargetSet = true;
|
||||
_active = true;
|
||||
unlock();
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
#endif
|
||||
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <PerfStat.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <UUID.h>
|
||||
|
@ -72,9 +71,13 @@ void AvatarManager::init() {
|
|||
_myAvatar->init();
|
||||
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
|
||||
|
||||
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderAvatarsChanged, this, &AvatarManager::updateAvatarRenderStatus, Qt::QueuedConnection);
|
||||
|
||||
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
||||
|
@ -158,7 +161,9 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
|
|||
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
|
||||
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
return avatar;
|
||||
}
|
||||
|
@ -310,3 +315,23 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
} else {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->removeFromScene(avatar, scene, pendingChanges);
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@ public:
|
|||
|
||||
public slots:
|
||||
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
|
||||
void updateAvatarRenderStatus(bool shouldRenderAvatars);
|
||||
|
||||
private:
|
||||
AvatarManager(QObject* parent = 0);
|
||||
|
|
|
@ -56,18 +56,14 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
|
|||
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
|
||||
joint.preTransform * glm::mat4_cast(joint.preRotation)));
|
||||
glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
|
||||
if (owningAvatar->isMyAvatar()) {
|
||||
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
|
||||
_owningHead->getTorsoTwist(),
|
||||
_owningHead->getFinalLeanSideways()));
|
||||
pitchYawRoll -= lean;
|
||||
}
|
||||
|
||||
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
|
||||
_owningHead->getTorsoTwist(),
|
||||
_owningHead->getFinalLeanSideways()));
|
||||
pitchYawRoll -= lean;
|
||||
state.setRotationInConstrainedFrame(glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
|
||||
* glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
|
||||
* glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
|
||||
* joint.rotation, DEFAULT_PRIORITY);
|
||||
|
||||
}
|
||||
|
||||
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, JointState& state) {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
#include <QtCore/QTimer>
|
||||
|
||||
#include <gpu/GPUConfig.h>
|
||||
#include <AccountManager.h>
|
||||
#include <AddressManager.h>
|
||||
#include <AnimationHandle.h>
|
||||
|
@ -31,7 +32,7 @@
|
|||
#include <PerfStat.h>
|
||||
#include <ShapeCollider.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <TextRenderer.h>
|
||||
#include <TextRenderer3D.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "devices/Faceshift.h"
|
||||
|
@ -323,28 +324,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
}
|
||||
|
||||
|
||||
void MyAvatar::renderDebugBodyPoints() {
|
||||
glm::vec3 torsoPosition(getPosition());
|
||||
glm::vec3 headPosition(getHead()->getEyePosition());
|
||||
float torsoToHead = glm::length(headPosition - torsoPosition);
|
||||
glm::vec3 position;
|
||||
qCDebug(interfaceapp, "head-above-torso %.2f, scale = %0.2f", (double)torsoToHead, (double)getScale());
|
||||
|
||||
// Torso Sphere
|
||||
position = torsoPosition;
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
DependencyManager::get<GeometryCache>()->renderSphere(0.2f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
|
||||
glPopMatrix();
|
||||
|
||||
// Head Sphere
|
||||
position = headPosition;
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
DependencyManager::get<GeometryCache>()->renderSphere(0.15f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
// virtual
|
||||
void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
|
||||
// don't render if we've been asked to disable local rendering
|
||||
|
@ -355,8 +334,9 @@ void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, b
|
|||
Avatar::render(renderArgs, cameraPosition, postLighting);
|
||||
|
||||
// don't display IK constraints in shadow mode
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) && postLighting) {
|
||||
_skeletonModel.renderIKConstraints();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) &&
|
||||
renderArgs && renderArgs->_batch) {
|
||||
_skeletonModel.renderIKConstraints(*renderArgs->_batch);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,7 +48,6 @@ public:
|
|||
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting = false) override;
|
||||
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f) override;
|
||||
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
|
||||
void renderDebugBodyPoints();
|
||||
|
||||
// setters
|
||||
void setLeanScale(float scale) { _leanScale = scale; }
|
||||
|
|
|
@ -120,8 +120,8 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
Hand* hand = _owningAvatar->getHand();
|
||||
hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
|
||||
|
||||
const float HAND_RESTORATION_RATE = 0.25f;
|
||||
if (leftPalmIndex == -1 || rightPalmIndex == -1) {
|
||||
const float HAND_RESTORATION_RATE = 0.25f;
|
||||
if (leftPalmIndex == -1 && rightPalmIndex == -1) {
|
||||
// palms are not yet set, use mouse
|
||||
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
|
||||
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
|
@ -138,8 +138,16 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
|
||||
} else {
|
||||
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
|
||||
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
|
||||
if (leftPalmIndex != -1) {
|
||||
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
|
||||
} else {
|
||||
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
}
|
||||
if (rightPalmIndex != -1) {
|
||||
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
|
||||
} else {
|
||||
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
||||
if (_isFirstPerson) {
|
||||
|
@ -186,9 +194,9 @@ void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes)
|
|||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::renderIKConstraints() {
|
||||
renderJointConstraints(getRightHandJointIndex());
|
||||
renderJointConstraints(getLeftHandJointIndex());
|
||||
void SkeletonModel::renderIKConstraints(gpu::Batch& batch) {
|
||||
renderJointConstraints(batch, getRightHandJointIndex());
|
||||
renderJointConstraints(batch, getLeftHandJointIndex());
|
||||
}
|
||||
|
||||
class IndexValue {
|
||||
|
@ -312,26 +320,27 @@ void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const
|
|||
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(this, parentState, joint, state);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderJointConstraints(int jointIndex) {
|
||||
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
|
||||
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
|
||||
return;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const float BASE_DIRECTION_SIZE = 0.3f;
|
||||
float directionSize = BASE_DIRECTION_SIZE * extractUniformScale(_scale);
|
||||
glLineWidth(3.0f);
|
||||
batch._glLineWidth(3.0f);
|
||||
do {
|
||||
const FBXJoint& joint = geometry.joints.at(jointIndex);
|
||||
const JointState& jointState = _jointStates.at(jointIndex);
|
||||
glm::vec3 position = _rotation * jointState.getPosition() + _translation;
|
||||
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::quat parentRotation = (joint.parentIndex == -1) ? _rotation : _rotation * _jointStates.at(joint.parentIndex).getRotation();
|
||||
glm::vec3 rotationAxis = glm::axis(parentRotation);
|
||||
glRotatef(glm::degrees(glm::angle(parentRotation)), rotationAxis.x, rotationAxis.y, rotationAxis.z);
|
||||
float fanScale = directionSize * 0.75f;
|
||||
glScalef(fanScale, fanScale, fanScale);
|
||||
|
||||
Transform transform = Transform();
|
||||
transform.setTranslation(position);
|
||||
transform.setRotation(parentRotation);
|
||||
transform.setScale(fanScale);
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
const int AXIS_COUNT = 3;
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
@ -362,17 +371,14 @@ void SkeletonModel::renderJointConstraints(int jointIndex) {
|
|||
// TODO: this is really inefficient constantly recreating these vertices buffers. It would be
|
||||
// better if the skeleton model cached these buffers for each of the joints they are rendering
|
||||
geometryCache->updateVertices(_triangleFanID, points, color);
|
||||
geometryCache->renderVertices(gpu::TRIANGLE_FAN, _triangleFanID);
|
||||
geometryCache->renderVertices(batch, gpu::TRIANGLE_FAN, _triangleFanID);
|
||||
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
renderOrientationDirections(jointIndex, position, _rotation * jointState.getRotation(), directionSize);
|
||||
jointIndex = joint.parentIndex;
|
||||
|
||||
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
|
||||
|
||||
glLineWidth(1.0f);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size) {
|
||||
|
@ -793,19 +799,24 @@ void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float alpha
|
|||
transform.setTranslation(endPoint);
|
||||
batch.setModelTransform(transform);
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
|
||||
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
|
||||
glm::vec4(0.6f, 0.6f, 0.8f, alpha));
|
||||
|
||||
// draw a yellow sphere at the capsule startpoint
|
||||
glm::vec3 startPoint;
|
||||
_boundingShape.getStartPoint(startPoint);
|
||||
startPoint = startPoint - _translation;
|
||||
glm::vec3 axis = endPoint - startPoint;
|
||||
glTranslatef(-axis.x, -axis.y, -axis.z);
|
||||
geometryCache->renderSphere(_boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
|
||||
Transform axisTransform = Transform();
|
||||
axisTransform.setTranslation(-axis);
|
||||
batch.setModelTransform(axisTransform);
|
||||
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
|
||||
glm::vec4(0.8f, 0.8f, 0.6f, alpha));
|
||||
|
||||
// draw a green cylinder between the two points
|
||||
glm::vec3 origin(0.0f);
|
||||
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(), glm::vec4(0.6f, 0.8f, 0.6f, alpha));
|
||||
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(),
|
||||
glm::vec4(0.6f, 0.8f, 0.6f, alpha));
|
||||
}
|
||||
|
||||
bool SkeletonModel::hasSkeleton() {
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
/// \param shapes[out] list in which is stored pointers to hand shapes
|
||||
void getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const;
|
||||
|
||||
void renderIKConstraints();
|
||||
void renderIKConstraints(gpu::Batch& batch);
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
|
||||
|
@ -144,7 +144,7 @@ protected:
|
|||
|
||||
private:
|
||||
|
||||
void renderJointConstraints(int jointIndex);
|
||||
void renderJointConstraints(gpu::Batch& batch, int jointIndex);
|
||||
void renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size);
|
||||
|
||||
struct OrientationLineIDs {
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <GlWindow.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <OglplusHelpers.h>
|
||||
|
@ -644,15 +643,9 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
return;
|
||||
}
|
||||
|
||||
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
} else {
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
|
@ -714,6 +707,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, *_camera);
|
||||
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
|
||||
|
@ -723,15 +717,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
glPopMatrix();
|
||||
|
||||
gpu::FramebufferPointer finalFbo;
|
||||
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
} else {
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
@ -824,7 +811,6 @@ glm::quat OculusManager::getOrientation() {
|
|||
return toGlm(trackingState.HeadPose.ThePose.Orientation);
|
||||
}
|
||||
|
||||
//Used to set the size of the glow framebuffers
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
|
|
|
@ -244,14 +244,6 @@ void SixenseManager::update(float deltaTime) {
|
|||
palm->setTrigger(data->trigger);
|
||||
palm->setJoystick(data->joystick_x, data->joystick_y);
|
||||
|
||||
handleButtonEvent(data->buttons, numActiveControllers - 1);
|
||||
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
|
||||
|
||||
// Emulate the mouse so we can use scripts
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
|
||||
emulateMouse(palm, numActiveControllers - 1);
|
||||
}
|
||||
|
||||
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
|
||||
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
|
||||
position *= METERS_PER_MILLIMETER;
|
||||
|
@ -260,6 +252,15 @@ void SixenseManager::update(float deltaTime) {
|
|||
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
|
||||
if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
|
||||
numControllersAtBase++;
|
||||
palm->setActive(false);
|
||||
} else {
|
||||
handleButtonEvent(data->buttons, numActiveControllers - 1);
|
||||
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
|
||||
|
||||
// Emulate the mouse so we can use scripts
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
|
||||
emulateMouse(palm, numActiveControllers - 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Transform the measured position into body frame.
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include "gpu/GLBackend.h"
|
||||
#include "Application.h"
|
||||
|
||||
|
@ -82,6 +81,9 @@ void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int scre
|
|||
}
|
||||
|
||||
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
||||
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
|
||||
|
||||
|
@ -94,6 +96,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
int portalH = deviceSize.height();
|
||||
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
|
@ -107,6 +110,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
_activeEye = &eye;
|
||||
glViewport(portalX, portalY, portalW, portalH);
|
||||
glScissor(portalX, portalY, portalW, portalH);
|
||||
renderArgs->_viewport = glm::ivec4(portalX, portalY, portalW, portalH);
|
||||
|
||||
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
|
||||
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
|
||||
|
@ -118,6 +122,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
|
||||
qApp->displaySide(renderArgs, eyeCamera, false);
|
||||
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
|
||||
_activeEye = NULL;
|
||||
|
@ -128,6 +133,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
glPopMatrix();
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
auto fboSize = finalFbo->getSize();
|
||||
// Get the ACTUAL device size for the BLIT
|
||||
|
@ -142,6 +148,8 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
|
||||
// reset the viewport to how we started
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
//
|
||||
// OctreeFade.cpp
|
||||
// interface/src/octree
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/6/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <OctreeConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "OctreeFade.h"
|
||||
|
||||
const float OctreeFade::FADE_OUT_START = 0.5f;
|
||||
const float OctreeFade::FADE_OUT_END = 0.05f;
|
||||
const float OctreeFade::FADE_OUT_STEP = 0.9f;
|
||||
const float OctreeFade::FADE_IN_START = 0.05f;
|
||||
const float OctreeFade::FADE_IN_END = 0.5f;
|
||||
const float OctreeFade::FADE_IN_STEP = 1.1f;
|
||||
const float OctreeFade::DEFAULT_RED = 0.5f;
|
||||
const float OctreeFade::DEFAULT_GREEN = 0.5f;
|
||||
const float OctreeFade::DEFAULT_BLUE = 0.5f;
|
||||
|
||||
OctreeFade::OctreeFade(FadeDirection direction, float red, float green, float blue) :
|
||||
direction(direction),
|
||||
red(red),
|
||||
green(green),
|
||||
blue(blue)
|
||||
{
|
||||
opacity = (direction == FADE_OUT) ? FADE_OUT_START : FADE_IN_START;
|
||||
}
|
||||
|
||||
void OctreeFade::render(RenderArgs* renderArgs) {
|
||||
DependencyManager::get<GlowEffect>()->begin(renderArgs);
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glPushMatrix();
|
||||
glScalef(1.0f, 1.0f, 1.0f);
|
||||
glTranslatef(voxelDetails.x + voxelDetails.s * 0.5f,
|
||||
voxelDetails.y + voxelDetails.s * 0.5f,
|
||||
voxelDetails.z + voxelDetails.s * 0.5f);
|
||||
glLineWidth(1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(voxelDetails.s, glm::vec4(red, green, blue, opacity));
|
||||
glLineWidth(1.0f);
|
||||
glPopMatrix();
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
|
||||
DependencyManager::get<GlowEffect>()->end(renderArgs);
|
||||
|
||||
opacity *= (direction == FADE_OUT) ? FADE_OUT_STEP : FADE_IN_STEP;
|
||||
}
|
||||
|
||||
bool OctreeFade::isDone() const {
|
||||
if (direction == FADE_OUT) {
|
||||
return opacity <= FADE_OUT_END;
|
||||
} else {
|
||||
return opacity >= FADE_IN_END;
|
||||
}
|
||||
return true; // unexpected case, assume we're done
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
//
|
||||
// OctreeFade.h
|
||||
// interface/src/octree
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/6/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OctreeFade_h
|
||||
#define hifi_OctreeFade_h
|
||||
|
||||
#include <OctalCode.h> // for VoxelPositionSize
|
||||
|
||||
class OctreeFade {
|
||||
public:
|
||||
|
||||
enum FadeDirection { FADE_OUT, FADE_IN};
|
||||
static const float FADE_OUT_START;
|
||||
static const float FADE_OUT_END;
|
||||
static const float FADE_OUT_STEP;
|
||||
static const float FADE_IN_START;
|
||||
static const float FADE_IN_END;
|
||||
static const float FADE_IN_STEP;
|
||||
static const float DEFAULT_RED;
|
||||
static const float DEFAULT_GREEN;
|
||||
static const float DEFAULT_BLUE;
|
||||
|
||||
VoxelPositionSize voxelDetails;
|
||||
FadeDirection direction;
|
||||
float opacity;
|
||||
|
||||
float red;
|
||||
float green;
|
||||
float blue;
|
||||
|
||||
OctreeFade(FadeDirection direction = FADE_OUT, float red = DEFAULT_RED,
|
||||
float green = DEFAULT_GREEN, float blue = DEFAULT_BLUE);
|
||||
|
||||
void render(RenderArgs* renderArgs);
|
||||
bool isDone() const;
|
||||
};
|
||||
|
||||
#endif // hifi_OctreeFade_h
|
|
@ -179,7 +179,7 @@ void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorI
|
|||
_cursors[iconId] = DependencyManager::get<TextureCache>()->
|
||||
getImageTexture(iconPath);
|
||||
}
|
||||
batch.setUniformTexture(0, _cursors[iconId]);
|
||||
batch.setResourceTexture(0, _cursors[iconId]);
|
||||
}
|
||||
|
||||
// Draws the FBO texture for the screen
|
||||
|
@ -189,15 +189,14 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
return;
|
||||
}
|
||||
|
||||
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
|
||||
if (!texture) {
|
||||
gpu::FramebufferPointer overlayFramebuffer = qApp->getApplicationOverlay().getOverlayFramebuffer();
|
||||
if (!overlayFramebuffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
updateTooltips();
|
||||
|
||||
auto deviceSize = qApp->getDeviceSize();
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
//Handle fading and deactivation/activation of UI
|
||||
gpu::Batch batch;
|
||||
|
@ -206,12 +205,11 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch._glBindTexture(GL_TEXTURE_2D, texture);
|
||||
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
|
||||
geometryCache->renderUnitQuad(batch, vec4(vec3(1), _alpha));
|
||||
|
||||
// Doesn't actually render
|
||||
|
@ -258,8 +256,8 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
return;
|
||||
}
|
||||
|
||||
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
|
||||
if (!texture) {
|
||||
gpu::FramebufferPointer overlayFramebuffer = qApp->getApplicationOverlay().getOverlayFramebuffer();
|
||||
if (!overlayFramebuffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -275,9 +273,12 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch._glDisable(GL_DEPTH_TEST);
|
||||
batch._glDisable(GL_CULL_FACE);
|
||||
batch._glBindTexture(GL_TEXTURE_2D, texture);
|
||||
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
//batch._glBindTexture(GL_TEXTURE_2D, texture);
|
||||
//batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
//batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
|
||||
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
|
||||
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
|
||||
|
@ -535,75 +536,6 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
}
|
||||
}
|
||||
|
||||
//Renders a small magnification of the currently bound texture at the coordinates
|
||||
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
|
||||
if (!_magnifier) {
|
||||
return;
|
||||
}
|
||||
auto canvasSize = qApp->getCanvasSize();
|
||||
|
||||
const int widgetWidth = canvasSize.x;
|
||||
const int widgetHeight = canvasSize.y;
|
||||
|
||||
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
|
||||
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
|
||||
// Magnification Texture Coordinates
|
||||
const float magnifyULeft = (magPos.x - halfWidth) / (float)widgetWidth;
|
||||
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
|
||||
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
|
||||
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
|
||||
|
||||
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
|
||||
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
|
||||
//Get yaw / pitch value for the corners
|
||||
const glm::vec2 topLeftYawPitch = overlayToSpherical(glm::vec2(magPos.x - newHalfWidth,
|
||||
magPos.y - newHalfHeight));
|
||||
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
|
||||
magPos.y + newHalfHeight));
|
||||
|
||||
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
|
||||
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
|
||||
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
|
||||
const glm::vec3 topRight = getPoint(bottomRightYawPitch.x, topLeftYawPitch.y);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
if (bottomLeft != _previousMagnifierBottomLeft || bottomRight != _previousMagnifierBottomRight
|
||||
|| topLeft != _previousMagnifierTopLeft || topRight != _previousMagnifierTopRight) {
|
||||
QVector<glm::vec3> border;
|
||||
border << topLeft;
|
||||
border << bottomLeft;
|
||||
border << bottomRight;
|
||||
border << topRight;
|
||||
border << topLeft;
|
||||
geometryCache->updateVertices(_magnifierBorder, border, glm::vec4(1.0f, 0.0f, 0.0f, _alpha));
|
||||
|
||||
_previousMagnifierBottomLeft = bottomLeft;
|
||||
_previousMagnifierBottomRight = bottomRight;
|
||||
_previousMagnifierTopLeft = topLeft;
|
||||
_previousMagnifierTopRight = topRight;
|
||||
}
|
||||
|
||||
glPushMatrix(); {
|
||||
if (showBorder) {
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
glLineWidth(1.0f);
|
||||
//Outer Line
|
||||
geometryCache->renderVertices(gpu::LINE_STRIP, _magnifierBorder);
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
}
|
||||
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
|
||||
glm::vec2(magnifyULeft, magnifyVBottom),
|
||||
glm::vec2(magnifyURight, magnifyVBottom),
|
||||
glm::vec2(magnifyURight, magnifyVTop),
|
||||
glm::vec2(magnifyULeft, magnifyVTop),
|
||||
magnifierColor, _magnifierQuad);
|
||||
|
||||
} glPopMatrix();
|
||||
}
|
||||
|
||||
void ApplicationCompositor::buildHemiVertices(
|
||||
const float fov, const float aspectRatio, const int slices, const int stacks) {
|
||||
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
|
||||
|
|
|
@ -77,7 +77,6 @@ private:
|
|||
void updateTooltips();
|
||||
|
||||
void renderPointers(gpu::Batch& batch);
|
||||
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
|
||||
void renderControllerPointers(gpu::Batch& batch);
|
||||
void renderPointersOculus(gpu::Batch& batch);
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
#include <PerfStat.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "Application.h"
|
||||
#include "ApplicationOverlay.h"
|
||||
|
@ -41,12 +40,6 @@ const float CONNECTION_STATUS_BORDER_LINE_WIDTH = 4.0f;
|
|||
static const float ORTHO_NEAR_CLIP = -10000;
|
||||
static const float ORTHO_FAR_CLIP = 10000;
|
||||
|
||||
// TODO move somewhere useful
|
||||
static void fboViewport(QOpenGLFramebufferObject* fbo) {
|
||||
auto size = fbo->size();
|
||||
glViewport(0, 0, size.width(), size.height());
|
||||
}
|
||||
|
||||
ApplicationOverlay::ApplicationOverlay()
|
||||
{
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
@ -83,62 +76,84 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
|
|||
AvatarInputs::getInstance()->update();
|
||||
|
||||
buildFramebufferObject();
|
||||
|
||||
if (!_overlayFramebuffer) {
|
||||
return; // we can't do anything without our frame buffer.
|
||||
}
|
||||
|
||||
// Execute the batch into our framebuffer
|
||||
_overlayFramebuffer->bind();
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
fboViewport(_overlayFramebuffer);
|
||||
gpu::Batch batch;
|
||||
renderArgs->_batch = &batch;
|
||||
|
||||
int width = _overlayFramebuffer->getWidth();
|
||||
int height = _overlayFramebuffer->getHeight();
|
||||
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, width, height));
|
||||
batch.setFramebuffer(_overlayFramebuffer);
|
||||
|
||||
glm::vec4 color { 0.0f, 0.0f, 0.0f, 0.0f };
|
||||
float depth = 1.0f;
|
||||
int stencil = 0;
|
||||
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_COLOR0 | gpu::Framebuffer::BUFFER_DEPTH, color, depth, stencil);
|
||||
|
||||
// Now render the overlay components together into a single texture
|
||||
renderOverlays(renderArgs);
|
||||
renderStatsAndLogs(renderArgs);
|
||||
renderDomainConnectionStatusBorder(renderArgs);
|
||||
renderQmlUi(renderArgs);
|
||||
_overlayFramebuffer->release();
|
||||
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
|
||||
renderStatsAndLogs(renderArgs); // currently renders nothing
|
||||
renderDomainConnectionStatusBorder(renderArgs); // renders the connected domain line
|
||||
renderQmlUi(renderArgs); // renders a unit quad with the QML UI texture
|
||||
|
||||
renderArgs->_context->syncCache();
|
||||
renderArgs->_context->render(batch);
|
||||
|
||||
renderArgs->_batch = nullptr; // so future users of renderArgs don't try to use our batch
|
||||
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
if (_uiTexture) {
|
||||
gpu::Batch batch;
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch.setModelTransform(mat4());
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch._glBindTexture(GL_TEXTURE_2D, _uiTexture);
|
||||
|
||||
geometryCache->renderUnitQuad(batch, glm::vec4(1));
|
||||
renderArgs->_context->syncCache();
|
||||
renderArgs->_context->render(batch);
|
||||
}
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderOverlays(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
glm::vec2 size = qApp->getCanvasSize();
|
||||
|
||||
mat4 legacyProjection = glm::ortho<float>(0, size.x, size.y, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
glLoadMatrixf(glm::value_ptr(legacyProjection));
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
batch.setResourceTexture(0, textureCache->getWhiteTexture());
|
||||
int width = renderArgs->_viewport.z;
|
||||
int height = renderArgs->_viewport.w;
|
||||
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
|
||||
batch.setProjectionTransform(legacyProjection);
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch._glLineWidth(1.0f); // default
|
||||
|
||||
{
|
||||
// Render the audio scope
|
||||
//int width = _overlayFramebuffer ? _overlayFramebuffer->getWidth() : 0;
|
||||
//int height = _overlayFramebuffer ? _overlayFramebuffer->getHeight() : 0;
|
||||
DependencyManager::get<AudioScope>()->render(renderArgs, width, height);
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
|
||||
// give external parties a change to hook in
|
||||
emit qApp->renderingOverlay();
|
||||
qApp->getOverlays().renderHUD(renderArgs);
|
||||
|
||||
DependencyManager::get<AudioScope>()->render(renderArgs, _overlayFramebuffer->size().width(), _overlayFramebuffer->size().height());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
|
||||
fboViewport(_overlayFramebuffer);
|
||||
// Render all of the Script based "HUD" aka 2D overlays.
|
||||
// note: we call them HUD, as opposed to 2D, only because there are some cases of 3D HUD overlays, like the
|
||||
// cameral controls for the edit.js
|
||||
qApp->getOverlays().renderHUD(renderArgs);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderRearViewToFbo(RenderArgs* renderArgs) {
|
||||
|
@ -148,6 +163,7 @@ void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
|
|||
}
|
||||
|
||||
void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {
|
||||
|
||||
// Display stats and log text onscreen
|
||||
|
||||
// Determine whether to compute timing details
|
||||
|
@ -164,19 +180,6 @@ void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {
|
|||
drawText(canvasSize.x - 100, canvasSize.y - timerBottom,
|
||||
0.30f, 0.0f, 0, frameTimer.toUtf8().constData(), WHITE_TEXT);
|
||||
}
|
||||
|
||||
glPointSize(1.0f);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
NodeBounds& nodeBoundsDisplay = qApp->getNodeBoundsDisplay();
|
||||
nodeBoundsDisplay.drawOverlay();
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
|
||||
fboViewport(_overlayFramebuffer);
|
||||
*/
|
||||
}
|
||||
|
||||
|
@ -195,12 +198,12 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder(RenderArgs* renderAr
|
|||
});
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
if (nodeList && !nodeList->getDomainHandler().isConnected()) {
|
||||
gpu::Batch batch;
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setProjectionTransform(mat4());
|
||||
batch.setModelTransform(mat4());
|
||||
batch.setUniformTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
|
||||
batch._glLineWidth(CONNECTION_STATUS_BORDER_LINE_WIDTH);
|
||||
|
||||
// TODO animate the disconnect border for some excitement while not connected?
|
||||
|
@ -210,37 +213,40 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder(RenderArgs* renderAr
|
|||
//batch.setModelTransform(glm::scale(mat4(), vec3(scaleAmount)));
|
||||
|
||||
geometryCache->renderVertices(batch, gpu::LINE_STRIP, _domainStatusBorder);
|
||||
renderArgs->_context->syncCache();
|
||||
renderArgs->_context->render(batch);
|
||||
}
|
||||
}
|
||||
|
||||
GLuint ApplicationOverlay::getOverlayTexture() {
|
||||
if (!_overlayFramebuffer) {
|
||||
return 0;
|
||||
}
|
||||
return _overlayFramebuffer->texture();
|
||||
}
|
||||
|
||||
void ApplicationOverlay::buildFramebufferObject() {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
QSize fboSize = qApp->getDeviceSize();
|
||||
if (_overlayFramebuffer && fboSize == _overlayFramebuffer->size()) {
|
||||
|
||||
QSize desiredSize = qApp->getDeviceSize();
|
||||
int currentWidth = _overlayFramebuffer ? _overlayFramebuffer->getWidth() : 0;
|
||||
int currentHeight = _overlayFramebuffer ? _overlayFramebuffer->getHeight() : 0;
|
||||
QSize frameBufferCurrentSize(currentWidth, currentHeight);
|
||||
|
||||
if (_overlayFramebuffer && desiredSize == frameBufferCurrentSize) {
|
||||
// Already built
|
||||
return;
|
||||
}
|
||||
|
||||
if (_overlayFramebuffer) {
|
||||
delete _overlayFramebuffer;
|
||||
_overlayFramebuffer.reset();
|
||||
_overlayDepthTexture.reset();
|
||||
_overlayColorTexture.reset();
|
||||
}
|
||||
|
||||
_overlayFramebuffer = new QOpenGLFramebufferObject(fboSize, QOpenGLFramebufferObject::Depth);
|
||||
glBindTexture(GL_TEXTURE_2D, getOverlayTexture());
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
|
||||
GLfloat borderColor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
|
||||
glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
_overlayFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
|
||||
auto colorFormat = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
|
||||
auto width = desiredSize.width();
|
||||
auto height = desiredSize.height();
|
||||
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
|
||||
_overlayColorTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
|
||||
_overlayFramebuffer->setRenderBuffer(0, _overlayColorTexture);
|
||||
|
||||
auto depthFormat = gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::DEPTH);
|
||||
_overlayDepthTexture = gpu::TexturePointer(gpu::Texture::create2D(depthFormat, width, height, defaultSampler));
|
||||
|
||||
_overlayFramebuffer->setDepthStencilBuffer(_overlayDepthTexture, depthFormat);
|
||||
}
|
||||
|
|
|
@ -25,7 +25,8 @@ public:
|
|||
~ApplicationOverlay();
|
||||
|
||||
void renderOverlay(RenderArgs* renderArgs);
|
||||
GLuint getOverlayTexture();
|
||||
|
||||
gpu::FramebufferPointer getOverlayFramebuffer() const { return _overlayFramebuffer; }
|
||||
|
||||
private:
|
||||
void renderStatsAndLogs(RenderArgs* renderArgs);
|
||||
|
@ -44,7 +45,11 @@ private:
|
|||
int _magnifierBorder;
|
||||
|
||||
ivec2 _previousBorderSize{ -1 };
|
||||
QOpenGLFramebufferObject* _overlayFramebuffer{ nullptr };
|
||||
|
||||
gpu::TexturePointer _overlayDepthTexture;
|
||||
gpu::TexturePointer _overlayColorTexture;
|
||||
gpu::FramebufferPointer _overlayFramebuffer;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_ApplicationOverlay_h
|
||||
|
|
269
interface/src/ui/AudioStatsDialog.cpp
Normal file
269
interface/src/ui/AudioStatsDialog.cpp
Normal file
|
@ -0,0 +1,269 @@
|
|||
//
|
||||
// AudioStatsDialog.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Bridget Went on 7/9/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <cstdio>
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <AudioClient.h>
|
||||
#include <AudioConstants.h>
|
||||
#include <AudioIOStats.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <NodeList.h>
|
||||
#include <Util.h>
|
||||
|
||||
|
||||
#include "AudioStatsDialog.h"
|
||||
|
||||
const unsigned COLOR0 = 0x33cc99ff;
|
||||
const unsigned COLOR1 = 0xffef40c0;
|
||||
const unsigned COLOR2 = 0xd0d0d0a0;
|
||||
const unsigned COLOR3 = 0x01DD7880;
|
||||
|
||||
|
||||
AudioStatsDisplay::AudioStatsDisplay(QFormLayout* form,
|
||||
QString text, unsigned colorRGBA) :
|
||||
_text(text),
|
||||
_colorRGBA(colorRGBA)
|
||||
{
|
||||
_label = new QLabel();
|
||||
_label->setAlignment(Qt::AlignCenter);
|
||||
|
||||
QPalette palette = _label->palette();
|
||||
unsigned rgb = colorRGBA >> 8;
|
||||
rgb = ((rgb & 0xfefefeu) >> 1) + ((rgb & 0xf8f8f8) >> 3);
|
||||
palette.setColor(QPalette::WindowText, QColor::fromRgb(rgb));
|
||||
_label->setPalette(palette);
|
||||
|
||||
form->addRow(_label);
|
||||
}
|
||||
|
||||
void AudioStatsDisplay::paint() {
|
||||
_label->setText(_strBuf);
|
||||
}
|
||||
|
||||
void AudioStatsDisplay::updatedDisplay(QString str) {
|
||||
_strBuf = str;
|
||||
}
|
||||
|
||||
|
||||
AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
|
||||
|
||||
_shouldShowInjectedStreams = false;
|
||||
|
||||
setWindowTitle("Audio Network Statistics");
|
||||
|
||||
// Get statistics from the Audio Client
|
||||
_stats = &DependencyManager::get<AudioClient>()->getStats();
|
||||
|
||||
// Create layouter
|
||||
_form = new QFormLayout();
|
||||
QDialog::setLayout(_form);
|
||||
|
||||
// Load and initilize all channels
|
||||
renderStats();
|
||||
|
||||
_audioDisplayChannels = QVector<QVector<AudioStatsDisplay*>>(1);
|
||||
|
||||
_audioMixerID = addChannel(_form, _audioMixerStats, COLOR0);
|
||||
_upstreamClientID = addChannel(_form, _upstreamClientStats, COLOR1);
|
||||
_upstreamMixerID = addChannel(_form, _upstreamMixerStats, COLOR2);
|
||||
_downstreamID = addChannel(_form, _downstreamStats, COLOR3);
|
||||
_upstreamInjectedID = addChannel(_form, _upstreamInjectedStats, COLOR0);
|
||||
|
||||
|
||||
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(updateTimerTimeout()));
|
||||
averageUpdateTimer->start(1000);
|
||||
|
||||
}
|
||||
|
||||
int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color) {
|
||||
|
||||
int channelID = _audioDisplayChannels.size() - 1;
|
||||
|
||||
for (int i = 0; i < stats.size(); i++)
|
||||
// Create new display label
|
||||
_audioDisplayChannels[channelID].push_back(new AudioStatsDisplay(form, stats.at(i), color));
|
||||
|
||||
// Expand vector to fit next channel
|
||||
_audioDisplayChannels.resize(_audioDisplayChannels.size() + 1);
|
||||
|
||||
return channelID;
|
||||
}
|
||||
|
||||
void AudioStatsDialog::updateStats(QVector<QString>& stats, int channelID) {
|
||||
// Update all stat displays at specified channel
|
||||
for (int i = 0; i < stats.size(); i++)
|
||||
_audioDisplayChannels[channelID].at(i)->updatedDisplay(stats.at(i));
|
||||
}
|
||||
|
||||
void AudioStatsDialog::renderStats() {
|
||||
|
||||
// Clear current stats from all vectors
|
||||
clearAllChannels();
|
||||
|
||||
double audioInputBufferLatency = 0.0,
|
||||
inputRingBufferLatency = 0.0,
|
||||
networkRoundtripLatency = 0.0,
|
||||
mixerRingBufferLatency = 0.0,
|
||||
outputRingBufferLatency = 0.0,
|
||||
audioOutputBufferLatency = 0.0;
|
||||
|
||||
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
|
||||
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
|
||||
|
||||
if (!audioMixerNodePointer.isNull()) {
|
||||
audioInputBufferLatency = (double)_stats->getAudioInputMsecsReadStats().getWindowAverage();
|
||||
inputRingBufferLatency = (double)_stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
|
||||
networkRoundtripLatency = (double) audioMixerNodePointer->getPingMs();
|
||||
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
outputRingBufferLatency = (double)downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
audioOutputBufferLatency = (double)_stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
|
||||
}
|
||||
|
||||
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency
|
||||
+ outputRingBufferLatency + audioOutputBufferLatency;
|
||||
|
||||
_audioMixerStats.push_back(QString("Audio input buffer: %1ms").arg(
|
||||
QString::number(audioInputBufferLatency, 'f', 2)) + QString(" - avg msecs of samples read to the audio input buffer in last 10s"));
|
||||
|
||||
_audioMixerStats.push_back(QString("Input ring buffer: %1ms").arg(
|
||||
QString::number(inputRingBufferLatency, 'f', 2)) + QString(" - avg msecs of samples read to the input ring buffer in last 10s"));
|
||||
_audioMixerStats.push_back(QString("Network to mixer: %1ms").arg(
|
||||
QString::number((networkRoundtripLatency / 2.0), 'f', 2)) + QString(" - half of last ping value calculated by the node list"));
|
||||
_audioMixerStats.push_back(QString("Network to client: %1ms").arg(
|
||||
QString::number((mixerRingBufferLatency / 2.0),'f', 2)) + QString(" - half of last ping value calculated by the node list"));
|
||||
_audioMixerStats.push_back(QString("Output ring buffer: %1ms").arg(
|
||||
QString::number(outputRingBufferLatency,'f', 2)) + QString(" - avg msecs of samples in output ring buffer in last 10s"));
|
||||
_audioMixerStats.push_back(QString("Audio output buffer: %1ms").arg(
|
||||
QString::number(mixerRingBufferLatency,'f', 2)) + QString(" - avg msecs of samples in audio output buffer in last 10s"));
|
||||
_audioMixerStats.push_back(QString("TOTAL: %1ms").arg(
|
||||
QString::number(totalLatency, 'f', 2)) +QString(" - avg msecs of samples in audio output buffer in last 10s"));
|
||||
|
||||
|
||||
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
|
||||
|
||||
_upstreamClientStats.push_back(
|
||||
QString("\nUpstream Mic Audio Packets Sent Gaps (by client):"));
|
||||
|
||||
_upstreamClientStats.push_back(
|
||||
QString("Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3").arg(formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data()).arg(formatUsecTime( packetSentTimeGaps.getMax()).toLatin1().data()).arg(formatUsecTime( packetSentTimeGaps.getAverage()).toLatin1().data()));
|
||||
_upstreamClientStats.push_back(
|
||||
QString("Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3").arg(formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data()).arg(formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data()).arg(formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data()));
|
||||
|
||||
_upstreamMixerStats.push_back(QString("\nUpstream mic audio stats (received and reported by audio-mixer):"));
|
||||
|
||||
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats, true);
|
||||
|
||||
_downstreamStats.push_back(QString("\nDownstream mixed audio stats:"));
|
||||
|
||||
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
|
||||
|
||||
renderAudioStreamStats(&downstreamStats, &_downstreamStats, true);
|
||||
|
||||
|
||||
if (_shouldShowInjectedStreams) {
|
||||
|
||||
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
|
||||
|
||||
_upstreamInjectedStats.push_back(QString("\nUpstream injected audio stats: stream ID: %1").arg( injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data()));
|
||||
|
||||
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats, true);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats, bool isDownstreamStats) {
|
||||
|
||||
|
||||
audioStreamStats->push_back(
|
||||
QString("Packet loss | overall: %1% (%2 lost), last_30s: %3% (%4 lost)").arg(QString::number((int)(streamStats->_packetStreamStats.getLostRate() * 100.0f))).arg(QString::number((int)(streamStats->_packetStreamStats._lost))).arg(QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f))).arg(QString::number((int)(streamStats->_packetStreamWindowStats._lost)))
|
||||
);
|
||||
|
||||
if (isDownstreamStats) {
|
||||
audioStreamStats->push_back(
|
||||
QString("Ringbuffer frames | desired: %1, avg_available(10s): %2 + %3, available: %4+%5").arg(QString::number(streamStats->_desiredJitterBufferFrames)).arg(QString::number(streamStats->_framesAvailableAverage)).arg(QString::number((int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS))).arg(QString::number(streamStats->_framesAvailable)).arg(QString::number((int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample() / AudioConstants::NETWORK_FRAME_MSECS))));
|
||||
} else {
|
||||
audioStreamStats->push_back(
|
||||
QString("Ringbuffer frames | desired: %1, avg_available(10s): %2, available: %3").arg(QString::number(streamStats->_desiredJitterBufferFrames)).arg(QString::number(streamStats->_framesAvailableAverage)).arg(QString::number(streamStats->_framesAvailable)));
|
||||
}
|
||||
|
||||
audioStreamStats->push_back(
|
||||
QString("Ringbuffer stats | starves: %1, prev_starve_lasted: %2, frames_dropped: %3, overflows: %4").arg(QString::number(streamStats->_starveCount)).arg(QString::number(streamStats->_consecutiveNotMixedCount)).arg(QString::number(streamStats->_framesDropped)).arg(QString::number(streamStats->_overflowCount)));
|
||||
audioStreamStats->push_back(
|
||||
QString("Inter-packet timegaps (overall) | min: %1, max: %2, avg: %3").arg(formatUsecTime(streamStats->_timeGapMin).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapMax).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapAverage).toLatin1().data()));
|
||||
audioStreamStats->push_back(
|
||||
QString("Inter-packet timegaps (last 30s) | min: %1, max: %2, avg: %3").arg(formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data()).arg(formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data()).arg(QString::number(streamStats->_timeGapWindowAverage).toLatin1().data()));
|
||||
|
||||
}
|
||||
|
||||
void AudioStatsDialog::clearAllChannels() {
|
||||
_audioMixerStats.clear();
|
||||
_upstreamClientStats.clear();
|
||||
_upstreamMixerStats.clear();
|
||||
_downstreamStats.clear();
|
||||
_upstreamInjectedStats.clear();
|
||||
}
|
||||
|
||||
|
||||
void AudioStatsDialog::updateTimerTimeout() {
|
||||
|
||||
renderStats();
|
||||
|
||||
// Update all audio stats
|
||||
updateStats(_audioMixerStats, _audioMixerID);
|
||||
updateStats(_upstreamClientStats, _upstreamClientID);
|
||||
updateStats(_upstreamMixerStats, _upstreamMixerID);
|
||||
updateStats(_downstreamStats, _downstreamID);
|
||||
updateStats(_upstreamInjectedStats, _upstreamInjectedID);
|
||||
|
||||
}
|
||||
|
||||
|
||||
void AudioStatsDialog::paintEvent(QPaintEvent* event) {
|
||||
|
||||
// Repaint each stat in each channel
|
||||
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
|
||||
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
|
||||
_audioDisplayChannels[i].at(j)->paint();
|
||||
}
|
||||
}
|
||||
|
||||
QDialog::paintEvent(event);
|
||||
setFixedSize(width(), height());
|
||||
}
|
||||
|
||||
void AudioStatsDialog::reject() {
|
||||
// Just regularly close upon ESC
|
||||
QDialog::close();
|
||||
}
|
||||
|
||||
void AudioStatsDialog::closeEvent(QCloseEvent* event) {
|
||||
QDialog::closeEvent(event);
|
||||
emit closed();
|
||||
}
|
||||
|
||||
AudioStatsDialog::~AudioStatsDialog() {
|
||||
clearAllChannels();
|
||||
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
|
||||
_audioDisplayChannels[i].clear();
|
||||
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
|
||||
delete _audioDisplayChannels[i].at(j);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
113
interface/src/ui/AudioStatsDialog.h
Normal file
113
interface/src/ui/AudioStatsDialog.h
Normal file
|
@ -0,0 +1,113 @@
|
|||
//
|
||||
// AudioStatsDialog.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Bridget Went on 7/9/15.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
#ifndef __hifi__AudioStatsDialog__
|
||||
#define __hifi__AudioStatsDialog__
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include <QDialog>
|
||||
#include <QLabel>
|
||||
#include <QFormLayout>
|
||||
#include <QVector>
|
||||
#include <QTimer>
|
||||
#include <QString>
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
class AudioIOStats;
|
||||
class AudioStreamStats;
|
||||
|
||||
//display
|
||||
class AudioStatsDisplay : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
AudioStatsDisplay(QFormLayout* form, QString text, unsigned colorRGBA);
|
||||
void updatedDisplay(QString str);
|
||||
void paint();
|
||||
|
||||
private:
|
||||
QString _strBuf;
|
||||
QLabel* _label;
|
||||
QString _text;
|
||||
unsigned _colorRGBA;
|
||||
|
||||
};
|
||||
|
||||
//dialog
|
||||
class AudioStatsDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AudioStatsDialog(QWidget* parent);
|
||||
~AudioStatsDialog();
|
||||
|
||||
void paintEvent(QPaintEvent*);
|
||||
|
||||
private:
|
||||
// audio stats methods for rendering
|
||||
QVector<QString> _audioMixerStats;
|
||||
QVector<QString> _upstreamClientStats;
|
||||
QVector<QString> _upstreamMixerStats;
|
||||
QVector<QString> _downstreamStats;
|
||||
QVector<QString> _upstreamInjectedStats;
|
||||
|
||||
int _audioMixerID;
|
||||
int _upstreamClientID;
|
||||
int _upstreamMixerID;
|
||||
int _downstreamID;
|
||||
int _upstreamInjectedID;
|
||||
|
||||
QVector<QVector<AudioStatsDisplay*>> _audioDisplayChannels;
|
||||
|
||||
int addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color);
|
||||
void updateStats(QVector<QString>& stats, const int channelID);
|
||||
void renderStats();
|
||||
void clearAllChannels();
|
||||
void renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamstats, bool isDownstreamStats);
|
||||
|
||||
|
||||
const AudioIOStats* _stats;
|
||||
QFormLayout* _form;
|
||||
|
||||
bool _isEnabled;
|
||||
bool _shouldShowInjectedStreams;
|
||||
|
||||
|
||||
signals:
|
||||
|
||||
|
||||
void closed();
|
||||
|
||||
public slots:
|
||||
|
||||
|
||||
void reject();
|
||||
void updateTimerTimeout();
|
||||
|
||||
protected:
|
||||
|
||||
// Emits a 'closed' signal when this dialog is closed.
|
||||
void closeEvent(QCloseEvent*);
|
||||
|
||||
private:
|
||||
QTimer* averageUpdateTimer = new QTimer(this);
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#endif /* defined(__hifi__AudioStatsDialog__) */
|
||||
|
|
@ -114,6 +114,20 @@ void DialogsManager::editAnimations() {
|
|||
}
|
||||
}
|
||||
|
||||
void DialogsManager::audioStatsDetails() {
|
||||
if (! _audioStatsDialog) {
|
||||
_audioStatsDialog = new AudioStatsDialog(qApp->getWindow());
|
||||
connect(_audioStatsDialog, SIGNAL(closed()), _audioStatsDialog, SLOT(deleteLater()));
|
||||
|
||||
if (_hmdToolsDialog) {
|
||||
_hmdToolsDialog->watchWindow(_audioStatsDialog->windowHandle());
|
||||
}
|
||||
|
||||
_audioStatsDialog->show();
|
||||
}
|
||||
_audioStatsDialog->raise();
|
||||
}
|
||||
|
||||
void DialogsManager::bandwidthDetails() {
|
||||
if (! _bandwidthDialog) {
|
||||
_bandwidthDialog = new BandwidthDialog(qApp->getWindow());
|
||||
|
|
|
@ -24,6 +24,7 @@ class QAction;
|
|||
class AddressBarDialog;
|
||||
class AnimationsDialog;
|
||||
class AttachmentsDialog;
|
||||
class AudioStatsDialog;
|
||||
class BandwidthDialog;
|
||||
class CachesSizeDialog;
|
||||
class DiskCacheEditor;
|
||||
|
@ -42,6 +43,7 @@ class DialogsManager : public QObject, public Dependency {
|
|||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
QPointer<AudioStatsDialog> getAudioStatsDialog() const { return _audioStatsDialog; }
|
||||
QPointer<BandwidthDialog> getBandwidthDialog() const { return _bandwidthDialog; }
|
||||
QPointer<HMDToolsDialog> getHMDToolsDialog() const { return _hmdToolsDialog; }
|
||||
QPointer<LodToolsDialog> getLodToolsDialog() const { return _lodToolsDialog; }
|
||||
|
@ -58,6 +60,7 @@ public slots:
|
|||
void editPreferences();
|
||||
void editAttachments();
|
||||
void editAnimations();
|
||||
void audioStatsDetails();
|
||||
void bandwidthDetails();
|
||||
void lodTools();
|
||||
void hmdTools(bool showTools);
|
||||
|
@ -93,6 +96,7 @@ private:
|
|||
QPointer<AddressBarDialog> _addressBarDialog;
|
||||
QPointer<AnimationsDialog> _animationsDialog;
|
||||
QPointer<AttachmentsDialog> _attachmentsDialog;
|
||||
QPointer<AudioStatsDialog> _audioStatsDialog;
|
||||
QPointer<BandwidthDialog> _bandwidthDialog;
|
||||
QPointer<CachesSizeDialog> _cachesSizeDialog;
|
||||
QPointer<DiskCacheEditor> _diskCacheEditor;
|
||||
|
|
|
@ -1,183 +0,0 @@
|
|||
//
|
||||
// NodeBounds.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Ryan Huffman on 05/14/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This class draws a border around the different Entity nodes on the current domain,
|
||||
// and a semi-transparent cube around the currently mouse-overed node.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Util.h"
|
||||
|
||||
#include "NodeBounds.h"
|
||||
|
||||
NodeBounds::NodeBounds(QObject* parent) :
|
||||
QObject(parent),
|
||||
_showEntityNodes(false),
|
||||
_overlayText() {
|
||||
|
||||
}
|
||||
|
||||
void NodeBounds::draw() {
|
||||
if (!_showEntityNodes) {
|
||||
_overlayText[0] = '\0';
|
||||
return;
|
||||
}
|
||||
|
||||
NodeToJurisdictionMap& entityServerJurisdictions = Application::getInstance()->getEntityServerJurisdictions();
|
||||
NodeToJurisdictionMap* serverJurisdictions;
|
||||
|
||||
// Compute ray to find selected nodes later on. We can't use the pre-computed ray in Application because it centers
|
||||
// itself after the cursor disappears.
|
||||
PickRay pickRay = qApp->computePickRay();
|
||||
|
||||
// Variables to keep track of the selected node and properties to draw the cube later if needed
|
||||
Node* selectedNode = NULL;
|
||||
float selectedDistance = FLT_MAX;
|
||||
bool selectedIsInside = true;
|
||||
glm::vec3 selectedCenter;
|
||||
float selectedScale = 0;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([&](const SharedNodePointer& node){
|
||||
NodeType_t nodeType = node->getType();
|
||||
|
||||
if (nodeType == NodeType::EntityServer && _showEntityNodes) {
|
||||
serverJurisdictions = &entityServerJurisdictions;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
QUuid nodeUUID = node->getUUID();
|
||||
serverJurisdictions->lockForRead();
|
||||
if (serverJurisdictions->find(nodeUUID) != serverJurisdictions->end()) {
|
||||
const JurisdictionMap& map = (*serverJurisdictions)[nodeUUID];
|
||||
|
||||
unsigned char* rootCode = map.getRootOctalCode();
|
||||
|
||||
if (rootCode) {
|
||||
VoxelPositionSize rootDetails;
|
||||
voxelDetailsForCode(rootCode, rootDetails);
|
||||
serverJurisdictions->unlock();
|
||||
glm::vec3 location(rootDetails.x, rootDetails.y, rootDetails.z);
|
||||
|
||||
AACube serverBounds(location, rootDetails.s);
|
||||
|
||||
glm::vec3 center = serverBounds.getVertex(BOTTOM_RIGHT_NEAR)
|
||||
+ ((serverBounds.getVertex(TOP_LEFT_FAR) - serverBounds.getVertex(BOTTOM_RIGHT_NEAR)) / 2.0f);
|
||||
|
||||
const float ENTITY_NODE_SCALE = 0.99f;
|
||||
|
||||
float scaleFactor = rootDetails.s;
|
||||
|
||||
// Scale by 0.92 - 1.00 depending on the scale of the node. This allows smaller nodes to scale in
|
||||
// a bit and not overlap larger nodes.
|
||||
scaleFactor *= 0.92f + (rootDetails.s * 0.08f);
|
||||
|
||||
// Scale different node types slightly differently because it's common for them to overlap.
|
||||
if (nodeType == NodeType::EntityServer) {
|
||||
scaleFactor *= ENTITY_NODE_SCALE;
|
||||
}
|
||||
|
||||
float red, green, blue;
|
||||
getColorForNodeType(nodeType, red, green, blue);
|
||||
drawNodeBorder(center, scaleFactor, red, green, blue);
|
||||
|
||||
float distance;
|
||||
BoxFace face;
|
||||
|
||||
bool inside = serverBounds.contains(pickRay.origin);
|
||||
bool colliding = serverBounds.findRayIntersection(pickRay.origin, pickRay.direction, distance, face);
|
||||
|
||||
// If the camera is inside a node it will be "selected" if you don't have your cursor over another node
|
||||
// that you aren't inside.
|
||||
if (colliding && (!selectedNode || (!inside && (distance < selectedDistance || selectedIsInside)))) {
|
||||
selectedNode = node.data();
|
||||
selectedDistance = distance;
|
||||
selectedIsInside = inside;
|
||||
selectedCenter = center;
|
||||
selectedScale = scaleFactor;
|
||||
}
|
||||
} else {
|
||||
serverJurisdictions->unlock();
|
||||
}
|
||||
} else {
|
||||
serverJurisdictions->unlock();
|
||||
}
|
||||
});
|
||||
|
||||
if (selectedNode) {
|
||||
glPushMatrix();
|
||||
|
||||
glTranslatef(selectedCenter.x, selectedCenter.y, selectedCenter.z);
|
||||
glScalef(selectedScale, selectedScale, selectedScale);
|
||||
|
||||
float red, green, blue;
|
||||
getColorForNodeType(selectedNode->getType(), red, green, blue);
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(red, green, blue, 0.2f));
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
HifiSockAddr addr = selectedNode->getPublicSocket();
|
||||
QString overlay = QString("%1:%2 %3ms")
|
||||
.arg(addr.getAddress().toString())
|
||||
.arg(addr.getPort())
|
||||
.arg(selectedNode->getPingMs())
|
||||
.left(MAX_OVERLAY_TEXT_LENGTH);
|
||||
|
||||
// Ideally we'd just use a QString, but I ran into weird blinking issues using
|
||||
// constData() directly, as if the data was being overwritten.
|
||||
strcpy(_overlayText, overlay.toLocal8Bit().constData());
|
||||
} else {
|
||||
_overlayText[0] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
void NodeBounds::drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue) {
|
||||
glPushMatrix();
|
||||
glTranslatef(center.x, center.y, center.z);
|
||||
glScalef(scale, scale, scale);
|
||||
glLineWidth(2.5);
|
||||
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, glm::vec4(red, green, blue, 1.0f));
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
void NodeBounds::getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue) {
|
||||
red = nodeType == 0.0;
|
||||
green = 0.0;
|
||||
blue = nodeType == NodeType::EntityServer ? 1.0 : 0.0;
|
||||
}
|
||||
|
||||
void NodeBounds::drawOverlay() {
|
||||
if (strlen(_overlayText) > 0) {
|
||||
Application* application = Application::getInstance();
|
||||
|
||||
const float TEXT_COLOR[] = { 0.90f, 0.90f, 0.90f };
|
||||
const float TEXT_SCALE = 0.1f;
|
||||
const int TEXT_HEIGHT = 10;
|
||||
const float ROTATION = 0.0f;
|
||||
const int FONT = 2;
|
||||
const int PADDING = 10;
|
||||
const int MOUSE_OFFSET = 10;
|
||||
const int BACKGROUND_BEVEL = 3;
|
||||
|
||||
int mouseX = application->getTrueMouseX(),
|
||||
mouseY = application->getTrueMouseY(),
|
||||
textWidth = widthText(TEXT_SCALE, 0, _overlayText);
|
||||
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(
|
||||
mouseX + MOUSE_OFFSET, mouseY - TEXT_HEIGHT - PADDING,
|
||||
textWidth + (2 * PADDING), TEXT_HEIGHT + (2 * PADDING), BACKGROUND_BEVEL,
|
||||
glm::vec4(0.4f, 0.4f, 0.4f, 0.6f));
|
||||
drawText(mouseX + MOUSE_OFFSET + PADDING, mouseY, TEXT_SCALE, ROTATION, FONT, _overlayText, TEXT_COLOR);
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
//
|
||||
// NodeBounds.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Ryan Huffman on 05/14/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_NodeBounds_h
|
||||
#define hifi_NodeBounds_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <NodeList.h>
|
||||
|
||||
const int MAX_OVERLAY_TEXT_LENGTH = 64;
|
||||
|
||||
class NodeBounds : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
NodeBounds(QObject* parent = NULL);
|
||||
|
||||
bool getShowEntityNodes() { return _showEntityNodes; }
|
||||
bool getShowParticleNodes() { return _showParticleNodes; }
|
||||
|
||||
void draw();
|
||||
void drawOverlay();
|
||||
|
||||
public slots:
|
||||
void setShowEntityNodes(bool value) { _showEntityNodes = value; }
|
||||
void setShowParticleNodes(bool value) { _showParticleNodes = value; }
|
||||
|
||||
protected:
|
||||
void drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue);
|
||||
void getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue);
|
||||
|
||||
private:
|
||||
bool _showEntityNodes;
|
||||
bool _showParticleNodes;
|
||||
char _overlayText[MAX_OVERLAY_TEXT_LENGTH + 1];
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_NodeBounds_h
|
|
@ -24,9 +24,18 @@ UpdateDialog::UpdateDialog(QQuickItem* parent) :
|
|||
int currentVersion = QCoreApplication::applicationVersion().toInt();
|
||||
int latestVersion = applicationUpdater.data()->getBuildData().lastKey();
|
||||
int versionsBehind = latestVersion - currentVersion;
|
||||
_updateAvailableDetails = "v" + QString::number(latestVersion) + " released on " + applicationUpdater.data()->getBuildData()[latestVersion]["releaseTime"];
|
||||
_updateAvailableDetails += "\nYou are " + QString::number(versionsBehind) + " versions behind";
|
||||
_releaseNotes = applicationUpdater.data()->getBuildData()[latestVersion]["releaseNotes"];
|
||||
_updateAvailableDetails = "v" + QString::number(latestVersion) + " released on "
|
||||
+ QString(applicationUpdater.data()->getBuildData()[latestVersion]["releaseTime"]).replace(" ", " ");
|
||||
_updateAvailableDetails += "\nYou are " + QString::number(versionsBehind) + " version"
|
||||
+ (versionsBehind > 1 ? "s" : "") + " behind";
|
||||
|
||||
_releaseNotes = "";
|
||||
for (int i = latestVersion; i > currentVersion; i--) {
|
||||
QString releaseNotes = applicationUpdater.data()->getBuildData()[i]["releaseNotes"];
|
||||
releaseNotes.remove("<br />");
|
||||
releaseNotes.remove(QRegExp("^\n+"));
|
||||
_releaseNotes += "\n" + QString().sprintf("%d", i) + "\n" + releaseNotes + "\n";
|
||||
}
|
||||
}
|
||||
|
||||
const QString& UpdateDialog::updateAvailableDetails() const {
|
||||
|
|
|
@ -28,12 +28,12 @@ BillboardOverlay::BillboardOverlay(const BillboardOverlay* billboardOverlay) :
|
|||
}
|
||||
|
||||
void BillboardOverlay::render(RenderArgs* args) {
|
||||
if (!_isLoaded) {
|
||||
if (!_texture) {
|
||||
_isLoaded = true;
|
||||
_texture = DependencyManager::get<TextureCache>()->getTexture(_url);
|
||||
}
|
||||
|
||||
if (!_visible || !_texture->isLoaded()) {
|
||||
if (!_visible || !_texture || !_texture->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -87,12 +87,12 @@ void BillboardOverlay::render(RenderArgs* args) {
|
|||
transform.postScale(glm::vec3(getDimensions(), 1.0f));
|
||||
|
||||
batch->setModelTransform(transform);
|
||||
batch->setUniformTexture(0, _texture->getGPUTexture());
|
||||
batch->setResourceTexture(0, _texture->getGPUTexture());
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(*batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
|
||||
glm::vec4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha));
|
||||
|
||||
batch->setUniformTexture(0, args->_whiteTexture); // restore default white color after me
|
||||
batch->setResourceTexture(0, args->_whiteTexture); // restore default white color after me
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ void BillboardOverlay::setBillboardURL(const QString& url) {
|
|||
bool BillboardOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
float& distance, BoxFace& face) {
|
||||
|
||||
if (_texture) {
|
||||
if (_texture && _texture->isLoaded()) {
|
||||
glm::quat rotation = getRotation();
|
||||
if (_isFacingAvatar) {
|
||||
// rotate about vertical to face the camera
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <StreamUtils.h>
|
||||
|
||||
|
@ -34,8 +33,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
|
|||
const float MAX_COLOR = 255.0f;
|
||||
glm::vec4 cubeColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
|
||||
//glDisable(GL_LIGHTING);
|
||||
|
||||
// TODO: handle registration point??
|
||||
glm::vec3 position = getPosition();
|
||||
glm::vec3 center = getCenter();
|
||||
|
@ -104,93 +101,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
|
|||
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(*batch, 1.0f, cubeColor);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::vec3 axis = glm::axis(rotation);
|
||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
||||
glPushMatrix();
|
||||
glm::vec3 positionToCenter = center - position;
|
||||
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
|
||||
if (_isSolid) {
|
||||
if (_borderSize > 0) {
|
||||
// Draw a cube at a larger size behind the main cube, creating
|
||||
// a border effect.
|
||||
// Disable writing to the depth mask so that the "border" cube will not
|
||||
// occlude the main cube. This means the border could be covered by
|
||||
// overlays that are further back and drawn later, but this is good
|
||||
// enough for the use-case.
|
||||
glDepthMask(GL_FALSE);
|
||||
glPushMatrix();
|
||||
glScalef(dimensions.x * _borderSize, dimensions.y * _borderSize, dimensions.z * _borderSize);
|
||||
|
||||
if (_drawOnHUD) {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
|
||||
}
|
||||
|
||||
glPopMatrix();
|
||||
glDepthMask(GL_TRUE);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
glScalef(dimensions.x, dimensions.y, dimensions.z);
|
||||
if (_drawOnHUD) {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
|
||||
}
|
||||
glPopMatrix();
|
||||
} else {
|
||||
glLineWidth(_lineWidth);
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
glm::vec3 halfDimensions = dimensions / 2.0f;
|
||||
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
|
||||
|
||||
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->renderDashedLine(bottomLeftNear, bottomRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightNear, bottomRightFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightFar, bottomLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomLeftFar, bottomLeftNear, cubeColor);
|
||||
|
||||
geometryCache->renderDashedLine(topLeftNear, topRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(topRightNear, topRightFar, cubeColor);
|
||||
geometryCache->renderDashedLine(topRightFar, topLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(topLeftFar, topLeftNear, cubeColor);
|
||||
|
||||
geometryCache->renderDashedLine(bottomLeftNear, topLeftNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightNear, topRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomLeftFar, topLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightFar, topRightFar, cubeColor);
|
||||
|
||||
} else {
|
||||
glScalef(dimensions.x, dimensions.y, dimensions.z);
|
||||
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
|
||||
}
|
||||
}
|
||||
glPopMatrix();
|
||||
glPopMatrix();
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,11 @@
|
|||
#include <GeometryCache.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
#include "qapplication.h"
|
||||
|
||||
#include "gpu/Context.h"
|
||||
#include "gpu/StandardShaderLib.h"
|
||||
|
||||
ImageOverlay::ImageOverlay() :
|
||||
_imageURL(),
|
||||
_renderImage(false),
|
||||
|
@ -50,16 +55,19 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
_isLoaded = true;
|
||||
_texture = DependencyManager::get<TextureCache>()->getTexture(_imageURL);
|
||||
}
|
||||
|
||||
// If we are not visible or loaded, return. If we are trying to render an
|
||||
// image but the texture hasn't loaded, return.
|
||||
if (!_visible || !_isLoaded || (_renderImage && !_texture->isLoaded())) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
if (_renderImage) {
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
glBindTexture(GL_TEXTURE_2D, _texture->getID());
|
||||
batch.setResourceTexture(0, _texture->getGPUTexture());
|
||||
} else {
|
||||
batch.setResourceTexture(0, args->_whiteTexture);
|
||||
}
|
||||
|
||||
const float MAX_COLOR = 255.0f;
|
||||
|
@ -75,6 +83,8 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
glm::vec2 topLeft(left, top);
|
||||
glm::vec2 bottomRight(right, bottom);
|
||||
|
||||
batch.setModelTransform(Transform());
|
||||
|
||||
// if for some reason our image is not over 0 width or height, don't attempt to render the image
|
||||
if (_renderImage) {
|
||||
float imageWidth = _texture->getWidth();
|
||||
|
@ -104,14 +114,14 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
|
||||
glm::vec2 texCoordTopLeft(x, y);
|
||||
glm::vec2 texCoordBottomRight(x + w, y + h);
|
||||
glm::vec4 texcoordRect(texCoordTopLeft, w, h);
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
|
||||
}
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
|
@ -53,7 +52,6 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
glm::vec4 colorv4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
|
||||
auto batch = args->_batch;
|
||||
|
||||
if (batch) {
|
||||
batch->setModelTransform(_transform);
|
||||
|
||||
|
@ -63,38 +61,6 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
}
|
||||
} else {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glLineWidth(_lineWidth);
|
||||
|
||||
glm::vec3 position = getPosition();
|
||||
glm::quat rotation = getRotation();
|
||||
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::vec3 axis = glm::axis(rotation);
|
||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
// TODO: add support for color to renderDashedLine()
|
||||
DependencyManager::get<GeometryCache>()->renderDashedLine(_start, _end, colorv4, _geometryCacheID);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
|
||||
}
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <GlowEffect.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "LocalModelsOverlay.h"
|
||||
|
@ -32,11 +30,7 @@ void LocalModelsOverlay::update(float deltatime) {
|
|||
|
||||
void LocalModelsOverlay::render(RenderArgs* args) {
|
||||
if (_visible) {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
float glowLevel = getGlowLevel(); // FIXME, glowing removed for now
|
||||
|
||||
auto batch = args ->_batch;
|
||||
Application* app = Application::getInstance();
|
||||
|
@ -47,10 +41,6 @@ void LocalModelsOverlay::render(RenderArgs* args) {
|
|||
_entityTreeRenderer->render(args);
|
||||
transform.setTranslation(oldTranslation);
|
||||
batch->setViewTransform(transform);
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,8 +12,6 @@
|
|||
#include "ModelOverlay.h"
|
||||
|
||||
#include <Application.h>
|
||||
#include <GlowEffect.h>
|
||||
|
||||
|
||||
ModelOverlay::ModelOverlay()
|
||||
: _model(),
|
||||
|
|
|
@ -98,24 +98,29 @@ void Overlays::cleanupOverlaysToDelete() {
|
|||
void Overlays::renderHUD(RenderArgs* renderArgs) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
QReadLocker lock(&_lock);
|
||||
gpu::Batch batch;
|
||||
renderArgs->_batch = &batch;
|
||||
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
auto size = qApp->getCanvasSize();
|
||||
int width = size.x;
|
||||
int height = size.y;
|
||||
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
|
||||
|
||||
|
||||
foreach(Overlay::Pointer thisOverlay, _overlaysHUD) {
|
||||
if (thisOverlay->is3D()) {
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
// Reset all batch pipeline settings between overlay
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setResourceTexture(0, textureCache->getWhiteTexture()); // FIXME - do we really need to do this??
|
||||
batch.setProjectionTransform(legacyProjection);
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch._glLineWidth(1.0f); // default
|
||||
|
||||
thisOverlay->render(renderArgs);
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
} else {
|
||||
thisOverlay->render(renderArgs);
|
||||
}
|
||||
thisOverlay->render(renderArgs);
|
||||
}
|
||||
gpu::GLBackend::renderBatch(batch, true);
|
||||
}
|
||||
|
||||
unsigned int Overlays::addOverlay(const QString& type, const QScriptValue& properties) {
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include "Rectangle3DOverlay.h"
|
||||
|
||||
#include <GeometryCache.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
Rectangle3DOverlay::Rectangle3DOverlay() :
|
||||
|
@ -35,14 +34,13 @@ void Rectangle3DOverlay::render(RenderArgs* args) {
|
|||
if (!_visible) {
|
||||
return; // do nothing if we're not visible
|
||||
}
|
||||
|
||||
|
||||
float alpha = getAlpha();
|
||||
xColor color = getColor();
|
||||
const float MAX_COLOR = 255.0f;
|
||||
glm::vec4 rectangleColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
|
||||
glm::vec3 position = getPosition();
|
||||
glm::vec3 center = getCenter();
|
||||
glm::vec2 dimensions = getDimensions();
|
||||
glm::vec2 halfDimensions = dimensions * 0.5f;
|
||||
glm::quat rotation = getRotation();
|
||||
|
@ -67,7 +65,7 @@ void Rectangle3DOverlay::render(RenderArgs* args) {
|
|||
glm::vec3 point2(halfDimensions.x, -halfDimensions.y, 0.0f);
|
||||
glm::vec3 point3(halfDimensions.x, halfDimensions.y, 0.0f);
|
||||
glm::vec3 point4(-halfDimensions.x, halfDimensions.y, 0.0f);
|
||||
|
||||
|
||||
geometryCache->renderDashedLine(*batch, point1, point2, rectangleColor);
|
||||
geometryCache->renderDashedLine(*batch, point2, point3, rectangleColor);
|
||||
geometryCache->renderDashedLine(*batch, point3, point4, rectangleColor);
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "Sphere3DOverlay.h"
|
||||
|
|
|
@ -10,15 +10,72 @@
|
|||
|
||||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "TextOverlay.h"
|
||||
|
||||
#include "OffscreenUi.h"
|
||||
#include "text/FontFamilies.h"
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <TextureCache.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <TextRenderer.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <QQuickItem>
|
||||
|
||||
#define TEXT_OVERLAY_PROPERTY(type, name, initialValue) \
|
||||
Q_PROPERTY(type name READ name WRITE set##name NOTIFY name##Changed) \
|
||||
public: \
|
||||
type name() { return _##name; }; \
|
||||
void set##name(const type& name) { \
|
||||
if (name != _##name) { \
|
||||
_##name = name; \
|
||||
emit name##Changed(); \
|
||||
} \
|
||||
} \
|
||||
private: \
|
||||
type _##name{ initialValue };
|
||||
|
||||
|
||||
class TextOverlayElement : public QQuickItem {
|
||||
Q_OBJECT
|
||||
HIFI_QML_DECL
|
||||
private:
|
||||
TEXT_OVERLAY_PROPERTY(QString, text, "")
|
||||
TEXT_OVERLAY_PROPERTY(QString, fontFamily, SANS_FONT_FAMILY)
|
||||
TEXT_OVERLAY_PROPERTY(QString, textColor, "#ffffffff")
|
||||
TEXT_OVERLAY_PROPERTY(QString, backgroundColor, "#B2000000")
|
||||
TEXT_OVERLAY_PROPERTY(qreal, fontSize, 18)
|
||||
TEXT_OVERLAY_PROPERTY(qreal, lineHeight, 18)
|
||||
TEXT_OVERLAY_PROPERTY(qreal, leftMargin, 0)
|
||||
TEXT_OVERLAY_PROPERTY(qreal, topMargin, 0)
|
||||
|
||||
public:
|
||||
TextOverlayElement(QQuickItem* parent = nullptr) : QQuickItem(parent) {
|
||||
}
|
||||
|
||||
signals:
|
||||
void textChanged();
|
||||
void fontFamilyChanged();
|
||||
void fontSizeChanged();
|
||||
void lineHeightChanged();
|
||||
void leftMarginChanged();
|
||||
void topMarginChanged();
|
||||
void textColorChanged();
|
||||
void backgroundColorChanged();
|
||||
};
|
||||
|
||||
HIFI_QML_DEF(TextOverlayElement)
|
||||
|
||||
QString toQmlColor(const glm::vec4& v) {
|
||||
QString templat("#%1%2%3%4");
|
||||
return templat.
|
||||
arg((int)(v.a * 255), 2, 16, QChar('0')).
|
||||
arg((int)(v.r * 255), 2, 16, QChar('0')).
|
||||
arg((int)(v.g * 255), 2, 16, QChar('0')).
|
||||
arg((int)(v.b * 255), 2, 16, QChar('0'));
|
||||
}
|
||||
|
||||
TextOverlay::TextOverlay() :
|
||||
_backgroundColor(DEFAULT_BACKGROUND_COLOR),
|
||||
|
@ -27,7 +84,20 @@ TextOverlay::TextOverlay() :
|
|||
_topMargin(DEFAULT_MARGIN),
|
||||
_fontSize(DEFAULT_FONTSIZE)
|
||||
{
|
||||
_textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
|
||||
|
||||
qApp->postLambdaEvent([=] {
|
||||
static std::once_flag once;
|
||||
std::call_once(once, [] {
|
||||
TextOverlayElement::registerType();
|
||||
});
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
TextOverlayElement::show([=](QQmlContext* context, QObject* object) {
|
||||
_qmlElement = static_cast<TextOverlayElement*>(object);
|
||||
});
|
||||
});
|
||||
while (!_qmlElement) {
|
||||
QThread::msleep(1);
|
||||
}
|
||||
}
|
||||
|
||||
TextOverlay::TextOverlay(const TextOverlay* textOverlay) :
|
||||
|
@ -39,11 +109,21 @@ TextOverlay::TextOverlay(const TextOverlay* textOverlay) :
|
|||
_topMargin(textOverlay->_topMargin),
|
||||
_fontSize(textOverlay->_fontSize)
|
||||
{
|
||||
_textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
|
||||
qApp->postLambdaEvent([=] {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
TextOverlayElement::show([this](QQmlContext* context, QObject* object) {
|
||||
_qmlElement = static_cast<TextOverlayElement*>(object);
|
||||
});
|
||||
});
|
||||
while (!_qmlElement) {
|
||||
QThread::sleep(1);
|
||||
}
|
||||
}
|
||||
|
||||
TextOverlay::~TextOverlay() {
|
||||
delete _textRenderer;
|
||||
if (_qmlElement) {
|
||||
_qmlElement->deleteLater();
|
||||
}
|
||||
}
|
||||
|
||||
xColor TextOverlay::getBackgroundColor() {
|
||||
|
@ -65,45 +145,34 @@ xColor TextOverlay::getBackgroundColor() {
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
void TextOverlay::render(RenderArgs* args) {
|
||||
if (!_visible) {
|
||||
return; // do nothing if we're not visible
|
||||
if (_visible != _qmlElement->isVisible()) {
|
||||
_qmlElement->setVisible(_visible);
|
||||
}
|
||||
float pulseLevel = updatePulse();
|
||||
static float _oldPulseLevel = 0.0f;
|
||||
if (pulseLevel != _oldPulseLevel) {
|
||||
|
||||
const float MAX_COLOR = 255.0f;
|
||||
xColor backgroundColor = getBackgroundColor();
|
||||
glm::vec4 quadColor(backgroundColor.red / MAX_COLOR, backgroundColor.green / MAX_COLOR, backgroundColor.blue / MAX_COLOR,
|
||||
getBackgroundAlpha());
|
||||
|
||||
int left = _bounds.left();
|
||||
int right = _bounds.right() + 1;
|
||||
int top = _bounds.top();
|
||||
int bottom = _bounds.bottom() + 1;
|
||||
|
||||
glm::vec2 topLeft(left, top);
|
||||
glm::vec2 bottomRight(right, bottom);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
|
||||
|
||||
const int leftAdjust = -1; // required to make text render relative to left edge of bounds
|
||||
const int topAdjust = -2; // required to make text render relative to top edge of bounds
|
||||
int x = _bounds.left() + _leftMargin + leftAdjust;
|
||||
int y = _bounds.top() + _topMargin + topAdjust;
|
||||
|
||||
float alpha = getAlpha();
|
||||
glm::vec4 textColor = {_color.red / MAX_COLOR, _color.green / MAX_COLOR, _color.blue / MAX_COLOR, alpha };
|
||||
_textRenderer->draw(x, y, _text, textColor);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void TextOverlay::setProperties(const QScriptValue& properties) {
|
||||
Overlay2D::setProperties(properties);
|
||||
|
||||
_qmlElement->setX(_bounds.left());
|
||||
_qmlElement->setY(_bounds.top());
|
||||
_qmlElement->setWidth(_bounds.width());
|
||||
_qmlElement->setHeight(_bounds.height());
|
||||
_qmlElement->settextColor(toQmlColor(vec4(toGlm(_color), _alpha)));
|
||||
QScriptValue font = properties.property("font");
|
||||
if (font.isObject()) {
|
||||
if (font.property("size").isValid()) {
|
||||
setFontSize(font.property("size").toInt32());
|
||||
}
|
||||
QFont font(_qmlElement->fontFamily());
|
||||
font.setPixelSize(_qmlElement->fontSize());
|
||||
QFontMetrics fm(font);
|
||||
_qmlElement->setlineHeight(fm.lineSpacing() * 1.2);
|
||||
}
|
||||
|
||||
QScriptValue text = properties.property("text");
|
||||
|
@ -126,6 +195,7 @@ void TextOverlay::setProperties(const QScriptValue& properties) {
|
|||
if (properties.property("backgroundAlpha").isValid()) {
|
||||
_backgroundAlpha = properties.property("backgroundAlpha").toVariant().toFloat();
|
||||
}
|
||||
_qmlElement->setbackgroundColor(toQmlColor(vec4(toGlm(_backgroundColor), _backgroundAlpha)));
|
||||
|
||||
if (properties.property("leftMargin").isValid()) {
|
||||
setLeftMargin(properties.property("leftMargin").toVariant().toInt());
|
||||
|
@ -166,15 +236,37 @@ QScriptValue TextOverlay::getProperty(const QString& property) {
|
|||
}
|
||||
|
||||
QSizeF TextOverlay::textSize(const QString& text) const {
|
||||
auto extents = _textRenderer->computeExtent(text);
|
||||
|
||||
return QSizeF(extents.x, extents.y);
|
||||
int lines = 1;
|
||||
foreach(QChar c, text) {
|
||||
if (c == QChar('\n')) {
|
||||
++lines;
|
||||
}
|
||||
}
|
||||
QFont font(_qmlElement->fontFamily());
|
||||
font.setPixelSize(_qmlElement->fontSize());
|
||||
QFontMetrics fm(font);
|
||||
QSizeF result = QSizeF(fm.width(text), _qmlElement->lineHeight() * lines);
|
||||
return result;
|
||||
}
|
||||
|
||||
void TextOverlay::setFontSize(int fontSize) {
|
||||
_fontSize = fontSize;
|
||||
|
||||
auto oldTextRenderer = _textRenderer;
|
||||
_textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, DEFAULT_FONT_WEIGHT);
|
||||
delete oldTextRenderer;
|
||||
_qmlElement->setfontSize(fontSize);
|
||||
}
|
||||
|
||||
void TextOverlay::setText(const QString& text) {
|
||||
_text = text;
|
||||
_qmlElement->settext(text);
|
||||
}
|
||||
|
||||
void TextOverlay::setLeftMargin(int margin) {
|
||||
_leftMargin = margin;
|
||||
_qmlElement->setleftMargin(margin);
|
||||
}
|
||||
|
||||
void TextOverlay::setTopMargin(int margin) {
|
||||
_topMargin = margin;
|
||||
_qmlElement->settopMargin(margin);
|
||||
}
|
||||
|
||||
#include "TextOverlay.moc"
|
||||
|
|
|
@ -26,7 +26,7 @@ const int DEFAULT_MARGIN = 10;
|
|||
const int DEFAULT_FONTSIZE = 12;
|
||||
const int DEFAULT_FONT_WEIGHT = 50;
|
||||
|
||||
class TextRenderer;
|
||||
class TextOverlayElement;
|
||||
|
||||
class TextOverlay : public Overlay2D {
|
||||
Q_OBJECT
|
||||
|
@ -45,9 +45,9 @@ public:
|
|||
float getBackgroundAlpha() const { return _backgroundAlpha; }
|
||||
|
||||
// setters
|
||||
void setText(const QString& text) { _text = text; }
|
||||
void setLeftMargin(int margin) { _leftMargin = margin; }
|
||||
void setTopMargin(int margin) { _topMargin = margin; }
|
||||
void setText(const QString& text);
|
||||
void setLeftMargin(int margin);
|
||||
void setTopMargin(int margin);
|
||||
void setFontSize(int fontSize);
|
||||
|
||||
virtual void setProperties(const QScriptValue& properties);
|
||||
|
@ -57,9 +57,7 @@ public:
|
|||
QSizeF textSize(const QString& text) const; // Pixels
|
||||
|
||||
private:
|
||||
|
||||
TextRenderer* _textRenderer = nullptr;
|
||||
|
||||
TextOverlayElement* _qmlElement{ nullptr };
|
||||
QString _text;
|
||||
xColor _backgroundColor;
|
||||
float _backgroundAlpha;
|
||||
|
|
|
@ -38,10 +38,7 @@ QSharedPointer<Resource> AnimationCache::createResource(const QUrl& url, const Q
|
|||
return QSharedPointer<Resource>(new Animation(url), &Resource::allReferencesCleared);
|
||||
}
|
||||
|
||||
Animation::Animation(const QUrl& url) :
|
||||
Resource(url),
|
||||
_isValid(false) {
|
||||
}
|
||||
Animation::Animation(const QUrl& url) : Resource(url) {}
|
||||
|
||||
class AnimationReader : public QRunnable {
|
||||
public:
|
||||
|
@ -97,7 +94,6 @@ QVector<FBXAnimationFrame> Animation::getFrames() const {
|
|||
void Animation::setGeometry(const FBXGeometry& geometry) {
|
||||
_geometry = geometry;
|
||||
finishedLoading(true);
|
||||
_isValid = true;
|
||||
}
|
||||
|
||||
void Animation::downloadFinished(QNetworkReply* reply) {
|
||||
|
|
|
@ -57,8 +57,6 @@ public:
|
|||
Q_INVOKABLE QStringList getJointNames() const;
|
||||
|
||||
Q_INVOKABLE QVector<FBXAnimationFrame> getFrames() const;
|
||||
|
||||
bool isValid() const { return _isValid; }
|
||||
|
||||
protected:
|
||||
|
||||
|
@ -69,7 +67,6 @@ protected:
|
|||
private:
|
||||
|
||||
FBXGeometry _geometry;
|
||||
bool _isValid;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -294,7 +294,7 @@ AudioInjector* AudioInjector::playSound(const QString& soundUrl, const float vol
|
|||
if (soundCache.isNull()) {
|
||||
return NULL;
|
||||
}
|
||||
SharedSoundPointer sound = soundCache.data()->getSound(QUrl(soundUrl));
|
||||
SharedSoundPointer sound = soundCache->getSound(QUrl(soundUrl));
|
||||
if (sound.isNull() || !sound->isReady()) {
|
||||
return NULL;
|
||||
}
|
||||
|
|
|
@ -161,19 +161,15 @@ QByteArray AvatarData::toByteArray() {
|
|||
// Body scale
|
||||
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
|
||||
|
||||
// Head rotation (NOTE: This needs to become a quaternion to save two bytes)
|
||||
glm::vec3 pitchYawRoll = glm::vec3(_headData->getFinalPitch(),
|
||||
_headData->getFinalYaw(),
|
||||
_headData->getFinalRoll());
|
||||
if (this->isMyAvatar()) {
|
||||
glm::vec3 lean = glm::vec3(_headData->getFinalLeanForward(),
|
||||
_headData->getTorsoTwist(),
|
||||
_headData->getFinalLeanSideways());
|
||||
pitchYawRoll -= lean;
|
||||
}
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.x);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.y);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.z);
|
||||
// Head rotation
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalPitch());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalYaw());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalRoll());
|
||||
|
||||
// Body lean
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanForward);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanSideways);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_torsoTwist);
|
||||
|
||||
// Lookat Position
|
||||
memcpy(destinationBuffer, &_headData->_lookAtPosition, sizeof(_headData->_lookAtPosition));
|
||||
|
@ -291,13 +287,16 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) {
|
|||
// headPitch = 2 (compressed float)
|
||||
// headYaw = 2 (compressed float)
|
||||
// headRoll = 2 (compressed float)
|
||||
// leanForward = 2 (compressed float)
|
||||
// leanSideways = 2 (compressed float)
|
||||
// torsoTwist = 2 (compressed float)
|
||||
// lookAt = 12
|
||||
// audioLoudness = 4
|
||||
// }
|
||||
// + 1 byte for pupilSize
|
||||
// + 1 byte for numJoints (0)
|
||||
// = 45 bytes
|
||||
int minPossibleSize = 45;
|
||||
// = 51 bytes
|
||||
int minPossibleSize = 51;
|
||||
|
||||
int maxAvailableSize = packet.size() - offset;
|
||||
if (minPossibleSize > maxAvailableSize) {
|
||||
|
@ -371,6 +370,22 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) {
|
|||
_headData->setBaseYaw(headYaw);
|
||||
_headData->setBaseRoll(headRoll);
|
||||
} // 6 bytes
|
||||
|
||||
{ // Head lean (relative to pelvis)
|
||||
float leanForward, leanSideways, torsoTwist;
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanForward);
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanSideways);
|
||||
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &torsoTwist);
|
||||
if (glm::isnan(leanForward) || glm::isnan(leanSideways)) {
|
||||
if (shouldLogError(now)) {
|
||||
qCDebug(avatars) << "Discard nan AvatarData::leanForward,leanSideways,torsoTwise; displayName = '" << _displayName << "'";
|
||||
}
|
||||
return maxAvailableSize;
|
||||
}
|
||||
_headData->_leanForward = leanForward;
|
||||
_headData->_leanSideways = leanSideways;
|
||||
_headData->_torsoTwist = torsoTwist;
|
||||
} // 6 bytes
|
||||
|
||||
{ // Lookat Position
|
||||
glm::vec3 lookAt;
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <PacketHeaders.h>
|
||||
#include <SharedUtil.h>
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
#include <AbstractScriptingServicesInterface.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <Model.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <PerfStat.h>
|
||||
|
@ -93,16 +92,18 @@ void EntityTreeRenderer::clear() {
|
|||
foreach (const EntityItemID& entityID, _entityScripts.keys()) {
|
||||
checkAndCallUnload(entityID);
|
||||
}
|
||||
OctreeRenderer::clear();
|
||||
_entityScripts.clear();
|
||||
|
||||
auto scene = _viewState->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
foreach(auto entity, _entitiesInScene) {
|
||||
entity->removeFromScene(entity, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
_entitiesInScene.clear();
|
||||
|
||||
OctreeRenderer::clear();
|
||||
}
|
||||
|
||||
void EntityTreeRenderer::init() {
|
||||
|
@ -544,7 +545,7 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
|
|||
Model* model = modelEntityItem->getModel(this);
|
||||
if (model) {
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = model->getCollisionGeometry();
|
||||
if (!collisionNetworkGeometry.isNull()) {
|
||||
if (collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) {
|
||||
result = &collisionNetworkGeometry->getFBXGeometry();
|
||||
}
|
||||
}
|
||||
|
@ -803,7 +804,7 @@ void EntityTreeRenderer::connectSignalsToSlots(EntityScriptingInterface* entityS
|
|||
connect(this, &EntityTreeRenderer::leaveEntity, entityScriptingInterface, &EntityScriptingInterface::leaveEntity);
|
||||
connect(this, &EntityTreeRenderer::collisionWithEntity, entityScriptingInterface, &EntityScriptingInterface::collisionWithEntity);
|
||||
|
||||
connect(&(*DependencyManager::get<SceneScriptingInterface>()), &SceneScriptingInterface::shouldRenderEntitiesChanged, this, &EntityTreeRenderer::updateEntityRenderStatus, Qt::QueuedConnection);
|
||||
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderEntitiesChanged, this, &EntityTreeRenderer::updateEntityRenderStatus, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
QScriptValueList EntityTreeRenderer::createMouseEventArgs(const EntityItemID& entityID, QMouseEvent* event, unsigned int deviceID) {
|
||||
|
@ -1003,7 +1004,7 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
|
|||
checkAndCallUnload(entityID);
|
||||
}
|
||||
_entityScripts.remove(entityID);
|
||||
|
||||
|
||||
// here's where we remove the entity payload from the scene
|
||||
if (_entitiesInScene.contains(entityID)) {
|
||||
auto entity = _entitiesInScene.take(entityID);
|
||||
|
@ -1164,6 +1165,7 @@ void EntityTreeRenderer::updateEntityRenderStatus(bool shouldRenderEntities) {
|
|||
} else {
|
||||
_entityIDsLastInScene = _entitiesInScene.keys();
|
||||
for (auto entityID : _entityIDsLastInScene) {
|
||||
// FIXME - is this really right? do we want to do the deletingEntity() code or just remove from the scene.
|
||||
deletingEntity(entityID);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,11 +63,11 @@ void RenderableModelEntityItem::remapTextures() {
|
|||
return; // nothing to do if we don't have a model
|
||||
}
|
||||
|
||||
if (!_model->isLoadedWithTextures()) {
|
||||
return; // nothing to do if the model has not yet loaded its default textures
|
||||
if (!_model->isLoaded()) {
|
||||
return; // nothing to do if the model has not yet loaded
|
||||
}
|
||||
|
||||
if (!_originalTexturesRead && _model->isLoadedWithTextures()) {
|
||||
if (!_originalTexturesRead) {
|
||||
const QSharedPointer<NetworkGeometry>& networkGeometry = _model->getGeometry();
|
||||
if (networkGeometry) {
|
||||
_originalTextures = networkGeometry->getTextureNames();
|
||||
|
@ -119,7 +119,7 @@ bool RenderableModelEntityItem::readyToAddToScene(RenderArgs* renderArgs) {
|
|||
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(renderArgs->_renderer);
|
||||
getModel(renderer);
|
||||
}
|
||||
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoadedWithTextures()) {
|
||||
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoaded()) {
|
||||
_model->setScaleToFit(true, getDimensions());
|
||||
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
|
||||
_model->setRotation(getRotation());
|
||||
|
@ -189,6 +189,7 @@ void makeEntityItemStatusGetters(RenderableModelEntityItem* entity, render::Item
|
|||
|
||||
bool RenderableModelEntityItem::addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
|
||||
render::PendingChanges& pendingChanges) {
|
||||
|
||||
_myMetaItem = scene->allocateID();
|
||||
|
||||
auto renderData = std::make_shared<RenderableModelEntityItemMeta>(self);
|
||||
|
@ -199,7 +200,10 @@ bool RenderableModelEntityItem::addToScene(EntityItemPointer self, std::shared_p
|
|||
if (_model) {
|
||||
render::Item::Status::Getters statusGetters;
|
||||
makeEntityItemStatusGetters(this, statusGetters);
|
||||
return _model->addToScene(scene, pendingChanges, statusGetters);
|
||||
|
||||
// note: we don't care if the model fails to add items, we always added our meta item and therefore we return
|
||||
// true so that the system knows our meta item is in the scene!
|
||||
_model->addToScene(scene, pendingChanges, statusGetters);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -397,8 +401,8 @@ bool RenderableModelEntityItem::isReadyToComputeShape() {
|
|||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
|
||||
if ((! collisionNetworkGeometry.isNull() && collisionNetworkGeometry->isLoadedWithTextures()) &&
|
||||
(! renderNetworkGeometry.isNull() && renderNetworkGeometry->isLoadedWithTextures())) {
|
||||
if ((collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) &&
|
||||
(renderNetworkGeometry && renderNetworkGeometry->isLoaded())) {
|
||||
// we have both URLs AND both geometries AND they are both fully loaded.
|
||||
return true;
|
||||
}
|
||||
|
@ -419,7 +423,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
|
||||
// should never fall in here when collision model not fully loaded
|
||||
// hence we assert collisionNetworkGeometry is not NULL
|
||||
assert(!collisionNetworkGeometry.isNull());
|
||||
assert(collisionNetworkGeometry);
|
||||
|
||||
const FBXGeometry& collisionGeometry = collisionNetworkGeometry->getFBXGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
|
|
|
@ -50,7 +50,7 @@ void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
|
|||
Q_ASSERT(args->_batch);
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
if (textured) {
|
||||
batch.setUniformTexture(0, _texture->getGPUTexture());
|
||||
batch.setResourceTexture(0, _texture->getGPUTexture());
|
||||
}
|
||||
batch.setModelTransform(getTransformToCenter());
|
||||
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, textured);
|
||||
|
|
|
@ -14,11 +14,9 @@
|
|||
|
||||
#include <gpu/GPUConfig.h>
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <PerfStat.h>
|
||||
#include <TextRenderer.h>
|
||||
#include <OffscreenQmlSurface.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
@ -169,7 +167,6 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
|||
_webSurface->resize(QSize(dims.x, dims.y));
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
Glower glow(0.0f);
|
||||
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
||||
Q_ASSERT(getType() == EntityTypes::Web);
|
||||
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);
|
||||
|
|
|
@ -29,7 +29,6 @@ BoxEntityItem::BoxEntityItem(const EntityItemID& entityItemID, const EntityItemP
|
|||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Box;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
|
|
|
@ -127,21 +127,21 @@ glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVarian
|
|||
qDebug() << objectName << "requires argument:" << argumentName;
|
||||
}
|
||||
ok = false;
|
||||
return glm::vec3();
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
QVariant resultV = arguments[argumentName];
|
||||
if (resultV.type() != (QVariant::Type) QMetaType::QVariantMap) {
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map";
|
||||
ok = false;
|
||||
return glm::vec3();
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
QVariantMap resultVM = resultV.toMap();
|
||||
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys: x, y, z";
|
||||
ok = false;
|
||||
return glm::vec3();
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
QVariant xV = resultVM["x"];
|
||||
|
@ -155,9 +155,15 @@ glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVarian
|
|||
float y = yV.toFloat(&yOk);
|
||||
float z = zV.toFloat(&zOk);
|
||||
if (!xOk || !yOk || !zOk) {
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z and values of type float.";
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys: x, y, and z of type float.";
|
||||
ok = false;
|
||||
return glm::vec3();
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
if (x != x || y != y || z != z) {
|
||||
// at least one of the values is NaN
|
||||
ok = false;
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
return glm::vec3(x, y, z);
|
||||
|
@ -181,8 +187,8 @@ glm::quat EntityActionInterface::extractQuatArgument(QString objectName, QVarian
|
|||
}
|
||||
|
||||
QVariantMap resultVM = resultV.toMap();
|
||||
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
|
||||
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z") || !resultVM.contains("w")) {
|
||||
qDebug() << objectName << "argument" << argumentName << "must be a map with keys: x, y, z, and w";
|
||||
ok = false;
|
||||
return glm::quat();
|
||||
}
|
||||
|
@ -202,12 +208,18 @@ glm::quat EntityActionInterface::extractQuatArgument(QString objectName, QVarian
|
|||
float w = wV.toFloat(&wOk);
|
||||
if (!xOk || !yOk || !zOk || !wOk) {
|
||||
qDebug() << objectName << "argument" << argumentName
|
||||
<< "must be a map with keys of x, y, z, w and values of type float.";
|
||||
<< "must be a map with keys: x, y, z, and w of type float.";
|
||||
ok = false;
|
||||
return glm::quat();
|
||||
}
|
||||
|
||||
return glm::quat(w, x, y, z);
|
||||
if (x != x || y != y || z != z || w != w) {
|
||||
// at least one of the components is NaN!
|
||||
ok = false;
|
||||
return glm::quat();
|
||||
}
|
||||
|
||||
return glm::normalize(glm::quat(w, x, y, z));
|
||||
}
|
||||
|
||||
float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMap arguments,
|
||||
|
@ -224,7 +236,7 @@ float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMa
|
|||
bool vOk = true;
|
||||
float v = vV.toFloat(&vOk);
|
||||
|
||||
if (!vOk) {
|
||||
if (!vOk || v != v) {
|
||||
ok = false;
|
||||
return 0.0f;
|
||||
}
|
||||
|
|
|
@ -45,9 +45,7 @@ EntityItem::EntityItem(const EntityItemID& entityItemID) :
|
|||
_lastEditedFromRemoteInRemoteTime(0),
|
||||
_created(UNKNOWN_CREATED_TIME),
|
||||
_changedOnServer(0),
|
||||
_transform(ENTITY_ITEM_DEFAULT_ROTATION,
|
||||
ENTITY_ITEM_DEFAULT_DIMENSIONS,
|
||||
ENTITY_ITEM_DEFAULT_POSITION),
|
||||
_transform(),
|
||||
_glowLevel(ENTITY_ITEM_DEFAULT_GLOW_LEVEL),
|
||||
_localRenderAlpha(ENTITY_ITEM_DEFAULT_LOCAL_RENDER_ALPHA),
|
||||
_density(ENTITY_ITEM_DEFAULT_DENSITY),
|
||||
|
@ -80,16 +78,15 @@ EntityItem::EntityItem(const EntityItemID& entityItemID) :
|
|||
_physicsInfo(nullptr),
|
||||
_simulated(false)
|
||||
{
|
||||
// explicitly set transform parts to set dirty flags used by batch rendering
|
||||
_transform.setTranslation(ENTITY_ITEM_DEFAULT_POSITION);
|
||||
_transform.setRotation(ENTITY_ITEM_DEFAULT_ROTATION);
|
||||
_transform.setScale(ENTITY_ITEM_DEFAULT_DIMENSIONS);
|
||||
quint64 now = usecTimestampNow();
|
||||
_lastSimulated = now;
|
||||
_lastUpdated = now;
|
||||
}
|
||||
|
||||
EntityItem::EntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) : EntityItem(entityItemID)
|
||||
{
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
EntityItem::~EntityItem() {
|
||||
// clear out any left-over actions
|
||||
EntityTree* entityTree = _element ? _element->getTree() : nullptr;
|
||||
|
@ -385,7 +382,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += encodedID.size();
|
||||
bytesRead += encodedID.size();
|
||||
Q_ASSERT(id == _id);
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -400,8 +397,8 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
quint32 type = typeCoder;
|
||||
EntityTypes::EntityType oldType = (EntityTypes::EntityType)type;
|
||||
Q_ASSERT(oldType == _type);
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
#endif
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
#endif
|
||||
|
||||
bool overwriteLocalData = true; // assume the new content overwrites our local data
|
||||
quint64 now = usecTimestampNow();
|
||||
|
@ -417,9 +414,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += sizeof(createdFromBuffer2);
|
||||
bytesRead += sizeof(createdFromBuffer2);
|
||||
Q_ASSERT(createdFromBuffer2 == createdFromBuffer);
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
if (_created == UNKNOWN_CREATED_TIME) {
|
||||
// we don't yet have a _created timestamp, so we accept this one
|
||||
createdFromBuffer -= clockSkew;
|
||||
|
@ -458,9 +455,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += sizeof(lastEditedFromBuffer2);
|
||||
bytesRead += sizeof(lastEditedFromBuffer2);
|
||||
Q_ASSERT(lastEditedFromBuffer2 == lastEditedFromBuffer);
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
quint64 lastEditedFromBufferAdjusted = lastEditedFromBuffer - clockSkew;
|
||||
if (lastEditedFromBufferAdjusted > now) {
|
||||
lastEditedFromBufferAdjusted = now;
|
||||
|
@ -534,10 +531,10 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
encodedUpdateDelta = updateDeltaCoder; // determine true length
|
||||
dataAt += encodedUpdateDelta.size();
|
||||
bytesRead += encodedUpdateDelta.size();
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
if (overwriteLocalData) {
|
||||
_lastUpdated = lastEditedFromBufferAdjusted + updateDelta; // don't adjust for clock skew since we already did that
|
||||
#ifdef WANT_DEBUG
|
||||
|
@ -562,7 +559,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
encodedSimulatedDelta = simulatedDeltaCoder; // determine true length
|
||||
dataAt += encodedSimulatedDelta.size();
|
||||
bytesRead += encodedSimulatedDelta.size();
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int) bytesRead);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -599,7 +596,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
dataAt += propertyFlags.getEncodedLength();
|
||||
bytesRead += propertyFlags.getEncodedLength();
|
||||
Q_ASSERT(propertyFlags2 == propertyFlags);
|
||||
Q_ASSERT(parser.offset() == bytesRead);
|
||||
Q_ASSERT(parser.offset() == (unsigned int)bytesRead);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -610,6 +607,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
int bytesRead = parser.offset();
|
||||
#endif
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const QUuid& myNodeID = nodeList->getSessionUUID();
|
||||
bool weOwnSimulation = _simulationOwner.matchesValidID(myNodeID);
|
||||
|
||||
if (args.bitstreamVersion >= VERSION_ENTITIES_HAVE_SIMULATION_OWNER_AND_ACTIONS_OVER_WIRE) {
|
||||
// pack SimulationOwner and terse update properties near each other
|
||||
|
@ -632,10 +632,8 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
}
|
||||
{ // When we own the simulation we don't accept updates to the entity's transform/velocities
|
||||
// but since we're using macros below we have to temporarily modify overwriteLocalData.
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
bool weOwnIt = _simulationOwner.matchesValidID(nodeList->getSessionUUID());
|
||||
bool oldOverwrite = overwriteLocalData;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnIt;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
|
||||
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
|
||||
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
|
||||
READ_ENTITY_PROPERTY(PROP_VELOCITY, glm::vec3, updateVelocity);
|
||||
|
@ -657,6 +655,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, setRegistrationPoint);
|
||||
} else {
|
||||
// legacy order of packing here
|
||||
// TODO: purge this logic in a few months from now (2015.07)
|
||||
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
|
||||
READ_ENTITY_PROPERTY(PROP_DIMENSIONS, glm::vec3, updateDimensions);
|
||||
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
|
||||
|
@ -702,7 +701,16 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
READ_ENTITY_PROPERTY(PROP_HREF, QString, setHref);
|
||||
READ_ENTITY_PROPERTY(PROP_DESCRIPTION, QString, setDescription);
|
||||
|
||||
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
|
||||
{ // When we own the simulation we don't accept updates to the entity's actions
|
||||
// but since we're using macros below we have to temporarily modify overwriteLocalData.
|
||||
// NOTE: this prevents userB from adding an action to an object1 when UserA
|
||||
// has simulation ownership of it.
|
||||
// TODO: figure out how to allow multiple users to update actions simultaneously
|
||||
bool oldOverwrite = overwriteLocalData;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
|
||||
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
|
||||
overwriteLocalData = oldOverwrite;
|
||||
}
|
||||
|
||||
bytesRead += readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args,
|
||||
propertyFlags, overwriteLocalData);
|
||||
|
@ -713,7 +721,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
// NOTE: we had a bad version of the stream that we added stream data after the subclass. We can attempt to recover
|
||||
// by doing this parsing here... but it's not likely going to fully recover the content.
|
||||
//
|
||||
// TODO: Remove this conde once we've sufficiently migrated content past this damaged version
|
||||
// TODO: Remove this code once we've sufficiently migrated content past this damaged version
|
||||
if (args.bitstreamVersion == VERSION_ENTITIES_HAS_MARKETPLACE_ID_DAMAGED) {
|
||||
READ_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, QString, setMarketplaceID);
|
||||
}
|
||||
|
@ -738,8 +746,6 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
}
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const QUuid& myNodeID = nodeList->getSessionUUID();
|
||||
if (overwriteLocalData) {
|
||||
if (!_simulationOwner.matchesValidID(myNodeID)) {
|
||||
|
||||
|
@ -1496,7 +1502,7 @@ bool EntityItem::addAction(EntitySimulation* simulation, EntityActionPointer act
|
|||
|
||||
bool result = addActionInternal(simulation, action);
|
||||
if (!result) {
|
||||
removeAction(simulation, action->getID());
|
||||
removeActionInternal(action->getID());
|
||||
}
|
||||
|
||||
unlock();
|
||||
|
@ -1520,6 +1526,7 @@ bool EntityItem::addActionInternal(EntitySimulation* simulation, EntityActionPoi
|
|||
QByteArray newDataCache = serializeActions(success);
|
||||
if (success) {
|
||||
_allActionsDataCache = newDataCache;
|
||||
_dirtyFlags |= EntityItem::DIRTY_PHYSICS_ACTIVATION;
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
@ -1537,6 +1544,7 @@ bool EntityItem::updateAction(EntitySimulation* simulation, const QUuid& actionI
|
|||
bool success = action->updateArguments(arguments);
|
||||
if (success) {
|
||||
_allActionsDataCache = serializeActions(success);
|
||||
_dirtyFlags |= EntityItem::DIRTY_PHYSICS_ACTIVATION;
|
||||
} else {
|
||||
qDebug() << "EntityItem::updateAction failed";
|
||||
}
|
||||
|
@ -1572,6 +1580,7 @@ bool EntityItem::removeActionInternal(const QUuid& actionID, EntitySimulation* s
|
|||
|
||||
bool success = true;
|
||||
_allActionsDataCache = serializeActions(success);
|
||||
_dirtyFlags |= EntityItem::DIRTY_PHYSICS_ACTIVATION;
|
||||
return success;
|
||||
}
|
||||
return false;
|
||||
|
@ -1590,6 +1599,7 @@ bool EntityItem::clearActions(EntitySimulation* simulation) {
|
|||
// empty _serializedActions means no actions for the EntityItem
|
||||
_actionsToRemove.clear();
|
||||
_allActionsDataCache.clear();
|
||||
_dirtyFlags |= EntityItem::DIRTY_PHYSICS_ACTIVATION;
|
||||
unlock();
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -119,7 +119,6 @@ public:
|
|||
DONT_ALLOW_INSTANTIATION // This class can not be instantiated directly
|
||||
|
||||
EntityItem(const EntityItemID& entityItemID);
|
||||
EntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
|
||||
virtual ~EntityItem();
|
||||
|
||||
// ID and EntityItemID related methods
|
||||
|
|
|
@ -543,6 +543,9 @@ bool EntityScriptingInterface::actionWorker(const QUuid& entityID,
|
|||
}
|
||||
|
||||
bool success = actor(simulation, entity);
|
||||
if (success) {
|
||||
_entityTree->entityChanged(entity);
|
||||
}
|
||||
_entityTree->unlock();
|
||||
|
||||
// transmit the change
|
||||
|
|
|
@ -772,6 +772,9 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
entityItemID = entityItem->getEntityItemID();
|
||||
_myTree->setContainingElement(entityItemID, this);
|
||||
_myTree->postAddEntity(entityItem);
|
||||
if (entityItem->getCreated() == UNKNOWN_CREATED_TIME) {
|
||||
entityItem->recordCreationTime();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Move the buffer forward to read more entities
|
||||
|
|
|
@ -29,7 +29,7 @@ EntityItemPointer LightEntityItem::factory(const EntityItemID& entityID, const E
|
|||
|
||||
// our non-pure virtual subclass for now...
|
||||
LightEntityItem::LightEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
EntityItem(entityItemID, properties)
|
||||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Light;
|
||||
|
||||
|
|
|
@ -38,7 +38,6 @@ LineEntityItem::LineEntityItem(const EntityItemID& entityItemID, const EntityIte
|
|||
_points(QVector<glm::vec3>(0))
|
||||
{
|
||||
_type = EntityTypes::Line;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ EntityItemPointer ModelEntityItem::factory(const EntityItemID& entityID, const E
|
|||
}
|
||||
|
||||
ModelEntityItem::ModelEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
EntityItem(entityItemID, properties)
|
||||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Model;
|
||||
setProperties(properties);
|
||||
|
@ -184,7 +184,7 @@ void ModelEntityItem::cleanupLoadedAnimations() {
|
|||
_loadedAnimations.clear();
|
||||
}
|
||||
|
||||
Animation* ModelEntityItem::getAnimation(const QString& url) {
|
||||
AnimationPointer ModelEntityItem::getAnimation(const QString& url) {
|
||||
AnimationPointer animation;
|
||||
|
||||
// if we don't already have this model then create it and initialize it
|
||||
|
@ -194,7 +194,7 @@ Animation* ModelEntityItem::getAnimation(const QString& url) {
|
|||
} else {
|
||||
animation = _loadedAnimations[url];
|
||||
}
|
||||
return animation.data();
|
||||
return animation;
|
||||
}
|
||||
|
||||
void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
||||
|
@ -203,9 +203,8 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
|||
return;
|
||||
}
|
||||
|
||||
Animation* myAnimation = getAnimation(_animationURL);
|
||||
|
||||
if (!_jointMappingCompleted) {
|
||||
AnimationPointer myAnimation = getAnimation(_animationURL);
|
||||
if (myAnimation && myAnimation->isLoaded()) {
|
||||
QStringList animationJointNames = myAnimation->getJointNames();
|
||||
|
||||
if (modelJointNames.size() > 0 && animationJointNames.size() > 0) {
|
||||
|
@ -220,8 +219,12 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
|||
|
||||
QVector<glm::quat> ModelEntityItem::getAnimationFrame() {
|
||||
QVector<glm::quat> frameData;
|
||||
if (hasAnimation() && _jointMappingCompleted) {
|
||||
Animation* myAnimation = getAnimation(_animationURL);
|
||||
if (!hasAnimation() || !_jointMappingCompleted) {
|
||||
return frameData;
|
||||
}
|
||||
|
||||
AnimationPointer myAnimation = getAnimation(_animationURL);
|
||||
if (myAnimation && myAnimation->isLoaded()) {
|
||||
QVector<FBXAnimationFrame> frames = myAnimation->getFrames();
|
||||
int frameCount = frames.size();
|
||||
if (frameCount > 0) {
|
||||
|
|
|
@ -141,7 +141,7 @@ protected:
|
|||
bool _jointMappingCompleted;
|
||||
QVector<int> _jointMapping;
|
||||
|
||||
static Animation* getAnimation(const QString& url);
|
||||
static AnimationPointer getAnimation(const QString& url);
|
||||
static QMap<QString, AnimationPointer> _loadedAnimations;
|
||||
static AnimationCache _animationCache;
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ EntityItemPointer ParticleEffectEntityItem::factory(const EntityItemID& entityID
|
|||
|
||||
// our non-pure virtual subclass for now...
|
||||
ParticleEffectEntityItem::ParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
EntityItem(entityItemID, properties),
|
||||
EntityItem(entityItemID),
|
||||
_maxParticles(DEFAULT_MAX_PARTICLES),
|
||||
_lifespan(DEFAULT_LIFESPAN),
|
||||
_emitRate(DEFAULT_EMIT_RATE),
|
||||
|
|
|
@ -52,7 +52,6 @@ PolyVoxEntityItem::PolyVoxEntityItem(const EntityItemID& entityItemID, const Ent
|
|||
_voxelSurfaceStyle(PolyVoxEntityItem::DEFAULT_VOXEL_SURFACE_STYLE)
|
||||
{
|
||||
_type = EntityTypes::PolyVox;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ EntityItemPointer SphereEntityItem::factory(const EntityItemID& entityID, const
|
|||
|
||||
// our non-pure virtual subclass for now...
|
||||
SphereEntityItem::SphereEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
EntityItem(entityItemID, properties)
|
||||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Sphere;
|
||||
setProperties(properties);
|
||||
|
|
|
@ -37,7 +37,6 @@ TextEntityItem::TextEntityItem(const EntityItemID& entityItemID, const EntityIte
|
|||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Text;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,6 @@ WebEntityItem::WebEntityItem(const EntityItemID& entityItemID, const EntityItemP
|
|||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Web;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
|
@ -149,4 +148,4 @@ void WebEntityItem::setSourceUrl(const QString& value) {
|
|||
}
|
||||
}
|
||||
|
||||
const QString& WebEntityItem::getSourceUrl() const { return _sourceUrl; }
|
||||
const QString& WebEntityItem::getSourceUrl() const { return _sourceUrl; }
|
||||
|
|
|
@ -37,7 +37,6 @@ ZoneEntityItem::ZoneEntityItem(const EntityItemID& entityItemID, const EntityIte
|
|||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Zone;
|
||||
_created = properties.getCreated();
|
||||
|
||||
_keyLightColor[RED_INDEX] = DEFAULT_KEYLIGHT_COLOR.red;
|
||||
_keyLightColor[GREEN_INDEX] = DEFAULT_KEYLIGHT_COLOR.green;
|
||||
|
|
|
@ -93,7 +93,16 @@ QByteArray FSTReader::writeMapping(const QVariantHash& mapping) {
|
|||
for (auto key : PREFERED_ORDER) {
|
||||
auto it = mapping.find(key);
|
||||
if (it != mapping.constEnd()) {
|
||||
writeVariant(buffer, it);
|
||||
if (key == FREE_JOINT_FIELD) { // writeVariant does not handle strings added using insertMulti.
|
||||
for (auto multi : mapping.values(key)) {
|
||||
buffer.write(key.toUtf8());
|
||||
buffer.write(" = ");
|
||||
buffer.write(multi.toByteArray());
|
||||
buffer.write("\n");
|
||||
}
|
||||
} else {
|
||||
writeVariant(buffer, it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -102,9 +102,10 @@ void Batch::drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, ui
|
|||
_params.push_back(nbInstances);
|
||||
}
|
||||
|
||||
void Batch::clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil) {
|
||||
void Batch::clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor) {
|
||||
ADD_COMMAND(clearFramebuffer);
|
||||
|
||||
_params.push_back(enableScissor);
|
||||
_params.push_back(stencil);
|
||||
_params.push_back(depth);
|
||||
_params.push_back(color.w);
|
||||
|
@ -114,20 +115,20 @@ void Batch::clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, floa
|
|||
_params.push_back(targets);
|
||||
}
|
||||
|
||||
void Batch::clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color) {
|
||||
clearFramebuffer(targets & Framebuffer::BUFFER_COLORS, color, 1.0f, 0);
|
||||
void Batch::clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor) {
|
||||
clearFramebuffer(targets & Framebuffer::BUFFER_COLORS, color, 1.0f, 0, enableScissor);
|
||||
}
|
||||
|
||||
void Batch::clearDepthFramebuffer(float depth) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_DEPTH, Vec4(0.0f), depth, 0);
|
||||
void Batch::clearDepthFramebuffer(float depth, bool enableScissor) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_DEPTH, Vec4(0.0f), depth, 0, enableScissor);
|
||||
}
|
||||
|
||||
void Batch::clearStencilFramebuffer(int stencil) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_STENCIL, Vec4(0.0f), 1.0f, stencil);
|
||||
void Batch::clearStencilFramebuffer(int stencil, bool enableScissor) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_STENCIL, Vec4(0.0f), 1.0f, stencil, enableScissor);
|
||||
}
|
||||
|
||||
void Batch::clearDepthStencilFramebuffer(float depth, int stencil) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_DEPTHSTENCIL, Vec4(0.0f), depth, stencil);
|
||||
void Batch::clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor) {
|
||||
clearFramebuffer(Framebuffer::BUFFER_DEPTHSTENCIL, Vec4(0.0f), depth, stencil, enableScissor);
|
||||
}
|
||||
|
||||
|
||||
|
@ -212,6 +213,14 @@ void Batch::setStateBlendFactor(const Vec4& factor) {
|
|||
_params.push_back(factor.w);
|
||||
}
|
||||
|
||||
void Batch::setStateScissorRect(const Vec4i& rect) {
|
||||
ADD_COMMAND(setStateScissorRect);
|
||||
|
||||
_params.push_back(rect.x);
|
||||
_params.push_back(rect.y);
|
||||
_params.push_back(rect.z);
|
||||
_params.push_back(rect.w);
|
||||
}
|
||||
|
||||
void Batch::setUniformBuffer(uint32 slot, const BufferPointer& buffer, Offset offset, Offset size) {
|
||||
ADD_COMMAND(setUniformBuffer);
|
||||
|
@ -227,15 +236,15 @@ void Batch::setUniformBuffer(uint32 slot, const BufferView& view) {
|
|||
}
|
||||
|
||||
|
||||
void Batch::setUniformTexture(uint32 slot, const TexturePointer& texture) {
|
||||
ADD_COMMAND(setUniformTexture);
|
||||
void Batch::setResourceTexture(uint32 slot, const TexturePointer& texture) {
|
||||
ADD_COMMAND(setResourceTexture);
|
||||
|
||||
_params.push_back(_textures.cache(texture));
|
||||
_params.push_back(slot);
|
||||
}
|
||||
|
||||
void Batch::setUniformTexture(uint32 slot, const TextureView& view) {
|
||||
setUniformTexture(slot, view._texture);
|
||||
void Batch::setResourceTexture(uint32 slot, const TextureView& view) {
|
||||
setResourceTexture(slot, view._texture);
|
||||
}
|
||||
|
||||
void Batch::setFramebuffer(const FramebufferPointer& framebuffer) {
|
||||
|
|
|
@ -40,11 +40,8 @@
|
|||
namespace gpu {
|
||||
|
||||
enum ReservedSlot {
|
||||
/* TRANSFORM_OBJECT_SLOT = 6,
|
||||
TRANSFORM_OBJECT_SLOT = 6,
|
||||
TRANSFORM_CAMERA_SLOT = 7,
|
||||
*/
|
||||
TRANSFORM_OBJECT_SLOT = 1,
|
||||
TRANSFORM_CAMERA_SLOT = 2,
|
||||
};
|
||||
|
||||
class Batch {
|
||||
|
@ -65,11 +62,12 @@ public:
|
|||
|
||||
// Clear framebuffer layers
|
||||
// Targets can be any of the render buffers contained in the Framebuffer
|
||||
void clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil);
|
||||
void clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color); // not a command, just a shortcut for clearFramebuffer, mask out targets to make sure it touches only color targets
|
||||
void clearDepthFramebuffer(float depth); // not a command, just a shortcut for clearFramebuffer, it touches only depth target
|
||||
void clearStencilFramebuffer(int stencil); // not a command, just a shortcut for clearFramebuffer, it touches only stencil target
|
||||
void clearDepthStencilFramebuffer(float depth, int stencil); // not a command, just a shortcut for clearFramebuffer, it touches depth and stencil target
|
||||
// Optionally the scissor test can be enabled locally for this command and to restrict the clearing command to the pixels contained in the scissor rectangle
|
||||
void clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor = false);
|
||||
void clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, mask out targets to make sure it touches only color targets
|
||||
void clearDepthFramebuffer(float depth, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only depth target
|
||||
void clearStencilFramebuffer(int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only stencil target
|
||||
void clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches depth and stencil target
|
||||
|
||||
// Input Stage
|
||||
// InputFormat
|
||||
|
@ -89,22 +87,27 @@ public:
|
|||
// Then by the inverse of the ViewTransform from world space to eye space
|
||||
// finaly projected into the clip space by the projection transform
|
||||
// WARNING: ViewTransform transform from eye space to world space, its inverse is composed
|
||||
// with the ModelTransformu to create the equivalent of the glModelViewMatrix
|
||||
// with the ModelTransform to create the equivalent of the gl ModelViewMatrix
|
||||
void setModelTransform(const Transform& model);
|
||||
void setViewTransform(const Transform& view);
|
||||
void setProjectionTransform(const Mat4& proj);
|
||||
void setViewportTransform(const Vec4i& viewport); // Viewport is xy = low left corner in the framebuffer, zw = width height of the viewport
|
||||
// Viewport is xy = low left corner in framebuffer, zw = width height of the viewport, expressed in pixels
|
||||
void setViewportTransform(const Vec4i& viewport);
|
||||
|
||||
// Pipeline Stage
|
||||
void setPipeline(const PipelinePointer& pipeline);
|
||||
|
||||
void setStateBlendFactor(const Vec4& factor);
|
||||
|
||||
// Set the Scissor rect
|
||||
// the rect coordinates are xy for the low left corner of the rect and zw for the width and height of the rect, expressed in pixels
|
||||
void setStateScissorRect(const Vec4i& rect);
|
||||
|
||||
void setUniformBuffer(uint32 slot, const BufferPointer& buffer, Offset offset, Offset size);
|
||||
void setUniformBuffer(uint32 slot, const BufferView& view); // not a command, just a shortcut from a BufferView
|
||||
|
||||
void setUniformTexture(uint32 slot, const TexturePointer& view);
|
||||
void setUniformTexture(uint32 slot, const TextureView& view); // not a command, just a shortcut from a TextureView
|
||||
void setResourceTexture(uint32 slot, const TexturePointer& view);
|
||||
void setResourceTexture(uint32 slot, const TextureView& view); // not a command, just a shortcut from a TextureView
|
||||
|
||||
// Framebuffer Stage
|
||||
void setFramebuffer(const FramebufferPointer& framebuffer);
|
||||
|
@ -117,7 +120,7 @@ public:
|
|||
// TODO: As long as we have gl calls explicitely issued from interface
|
||||
// code, we need to be able to record and batch these calls. THe long
|
||||
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
|
||||
// For now, instead of calling the raw glCall, use the equivalent call on the batch so the call is beeing recorded
|
||||
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
|
||||
// THe implementation of these functions is in GLBackend.cpp
|
||||
|
||||
void _glEnable(GLenum cap);
|
||||
|
@ -176,9 +179,10 @@ public:
|
|||
|
||||
COMMAND_setPipeline,
|
||||
COMMAND_setStateBlendFactor,
|
||||
COMMAND_setStateScissorRect,
|
||||
|
||||
COMMAND_setUniformBuffer,
|
||||
COMMAND_setUniformTexture,
|
||||
COMMAND_setResourceTexture,
|
||||
|
||||
COMMAND_setFramebuffer,
|
||||
|
||||
|
|
22
libraries/gpu/src/gpu/DrawColoredTexture.slf
Executable file
22
libraries/gpu/src/gpu/DrawColoredTexture.slf
Executable file
|
@ -0,0 +1,22 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Draw texture 0 fetched at texcoord.xy, Blend with color uniform
|
||||
//
|
||||
// Created by Sam Gateau on 7/12/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
uniform sampler2D colorMap;
|
||||
uniform vec4 color;
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
|
||||
void main(void) {
|
||||
gl_FragColor = texture2D(colorMap, varTexcoord) * color;
|
||||
}
|
39
libraries/gpu/src/gpu/DrawTexcoordRectTransformUnitQuad.slv
Executable file
39
libraries/gpu/src/gpu/DrawTexcoordRectTransformUnitQuad.slv
Executable file
|
@ -0,0 +1,39 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Draw and transform the unit quad [-1,-1 -> 1,1]
|
||||
// Transform the normalized texcoords [0, 1] to be in the range [texcoordRect.xy, texcoordRect.xy + texcoordRect.zw]
|
||||
// Simply draw a Triangle_strip of 2 triangles, no input buffers or index buffer needed
|
||||
//
|
||||
// Created by Sam Gateau on 6/22/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
uniform vec4 texcoordRect;
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
|
||||
void main(void) {
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
vec4(-1.0, -1.0, 0.0, 1.0),
|
||||
vec4(1.0, -1.0, 0.0, 1.0),
|
||||
vec4(-1.0, 1.0, 0.0, 1.0),
|
||||
vec4(1.0, 1.0, 0.0, 1.0)
|
||||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, pos, gl_Position)$>
|
||||
|
||||
varTexcoord = ((pos.xy + 1) * 0.5) * texcoordRect.zw + texcoordRect.xy;
|
||||
}
|
|
@ -33,9 +33,10 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
|||
|
||||
(&::gpu::GLBackend::do_setPipeline),
|
||||
(&::gpu::GLBackend::do_setStateBlendFactor),
|
||||
(&::gpu::GLBackend::do_setStateScissorRect),
|
||||
|
||||
(&::gpu::GLBackend::do_setUniformBuffer),
|
||||
(&::gpu::GLBackend::do_setUniformTexture),
|
||||
(&::gpu::GLBackend::do_setResourceTexture),
|
||||
|
||||
(&::gpu::GLBackend::do_setFramebuffer),
|
||||
|
||||
|
@ -87,10 +88,12 @@ GLBackend::GLBackend() :
|
|||
_pipeline(),
|
||||
_output()
|
||||
{
|
||||
initInput();
|
||||
initTransform();
|
||||
}
|
||||
|
||||
GLBackend::~GLBackend() {
|
||||
killInput();
|
||||
killTransform();
|
||||
}
|
||||
|
||||
|
@ -204,7 +207,6 @@ void GLBackend::do_drawInstanced(Batch& batch, uint32 paramOffset) {
|
|||
GLenum mode = _primitiveToGLmode[primitiveType];
|
||||
uint32 numVertices = batch._params[paramOffset + 2]._uint;
|
||||
uint32 startVertex = batch._params[paramOffset + 1]._uint;
|
||||
uint32 startInstance = batch._params[paramOffset + 0]._uint;
|
||||
|
||||
glDrawArraysInstancedARB(mode, startVertex, numVertices, numInstances);
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
@ -216,17 +218,18 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
|
|||
|
||||
void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
|
||||
|
||||
uint32 masks = batch._params[paramOffset + 6]._uint;
|
||||
uint32 masks = batch._params[paramOffset + 7]._uint;
|
||||
Vec4 color;
|
||||
color.x = batch._params[paramOffset + 5]._float;
|
||||
color.y = batch._params[paramOffset + 4]._float;
|
||||
color.z = batch._params[paramOffset + 3]._float;
|
||||
color.w = batch._params[paramOffset + 2]._float;
|
||||
float depth = batch._params[paramOffset + 1]._float;
|
||||
int stencil = batch._params[paramOffset + 0]._float;
|
||||
color.x = batch._params[paramOffset + 6]._float;
|
||||
color.y = batch._params[paramOffset + 5]._float;
|
||||
color.z = batch._params[paramOffset + 4]._float;
|
||||
color.w = batch._params[paramOffset + 3]._float;
|
||||
float depth = batch._params[paramOffset + 2]._float;
|
||||
int stencil = batch._params[paramOffset + 1]._int;
|
||||
int useScissor = batch._params[paramOffset + 0]._int;
|
||||
|
||||
GLuint glmask = 0;
|
||||
if (masks & Framebuffer::BUFFER_DEPTH) {
|
||||
if (masks & Framebuffer::BUFFER_STENCIL) {
|
||||
glClearStencil(stencil);
|
||||
glmask |= GL_STENCIL_BUFFER_BIT;
|
||||
}
|
||||
|
@ -238,7 +241,7 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
|
|||
|
||||
std::vector<GLenum> drawBuffers;
|
||||
if (masks & Framebuffer::BUFFER_COLORS) {
|
||||
for (int i = 0; i < Framebuffer::MAX_NUM_RENDER_BUFFERS; i++) {
|
||||
for (unsigned int i = 0; i < Framebuffer::MAX_NUM_RENDER_BUFFERS; i++) {
|
||||
if (masks & (1 << i)) {
|
||||
drawBuffers.push_back(GL_COLOR_ATTACHMENT0 + i);
|
||||
}
|
||||
|
@ -251,8 +254,19 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
|
|||
}
|
||||
}
|
||||
|
||||
// Apply scissor if needed and if not already on
|
||||
bool doEnableScissor = (useScissor && (!_pipeline._stateCache.scissorEnable));
|
||||
if (doEnableScissor) {
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
glClear(glmask);
|
||||
|
||||
// Restore scissor if needed
|
||||
if (doEnableScissor) {
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
}
|
||||
|
||||
// Restore the color draw buffers only if a frmaebuffer is bound
|
||||
if (_output._framebuffer && !drawBuffers.empty()) {
|
||||
auto glFramebuffer = syncGPUObject(*_output._framebuffer);
|
||||
|
|
|
@ -228,7 +228,21 @@ public:
|
|||
|
||||
void do_setStateColorWriteMask(uint32 mask);
|
||||
|
||||
// Repporting stats of the context
|
||||
class Stats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
Stats() {}
|
||||
Stats(const Stats& stats) = default;
|
||||
};
|
||||
|
||||
void getStats(Stats& stats) const { stats = _stats; }
|
||||
|
||||
protected:
|
||||
Stats _stats;
|
||||
|
||||
// Draw Stage
|
||||
void do_draw(Batch& batch, uint32 paramOffset);
|
||||
|
@ -242,12 +256,13 @@ protected:
|
|||
void do_setInputFormat(Batch& batch, uint32 paramOffset);
|
||||
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
|
||||
void do_setIndexBuffer(Batch& batch, uint32 paramOffset);
|
||||
|
||||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
|
||||
void initInput();
|
||||
void killInput();
|
||||
void syncInputStateCache();
|
||||
void updateInput();
|
||||
struct InputStageState {
|
||||
bool _invalidFormat;
|
||||
bool _invalidFormat = true;
|
||||
Stream::FormatPointer _format;
|
||||
|
||||
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
|
||||
|
@ -256,6 +271,7 @@ protected:
|
|||
Buffers _buffers;
|
||||
Offsets _bufferOffsets;
|
||||
Offsets _bufferStrides;
|
||||
std::vector<GLuint> _bufferVBOs;
|
||||
|
||||
BufferPointer _indexBuffer;
|
||||
Offset _indexBufferOffset;
|
||||
|
@ -264,6 +280,8 @@ protected:
|
|||
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
|
||||
ActivationCache _attributeActivation;
|
||||
|
||||
GLuint _defaultVAO;
|
||||
|
||||
InputStageState() :
|
||||
_invalidFormat(true),
|
||||
_format(0),
|
||||
|
@ -271,10 +289,12 @@ protected:
|
|||
_buffers(_buffersState.size(), BufferPointer(0)),
|
||||
_bufferOffsets(_buffersState.size(), 0),
|
||||
_bufferStrides(_buffersState.size(), 0),
|
||||
_bufferVBOs(_buffersState.size(), 0),
|
||||
_indexBuffer(0),
|
||||
_indexBufferOffset(0),
|
||||
_indexBufferType(UINT32),
|
||||
_attributeActivation(0)
|
||||
_attributeActivation(0),
|
||||
_defaultVAO(0)
|
||||
{}
|
||||
} _input;
|
||||
|
||||
|
@ -321,7 +341,7 @@ protected:
|
|||
|
||||
// Uniform Stage
|
||||
void do_setUniformBuffer(Batch& batch, uint32 paramOffset);
|
||||
void do_setUniformTexture(Batch& batch, uint32 paramOffset);
|
||||
void do_setResourceTexture(Batch& batch, uint32 paramOffset);
|
||||
|
||||
struct UniformStageState {
|
||||
|
||||
|
@ -330,6 +350,7 @@ protected:
|
|||
// Pipeline Stage
|
||||
void do_setPipeline(Batch& batch, uint32 paramOffset);
|
||||
void do_setStateBlendFactor(Batch& batch, uint32 paramOffset);
|
||||
void do_setStateScissorRect(Batch& batch, uint32 paramOffset);
|
||||
|
||||
// Standard update pipeline check that the current Program and current State or good to go for a
|
||||
void updatePipeline();
|
||||
|
|
|
@ -28,13 +28,39 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
|
|||
uint32 channel = batch._params[paramOffset + 3]._uint;
|
||||
|
||||
if (channel < getNumInputBuffers()) {
|
||||
_input._buffers[channel] = buffer;
|
||||
_input._bufferOffsets[channel] = offset;
|
||||
_input._bufferStrides[channel] = stride;
|
||||
_input._buffersState.set(channel);
|
||||
bool isModified = false;
|
||||
if (_input._buffers[channel] != buffer) {
|
||||
_input._buffers[channel] = buffer;
|
||||
|
||||
GLuint vbo = 0;
|
||||
if (buffer) {
|
||||
vbo = getBufferID((*buffer));
|
||||
}
|
||||
_input._bufferVBOs[channel] = vbo;
|
||||
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (_input._bufferOffsets[channel] != offset) {
|
||||
_input._bufferOffsets[channel] = offset;
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (_input._bufferStrides[channel] != stride) {
|
||||
_input._bufferStrides[channel] = stride;
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (isModified) {
|
||||
_input._buffersState.set(channel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#define NOT_SUPPORT_VAO
|
||||
#if defined(SUPPORT_VAO)
|
||||
#else
|
||||
|
||||
#define SUPPORT_LEGACY_OPENGL
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
static const int NUM_CLASSIC_ATTRIBS = Stream::TANGENT;
|
||||
|
@ -45,24 +71,120 @@ static const GLenum attributeSlotToClassicAttribName[NUM_CLASSIC_ATTRIBS] = {
|
|||
GL_TEXTURE_COORD_ARRAY
|
||||
};
|
||||
#endif
|
||||
#endif
|
||||
|
||||
void GLBackend::initInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
if(!_input._defaultVAO) {
|
||||
glGenVertexArrays(1, &_input._defaultVAO);
|
||||
}
|
||||
glBindVertexArray(_input._defaultVAO);
|
||||
(void) CHECK_GL_ERROR();
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::killInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
glBindVertexArray(0);
|
||||
if(_input._defaultVAO) {
|
||||
glDeleteVertexArrays(1, &_input._defaultVAO);
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::syncInputStateCache() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
for (int i = 0; i < NUM_CLASSIC_ATTRIBS; i++) {
|
||||
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
//_input._defaultVAO
|
||||
glBindVertexArray(_input._defaultVAO);
|
||||
#else
|
||||
int i = 0;
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
|
||||
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
#endif
|
||||
for (; i < _input._attributeActivation.size(); i++) {
|
||||
GLint active = 0;
|
||||
glGetVertexAttribiv(i, GL_VERTEX_ATTRIB_ARRAY_ENABLED, &active);
|
||||
_input._attributeActivation[i] = active;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::updateInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
if (_input._invalidFormat) {
|
||||
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
glVertexAttribFormat(
|
||||
attrib._slot,
|
||||
attrib._element.getDimensionCount(),
|
||||
_elementTypeToGLType[attrib._element.getType()],
|
||||
attrib._element.isNormalized(),
|
||||
attrib._offset);
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
_input._invalidFormat = false;
|
||||
_stats._ISNumFormatChanges++;
|
||||
}
|
||||
|
||||
if (_input._buffersState.any()) {
|
||||
int numBuffers = _input._buffers.size();
|
||||
auto buffer = _input._buffers.data();
|
||||
auto vbo = _input._bufferVBOs.data();
|
||||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
|
||||
if (_input._buffersState.test(bufferNum)) {
|
||||
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
|
||||
}
|
||||
buffer++;
|
||||
vbo++;
|
||||
offset++;
|
||||
stride++;
|
||||
}
|
||||
_input._buffersState.reset();
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
#else
|
||||
if (_input._invalidFormat || _input._buffersState.any()) {
|
||||
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
_stats._ISNumFormatChanges++;
|
||||
|
||||
// Check expected activation
|
||||
if (_input._format) {
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
for (Stream::Format::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
|
||||
const Stream::Attribute& attrib = (*it).second;
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
}
|
||||
}
|
||||
|
@ -72,17 +194,15 @@ void GLBackend::updateInput() {
|
|||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
const bool useClientState = i < NUM_CLASSIC_ATTRIBS;
|
||||
#else
|
||||
const bool useClientState = false;
|
||||
#endif
|
||||
if (useClientState) {
|
||||
if (i < NUM_CLASSIC_ATTRIBS) {
|
||||
if (newState) {
|
||||
glEnableClientState(attributeSlotToClassicAttribName[i]);
|
||||
} else {
|
||||
glDisableClientState(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
} else {
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
|
@ -103,18 +223,23 @@ void GLBackend::updateInput() {
|
|||
const Offsets& strides = _input._bufferStrides;
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
_stats._ISNumInputBufferChanges++;
|
||||
|
||||
for (Stream::Format::ChannelMap::const_iterator channelIt = _input._format->getChannels().begin();
|
||||
channelIt != _input._format->getChannels().end();
|
||||
channelIt++) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (*channelIt).second;
|
||||
if ((*channelIt).first < buffers.size()) {
|
||||
int bufferNum = (*channelIt).first;
|
||||
GLuint boundVBO = 0;
|
||||
for (auto& channelIt : inputChannels) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
if ((channelIt).first < buffers.size()) {
|
||||
int bufferNum = (channelIt).first;
|
||||
|
||||
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
|
||||
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void) CHECK_GL_ERROR();
|
||||
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
|
||||
GLuint vbo = _input._bufferVBOs[bufferNum];
|
||||
if (boundVBO != vbo) {
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void) CHECK_GL_ERROR();
|
||||
boundVBO = vbo;
|
||||
}
|
||||
_input._buffersState[bufferNum] = false;
|
||||
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
|
@ -126,9 +251,6 @@ void GLBackend::updateInput() {
|
|||
GLuint pointer = attrib._offset + offsets[bufferNum];
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
const bool useClientState = slot < NUM_CLASSIC_ATTRIBS;
|
||||
#else
|
||||
const bool useClientState = false;
|
||||
#endif
|
||||
if (useClientState) {
|
||||
switch (slot) {
|
||||
case Stream::POSITION:
|
||||
|
@ -144,7 +266,9 @@ void GLBackend::updateInput() {
|
|||
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
};
|
||||
} else {
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
glVertexAttribPointer(slot, count, type, isNormalized, stride,
|
||||
reinterpret_cast<GLvoid*>(pointer));
|
||||
|
@ -158,61 +282,7 @@ void GLBackend::updateInput() {
|
|||
// everything format related should be in sync now
|
||||
_input._invalidFormat = false;
|
||||
}
|
||||
|
||||
/* TODO: Fancy version GL4.4
|
||||
if (_needInputFormatUpdate) {
|
||||
|
||||
InputActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_inputFormat) {
|
||||
const StreamFormat::AttributeMap& attributes = _inputFormat->getAttributes();
|
||||
for (StreamFormat::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
|
||||
const StreamFormat::Attribute& attrib = (*it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
glVertexAttribFormat(
|
||||
attrib._slot,
|
||||
attrib._element.getDimensionCount(),
|
||||
_elementTypeToGLType[attrib._element.getType()],
|
||||
attrib._element.isNormalized(),
|
||||
attrib._stride);
|
||||
}
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _inputAttributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_inputAttributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
CHECK_GL_ERROR();
|
||||
|
||||
_needInputFormatUpdate = false;
|
||||
}
|
||||
|
||||
if (_needInputStreamUpdate) {
|
||||
if (_inputStream) {
|
||||
const Stream::Buffers& buffers = _inputStream->getBuffers();
|
||||
const Stream::Offsets& offsets = _inputStream->getOffsets();
|
||||
const Stream::Strides& strides = _inputStream->getStrides();
|
||||
|
||||
for (int i = 0; i < buffers.size(); i++) {
|
||||
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[i]));
|
||||
glBindVertexBuffer(i, vbo, offsets[i], strides[i]);
|
||||
}
|
||||
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
_needInputStreamUpdate = false;
|
||||
}
|
||||
*/
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -188,7 +188,7 @@ void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
|
|||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void GLBackend::do_setUniformTexture(Batch& batch, uint32 paramOffset) {
|
||||
void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
TexturePointer uniformTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
|
||||
|
||||
|
|
|
@ -41,21 +41,34 @@ void makeBindings(GLBackend::GLShader* shader) {
|
|||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "attribPosition");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "attribPosition");
|
||||
}
|
||||
|
||||
//Check for gpu specific attribute slotBindings
|
||||
loc = glGetAttribLocation(glprogram, "gl_Vertex");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "gl_Vertex");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "normal");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "normal");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribNormal");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "attribNormal");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "color");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "color");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribColor");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "attribColor");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "texcoord");
|
||||
if (loc >= 0) {
|
||||
|
@ -75,6 +88,10 @@ void makeBindings(GLBackend::GLShader* shader) {
|
|||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribTexcoord1");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "clusterIndices");
|
||||
if (loc >= 0) {
|
||||
|
|
|
@ -755,3 +755,14 @@ void GLBackend::do_setStateBlendFactor(Batch& batch, uint32 paramOffset) {
|
|||
glBlendColor(factor.x, factor.y, factor.z, factor.w);
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void GLBackend::do_setStateScissorRect(Batch& batch, uint32 paramOffset) {
|
||||
|
||||
Vec4 rect(batch._params[paramOffset + 0]._float,
|
||||
batch._params[paramOffset + 1]._float,
|
||||
batch._params[paramOffset + 2]._float,
|
||||
batch._params[paramOffset + 3]._float);
|
||||
|
||||
glScissor(rect.x, rect.y, rect.z, rect.w);
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
|
|
@ -82,8 +82,6 @@ void GLBackend::syncTransformStateCache() {
|
|||
}
|
||||
|
||||
void GLBackend::updateTransform() {
|
||||
GLint originalMatrixMode;
|
||||
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
|
||||
// Check all the dirty flags and update the state accordingly
|
||||
if (_transform._invalidViewport) {
|
||||
_transform._transformCamera._viewport = glm::vec4(_transform._viewport);
|
||||
|
@ -138,6 +136,9 @@ void GLBackend::updateTransform() {
|
|||
|
||||
#if (GPU_TRANSFORM_PROFILE == GPU_LEGACY)
|
||||
// Do it again for fixed pipeline until we can get rid of it
|
||||
GLint originalMatrixMode;
|
||||
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
|
||||
|
||||
if (_transform._invalidProj) {
|
||||
if (_transform._lastMode != GL_PROJECTION) {
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
|
@ -173,12 +174,12 @@ void GLBackend::updateTransform() {
|
|||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
glMatrixMode(originalMatrixMode);
|
||||
#endif
|
||||
|
||||
// Flags are clean
|
||||
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
|
||||
|
||||
glMatrixMode(originalMatrixMode);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -148,7 +148,7 @@ public:
|
|||
//
|
||||
// As of now (03/2015), the call to makeProgram is in fact calling gpu::Context::makeProgram and does rely
|
||||
// on the underneath gpu::Context::Backend available. Since we only support glsl, this means that it relies
|
||||
// on a glContext and the driver to compile the glsl shader.
|
||||
// on a gl Context and the driver to compile the glsl shader.
|
||||
// Hoppefully in a few years the shader compilation will be completely abstracted in a separate shader compiler library
|
||||
// independant of the graphics api in use underneath (looking at you opengl & vulkan).
|
||||
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue