merge with plugins, needs testing

This commit is contained in:
Anthony J. Thibault 2015-07-16 18:47:39 -07:00
commit e8da4b9586
97 changed files with 1076 additions and 2174 deletions
assignment-client/src/avatars
cmake
externals/polyvox
modules
examples
edit.js
entityScripts
example/games
grab.js
interface/src
libraries
tests/ui/src
tools/scribe/src

View file

@ -53,7 +53,7 @@ AnimationDetails ScriptableAvatar::getAnimationDetails() {
void ScriptableAvatar::update(float deltatime) {
// Run animation
if (_animation != NULL && _animation->isValid() && _animation->getFrames().size() > 0) {
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0) {
QStringList modelJoints = getJointNames();
QStringList animationJoints = _animation->getJointNames();

View file

@ -3,8 +3,8 @@ set(EXTERNAL_NAME polyvox)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox.zip
URL_MD5 904b840328278c9b36fa7a14be730c34
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox-master-2015-7-15.zip
URL_MD5 9ec6323b87e849ae36e562ae1c7494a9
CMAKE_ARGS -DENABLE_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1
@ -24,7 +24,16 @@ if (APPLE)
${EXTERNAL_NAME}
change-install-name
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR} -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Debug -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
DEPENDEES install
WORKING_DIRECTORY <SOURCE_DIR>
LOG 1
)
ExternalProject_Add_Step(
${EXTERNAL_NAME}
change-install-name
COMMENT "Calling install_name_tool on libraries to fix install name for dylib linking"
COMMAND ${CMAKE_COMMAND} -DINSTALL_NAME_LIBRARY_DIR=${INSTALL_NAME_LIBRARY_DIR}/Release -P ${EXTERNAL_PROJECT_DIR}/OSXInstallNameChange.cmake
DEPENDEES install
WORKING_DIRECTORY <SOURCE_DIR>
LOG 1
@ -48,12 +57,15 @@ endif ()
if (WIN32)
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/PolyVoxCore/lib/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/PolyVoxCore/lib/Debug/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/PolyVoxCore/lib/Release/PolyVoxCore.lib CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/PolyVoxUtil/lib/PolyVoxUtil.lib CACHE FILEPATH "polyvox util library")
elseif (APPLE)
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.dylib CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.dylib CACHE FILEPATH "polyvox util library")
else ()
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_DEBUG ${INSTALL_DIR}/lib/Debug/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
set(${EXTERNAL_NAME_UPPER}_CORE_LIBRARY_RELEASE ${INSTALL_DIR}/lib/Release/libPolyVoxCore.so CACHE FILEPATH "polyvox core library")
# set(${EXTERNAL_NAME_UPPER}_UTIL_LIBRARY ${INSTALL_DIR}/lib/libPolyVoxUtil.so CACHE FILEPATH "polyvox util library")
endif ()

View file

@ -24,9 +24,12 @@ hifi_library_search_hints("polyvox")
find_path(POLYVOX_CORE_INCLUDE_DIRS PolyVoxCore/SimpleVolume.h PATH_SUFFIXES include include/PolyVoxCore HINTS ${POLYVOX_SEARCH_DIRS})
# find_path(POLYVOX_UTIL_INCLUDE_DIRS PolyVoxUtil/Serialization.h PATH_SUFFIXES include include/PolyVoxUtil HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY NAMES PolyVoxCore PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY_DEBUG NAMES PolyVoxCore PATH_SUFFIXES lib/Debug HINTS ${POLYVOX_SEARCH_DIRS})
find_library(POLYVOX_CORE_LIBRARY_RELEASE NAMES PolyVoxCore PATH_SUFFIXES lib/Release lib HINTS ${POLYVOX_SEARCH_DIRS})
# find_library(POLYVOX_UTIL_LIBRARY NAMES PolyVoxUtil PATH_SUFFIXES lib HINTS ${POLYVOX_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(POLYVOX_CORE)
# if (WIN32)
# find_path(POLYVOX_DLL_PATH polyvox.dll PATH_SUFFIXES bin HINTS ${POLYVOX_SEARCH_DIRS})

View file

@ -656,7 +656,9 @@ function mouseMove(event) {
function handleIdleMouse() {
idleMouseTimerId = null;
highlightEntityUnderCursor(lastMousePosition, true);
if (isActive) {
highlightEntityUnderCursor(lastMousePosition, true);
}
}
function highlightEntityUnderCursor(position, accurateRay) {

View file

@ -130,10 +130,10 @@
var others = Entities.findEntities(this.properties.position, this.properties.dimensions.y);
for (var i = 0; i < others.length; i++) {
var piece = others[i];
var pieceID = others[i];
if (piece.id != this.entityID) {
var properties = Entities.getEntityProperties(piece);
if (pieceID != this.entityID) {
var properties = Entities.getEntityProperties(pieceID);
var isWhite = properties.modelURL.search("White") !== -1;
var type = (properties.modelURL.search("King") !== -1) ? 4 :
@ -147,7 +147,7 @@
if (myPos.i === piecePos.i && myPos.j === piecePos.j && type !== -2) {
var position = this.getAbsolutePosition((isWhite) ? { i: type, j: -1 } : { i: 7 - type, j: 8 },
properties.dimensions.y / 2.0);
Entities.editEntity(piece, {
Entities.editEntity(pieceID, {
position: position
});
break;

View file

@ -175,10 +175,12 @@ function positionStick(stickOrientation) {
inHand = false;
Entities.updateAction(stickID, actionID, {
relativePosition: offset,
relativeRotation: stickOrientation
relativeRotation: stickOrientation,
hand: "right"
});
}
function resetToHand() { // Maybe coordinate with positionStick?
function resetToHand() { // For use with controllers, puts the sword in contact with the hand.
// Maybe coordinate with positionStick?
if (inHand) { // Optimization: bail if we're already inHand.
return;
}
@ -191,14 +193,14 @@ function resetToHand() { // Maybe coordinate with positionStick?
});
inHand = true;
}
function isControllerActive() {
// I don't think the hydra API provides any reliable way to know whether a particular controller is active. Ask for both.
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(3)) > 0) || Vec3.length(Controller.getSpatialControlPosition(4)) > 0;
return controllerActive;
}
function mouseMoveEvent(event) {
if (event.deviceID) { // Not a MOUSE mouse event, but a (e.g., hydra) mouse event, with x/y that is not meaningful for us.
resetToHand(); // Can only happen when controller is uncradled, so let's drive with that, resetting our attachement.
return;
}
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(controllerID)) > 0);
//print("Mouse move with hand controller " + (controllerActive ? "active" : "inactive") + JSON.stringify(event));
if (controllerActive || !isFighting()) {
// When a controller like the hydra gives a mouse event, the x/y is not meaningful to us, but we can detect with a truty deviceID
if (event.deviceID || !isFighting() || isControllerActive()) {
print('Attempting attachment reset');
resetToHand();
return;
@ -244,12 +246,20 @@ function cleanUp(leaveButtons) {
}
function makeSword() {
initControls();
var swordPosition;
if (!isControllerActive()) { // Dont' knock yourself with sword
swordPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getFront(MyAvatar.orientation)));
} else if (hand === 'right') {
swordPosition = MyAvatar.getRightPalmPosition();
} else {
swordPosition = MyAvatar.getLeftPalmPosition();
}
stickID = Entities.addEntity({
type: "Model",
modelURL: swordModel,
compoundShapeURL: swordCollisionShape,
dimensions: dimensions,
position: (hand === 'right') ? MyAvatar.getRightPalmPosition() : MyAvatar.getLeftPalmPosition(), // initial position doesn't matter, as long as it's close
position: swordPosition,
rotation: MyAvatar.orientation,
damping: 0.1,
collisionSoundURL: swordCollisionSoundURL,

View file

@ -10,96 +10,13 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var MOVE_TIMESCALE = 0.1;
var INV_MOVE_TIMESCALE = 1.0 / MOVE_TIMESCALE;
var MAX_SOLID_ANGLE = 0.01; // objects that appear smaller than this can't be grabbed
var CLOSE_ENOUGH = 0.001;
var ZERO_VEC3 = { x: 0, y: 0, z: 0 };
var ANGULAR_DAMPING_RATE = 0.40;
// NOTE: to improve readability global variable names start with 'g'
var gIsGrabbing = false;
var gGrabbedEntity = null;
var gActionID = null;
var gEntityProperties;
var gStartPosition;
var gStartRotation;
var gCurrentPosition;
var gOriginalGravity = ZERO_VEC3;
var gPlaneNormal = ZERO_VEC3;
// gMaxGrabDistance is a function of the size of the object.
var gMaxGrabDistance;
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
// Modes to eventually support?:
// xyPlane
// yzPlane
// polar
// elevationAzimuth
var gGrabMode = "xzplane";
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// are relative to the ray's intersection by the same offset.
var gGrabOffset = { x: 0, y: 0, z: 0 };
var gTargetPosition;
var gTargetRotation;
var gLiftKey = false; // SHIFT
var gRotateKey = false; // CONTROL
var gInitialMouse = { x: 0, y: 0 };
var gPreviousMouse = { x: 0, y: 0 };
var gMouseCursorLocation = { x: 0, y: 0 };
var gMouseAtRotateStart = { x: 0, y: 0 };
var gBeaconHeight = 0.10;
// var gAngularVelocity = ZERO_VEC3;
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
// var VOLUME = 0.0;
var gBeaconHeight = 0.10;
var BEACON_COLOR = {
red: 200,
green: 200,
blue: 200
};
var BEACON_WIDTH = 2;
var ZERO_VEC3 = {x: 0, y: 0, z: 0};
var IDENTITY_QUAT = {x: 0, y: 0, z: 0, w: 0};
var gBeacon = Overlays.addOverlay("line3d", {
color: BEACON_COLOR,
alpha: 1,
visible: false,
lineWidth: BEACON_WIDTH
});
function updateDropLine(position) {
Overlays.editOverlay(gBeacon, {
visible: true,
start: {
x: position.x,
y: position.y + gBeaconHeight,
z: position.z
},
end: {
x: position.x,
y: position.y - gBeaconHeight,
z: position.z
}
});
}
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
// helper function
function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event, maxDistance) {
var cameraPosition = Camera.getPosition();
var localPointOnPlane = Vec3.subtract(pointOnPlane, cameraPosition);
var distanceFromPlane = Vec3.dot(localPointOnPlane, planeNormal);
@ -116,7 +33,7 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
var useMaxForwardGrab = false;
if (Math.abs(dirDotNorm) > MIN_RAY_PLANE_DOT) {
var distanceToIntersection = distanceFromPlane / dirDotNorm;
if (distanceToIntersection > 0 && distanceToIntersection < gMaxGrabDistance) {
if (distanceToIntersection > 0 && distanceToIntersection < maxDistance) {
// ray points into the plane
localIntersection = Vec3.multiply(pickRay.direction, distanceFromPlane / dirDotNorm);
} else {
@ -133,53 +50,160 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
// we re-route the intersection to be in front at max distance.
var rayDirection = Vec3.subtract(pickRay.direction, Vec3.multiply(planeNormal, dirDotNorm));
rayDirection = Vec3.normalize(rayDirection);
localIntersection = Vec3.multiply(rayDirection, gMaxGrabDistance);
localIntersection = Vec3.multiply(rayDirection, maxDistance);
localIntersection = Vec3.sum(localIntersection, Vec3.multiply(planeNormal, distanceFromPlane));
}
var worldIntersection = Vec3.sum(cameraPosition, localIntersection);
return worldIntersection;
}
function computeNewGrabPlane() {
if (!gIsGrabbing) {
// Mouse class stores mouse click and drag info
Mouse = function() {
this.current = {x: 0, y: 0 };
this.previous = {x: 0, y: 0 };
this.rotateStart = {x: 0, y: 0 };
this.cursorRestore = {x: 0, y: 0};
}
Mouse.prototype.startDrag = function(position) {
this.current = {x: position.x, y: position.y};
this.startRotateDrag();
}
Mouse.prototype.updateDrag = function(position) {
this.current = {x: position.x, y: position.y };
}
Mouse.prototype.startRotateDrag = function() {
this.previous = {x: this.current.x, y: this.current.y};
this.rotateStart = {x: this.current.x, y: this.current.y};
this.cursorRestore = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
}
Mouse.prototype.getDrag = function() {
var delta = {x: this.current.x - this.previous.x, y: this.current.y - this.previous.y};
this.previous = {x: this.current.x, y: this.current.y};
return delta;
}
Mouse.prototype.restoreRotateCursor = function() {
Window.setCursorPosition(this.cursorRestore.x, this.cursorRestore.y);
this.current = {x: this.rotateStart.x, y: this.rotateStart.y};
}
var mouse = new Mouse();
// Beacon class stores info for drawing a line at object's target position
Beacon = function() {
this.height = 0.10;
this.overlayID = Overlays.addOverlay("line3d", {
color: {red: 200, green: 200, blue: 200},
alpha: 1,
visible: false,
lineWidth: 2
});
}
Beacon.prototype.enable = function() {
Overlays.editOverlay(this.overlayID, { visible: true });
}
Beacon.prototype.disable = function() {
Overlays.editOverlay(this.overlayID, { visible: false });
}
Beacon.prototype.updatePosition = function(position) {
Overlays.editOverlay(this.overlayID, {
visible: true,
start: {
x: position.x,
y: position.y + this.height,
z: position.z
},
end: {
x: position.x,
y: position.y - this.height,
z: position.z
}
});
}
var beacon = new Beacon();
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
// var releaseSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/ReleaseClamp.wav");
// var VOLUME = 0.0;
// Grabber class stores and computes info for grab behavior
Grabber = function() {
this.isGrabbing = false;
this.entityID = null;
this.actionID = null;
this.startPosition = ZERO_VEC3;
this.lastRotation = IDENTITY_QUAT;
this.currentPosition = ZERO_VEC3;
this.planeNormal = ZERO_VEC3;
this.originalGravity = ZERO_VEC3;
// maxDistance is a function of the size of the object.
this.maxDistance;
// mode defines the degrees of freedom of the grab target positions
// relative to startPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
this.mode = "xzplane";
// offset allows the user to grab an object off-center. It points from the object's center
// to the point where the ray intersects the grab plane (at the moment the grab is initiated).
// Future target positions of the ray intersection are on the same plane, and the offset is subtracted
// to compute the target position of the object's center.
this.offset = {x: 0, y: 0, z: 0 };
this.targetPosition;
this.targetRotation;
this.liftKey = false; // SHIFT
this.rotateKey = false; // CONTROL
}
Grabber.prototype.computeNewGrabPlane = function() {
if (!this.isGrabbing) {
return;
}
var maybeResetMousePosition = false;
if (gGrabMode !== "rotate") {
gMouseAtRotateStart = gMouseCursorLocation;
var modeWasRotate = (this.mode == "rotate");
this.mode = "xzPlane";
this.planeNormal = {x: 0, y: 1, z: 0 };
if (this.rotateKey) {
this.mode = "rotate";
mouse.startRotateDrag();
} else {
maybeResetMousePosition = true;
}
gGrabMode = "xzPlane";
gPointOnPlane = gCurrentPosition;
gPlaneNormal = { x: 0, y: 1, z: 0 };
if (gLiftKey) {
if (!gRotateKey) {
gGrabMode = "verticalCylinder";
// a new planeNormal will be computed each move
if (modeWasRotate) {
// we reset the mouse screen position whenever we stop rotating
mouse.restoreRotateCursor();
}
} else if (gRotateKey) {
gGrabMode = "rotate";
}
if (this.liftKey) {
this.mode = "verticalCylinder";
// NOTE: during verticalCylinder mode a new planeNormal will be computed each move
}
}
gPointOnPlane = Vec3.subtract(gCurrentPosition, gGrabOffset);
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
this.pointOnPlane = Vec3.sum(this.currentPosition, this.offset);
var xzOffset = Vec3.subtract(this.pointOnPlane, Camera.getPosition());
xzOffset.y = 0;
gXzDistanceToGrab = Vec3.length(xzOffset);
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
// we reset the mouse position whenever we stop rotating
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
}
this.xzDistanceToGrab = Vec3.length(xzOffset);
}
function mousePressEvent(event) {
Grabber.prototype.pressEvent = function(event) {
if (!event.isLeftButton) {
return;
}
gInitialMouse = {x: event.x, y: event.y };
gPreviousMouse = {x: event.x, y: event.y };
var pickRay = Camera.computePickRay(event.x, event.y);
var pickResults = Entities.findRayIntersection(pickRay, true); // accurate picking
@ -193,148 +217,172 @@ function mousePressEvent(event) {
return;
}
mouse.startDrag(event);
var clickedEntity = pickResults.entityID;
var entityProperties = Entities.getEntityProperties(clickedEntity)
gStartPosition = entityProperties.position;
gStartRotation = entityProperties.rotation;
this.startPosition = entityProperties.position;
this.lastRotation = entityProperties.rotation;
var cameraPosition = Camera.getPosition();
gBeaconHeight = Vec3.length(entityProperties.dimensions);
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
if (Vec3.distance(gStartPosition, cameraPosition) > gMaxGrabDistance) {
var objectBoundingDiameter = Vec3.length(entityProperties.dimensions);
beacon.height = objectBoundingDiameter;
this.maxDistance = objectBoundingDiameter / MAX_SOLID_ANGLE;
if (Vec3.distance(this.startPosition, cameraPosition) > this.maxDistance) {
// don't allow grabs of things far away
return;
}
Entities.editEntity(clickedEntity, { gravity: ZERO_VEC3 });
gIsGrabbing = true;
this.isGrabbing = true;
gGrabbedEntity = clickedEntity;
gCurrentPosition = entityProperties.position;
gOriginalGravity = entityProperties.gravity;
gTargetPosition = gStartPosition;
this.entityID = clickedEntity;
this.currentPosition = entityProperties.position;
this.originalGravity = entityProperties.gravity;
this.targetPosition = {x: this.startPosition.x, y: this.startPosition.y, z: this.startPosition.z};
// compute the grab point
var nearestPoint = Vec3.subtract(gStartPosition, cameraPosition);
var nearestPoint = Vec3.subtract(this.startPosition, cameraPosition);
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
this.pointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
// compute the grab offset
gGrabOffset = Vec3.subtract(gStartPosition, gPointOnPlane);
// compute the grab offset (points from object center to point of grab)
this.offset = Vec3.subtract(this.pointOnPlane, this.startPosition);
computeNewGrabPlane();
this.computeNewGrabPlane();
updateDropLine(gStartPosition);
beacon.updatePosition(this.startPosition);
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
}
function mouseReleaseEvent() {
if (gIsGrabbing) {
if (Vec3.length(gOriginalGravity) != 0) {
Entities.editEntity(gGrabbedEntity, { gravity: gOriginalGravity });
Grabber.prototype.releaseEvent = function() {
if (this.isGrabbing) {
if (Vec3.length(this.originalGravity) != 0) {
Entities.editEntity(this.entityID, { gravity: this.originalGravity});
}
gIsGrabbing = false
Entities.deleteAction(gGrabbedEntity, gActionID);
gActionID = null;
this.isGrabbing = false
Entities.deleteAction(this.entityID, this.actionID);
this.actionID = null;
Overlays.editOverlay(gBeacon, { visible: false });
beacon.disable();
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(releaseSound, { position: entityProperties.position, volume: VOLUME });
}
}
function mouseMoveEvent(event) {
if (!gIsGrabbing) {
Grabber.prototype.moveEvent = function(event) {
if (!this.isGrabbing) {
return;
}
mouse.updateDrag(event);
// see if something added/restored gravity
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
var entityProperties = Entities.getEntityProperties(this.entityID);
if (Vec3.length(entityProperties.gravity) != 0) {
gOriginalGravity = entityProperties.gravity;
this.originalGravity = entityProperties.gravity;
}
this.currentPosition = entityProperties.position;
var actionArgs = {};
if (gGrabMode === "rotate") {
var deltaMouse = { x: 0, y: 0 };
var dx = event.x - gInitialMouse.x;
var dy = event.y - gInitialMouse.y;
if (this.mode === "rotate") {
var drag = mouse.getDrag();
var orientation = Camera.getOrientation();
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
var dragOffset = Vec3.multiply(drag.x, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-drag.y, Quat.getUp(orientation)));
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
var angle = ROTATE_STRENGTH * Math.sqrt((dx * dx) + (dy * dy));
var angle = ROTATE_STRENGTH * Math.sqrt((drag.x * drag.x) + (drag.y * drag.y));
var deltaQ = Quat.angleAxis(angle, axis);
// var qZero = entityProperties.rotation;
var qZero = gStartRotation;
var qOne = Quat.multiply(deltaQ, qZero);
actionArgs = {targetRotation: qOne, angularTimeScale: 0.1};
//var qZero = this.lastRotation;
this.lastRotation = Quat.multiply(deltaQ, this.lastRotation);
actionArgs = {targetRotation: this.lastRotation, angularTimeScale: 0.1};
} else {
var newTargetPosition;
if (gGrabMode === "verticalCylinder") {
var newPointOnPlane;
if (this.mode === "verticalCylinder") {
// for this mode we recompute the plane based on current Camera
var planeNormal = Quat.getFront(Camera.getOrientation());
planeNormal.y = 0;
planeNormal = Vec3.normalize(planeNormal);
var pointOnCylinder = Vec3.multiply(planeNormal, gXzDistanceToGrab);
var pointOnCylinder = Vec3.multiply(planeNormal, this.xzDistanceToGrab);
pointOnCylinder = Vec3.sum(Camera.getPosition(), pointOnCylinder);
newTargetPosition = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, event);
this.pointOnPlane = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, mouse.current, this.maxDistance);
newPointOnPlane = {x: this.pointOnPlane.x, y: this.pointOnPlane.y, z: this.pointOnPlane.z};
} else {
var cameraPosition = Camera.getPosition();
newTargetPosition = mouseIntersectionWithPlane(gPointOnPlane, gPlaneNormal, event);
var relativePosition = Vec3.subtract(newTargetPosition, cameraPosition);
newPointOnPlane = mouseIntersectionWithPlane(this.pointOnPlane, this.planeNormal, mouse.current, this.maxDistance);
var relativePosition = Vec3.subtract(newPointOnPlane, cameraPosition);
var distance = Vec3.length(relativePosition);
if (distance > gMaxGrabDistance) {
if (distance > this.maxDistance) {
// clamp distance
relativePosition = Vec3.multiply(relativePosition, gMaxGrabDistance / distance);
newTargetPosition = Vec3.sum(relativePosition, cameraPosition);
relativePosition = Vec3.multiply(relativePosition, this.maxDistance / distance);
newPointOnPlane = Vec3.sum(relativePosition, cameraPosition);
}
}
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
}
gPreviousMouse = { x: event.x, y: event.y };
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
this.targetPosition = Vec3.subtract(newPointOnPlane, this.offset);
actionArgs = {targetPosition: this.targetPosition, linearTimeScale: 0.1};
if (!gActionID) {
gActionID = Entities.addAction("spring", gGrabbedEntity, actionArgs);
beacon.updatePosition(this.targetPosition);
}
if (!this.actionID) {
this.actionID = Entities.addAction("spring", this.entityID, actionArgs);
} else {
Entities.updateAction(gGrabbedEntity, gActionID, actionArgs);
Entities.updateAction(this.entityID, this.actionID, actionArgs);
}
updateDropLine(gTargetPosition);
}
function keyReleaseEvent(event) {
Grabber.prototype.keyReleaseEvent = function(event) {
if (event.text === "SHIFT") {
gLiftKey = false;
this.liftKey = false;
}
if (event.text === "CONTROL") {
gRotateKey = false;
this.rotateKey = false;
}
computeNewGrabPlane();
this.computeNewGrabPlane();
}
Grabber.prototype.keyPressEvent = function(event) {
if (event.text === "SHIFT") {
this.liftKey = true;
}
if (event.text === "CONTROL") {
this.rotateKey = true;
}
this.computeNewGrabPlane();
}
var grabber = new Grabber();
function pressEvent(event) {
grabber.pressEvent(event);
}
function moveEvent(event) {
grabber.moveEvent(event);
}
function releaseEvent(event) {
grabber.releaseEvent(event);
}
function keyPressEvent(event) {
if (event.text === "SHIFT") {
gLiftKey = true;
}
if (event.text === "CONTROL") {
gRotateKey = true;
}
computeNewGrabPlane();
grabber.keyPressEvent(event);
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
function keyReleaseEvent(event) {
grabber.keyReleaseEvent(event);
}
Controller.mousePressEvent.connect(pressEvent);
Controller.mouseMoveEvent.connect(moveEvent);
Controller.mouseReleaseEvent.connect(releaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Controller.keyReleaseEvent.connect(keyReleaseEvent);

View file

@ -67,7 +67,6 @@
#include <EntityScriptingInterface.h>
#include <ErrorDialog.h>
#include <GlowEffect.h>
#include <GlWindow.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
@ -119,7 +118,6 @@
#include "avatar/AvatarManager.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "devices/DdeFaceTracker.h"
@ -290,11 +288,9 @@ bool setupEssentials(int& argc, char** argv) {
auto geometryCache = DependencyManager::set<GeometryCache>();
auto scriptCache = DependencyManager::set<ScriptCache>();
auto soundCache = DependencyManager::set<SoundCache>();
auto glowEffect = DependencyManager::set<GlowEffect>();
auto faceshift = DependencyManager::set<Faceshift>();
auto audio = DependencyManager::set<AudioClient>();
auto audioScope = DependencyManager::set<AudioScope>();
auto audioIOStatsRenderer = DependencyManager::set<AudioIOStatsRenderer>();
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
auto ambientOcclusionEffect = DependencyManager::set<AmbientOcclusionEffect>();
auto textureCache = DependencyManager::set<TextureCache>();
@ -357,7 +353,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_mousePressed(false),
_enableProcessOctreeThread(true),
_octreeProcessor(),
_nodeBoundsDisplay(this),
_runningScriptsWidget(NULL),
_runningScriptsWidgetWasVisible(false),
_trayIcon(new QSystemTrayIcon(_window)),
@ -759,6 +754,14 @@ void Application::cleanupBeforeQuit() {
#endif
}
void Application::emptyLocalCache() {
QNetworkDiskCache* cache = qobject_cast<QNetworkDiskCache*>(NetworkAccessManager::getInstance().cache());
if (cache) {
qDebug() << "DiskCacheEditor::clear(): Clearing disk cache.";
cache->clear();
}
}
Application::~Application() {
EntityTree* tree = _entities.getTree();
tree->lockForWrite();
@ -1029,18 +1032,17 @@ void Application::paintGL() {
}
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
DependencyManager::get<GlowEffect>()->prepare(&renderArgs);
// Primary rendering pass
auto primaryFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
auto finalFbo = primaryFbo;
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFbo));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
// Primary scene rendering
// Primary rendering pass
{
PROFILE_RANGE(__FUNCTION__ "/mainRender");
auto primaryFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFbo);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
if (displayPlugin->isStereo()) {
QRect r(QPoint(0, 0), QSize(size.width() / 2, size.height()));
glEnable(GL_SCISSOR_TEST);
@ -1074,10 +1076,12 @@ void Application::paintGL() {
}
}
// Overlay Composition
finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
// Overlay Composition, needs to occur after screen space effects have completed
{
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
PROFILE_RANGE(__FUNCTION__ "/compositor");
auto primaryFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFbo));
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
if (displayPlugin->isStereo()) {
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
QRect r(QPoint(0, 0), QSize(size.width() / 2, size.height()));
@ -1099,28 +1103,30 @@ void Application::paintGL() {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
#if 0
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(&renderArgs, _mirrorViewRect);
}
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
#endif
// deliver final composited scene to the display plugin
{
GLuint finalTexture = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
PROFILE_RANGE(__FUNCTION__ "/pluginOutput");
auto primaryFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
GLuint finalTexture = gpu::GLBackend::getTextureID(primaryFbo->getRenderBuffer(0));
uvec2 finalSize = toGlm(size);
#ifdef Q_OS_MAC
glFinish();
#else
// Ensure the rendering context commands are completed when rendering
GLsync sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
glFinish();
#endif
_offscreenContext->doneCurrent();
Q_ASSERT(!QOpenGLContext::currentContext());
displayPlugin->preDisplay();
Q_ASSERT(QOpenGLContext::currentContext());
#ifdef Q_OS_MAC
#else
// FIXME? make the sync a parameter to preDisplay and let the plugin manage this
glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(sync);
#endif
{
PROFILE_RANGE(__FUNCTION__ "/pluginDisplay");
@ -1710,7 +1716,7 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
return;
}
if (Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
if (deviceID == 0 && Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
_keyboardMouseDevice->mouseMoveEvent(event, deviceID);
}
@ -1733,7 +1739,7 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
if (hasFocus()) {
if (Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
if (deviceID == 0 && Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
_keyboardMouseDevice->mousePressEvent(event);
}
@ -1775,7 +1781,7 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
}
if (hasFocus()) {
if (Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
if (deviceID == 0 && Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
_keyboardMouseDevice->mouseReleaseEvent(event);
}
@ -2398,10 +2404,6 @@ void Application::init() {
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
_entityClipboardRenderer.setTree(&_entityClipboard);
// initialize the GlowEffect with our widget
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
DependencyManager::get<GlowEffect>()->init(glow);
// Make sure any new sounds are loaded as soon as know about them.
connect(tree, &EntityTree::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
connect(_myAvatar, &MyAvatar::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
@ -2583,6 +2585,15 @@ void Application::cameraMenuChanged() {
}
}
void Application::reloadResourceCaches() {
emptyLocalCache();
DependencyManager::get<AnimationCache>()->refreshAll();
DependencyManager::get<GeometryCache>()->refreshAll();
DependencyManager::get<SoundCache>()->refreshAll();
DependencyManager::get<TextureCache>()->refreshAll();
}
void Application::rotationModeChanged() {
if (!Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
_myAvatar->setHeadPitch(0);
@ -3497,9 +3508,6 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
glClear(GL_COLOR_BUFFER_BIT);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
// the "glow" here causes an alpha of one
Glower glower(renderArgs);
const int BILLBOARD_SIZE = 64;
#if 0
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
@ -3814,8 +3822,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
});
}
if (true) {
//if (!billboard) {
if (!billboard) {
DependencyManager::get<DeferredLightingEffect>()->setAmbientLightMode(getRenderAmbientLight());
auto skyStage = DependencyManager::get<SceneScriptingInterface>()->getSkyStage();
DependencyManager::get<DeferredLightingEffect>()->setGlobalLight(skyStage->getSunLight()->getDirection(), skyStage->getSunLight()->getColor(), skyStage->getSunLight()->getIntensity(), skyStage->getSunLight()->getAmbientIntensity());
@ -3880,26 +3887,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
}
if (!selfAvatarOnly) {
_nodeBoundsDisplay.draw();
// render octree fades if they exist
if (_octreeFades.size() > 0) {
PerformanceTimer perfTimer("octreeFades");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... octree fades...");
_octreeFadesLock.lockForWrite();
for(std::vector<OctreeFade>::iterator fade = _octreeFades.begin(); fade != _octreeFades.end();) {
fade->render(renderArgs);
if(fade->isDone()) {
fade = _octreeFades.erase(fade);
} else {
++fade;
}
}
_octreeFadesLock.unlock();
}
// give external parties a change to hook in
{
PerformanceTimer perfTimer("inWorldInterface");
@ -4003,6 +3990,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
renderArgs->_viewport = glm::ivec4(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
} else {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
@ -4010,6 +3998,8 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
glViewport(x, size.height() - y - height, width, height);
glScissor(x, size.height() - y - height, width, height);
renderArgs->_viewport = glm::ivec4(x, size.height() - y - height, width, height);
}
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
@ -4020,6 +4010,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
glPopMatrix();
// reset Viewport and projection matrix
renderArgs->_viewport = glm::ivec4(viewport[0], viewport[1], viewport[2], viewport[3]);
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
glDisable(GL_SCISSOR_TEST);
}
@ -4160,17 +4151,6 @@ void Application::nodeKilled(SharedNodePointer node) {
qCDebug(interfaceapp, "model server going away...... v[%f, %f, %f, %f]",
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
// Add the jurisditionDetails object to the list of "fade outs"
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
OctreeFade fade(OctreeFade::FADE_OUT, NODE_KILLED_RED, NODE_KILLED_GREEN, NODE_KILLED_BLUE);
fade.voxelDetails = rootDetails;
const float slightly_smaller = 0.99f;
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
_octreeFadesLock.lockForWrite();
_octreeFades.push_back(fade);
_octreeFadesLock.unlock();
}
// If the model server is going away, remove it from our jurisdiction map so we don't send voxels to a dead server
_entityServerJurisdictions.lockForWrite();
_entityServerJurisdictions.erase(_entityServerJurisdictions.find(nodeUUID));
@ -4247,16 +4227,6 @@ int Application::parseOctreeStats(const QByteArray& packet, const SharedNodePoin
qPrintable(serverType),
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
// Add the jurisditionDetails object to the list of "fade outs"
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
OctreeFade fade(OctreeFade::FADE_OUT, NODE_ADDED_RED, NODE_ADDED_GREEN, NODE_ADDED_BLUE);
fade.voxelDetails = rootDetails;
const float slightly_smaller = 0.99f;
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
_octreeFadesLock.lockForWrite();
_octreeFades.push_back(fade);
_octreeFadesLock.unlock();
}
} else {
jurisdiction->unlock();
}

View file

@ -59,7 +59,6 @@
#include "scripting/WebWindowClass.h"
#include "ui/BandwidthDialog.h"
#include "ui/ModelsBrowser.h"
#include "ui/NodeBounds.h"
#include "ui/OctreeStatsDialog.h"
#include "ui/SnapshotShareDialog.h"
#include "ui/LodToolsDialog.h"
@ -69,7 +68,6 @@
#include "ui/ApplicationCompositor.h"
#include "ui/RunningScriptsWidget.h"
#include "ui/ToolWindow.h"
#include "octree/OctreeFade.h"
#include "octree/OctreePacketProcessor.h"
#include "UndoStackScriptingInterface.h"
#include "DisplayPlugins.h"
@ -91,13 +89,6 @@ class ProgramObject;
class ScriptEngine;
class GlWindow;
static const float NODE_ADDED_RED = 0.0f;
static const float NODE_ADDED_GREEN = 1.0f;
static const float NODE_ADDED_BLUE = 0.0f;
static const float NODE_KILLED_RED = 1.0f;
static const float NODE_KILLED_GREEN = 0.0f;
static const float NODE_KILLED_BLUE = 0.0f;
static const QString SNAPSHOT_EXTENSION = ".jpg";
static const QString SVO_EXTENSION = ".svo";
static const QString SVO_JSON_EXTENSION = ".svo.json";
@ -297,12 +288,10 @@ public:
virtual void addMenuItem(const QString& path, const QString& name, std::function<void()> onClicked, bool checkable, bool checked, const QString& groupName);
virtual GlWindow* getVisibleWindow();
private:
private:
DisplayPlugin * getActiveDisplayPlugin();
const DisplayPlugin * getActiveDisplayPlugin() const;
public:
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
public:
FileLogger* getLogger() { return _logger; }
@ -441,6 +430,8 @@ public slots:
void domainConnectionDenied(const QString& reason);
void cameraMenuChanged();
void reloadResourceCaches();
private slots:
void clearDomainOctreeDetails();
@ -479,6 +470,8 @@ private:
void init();
void cleanupBeforeQuit();
void emptyLocalCache();
void update(float deltaTime);
@ -613,10 +606,6 @@ private:
NodeToOctreeSceneStats _octreeServerSceneStats;
QReadWriteLock _octreeSceneStatsLock;
NodeBounds _nodeBoundsDisplay;
std::vector<OctreeFade> _octreeFades;
QReadWriteLock _octreeFadesLock;
ControllerScriptingInterface _controllerScriptingInterface;
QPointer<LogDialog> _logDialog;
QPointer<SnapshotShareDialog> _snapshotShareDialog;

View file

@ -17,8 +17,8 @@
#include <OctreeConstants.h>
#include <SimpleMovingAverage.h>
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 30.0;
const float DEFAULT_HMD_LOD_DOWN_FPS = 60.0;
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 15.0;
const float DEFAULT_HMD_LOD_DOWN_FPS = 30.0;
const float MAX_LIKELY_DESKTOP_FPS = 59.0; // this is essentially, V-synch - 1 fps
const float MAX_LIKELY_HMD_FPS = 74.0; // this is essentially, V-synch - 1 fps
const float INCREASE_LOD_GAP = 15.0f;

View file

@ -9,6 +9,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <QFileDialog>
#include <QMenuBar>
#include <QShortcut>
@ -16,7 +19,6 @@
#include <AddressManager.h>
#include <AudioClient.h>
#include <DependencyManager.h>
#include <GlowEffect.h>
#include <PathUtils.h>
#include <SettingHandle.h>
#include <UserActivityLogger.h>
@ -24,7 +26,6 @@
#include "Application.h"
#include "AccountManager.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "avatar/AvatarManager.h"
#include "devices/DdeFaceTracker.h"
@ -37,7 +38,6 @@
#include "SpeechRecognizer.h"
#endif
#include "ui/DialogsManager.h"
#include "ui/NodeBounds.h"
#include "ui/StandAloneJSConsole.h"
#include "InterfaceLogging.h"
@ -252,7 +252,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
avatar, SLOT(updateMotionBehaviorFromMenu()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehaviorFromMenu()));
@ -266,6 +265,8 @@ Menu::Menu() {
displayModeGroup->setExclusive(true);
}
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
#if 0
addCheckableActionToQMenuAndActionHash(viewMenu,
MenuOption::Fullscreen,
@ -317,16 +318,6 @@ Menu::Menu() {
SLOT(hmdTools(bool)));
#endif
addActionToQMenuAndActionHash(editMenu, MenuOption::Attachments, 0,
dialogsManager.data(), SLOT(editAttachments()));
MenuWrapper* nodeBordersMenu = viewMenu->addMenu("Server Borders");
NodeBounds& nodeBounds = qApp->getNodeBoundsDisplay();
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersEntityNodes,
Qt::CTRL | Qt::SHIFT | Qt::Key_1, false,
&nodeBounds, SLOT(setShowEntityNodes(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
@ -349,7 +340,6 @@ Menu::Menu() {
0, // QML Qt::SHIFT | Qt::Key_A,
true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DontFadeOnOctreeServerChanges);
MenuWrapper* ambientLightMenu = renderOptionsMenu->addMenu(MenuOption::RenderAmbientLight);
QActionGroup* ambientLightGroup = new QActionGroup(ambientLightMenu);
@ -402,8 +392,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars,
0, // QML Qt::Key_Asterisk,
true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true,
DependencyManager::get<GlowEffect>().data(), SLOT(toggleGlowEffect(bool)));
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Wireframe, Qt::ALT | Qt::Key_W, false);
addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::LodTools,
@ -602,18 +590,13 @@ Menu::Menu() {
audioScopeFramesGroup->addAction(fiftyFrames);
}
auto statsRenderer = DependencyManager::get<AudioIOStatsRenderer>();
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
Qt::CTRL | Qt::SHIFT | Qt::Key_A,
false,
statsRenderer.data(),
SLOT(toggle()));
false); //, statsRenderer.data(), SLOT(toggle())); // TODO: convert to dialogbox
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStatsShowInjectedStreams,
0,
false,
statsRenderer.data(),
SLOT(toggleShowInjectedStreams()));
false); //, statsRenderer.data(), SLOT(toggleShowInjectedStreams)); // TODO: convert to dialogbox
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");

View file

@ -178,14 +178,12 @@ namespace MenuOption {
const QString DisplayModelElementProxy = "Display Model Element Bounds";
const QString DisplayDebugTimingDetails = "Display Timing Details";
const QString DontDoPrecisionPicking = "Don't Do Precision Picking";
const QString DontFadeOnOctreeServerChanges = "Don't Fade In/Out on Octree Server Changes";
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString EditEntitiesHelp = "Edit Entities Help...";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableCharacterController = "Enable avatar collisions";
const QString EnableGlowEffect = "Enable Glow Effect";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
@ -230,6 +228,7 @@ namespace MenuOption {
const QString Preferences = "Preferences...";
const QString Quit = "Quit";
const QString ReloadAllScripts = "Reload All Scripts";
const QString ReloadContent = "Reload Content (Clears all caches)";
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";

View file

@ -100,23 +100,6 @@ int widthText(float scale, int mono, char const* string) {
return textRenderer(mono)->computeExtent(string).x; // computeWidth(string) * (scale / 0.10);
}
void drawText(int x, int y, float scale, float radians, int mono,
char const* string, const float* color) {
//
// Draws text on screen as stroked so it can be resized
//
glPushMatrix();
glTranslatef(static_cast<float>(x), static_cast<float>(y), 0.0f);
glRotated(double(radians * DEGREES_PER_RADIAN), 0.0, 0.0, 1.0);
glScalef(scale / 0.1f, scale / 0.1f, 1.0f);
glm::vec4 colorV4 = {color[0], color[1], color[2], 1.0f };
textRenderer(mono)->draw(0, 0, string, colorV4);
glPopMatrix();
}
void renderCollisionOverlay(int width, int height, float magnitude, float red, float blue, float green) {
const float MIN_VISIBLE_COLLISION = 0.01f;
if (magnitude > MIN_VISIBLE_COLLISION) {

View file

@ -24,9 +24,6 @@ const glm::vec3 randVector();
void renderWorldBox(gpu::Batch& batch);
int widthText(float scale, int mono, char const* string);
void drawText(int x, int y, float scale, float radians, int mono,
char const* string, const float* color);
void renderCollisionOverlay(int width, int height, float magnitude, float red = 0, float blue = 0, float green = 0);
void runTimingTests();

View file

@ -1,242 +0,0 @@
//
// AudioIOStatsRenderer.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <AudioClient.h>
#include <AudioConstants.h>
#include <AudioIOStats.h>
#include <DependencyManager.h>
#include <GeometryCache.h>
#include <NodeList.h>
#include <Util.h>
#include "AudioIOStatsRenderer.h"
AudioIOStatsRenderer::AudioIOStatsRenderer() :
_stats(NULL),
_isEnabled(false),
_shouldShowInjectedStreams(false)
{
// grab the stats object from the audio I/O singleton
_stats = &DependencyManager::get<AudioClient>()->getStats();
}
#ifdef _WIN32
const unsigned int STATS_WIDTH = 1500;
#else
const unsigned int STATS_WIDTH = 650;
#endif
const unsigned int STATS_HEIGHT_PER_LINE = 20;
void AudioIOStatsRenderer::render(const float* color, int width, int height) {
if (!_isEnabled) {
return;
}
const int linesWhenCentered = _shouldShowInjectedStreams ? 34 : 27;
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * linesWhenCentered;
int lines = _shouldShowInjectedStreams ? _stats->getMixerInjectedStreamStatsMap().size() * 7 + 27 : 27;
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
static const glm::vec4 backgroundColor = { 0.2f, 0.2f, 0.2f, 0.6f };
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
int w = STATS_WIDTH;
int h = statsHeight;
DependencyManager::get<GeometryCache>()->renderQuad(x, y, w, h, backgroundColor);
int horizontalOffset = x + 5;
int verticalOffset = y;
float scale = 0.10f;
float rotation = 0.0f;
int font = 2;
char latencyStatString[512];
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
if (!audioMixerNodePointer.isNull()) {
audioInputBufferLatency = _stats->getAudioInputMsecsReadStats().getWindowAverage();
inputRingBufferLatency = (float) _stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
networkRoundtripLatency = audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = _stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
audioOutputBufferLatency = _stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
}
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
sprintf(latencyStatString,
" Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s",
(double)audioInputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s",
(double)inputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" Network to mixer: %7.2fms - half of last ping value calculated by the node list",
(double)(networkRoundtripLatency / 2.0f));
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s",
(double)mixerRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" Network to client: %7.2fms - half of last ping value calculated by the node list",
(double)(networkRoundtripLatency / 2.0f));
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s",
(double)outputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString,
" Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s",
(double)audioOutputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " TOTAL: %7.2fms\n", (double)totalLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char clientUpstreamMicLabelString[] = "Upstream Mic Audio Packets Sent Gaps (by client):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, clientUpstreamMicLabelString, color);
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
char stringBuffer[512];
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getMax()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamMicLabelString[] = "Upstream mic audio stats (received and reported by audio-mixer):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), horizontalOffset, verticalOffset,
scale, rotation, font, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char downstreamLabelString[] = "Downstream mixed audio stats:";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, horizontalOffset, verticalOffset,
scale, rotation, font, color, true);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamInjectedLabelString[512];
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
renderAudioStreamStats(&injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
}
}
}
void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
char stringBuffer[512];
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
(double)(streamStats->_packetStreamStats.getLostRate() * 100.0f),
streamStats->_packetStreamStats._lost,
(double)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f),
streamStats->_packetStreamWindowStats._lost);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
if (isDownstreamStats) {
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
streamStats->_desiredJitterBufferFrames,
streamStats->_framesAvailableAverage,
(int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
streamStats->_framesAvailable,
(int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample()
/ AudioConstants::NETWORK_FRAME_MSECS));
} else {
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u",
streamStats->_desiredJitterBufferFrames,
streamStats->_framesAvailableAverage,
streamStats->_framesAvailable);
}
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
streamStats->_starveCount,
streamStats->_consecutiveNotMixedCount,
streamStats->_framesDropped,
streamStats->_overflowCount);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
}

View file

@ -1,45 +0,0 @@
//
// AudioIOStatsRenderer.h
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioIOStatsRenderer_h
#define hifi_AudioIOStatsRenderer_h
#include <QObject>
#include <DependencyManager.h>
class AudioIOStats;
class AudioStreamStats;
class AudioIOStatsRenderer : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
void render(const float* color, int width, int height);
public slots:
void toggle() { _isEnabled = !_isEnabled; }
void toggleShowInjectedStreams() { _shouldShowInjectedStreams = !_shouldShowInjectedStreams; }
protected:
AudioIOStatsRenderer();
private:
// audio stats methods for rendering
void renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
const AudioIOStats* _stats;
bool _isEnabled;
bool _shouldShowInjectedStreams;
};
#endif // hifi_AudioIOStatsRenderer_h

View file

@ -24,7 +24,6 @@
#include <DeferredLightingEffect.h>
#include <GeometryUtil.h>
#include <GlowEffect.h>
#include <LODManager.h>
#include <NodeList.h>
#include <NumericalConstants.h>
@ -410,9 +409,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
? 0.0f
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
}
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
? 1.0f
@ -526,7 +523,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
auto cameraMode = Application::getInstance()->getCamera()->getMode();
if (!isMyAvatar() || cameraMode != CAMERA_MODE_FIRST_PERSON) {
renderDisplayName(batch, *renderArgs->_viewFrustum);
renderDisplayName(batch, *renderArgs->_viewFrustum, renderArgs->_viewport);
}
}
@ -579,8 +576,6 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
fixupModelsInScene();
{
Glower glower(renderArgs, glowLevel);
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
if (postLighting || renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
// render the billboard until both models are loaded
@ -636,7 +631,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
_billboardTexture = DependencyManager::get<TextureCache>()->getTexture(
uniqueUrl, DEFAULT_TEXTURE, false, _billboard);
}
if (!_billboardTexture->isLoaded()) {
if (!_billboardTexture || !_billboardTexture->isLoaded()) {
return;
}
// rotate about vertical to face the camera
@ -679,7 +674,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
return namePosition;
}
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const {
Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const {
Transform result;
// We assume textPosition is whithin the frustum
glm::vec3 textPosition = getDisplayNamePosition();
@ -698,12 +693,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
glm::vec4 p0 = viewProj * glm::vec4(testPoint0, 1.0);
glm::vec4 p1 = viewProj * glm::vec4(testPoint1, 1.0);
// TODO REMOVE vvv
GLint viewportMatrix[4];
glGetIntegerv(GL_VIEWPORT, viewportMatrix);
glm::dmat4 modelViewMatrix;
float windowSizeY = viewportMatrix[3] - viewportMatrix[1];
// TODO REMOVE ^^^
float windowSizeY = viewport.w;
const float DESIRED_HIGHT_ON_SCREEN = 20; // In pixels (this is double on retinas)
@ -736,7 +726,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
}
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const {
void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const {
bool shouldShowReceiveStats = DependencyManager::get<AvatarManager>()->shouldShowReceiveStats() && !isMyAvatar();
// If we have nothing to draw, or it's tottaly transparent, return
@ -778,7 +768,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) co
(_displayNameAlpha / DISPLAYNAME_ALPHA) * DISPLAYNAME_BACKGROUND_ALPHA);
// Compute display name transform
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize());
auto textTransform = calculateDisplayNameTransform(frustum, renderer->getFontSize(), viewport);
batch.setModelTransform(textTransform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, true, true, true);

View file

@ -234,8 +234,8 @@ protected:
float getPelvisFloatingHeight() const;
glm::vec3 getDisplayNamePosition() const;
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum) const;
Transform calculateDisplayNameTransform(const ViewFrustum& frustum, float fontSize, const glm::ivec4& viewport) const;
void renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, const glm::ivec4& viewport) const;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const;
virtual void fixupModelsInScene();

View file

@ -25,7 +25,6 @@
#endif
#include <GlowEffect.h>
#include <PerfStat.h>
#include <RegisteredMetaTypes.h>
#include <UUID.h>
@ -72,9 +71,13 @@ void AvatarManager::init() {
_myAvatar->init();
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderAvatarsChanged, this, &AvatarManager::updateAvatarRenderStatus, Qt::QueuedConnection);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
}
@ -158,7 +161,9 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->addToScene(avatar, scene, pendingChanges);
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
avatar->addToScene(avatar, scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
return avatar;
}
@ -310,3 +315,23 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
}
}
}
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
for (auto avatarData : _avatarHash) {
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->addToScene(avatar, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
} else {
for (auto avatarData : _avatarHash) {
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->removeFromScene(avatar, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
}
}

View file

@ -63,6 +63,7 @@ public:
public slots:
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
void updateAvatarRenderStatus(bool shouldRenderAvatars);
private:
AvatarManager(QObject* parent = 0);

View file

@ -56,18 +56,14 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation)));
glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
if (owningAvatar->isMyAvatar()) {
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
_owningHead->getTorsoTwist(),
_owningHead->getFinalLeanSideways()));
pitchYawRoll -= lean;
}
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
_owningHead->getTorsoTwist(),
_owningHead->getFinalLeanSideways()));
pitchYawRoll -= lean;
state.setRotationInConstrainedFrame(glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
* glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
* glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
* joint.rotation, DEFAULT_PRIORITY);
}
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, JointState& state) {

View file

@ -362,28 +362,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
}
void MyAvatar::renderDebugBodyPoints() {
glm::vec3 torsoPosition(getPosition());
glm::vec3 headPosition(getHead()->getEyePosition());
float torsoToHead = glm::length(headPosition - torsoPosition);
glm::vec3 position;
qCDebug(interfaceapp, "head-above-torso %.2f, scale = %0.2f", (double)torsoToHead, (double)getScale());
// Torso Sphere
position = torsoPosition;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
DependencyManager::get<GeometryCache>()->renderSphere(0.2f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
glPopMatrix();
// Head Sphere
position = headPosition;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
DependencyManager::get<GeometryCache>()->renderSphere(0.15f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
glPopMatrix();
}
// virtual
void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
// don't render if we've been asked to disable local rendering
@ -394,8 +372,9 @@ void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, b
Avatar::render(renderArgs, cameraPosition, postLighting);
// don't display IK constraints in shadow mode
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) && postLighting) {
_skeletonModel.renderIKConstraints();
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) &&
renderArgs && renderArgs->_batch) {
_skeletonModel.renderIKConstraints(*renderArgs->_batch);
}
}

View file

@ -62,7 +62,6 @@ public:
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting = false) override;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void renderDebugBodyPoints();
// setters
void setLeanScale(float scale) { _leanScale = scale; }

View file

@ -120,8 +120,8 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
Hand* hand = _owningAvatar->getHand();
hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 || rightPalmIndex == -1) {
const float HAND_RESTORATION_RATE = 0.25f;
if (leftPalmIndex == -1 && rightPalmIndex == -1) {
// palms are not yet set, use mouse
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
@ -138,8 +138,16 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
if (leftPalmIndex != -1) {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
} else {
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
}
if (rightPalmIndex != -1) {
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
} else {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
}
}
if (_isFirstPerson) {
@ -186,9 +194,9 @@ void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes)
}
}
void SkeletonModel::renderIKConstraints() {
renderJointConstraints(getRightHandJointIndex());
renderJointConstraints(getLeftHandJointIndex());
void SkeletonModel::renderIKConstraints(gpu::Batch& batch) {
renderJointConstraints(batch, getRightHandJointIndex());
renderJointConstraints(batch, getLeftHandJointIndex());
}
class IndexValue {
@ -312,26 +320,27 @@ void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(this, parentState, joint, state);
}
void SkeletonModel::renderJointConstraints(int jointIndex) {
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const float BASE_DIRECTION_SIZE = 0.3f;
float directionSize = BASE_DIRECTION_SIZE * extractUniformScale(_scale);
glLineWidth(3.0f);
batch._glLineWidth(3.0f);
do {
const FBXJoint& joint = geometry.joints.at(jointIndex);
const JointState& jointState = _jointStates.at(jointIndex);
glm::vec3 position = _rotation * jointState.getPosition() + _translation;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::quat parentRotation = (joint.parentIndex == -1) ? _rotation : _rotation * _jointStates.at(joint.parentIndex).getRotation();
glm::vec3 rotationAxis = glm::axis(parentRotation);
glRotatef(glm::degrees(glm::angle(parentRotation)), rotationAxis.x, rotationAxis.y, rotationAxis.z);
float fanScale = directionSize * 0.75f;
glScalef(fanScale, fanScale, fanScale);
Transform transform = Transform();
transform.setTranslation(position);
transform.setRotation(parentRotation);
transform.setScale(fanScale);
batch.setModelTransform(transform);
const int AXIS_COUNT = 3;
auto geometryCache = DependencyManager::get<GeometryCache>();
@ -362,17 +371,14 @@ void SkeletonModel::renderJointConstraints(int jointIndex) {
// TODO: this is really inefficient constantly recreating these vertices buffers. It would be
// better if the skeleton model cached these buffers for each of the joints they are rendering
geometryCache->updateVertices(_triangleFanID, points, color);
geometryCache->renderVertices(gpu::TRIANGLE_FAN, _triangleFanID);
geometryCache->renderVertices(batch, gpu::TRIANGLE_FAN, _triangleFanID);
}
glPopMatrix();
renderOrientationDirections(jointIndex, position, _rotation * jointState.getRotation(), directionSize);
jointIndex = joint.parentIndex;
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
glLineWidth(1.0f);
}
void SkeletonModel::renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size) {
@ -798,19 +804,24 @@ void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float alpha
transform.setTranslation(endPoint);
batch.setModelTransform(transform);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule startpoint
glm::vec3 startPoint;
_boundingShape.getStartPoint(startPoint);
startPoint = startPoint - _translation;
glm::vec3 axis = endPoint - startPoint;
glTranslatef(-axis.x, -axis.y, -axis.z);
geometryCache->renderSphere(_boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
Transform axisTransform = Transform();
axisTransform.setTranslation(-axis);
batch.setModelTransform(axisTransform);
geometryCache->renderSphere(batch, _boundingShape.getRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
glm::vec4(0.8f, 0.8f, 0.6f, alpha));
// draw a green cylinder between the two points
glm::vec3 origin(0.0f);
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(), glm::vec4(0.6f, 0.8f, 0.6f, alpha));
Avatar::renderJointConnectingCone(batch, origin, axis, _boundingShape.getRadius(), _boundingShape.getRadius(),
glm::vec4(0.6f, 0.8f, 0.6f, alpha));
#endif
}

View file

@ -36,7 +36,7 @@ public:
/// \param shapes[out] list in which is stored pointers to hand shapes
void getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const;
void renderIKConstraints();
void renderIKConstraints(gpu::Batch& batch);
/// Returns the index of the left hand joint, or -1 if not found.
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
@ -145,7 +145,7 @@ protected:
private:
void renderJointConstraints(int jointIndex);
void renderJointConstraints(gpu::Batch& batch, int jointIndex);
void renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size);
struct OrientationLineIDs {

View file

@ -1,68 +0,0 @@
//
// OctreeFade.cpp
// interface/src/octree
//
// Created by Brad Hefta-Gaub on 8/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <GlowEffect.h>
#include <GeometryCache.h>
#include <OctreeConstants.h>
#include "Application.h"
#include "OctreeFade.h"
const float OctreeFade::FADE_OUT_START = 0.5f;
const float OctreeFade::FADE_OUT_END = 0.05f;
const float OctreeFade::FADE_OUT_STEP = 0.9f;
const float OctreeFade::FADE_IN_START = 0.05f;
const float OctreeFade::FADE_IN_END = 0.5f;
const float OctreeFade::FADE_IN_STEP = 1.1f;
const float OctreeFade::DEFAULT_RED = 0.5f;
const float OctreeFade::DEFAULT_GREEN = 0.5f;
const float OctreeFade::DEFAULT_BLUE = 0.5f;
OctreeFade::OctreeFade(FadeDirection direction, float red, float green, float blue) :
direction(direction),
red(red),
green(green),
blue(blue)
{
opacity = (direction == FADE_OUT) ? FADE_OUT_START : FADE_IN_START;
}
void OctreeFade::render(RenderArgs* renderArgs) {
DependencyManager::get<GlowEffect>()->begin(renderArgs);
glDisable(GL_LIGHTING);
glPushMatrix();
glScalef(1.0f, 1.0f, 1.0f);
glTranslatef(voxelDetails.x + voxelDetails.s * 0.5f,
voxelDetails.y + voxelDetails.s * 0.5f,
voxelDetails.z + voxelDetails.s * 0.5f);
glLineWidth(1.0f);
DependencyManager::get<GeometryCache>()->renderSolidCube(voxelDetails.s, glm::vec4(red, green, blue, opacity));
glLineWidth(1.0f);
glPopMatrix();
glEnable(GL_LIGHTING);
DependencyManager::get<GlowEffect>()->end(renderArgs);
opacity *= (direction == FADE_OUT) ? FADE_OUT_STEP : FADE_IN_STEP;
}
bool OctreeFade::isDone() const {
if (direction == FADE_OUT) {
return opacity <= FADE_OUT_END;
} else {
return opacity >= FADE_IN_END;
}
return true; // unexpected case, assume we're done
}

View file

@ -1,46 +0,0 @@
//
// OctreeFade.h
// interface/src/octree
//
// Created by Brad Hefta-Gaub on 8/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OctreeFade_h
#define hifi_OctreeFade_h
#include <OctalCode.h> // for VoxelPositionSize
class OctreeFade {
public:
enum FadeDirection { FADE_OUT, FADE_IN};
static const float FADE_OUT_START;
static const float FADE_OUT_END;
static const float FADE_OUT_STEP;
static const float FADE_IN_START;
static const float FADE_IN_END;
static const float FADE_IN_STEP;
static const float DEFAULT_RED;
static const float DEFAULT_GREEN;
static const float DEFAULT_BLUE;
VoxelPositionSize voxelDetails;
FadeDirection direction;
float opacity;
float red;
float green;
float blue;
OctreeFade(FadeDirection direction = FADE_OUT, float red = DEFAULT_RED,
float green = DEFAULT_GREEN, float blue = DEFAULT_BLUE);
void render(RenderArgs* renderArgs);
bool isDone() const;
};
#endif // hifi_OctreeFade_h

View file

@ -198,10 +198,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
updateTooltips();
// auto deviceSize = qApp->getDeviceSize();
// glViewport(0, 0, deviceSize.width(), deviceSize.height());
renderArgs->_context->syncCache();
auto deviceSize = qApp->getDeviceSize();
//Handle fading and deactivation/activation of UI
gpu::Batch batch;
@ -209,6 +206,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
@ -538,75 +536,6 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
}
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
if (!_magnifier) {
return;
}
auto canvasSize = qApp->getCanvasSize();
const int widgetWidth = canvasSize.x;
const int widgetHeight = canvasSize.y;
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
// Magnification Texture Coordinates
const float magnifyULeft = (magPos.x - halfWidth) / (float)widgetWidth;
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
//Get yaw / pitch value for the corners
const glm::vec2 topLeftYawPitch = overlayToSpherical(glm::vec2(magPos.x - newHalfWidth,
magPos.y - newHalfHeight));
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
magPos.y + newHalfHeight));
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
const glm::vec3 topRight = getPoint(bottomRightYawPitch.x, topLeftYawPitch.y);
auto geometryCache = DependencyManager::get<GeometryCache>();
if (bottomLeft != _previousMagnifierBottomLeft || bottomRight != _previousMagnifierBottomRight
|| topLeft != _previousMagnifierTopLeft || topRight != _previousMagnifierTopRight) {
QVector<glm::vec3> border;
border << topLeft;
border << bottomLeft;
border << bottomRight;
border << topRight;
border << topLeft;
geometryCache->updateVertices(_magnifierBorder, border, glm::vec4(1.0f, 0.0f, 0.0f, _alpha));
_previousMagnifierBottomLeft = bottomLeft;
_previousMagnifierBottomRight = bottomRight;
_previousMagnifierTopLeft = topLeft;
_previousMagnifierTopRight = topRight;
}
glPushMatrix(); {
if (showBorder) {
glDisable(GL_TEXTURE_2D);
glLineWidth(1.0f);
//Outer Line
geometryCache->renderVertices(gpu::LINE_STRIP, _magnifierBorder);
glEnable(GL_TEXTURE_2D);
}
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
glm::vec2(magnifyULeft, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVTop),
glm::vec2(magnifyULeft, magnifyVTop),
magnifierColor, _magnifierQuad);
} glPopMatrix();
}
void ApplicationCompositor::buildHemiVertices(
const float fov, const float aspectRatio, const int slices, const int stacks) {
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;

View file

@ -77,7 +77,6 @@ private:
void updateTooltips();
void renderPointers(gpu::Batch& batch);
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
void renderControllerPointers(gpu::Batch& batch);
void renderPointersOculus(gpu::Batch& batch);

View file

@ -27,7 +27,6 @@
#include <PerfStat.h>
#include "AudioClient.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "Application.h"
#include "ApplicationOverlay.h"
@ -127,6 +126,7 @@ void ApplicationOverlay::renderOverlays(RenderArgs* renderArgs) {
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glUseProgram(0);
// give external parties a change to hook in
emit qApp->renderingOverlay();
@ -164,19 +164,6 @@ void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {
drawText(canvasSize.x - 100, canvasSize.y - timerBottom,
0.30f, 0.0f, 0, frameTimer.toUtf8().constData(), WHITE_TEXT);
}
glPointSize(1.0f);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
NodeBounds& nodeBoundsDisplay = qApp->getNodeBoundsDisplay();
nodeBoundsDisplay.drawOverlay();
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
fboViewport(_overlayFramebuffer);
*/
}

View file

@ -1,183 +0,0 @@
//
// NodeBounds.cpp
// interface/src/ui
//
// Created by Ryan Huffman on 05/14/14.
// Copyright 2014 High Fidelity, Inc.
//
// This class draws a border around the different Entity nodes on the current domain,
// and a semi-transparent cube around the currently mouse-overed node.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <DependencyManager.h>
#include <GeometryCache.h>
#include "Application.h"
#include "Util.h"
#include "NodeBounds.h"
NodeBounds::NodeBounds(QObject* parent) :
QObject(parent),
_showEntityNodes(false),
_overlayText() {
}
void NodeBounds::draw() {
if (!_showEntityNodes) {
_overlayText[0] = '\0';
return;
}
NodeToJurisdictionMap& entityServerJurisdictions = Application::getInstance()->getEntityServerJurisdictions();
NodeToJurisdictionMap* serverJurisdictions;
// Compute ray to find selected nodes later on. We can't use the pre-computed ray in Application because it centers
// itself after the cursor disappears.
PickRay pickRay = qApp->computePickRay();
// Variables to keep track of the selected node and properties to draw the cube later if needed
Node* selectedNode = NULL;
float selectedDistance = FLT_MAX;
bool selectedIsInside = true;
glm::vec3 selectedCenter;
float selectedScale = 0;
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([&](const SharedNodePointer& node){
NodeType_t nodeType = node->getType();
if (nodeType == NodeType::EntityServer && _showEntityNodes) {
serverJurisdictions = &entityServerJurisdictions;
} else {
return;
}
QUuid nodeUUID = node->getUUID();
serverJurisdictions->lockForRead();
if (serverJurisdictions->find(nodeUUID) != serverJurisdictions->end()) {
const JurisdictionMap& map = (*serverJurisdictions)[nodeUUID];
unsigned char* rootCode = map.getRootOctalCode();
if (rootCode) {
VoxelPositionSize rootDetails;
voxelDetailsForCode(rootCode, rootDetails);
serverJurisdictions->unlock();
glm::vec3 location(rootDetails.x, rootDetails.y, rootDetails.z);
AACube serverBounds(location, rootDetails.s);
glm::vec3 center = serverBounds.getVertex(BOTTOM_RIGHT_NEAR)
+ ((serverBounds.getVertex(TOP_LEFT_FAR) - serverBounds.getVertex(BOTTOM_RIGHT_NEAR)) / 2.0f);
const float ENTITY_NODE_SCALE = 0.99f;
float scaleFactor = rootDetails.s;
// Scale by 0.92 - 1.00 depending on the scale of the node. This allows smaller nodes to scale in
// a bit and not overlap larger nodes.
scaleFactor *= 0.92f + (rootDetails.s * 0.08f);
// Scale different node types slightly differently because it's common for them to overlap.
if (nodeType == NodeType::EntityServer) {
scaleFactor *= ENTITY_NODE_SCALE;
}
float red, green, blue;
getColorForNodeType(nodeType, red, green, blue);
drawNodeBorder(center, scaleFactor, red, green, blue);
float distance;
BoxFace face;
bool inside = serverBounds.contains(pickRay.origin);
bool colliding = serverBounds.findRayIntersection(pickRay.origin, pickRay.direction, distance, face);
// If the camera is inside a node it will be "selected" if you don't have your cursor over another node
// that you aren't inside.
if (colliding && (!selectedNode || (!inside && (distance < selectedDistance || selectedIsInside)))) {
selectedNode = node.data();
selectedDistance = distance;
selectedIsInside = inside;
selectedCenter = center;
selectedScale = scaleFactor;
}
} else {
serverJurisdictions->unlock();
}
} else {
serverJurisdictions->unlock();
}
});
if (selectedNode) {
glPushMatrix();
glTranslatef(selectedCenter.x, selectedCenter.y, selectedCenter.z);
glScalef(selectedScale, selectedScale, selectedScale);
float red, green, blue;
getColorForNodeType(selectedNode->getType(), red, green, blue);
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(red, green, blue, 0.2f));
glPopMatrix();
HifiSockAddr addr = selectedNode->getPublicSocket();
QString overlay = QString("%1:%2 %3ms")
.arg(addr.getAddress().toString())
.arg(addr.getPort())
.arg(selectedNode->getPingMs())
.left(MAX_OVERLAY_TEXT_LENGTH);
// Ideally we'd just use a QString, but I ran into weird blinking issues using
// constData() directly, as if the data was being overwritten.
strcpy(_overlayText, overlay.toLocal8Bit().constData());
} else {
_overlayText[0] = '\0';
}
}
void NodeBounds::drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue) {
glPushMatrix();
glTranslatef(center.x, center.y, center.z);
glScalef(scale, scale, scale);
glLineWidth(2.5);
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, glm::vec4(red, green, blue, 1.0f));
glPopMatrix();
}
void NodeBounds::getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue) {
red = nodeType == 0.0;
green = 0.0;
blue = nodeType == NodeType::EntityServer ? 1.0 : 0.0;
}
void NodeBounds::drawOverlay() {
if (strlen(_overlayText) > 0) {
Application* application = Application::getInstance();
const float TEXT_COLOR[] = { 0.90f, 0.90f, 0.90f };
const float TEXT_SCALE = 0.1f;
const int TEXT_HEIGHT = 10;
const float ROTATION = 0.0f;
const int FONT = 2;
const int PADDING = 10;
const int MOUSE_OFFSET = 10;
const int BACKGROUND_BEVEL = 3;
int mouseX = application->getTrueMouseX(),
mouseY = application->getTrueMouseY(),
textWidth = widthText(TEXT_SCALE, 0, _overlayText);
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(
mouseX + MOUSE_OFFSET, mouseY - TEXT_HEIGHT - PADDING,
textWidth + (2 * PADDING), TEXT_HEIGHT + (2 * PADDING), BACKGROUND_BEVEL,
glm::vec4(0.4f, 0.4f, 0.4f, 0.6f));
drawText(mouseX + MOUSE_OFFSET + PADDING, mouseY, TEXT_SCALE, ROTATION, FONT, _overlayText, TEXT_COLOR);
}
}

View file

@ -1,47 +0,0 @@
//
// NodeBounds.h
// interface/src/ui
//
// Created by Ryan Huffman on 05/14/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_NodeBounds_h
#define hifi_NodeBounds_h
#include <QObject>
#include <NodeList.h>
const int MAX_OVERLAY_TEXT_LENGTH = 64;
class NodeBounds : public QObject {
Q_OBJECT
public:
NodeBounds(QObject* parent = NULL);
bool getShowEntityNodes() { return _showEntityNodes; }
bool getShowParticleNodes() { return _showParticleNodes; }
void draw();
void drawOverlay();
public slots:
void setShowEntityNodes(bool value) { _showEntityNodes = value; }
void setShowParticleNodes(bool value) { _showParticleNodes = value; }
protected:
void drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue);
void getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue);
private:
bool _showEntityNodes;
bool _showParticleNodes;
char _overlayText[MAX_OVERLAY_TEXT_LENGTH + 1];
};
#endif // hifi_NodeBounds_h

View file

@ -28,12 +28,12 @@ BillboardOverlay::BillboardOverlay(const BillboardOverlay* billboardOverlay) :
}
void BillboardOverlay::render(RenderArgs* args) {
if (!_isLoaded) {
if (!_texture) {
_isLoaded = true;
_texture = DependencyManager::get<TextureCache>()->getTexture(_url);
}
if (!_visible || !_texture->isLoaded()) {
if (!_visible || !_texture || !_texture->isLoaded()) {
return;
}
@ -170,7 +170,7 @@ void BillboardOverlay::setBillboardURL(const QString& url) {
bool BillboardOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
float& distance, BoxFace& face) {
if (_texture) {
if (_texture && _texture->isLoaded()) {
glm::quat rotation = getRotation();
if (_isFacingAvatar) {
// rotate about vertical to face the camera

View file

@ -12,7 +12,6 @@
#include "InterfaceConfig.h"
#include <DeferredLightingEffect.h>
#include <GlowEffect.h>
#include <SharedUtil.h>
#include <StreamUtils.h>
@ -34,8 +33,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
glm::vec4 cubeColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
//glDisable(GL_LIGHTING);
// TODO: handle registration point??
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
@ -104,93 +101,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(*batch, 1.0f, cubeColor);
}
}
} else {
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
if (_isSolid) {
if (_borderSize > 0) {
// Draw a cube at a larger size behind the main cube, creating
// a border effect.
// Disable writing to the depth mask so that the "border" cube will not
// occlude the main cube. This means the border could be covered by
// overlays that are further back and drawn later, but this is good
// enough for the use-case.
glDepthMask(GL_FALSE);
glPushMatrix();
glScalef(dimensions.x * _borderSize, dimensions.y * _borderSize, dimensions.z * _borderSize);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
}
glPopMatrix();
glDepthMask(GL_TRUE);
}
glPushMatrix();
glScalef(dimensions.x, dimensions.y, dimensions.z);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
}
glPopMatrix();
} else {
glLineWidth(_lineWidth);
if (getIsDashedLine()) {
glm::vec3 halfDimensions = dimensions / 2.0f;
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderDashedLine(bottomLeftNear, bottomRightNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, bottomRightFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, bottomLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, bottomLeftNear, cubeColor);
geometryCache->renderDashedLine(topLeftNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(topRightNear, topRightFar, cubeColor);
geometryCache->renderDashedLine(topRightFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(topLeftFar, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftNear, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, topRightFar, cubeColor);
} else {
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
}
}
glPopMatrix();
glPopMatrix();
if (glower) {
delete glower;
}
}
}

View file

@ -14,6 +14,11 @@
#include <GeometryCache.h>
#include <RegisteredMetaTypes.h>
#include "qapplication.h"
#include "gpu/Context.h"
#include "gpu/StandardShaderLib.h"
ImageOverlay::ImageOverlay() :
_imageURL(),
_renderImage(false),
@ -57,9 +62,12 @@ void ImageOverlay::render(RenderArgs* args) {
return;
}
gpu::Batch& batch = *args->_batch;
if (_renderImage) {
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _texture->getID());
batch.setResourceTexture(0, _texture->getGPUTexture());
} else {
batch.setResourceTexture(0, args->_whiteTexture);
}
const float MAX_COLOR = 255.0f;
@ -75,6 +83,8 @@ void ImageOverlay::render(RenderArgs* args) {
glm::vec2 topLeft(left, top);
glm::vec2 bottomRight(right, bottom);
batch.setModelTransform(Transform());
// if for some reason our image is not over 0 width or height, don't attempt to render the image
if (_renderImage) {
float imageWidth = _texture->getWidth();
@ -104,14 +114,14 @@ void ImageOverlay::render(RenderArgs* args) {
glm::vec2 texCoordTopLeft(x, y);
glm::vec2 texCoordBottomRight(x + w, y + h);
glm::vec4 texcoordRect(texCoordTopLeft, w, h);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
} else {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
}
glDisable(GL_TEXTURE_2D);
} else {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
}
}

View file

@ -11,7 +11,6 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <GlowEffect.h>
#include <GeometryCache.h>
#include <RegisteredMetaTypes.h>
@ -53,7 +52,6 @@ void Line3DOverlay::render(RenderArgs* args) {
glm::vec4 colorv4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
auto batch = args->_batch;
if (batch) {
batch->setModelTransform(_transform);
@ -63,38 +61,6 @@ void Line3DOverlay::render(RenderArgs* args) {
} else {
DependencyManager::get<GeometryCache>()->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
}
} else {
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPushMatrix();
glDisable(GL_LIGHTING);
glLineWidth(_lineWidth);
glm::vec3 position = getPosition();
glm::quat rotation = getRotation();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(_start, _end, colorv4, _geometryCacheID);
} else {
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
}
glEnable(GL_LIGHTING);
glPopMatrix();
if (glower) {
delete glower;
}
}
}

View file

@ -9,8 +9,6 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GlowEffect.h>
#include "Application.h"
#include "LocalModelsOverlay.h"
@ -32,11 +30,7 @@ void LocalModelsOverlay::update(float deltatime) {
void LocalModelsOverlay::render(RenderArgs* args) {
if (_visible) {
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
float glowLevel = getGlowLevel(); // FIXME, glowing removed for now
auto batch = args ->_batch;
Application* app = Application::getInstance();
@ -45,10 +39,8 @@ void LocalModelsOverlay::render(RenderArgs* args) {
transform.setTranslation(oldTranslation + getPosition());
batch->setViewTransform(transform);
_entityTreeRenderer->render(args);
if (glower) {
delete glower;
}
transform.setTranslation(oldTranslation);
batch->setViewTransform(transform);
}
}

View file

@ -12,8 +12,6 @@
#include "ModelOverlay.h"
#include <Application.h>
#include <GlowEffect.h>
ModelOverlay::ModelOverlay()
: _model(),

View file

@ -103,19 +103,11 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
foreach(Overlay::Pointer thisOverlay, _overlaysHUD) {
if (thisOverlay->is3D()) {
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
thisOverlay->render(renderArgs);
glDisable(GL_LIGHTING);
glDisable(GL_DEPTH_TEST);
} else {
thisOverlay->render(renderArgs);
}
thisOverlay->render(renderArgs);
}
gpu::GLBackend::renderBatch(batch, true);
renderArgs->_context->syncCache();
renderArgs->_context->render(batch);
}
unsigned int Overlays::addOverlay(const QString& type, const QScriptValue& properties) {

View file

@ -14,7 +14,6 @@
#include "Rectangle3DOverlay.h"
#include <GeometryCache.h>
#include <GlowEffect.h>
#include <SharedUtil.h>
Rectangle3DOverlay::Rectangle3DOverlay() :

View file

@ -11,7 +11,6 @@
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <GlowEffect.h>
#include <SharedUtil.h>
#include "Sphere3DOverlay.h"

View file

@ -38,10 +38,7 @@ QSharedPointer<Resource> AnimationCache::createResource(const QUrl& url, const Q
return QSharedPointer<Resource>(new Animation(url), &Resource::allReferencesCleared);
}
Animation::Animation(const QUrl& url) :
Resource(url),
_isValid(false) {
}
Animation::Animation(const QUrl& url) : Resource(url) {}
class AnimationReader : public QRunnable {
public:
@ -97,7 +94,6 @@ QVector<FBXAnimationFrame> Animation::getFrames() const {
void Animation::setGeometry(const FBXGeometry& geometry) {
_geometry = geometry;
finishedLoading(true);
_isValid = true;
}
void Animation::downloadFinished(QNetworkReply* reply) {

View file

@ -57,8 +57,6 @@ public:
Q_INVOKABLE QStringList getJointNames() const;
Q_INVOKABLE QVector<FBXAnimationFrame> getFrames() const;
bool isValid() const { return _isValid; }
protected:
@ -69,7 +67,6 @@ protected:
private:
FBXGeometry _geometry;
bool _isValid;
};

View file

@ -294,7 +294,7 @@ AudioInjector* AudioInjector::playSound(const QString& soundUrl, const float vol
if (soundCache.isNull()) {
return NULL;
}
SharedSoundPointer sound = soundCache.data()->getSound(QUrl(soundUrl));
SharedSoundPointer sound = soundCache->getSound(QUrl(soundUrl));
if (sound.isNull() || !sound->isReady()) {
return NULL;
}

View file

@ -161,19 +161,15 @@ QByteArray AvatarData::toByteArray() {
// Body scale
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
// Head rotation (NOTE: This needs to become a quaternion to save two bytes)
glm::vec3 pitchYawRoll = glm::vec3(_headData->getFinalPitch(),
_headData->getFinalYaw(),
_headData->getFinalRoll());
if (this->isMyAvatar()) {
glm::vec3 lean = glm::vec3(_headData->getFinalLeanForward(),
_headData->getTorsoTwist(),
_headData->getFinalLeanSideways());
pitchYawRoll -= lean;
}
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.x);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.y);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.z);
// Head rotation
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalPitch());
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalYaw());
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalRoll());
// Body lean
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanForward);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanSideways);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_torsoTwist);
// Lookat Position
memcpy(destinationBuffer, &_headData->_lookAtPosition, sizeof(_headData->_lookAtPosition));
@ -291,13 +287,16 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) {
// headPitch = 2 (compressed float)
// headYaw = 2 (compressed float)
// headRoll = 2 (compressed float)
// leanForward = 2 (compressed float)
// leanSideways = 2 (compressed float)
// torsoTwist = 2 (compressed float)
// lookAt = 12
// audioLoudness = 4
// }
// + 1 byte for pupilSize
// + 1 byte for numJoints (0)
// = 45 bytes
int minPossibleSize = 45;
// = 51 bytes
int minPossibleSize = 51;
int maxAvailableSize = packet.size() - offset;
if (minPossibleSize > maxAvailableSize) {
@ -371,6 +370,22 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) {
_headData->setBaseYaw(headYaw);
_headData->setBaseRoll(headRoll);
} // 6 bytes
{ // Head lean (relative to pelvis)
float leanForward, leanSideways, torsoTwist;
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanForward);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanSideways);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &torsoTwist);
if (glm::isnan(leanForward) || glm::isnan(leanSideways)) {
if (shouldLogError(now)) {
qCDebug(avatars) << "Discard nan AvatarData::leanForward,leanSideways,torsoTwise; displayName = '" << _displayName << "'";
}
return maxAvailableSize;
}
_headData->_leanForward = leanForward;
_headData->_leanSideways = leanSideways;
_headData->_torsoTwist = torsoTwist;
} // 6 bytes
{ // Lookat Position
glm::vec3 lookAt;

View file

@ -9,6 +9,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QtCore/QDataStream>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <SharedUtil.h>

View file

@ -20,7 +20,6 @@
#include <AbstractScriptingServicesInterface.h>
#include <AbstractViewStateInterface.h>
#include <DeferredLightingEffect.h>
#include <GlowEffect.h>
#include <Model.h>
#include <NetworkAccessManager.h>
#include <PerfStat.h>
@ -546,7 +545,7 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
Model* model = modelEntityItem->getModel(this);
if (model) {
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = model->getCollisionGeometry();
if (!collisionNetworkGeometry.isNull()) {
if (collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) {
result = &collisionNetworkGeometry->getFBXGeometry();
}
}
@ -805,7 +804,7 @@ void EntityTreeRenderer::connectSignalsToSlots(EntityScriptingInterface* entityS
connect(this, &EntityTreeRenderer::leaveEntity, entityScriptingInterface, &EntityScriptingInterface::leaveEntity);
connect(this, &EntityTreeRenderer::collisionWithEntity, entityScriptingInterface, &EntityScriptingInterface::collisionWithEntity);
connect(&(*DependencyManager::get<SceneScriptingInterface>()), &SceneScriptingInterface::shouldRenderEntitiesChanged, this, &EntityTreeRenderer::updateEntityRenderStatus, Qt::QueuedConnection);
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderEntitiesChanged, this, &EntityTreeRenderer::updateEntityRenderStatus, Qt::QueuedConnection);
}
QScriptValueList EntityTreeRenderer::createMouseEventArgs(const EntityItemID& entityID, QMouseEvent* event, unsigned int deviceID) {

View file

@ -63,11 +63,11 @@ void RenderableModelEntityItem::remapTextures() {
return; // nothing to do if we don't have a model
}
if (!_model->isLoadedWithTextures()) {
return; // nothing to do if the model has not yet loaded its default textures
if (!_model->isLoaded()) {
return; // nothing to do if the model has not yet loaded
}
if (!_originalTexturesRead && _model->isLoadedWithTextures()) {
if (!_originalTexturesRead) {
const QSharedPointer<NetworkGeometry>& networkGeometry = _model->getGeometry();
if (networkGeometry) {
_originalTextures = networkGeometry->getTextureNames();
@ -119,7 +119,7 @@ bool RenderableModelEntityItem::readyToAddToScene(RenderArgs* renderArgs) {
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(renderArgs->_renderer);
getModel(renderer);
}
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoadedWithTextures()) {
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoaded()) {
_model->setScaleToFit(true, getDimensions());
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
_model->setRotation(getRotation());
@ -401,8 +401,8 @@ bool RenderableModelEntityItem::isReadyToComputeShape() {
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
if ((! collisionNetworkGeometry.isNull() && collisionNetworkGeometry->isLoadedWithTextures()) &&
(! renderNetworkGeometry.isNull() && renderNetworkGeometry->isLoadedWithTextures())) {
if ((collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) &&
(renderNetworkGeometry && renderNetworkGeometry->isLoaded())) {
// we have both URLs AND both geometries AND they are both fully loaded.
return true;
}
@ -423,7 +423,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
// should never fall in here when collision model not fully loaded
// hence we assert collisionNetworkGeometry is not NULL
assert(!collisionNetworkGeometry.isNull());
assert(collisionNetworkGeometry);
const FBXGeometry& collisionGeometry = collisionNetworkGeometry->getFBXGeometry();
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();

View file

@ -14,7 +14,6 @@
#include <gpu/GPUConfig.h>
#include <GlowEffect.h>
#include <DeferredLightingEffect.h>
#include <GeometryCache.h>
#include <PerfStat.h>
@ -169,7 +168,6 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
_webSurface->resize(QSize(dims.x, dims.y));
currentContext->makeCurrent(currentSurface);
Glower glow(0.0f);
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
Q_ASSERT(getType() == EntityTypes::Web);
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);

View file

@ -610,6 +610,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
int bytesRead = parser.offset();
#endif
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
bool weOwnSimulation = _simulationOwner.matchesValidID(myNodeID);
if (args.bitstreamVersion >= VERSION_ENTITIES_HAVE_SIMULATION_OWNER_AND_ACTIONS_OVER_WIRE) {
// pack SimulationOwner and terse update properties near each other
@ -632,10 +635,8 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
}
{ // When we own the simulation we don't accept updates to the entity's transform/velocities
// but since we're using macros below we have to temporarily modify overwriteLocalData.
auto nodeList = DependencyManager::get<NodeList>();
bool weOwnIt = _simulationOwner.matchesValidID(nodeList->getSessionUUID());
bool oldOverwrite = overwriteLocalData;
overwriteLocalData = overwriteLocalData && !weOwnIt;
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
READ_ENTITY_PROPERTY(PROP_VELOCITY, glm::vec3, updateVelocity);
@ -657,6 +658,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, setRegistrationPoint);
} else {
// legacy order of packing here
// TODO: purge this logic in a few months from now (2015.07)
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
READ_ENTITY_PROPERTY(PROP_DIMENSIONS, glm::vec3, updateDimensions);
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
@ -702,7 +704,16 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
READ_ENTITY_PROPERTY(PROP_HREF, QString, setHref);
READ_ENTITY_PROPERTY(PROP_DESCRIPTION, QString, setDescription);
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
{ // When we own the simulation we don't accept updates to the entity's actions
// but since we're using macros below we have to temporarily modify overwriteLocalData.
// NOTE: this prevents userB from adding an action to an object1 when UserA
// has simulation ownership of it.
// TODO: figure out how to allow multiple users to update actions simultaneously
bool oldOverwrite = overwriteLocalData;
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
overwriteLocalData = oldOverwrite;
}
bytesRead += readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args,
propertyFlags, overwriteLocalData);
@ -713,7 +724,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
// NOTE: we had a bad version of the stream that we added stream data after the subclass. We can attempt to recover
// by doing this parsing here... but it's not likely going to fully recover the content.
//
// TODO: Remove this conde once we've sufficiently migrated content past this damaged version
// TODO: Remove this code once we've sufficiently migrated content past this damaged version
if (args.bitstreamVersion == VERSION_ENTITIES_HAS_MARKETPLACE_ID_DAMAGED) {
READ_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, QString, setMarketplaceID);
}
@ -738,8 +749,6 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
}
}
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
if (overwriteLocalData) {
if (!_simulationOwner.matchesValidID(myNodeID)) {

View file

@ -184,7 +184,7 @@ void ModelEntityItem::cleanupLoadedAnimations() {
_loadedAnimations.clear();
}
Animation* ModelEntityItem::getAnimation(const QString& url) {
AnimationPointer ModelEntityItem::getAnimation(const QString& url) {
AnimationPointer animation;
// if we don't already have this model then create it and initialize it
@ -194,7 +194,7 @@ Animation* ModelEntityItem::getAnimation(const QString& url) {
} else {
animation = _loadedAnimations[url];
}
return animation.data();
return animation;
}
void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
@ -203,9 +203,8 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
return;
}
Animation* myAnimation = getAnimation(_animationURL);
if (!_jointMappingCompleted) {
AnimationPointer myAnimation = getAnimation(_animationURL);
if (myAnimation && myAnimation->isLoaded()) {
QStringList animationJointNames = myAnimation->getJointNames();
if (modelJointNames.size() > 0 && animationJointNames.size() > 0) {
@ -220,8 +219,12 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
QVector<glm::quat> ModelEntityItem::getAnimationFrame() {
QVector<glm::quat> frameData;
if (hasAnimation() && _jointMappingCompleted) {
Animation* myAnimation = getAnimation(_animationURL);
if (!hasAnimation() || !_jointMappingCompleted) {
return frameData;
}
AnimationPointer myAnimation = getAnimation(_animationURL);
if (myAnimation && myAnimation->isLoaded()) {
QVector<FBXAnimationFrame> frames = myAnimation->getFrames();
int frameCount = frames.size();
if (frameCount > 0) {

View file

@ -141,7 +141,7 @@ protected:
bool _jointMappingCompleted;
QVector<int> _jointMapping;
static Animation* getAnimation(const QString& url);
static AnimationPointer getAnimation(const QString& url);
static QMap<QString, AnimationPointer> _loadedAnimations;
static AnimationCache _animationCache;

View file

@ -93,7 +93,16 @@ QByteArray FSTReader::writeMapping(const QVariantHash& mapping) {
for (auto key : PREFERED_ORDER) {
auto it = mapping.find(key);
if (it != mapping.constEnd()) {
writeVariant(buffer, it);
if (key == FREE_JOINT_FIELD) { // writeVariant does not handle strings added using insertMulti.
for (auto multi : mapping.values(key)) {
buffer.write(key.toUtf8());
buffer.write(" = ");
buffer.write(multi.toByteArray());
buffer.write("\n");
}
} else {
writeVariant(buffer, it);
}
}
}

View file

@ -40,11 +40,8 @@
namespace gpu {
enum ReservedSlot {
/* TRANSFORM_OBJECT_SLOT = 6,
TRANSFORM_OBJECT_SLOT = 6,
TRANSFORM_CAMERA_SLOT = 7,
*/
TRANSFORM_OBJECT_SLOT = 1,
TRANSFORM_CAMERA_SLOT = 2,
};
class Batch {
@ -89,7 +86,7 @@ public:
// Then by the inverse of the ViewTransform from world space to eye space
// finaly projected into the clip space by the projection transform
// WARNING: ViewTransform transform from eye space to world space, its inverse is composed
// with the ModelTransformu to create the equivalent of the glModelViewMatrix
// with the ModelTransform to create the equivalent of the gl ModelViewMatrix
void setModelTransform(const Transform& model);
void setViewTransform(const Transform& view);
void setProjectionTransform(const Mat4& proj);
@ -117,7 +114,7 @@ public:
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
// For now, instead of calling the raw glCall, use the equivalent call on the batch so the call is beeing recorded
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
// THe implementation of these functions is in GLBackend.cpp
void _glEnable(GLenum cap);

View file

@ -0,0 +1,22 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// Draw texture 0 fetched at texcoord.xy, Blend with color uniform
//
// Created by Sam Gateau on 7/12/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D colorMap;
uniform vec4 color;
varying vec2 varTexcoord;
void main(void) {
gl_FragColor = texture2D(colorMap, varTexcoord) * color;
}

View file

@ -0,0 +1,39 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// Draw and transform the unit quad [-1,-1 -> 1,1]
// Transform the normalized texcoords [0, 1] to be in the range [texcoordRect.xy, texcoordRect.xy + texcoordRect.zw]
// Simply draw a Triangle_strip of 2 triangles, no input buffers or index buffer needed
//
// Created by Sam Gateau on 6/22/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
uniform vec4 texcoordRect;
varying vec2 varTexcoord;
void main(void) {
const vec4 UNIT_QUAD[4] = vec4[4](
vec4(-1.0, -1.0, 0.0, 1.0),
vec4(1.0, -1.0, 0.0, 1.0),
vec4(-1.0, 1.0, 0.0, 1.0),
vec4(1.0, 1.0, 0.0, 1.0)
);
vec4 pos = UNIT_QUAD[gl_VertexID];
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, pos, gl_Position)$>
varTexcoord = ((pos.xy + 1) * 0.5) * texcoordRect.zw + texcoordRect.xy;
}

View file

@ -87,10 +87,12 @@ GLBackend::GLBackend() :
_pipeline(),
_output()
{
initInput();
initTransform();
}
GLBackend::~GLBackend() {
killInput();
killTransform();
}

View file

@ -228,7 +228,21 @@ public:
void do_setStateColorWriteMask(uint32 mask);
// Repporting stats of the context
class Stats {
public:
int _ISNumFormatChanges = 0;
int _ISNumInputBufferChanges = 0;
int _ISNumIndexBufferChanges = 0;
Stats() {}
Stats(const Stats& stats) = default;
};
void getStats(Stats& stats) const { stats = _stats; }
protected:
Stats _stats;
// Draw Stage
void do_draw(Batch& batch, uint32 paramOffset);
@ -242,12 +256,13 @@ protected:
void do_setInputFormat(Batch& batch, uint32 paramOffset);
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
void do_setIndexBuffer(Batch& batch, uint32 paramOffset);
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void initInput();
void killInput();
void syncInputStateCache();
void updateInput();
struct InputStageState {
bool _invalidFormat;
bool _invalidFormat = true;
Stream::FormatPointer _format;
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
@ -256,6 +271,7 @@ protected:
Buffers _buffers;
Offsets _bufferOffsets;
Offsets _bufferStrides;
std::vector<GLuint> _bufferVBOs;
BufferPointer _indexBuffer;
Offset _indexBufferOffset;
@ -264,6 +280,8 @@ protected:
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
GLuint _defaultVAO;
InputStageState() :
_invalidFormat(true),
_format(0),
@ -271,10 +289,12 @@ protected:
_buffers(_buffersState.size(), BufferPointer(0)),
_bufferOffsets(_buffersState.size(), 0),
_bufferStrides(_buffersState.size(), 0),
_bufferVBOs(_buffersState.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0),
_indexBufferType(UINT32),
_attributeActivation(0)
_attributeActivation(0),
_defaultVAO(0)
{}
} _input;

View file

@ -28,13 +28,39 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
uint32 channel = batch._params[paramOffset + 3]._uint;
if (channel < getNumInputBuffers()) {
_input._buffers[channel] = buffer;
_input._bufferOffsets[channel] = offset;
_input._bufferStrides[channel] = stride;
_input._buffersState.set(channel);
bool isModified = false;
if (_input._buffers[channel] != buffer) {
_input._buffers[channel] = buffer;
GLuint vbo = 0;
if (buffer) {
vbo = getBufferID((*buffer));
}
_input._bufferVBOs[channel] = vbo;
isModified = true;
}
if (_input._bufferOffsets[channel] != offset) {
_input._bufferOffsets[channel] = offset;
isModified = true;
}
if (_input._bufferStrides[channel] != stride) {
_input._bufferStrides[channel] = stride;
isModified = true;
}
if (isModified) {
_input._buffersState.set(channel);
}
}
}
#define NOT_SUPPORT_VAO
#if defined(SUPPORT_VAO)
#else
#define SUPPORT_LEGACY_OPENGL
#if defined(SUPPORT_LEGACY_OPENGL)
static const int NUM_CLASSIC_ATTRIBS = Stream::TANGENT;
@ -45,24 +71,120 @@ static const GLenum attributeSlotToClassicAttribName[NUM_CLASSIC_ATTRIBS] = {
GL_TEXTURE_COORD_ARRAY
};
#endif
#endif
void GLBackend::initInput() {
#if defined(SUPPORT_VAO)
if(!_input._defaultVAO) {
glGenVertexArrays(1, &_input._defaultVAO);
}
glBindVertexArray(_input._defaultVAO);
(void) CHECK_GL_ERROR();
#endif
}
void GLBackend::killInput() {
#if defined(SUPPORT_VAO)
glBindVertexArray(0);
if(_input._defaultVAO) {
glDeleteVertexArrays(1, &_input._defaultVAO);
}
(void) CHECK_GL_ERROR();
#endif
}
void GLBackend::syncInputStateCache() {
#if defined(SUPPORT_VAO)
for (int i = 0; i < NUM_CLASSIC_ATTRIBS; i++) {
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
}
//_input._defaultVAO
glBindVertexArray(_input._defaultVAO);
#else
int i = 0;
#if defined(SUPPORT_LEGACY_OPENGL)
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
}
#endif
for (; i < _input._attributeActivation.size(); i++) {
GLint active = 0;
glGetVertexAttribiv(i, GL_VERTEX_ATTRIB_ARRAY_ENABLED, &active);
_input._attributeActivation[i] = active;
}
#endif
}
void GLBackend::updateInput() {
#if defined(SUPPORT_VAO)
if (_input._invalidFormat) {
InputStageState::ActivationCache newActivation;
// Assign the vertex format required
if (_input._format) {
for (auto& it : _input._format->getAttributes()) {
const Stream::Attribute& attrib = (it).second;
newActivation.set(attrib._slot);
glVertexAttribFormat(
attrib._slot,
attrib._element.getDimensionCount(),
_elementTypeToGLType[attrib._element.getType()],
attrib._element.isNormalized(),
attrib._offset);
}
(void) CHECK_GL_ERROR();
}
// Manage Activation what was and what is expected now
for (int i = 0; i < newActivation.size(); i++) {
bool newState = newActivation[i];
if (newState != _input._attributeActivation[i]) {
if (newState) {
glEnableVertexAttribArray(i);
} else {
glDisableVertexAttribArray(i);
}
_input._attributeActivation.flip(i);
}
}
(void) CHECK_GL_ERROR();
_input._invalidFormat = false;
_stats._ISNumFormatChanges++;
}
if (_input._buffersState.any()) {
int numBuffers = _input._buffers.size();
auto buffer = _input._buffers.data();
auto vbo = _input._bufferVBOs.data();
auto offset = _input._bufferOffsets.data();
auto stride = _input._bufferStrides.data();
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
if (_input._buffersState.test(bufferNum)) {
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
}
buffer++;
vbo++;
offset++;
stride++;
}
_input._buffersState.reset();
(void) CHECK_GL_ERROR();
}
#else
if (_input._invalidFormat || _input._buffersState.any()) {
if (_input._invalidFormat) {
InputStageState::ActivationCache newActivation;
_stats._ISNumFormatChanges++;
// Check expected activation
if (_input._format) {
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
for (Stream::Format::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
const Stream::Attribute& attrib = (*it).second;
for (auto& it : _input._format->getAttributes()) {
const Stream::Attribute& attrib = (it).second;
newActivation.set(attrib._slot);
}
}
@ -72,17 +194,15 @@ void GLBackend::updateInput() {
bool newState = newActivation[i];
if (newState != _input._attributeActivation[i]) {
#if defined(SUPPORT_LEGACY_OPENGL)
const bool useClientState = i < NUM_CLASSIC_ATTRIBS;
#else
const bool useClientState = false;
#endif
if (useClientState) {
if (i < NUM_CLASSIC_ATTRIBS) {
if (newState) {
glEnableClientState(attributeSlotToClassicAttribName[i]);
} else {
glDisableClientState(attributeSlotToClassicAttribName[i]);
}
} else {
} else
#endif
{
if (newState) {
glEnableVertexAttribArray(i);
} else {
@ -103,18 +223,23 @@ void GLBackend::updateInput() {
const Offsets& strides = _input._bufferStrides;
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
auto& inputChannels = _input._format->getChannels();
_stats._ISNumInputBufferChanges++;
for (Stream::Format::ChannelMap::const_iterator channelIt = _input._format->getChannels().begin();
channelIt != _input._format->getChannels().end();
channelIt++) {
const Stream::Format::ChannelMap::value_type::second_type& channel = (*channelIt).second;
if ((*channelIt).first < buffers.size()) {
int bufferNum = (*channelIt).first;
GLuint boundVBO = 0;
for (auto& channelIt : inputChannels) {
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
if ((channelIt).first < buffers.size()) {
int bufferNum = (channelIt).first;
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
glBindBuffer(GL_ARRAY_BUFFER, vbo);
(void) CHECK_GL_ERROR();
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
GLuint vbo = _input._bufferVBOs[bufferNum];
if (boundVBO != vbo) {
glBindBuffer(GL_ARRAY_BUFFER, vbo);
(void) CHECK_GL_ERROR();
boundVBO = vbo;
}
_input._buffersState[bufferNum] = false;
for (unsigned int i = 0; i < channel._slots.size(); i++) {
@ -126,9 +251,6 @@ void GLBackend::updateInput() {
GLuint pointer = attrib._offset + offsets[bufferNum];
#if defined(SUPPORT_LEGACY_OPENGL)
const bool useClientState = slot < NUM_CLASSIC_ATTRIBS;
#else
const bool useClientState = false;
#endif
if (useClientState) {
switch (slot) {
case Stream::POSITION:
@ -144,7 +266,9 @@ void GLBackend::updateInput() {
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
break;
};
} else {
} else
#endif
{
GLboolean isNormalized = attrib._element.isNormalized();
glVertexAttribPointer(slot, count, type, isNormalized, stride,
reinterpret_cast<GLvoid*>(pointer));
@ -158,61 +282,7 @@ void GLBackend::updateInput() {
// everything format related should be in sync now
_input._invalidFormat = false;
}
/* TODO: Fancy version GL4.4
if (_needInputFormatUpdate) {
InputActivationCache newActivation;
// Assign the vertex format required
if (_inputFormat) {
const StreamFormat::AttributeMap& attributes = _inputFormat->getAttributes();
for (StreamFormat::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
const StreamFormat::Attribute& attrib = (*it).second;
newActivation.set(attrib._slot);
glVertexAttribFormat(
attrib._slot,
attrib._element.getDimensionCount(),
_elementTypeToGLType[attrib._element.getType()],
attrib._element.isNormalized(),
attrib._stride);
}
CHECK_GL_ERROR();
}
// Manage Activation what was and what is expected now
for (int i = 0; i < newActivation.size(); i++) {
bool newState = newActivation[i];
if (newState != _inputAttributeActivation[i]) {
if (newState) {
glEnableVertexAttribArray(i);
} else {
glDisableVertexAttribArray(i);
}
_inputAttributeActivation.flip(i);
}
}
CHECK_GL_ERROR();
_needInputFormatUpdate = false;
}
if (_needInputStreamUpdate) {
if (_inputStream) {
const Stream::Buffers& buffers = _inputStream->getBuffers();
const Stream::Offsets& offsets = _inputStream->getOffsets();
const Stream::Strides& strides = _inputStream->getStrides();
for (int i = 0; i < buffers.size(); i++) {
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[i]));
glBindVertexBuffer(i, vbo, offsets[i], strides[i]);
}
CHECK_GL_ERROR();
}
_needInputStreamUpdate = false;
}
*/
#endif
}

View file

@ -41,21 +41,34 @@ void makeBindings(GLBackend::GLShader* shader) {
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
}
loc = glGetAttribLocation(glprogram, "attribPosition");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "attribPosition");
}
//Check for gpu specific attribute slotBindings
loc = glGetAttribLocation(glprogram, "gl_Vertex");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "gl_Vertex");
}
loc = glGetAttribLocation(glprogram, "normal");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "normal");
}
loc = glGetAttribLocation(glprogram, "attribNormal");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "attribNormal");
}
loc = glGetAttribLocation(glprogram, "color");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "color");
}
loc = glGetAttribLocation(glprogram, "attribColor");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "attribColor");
}
loc = glGetAttribLocation(glprogram, "texcoord");
if (loc >= 0) {
@ -75,6 +88,10 @@ void makeBindings(GLBackend::GLShader* shader) {
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
}
loc = glGetAttribLocation(glprogram, "attribTexcoord1");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
}
loc = glGetAttribLocation(glprogram, "clusterIndices");
if (loc >= 0) {

View file

@ -82,8 +82,6 @@ void GLBackend::syncTransformStateCache() {
}
void GLBackend::updateTransform() {
GLint originalMatrixMode;
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
// Check all the dirty flags and update the state accordingly
if (_transform._invalidViewport) {
_transform._transformCamera._viewport = glm::vec4(_transform._viewport);
@ -138,6 +136,9 @@ void GLBackend::updateTransform() {
#if (GPU_TRANSFORM_PROFILE == GPU_LEGACY)
// Do it again for fixed pipeline until we can get rid of it
GLint originalMatrixMode;
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
if (_transform._invalidProj) {
if (_transform._lastMode != GL_PROJECTION) {
glMatrixMode(GL_PROJECTION);
@ -173,12 +174,12 @@ void GLBackend::updateTransform() {
}
(void) CHECK_GL_ERROR();
}
glMatrixMode(originalMatrixMode);
#endif
// Flags are clean
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
glMatrixMode(originalMatrixMode);
}

View file

@ -148,7 +148,7 @@ public:
//
// As of now (03/2015), the call to makeProgram is in fact calling gpu::Context::makeProgram and does rely
// on the underneath gpu::Context::Backend available. Since we only support glsl, this means that it relies
// on a glContext and the driver to compile the glsl shader.
// on a gl Context and the driver to compile the glsl shader.
// Hoppefully in a few years the shader compilation will be completely abstracted in a separate shader compiler library
// independant of the graphics api in use underneath (looking at you opengl & vulkan).
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());

View file

@ -13,14 +13,46 @@
#include "StandardShaderLib.h"
#include "DrawTransformUnitQuad_vert.h"
#include "DrawTexcoordRectTransformUnitQuad_vert.h"
#include "DrawViewportQuadTransformTexcoord_vert.h"
#include "DrawTexture_frag.h"
#include "DrawColoredTexture_frag.h"
using namespace gpu;
ShaderPointer StandardShaderLib::_drawTransformUnitQuadVS;
ShaderPointer StandardShaderLib::_drawTexcoordRectTransformUnitQuadVS;
ShaderPointer StandardShaderLib::_drawViewportQuadTransformTexcoordVS;
ShaderPointer StandardShaderLib::_drawTexturePS;
ShaderPointer StandardShaderLib::_drawColoredTexturePS;
StandardShaderLib::ProgramMap StandardShaderLib::_programs;
ShaderPointer StandardShaderLib::getProgram(GetShader getVS, GetShader getPS) {
auto programIt = _programs.find(std::pair<GetShader, GetShader>(getVS, getPS));
if (programIt != _programs.end()) {
return (*programIt).second;
} else {
auto vs = (getVS)();
auto ps = (getPS)();
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
if (program) {
// Program created, let's try to make it
if (gpu::Shader::makeProgram((*program))) {
// All good, backup and return that program
_programs.insert(ProgramMap::value_type(std::pair<GetShader, GetShader>(getVS, getPS), program));
return program;
} else {
// Failed to make the program probably because vs and ps cannot work together?
}
} else {
// Failed to create the program maybe because ps and vs are not true vertex and pixel shaders?
}
}
return ShaderPointer();
}
ShaderPointer StandardShaderLib::getDrawTransformUnitQuadVS() {
if (!_drawTransformUnitQuadVS) {
@ -29,6 +61,13 @@ ShaderPointer StandardShaderLib::getDrawTransformUnitQuadVS() {
return _drawTransformUnitQuadVS;
}
ShaderPointer StandardShaderLib::getDrawTexcoordRectTransformUnitQuadVS() {
if (!_drawTexcoordRectTransformUnitQuadVS) {
_drawTexcoordRectTransformUnitQuadVS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(DrawTexcoordRectTransformUnitQuad_vert)));
}
return _drawTexcoordRectTransformUnitQuadVS;
}
ShaderPointer StandardShaderLib::getDrawViewportQuadTransformTexcoordVS() {
if (!_drawViewportQuadTransformTexcoordVS) {
_drawViewportQuadTransformTexcoordVS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(DrawViewportQuadTransformTexcoord_vert)));
@ -42,3 +81,10 @@ ShaderPointer StandardShaderLib::getDrawTexturePS() {
}
return _drawTexturePS;
}
ShaderPointer StandardShaderLib::getDrawColoredTexturePS() {
if (!_drawColoredTexturePS) {
_drawColoredTexturePS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(DrawColoredTexture_frag)));
}
return _drawColoredTexturePS;
}

View file

@ -14,6 +14,7 @@
#define hifi_gpu_StandardShaderLib_h
#include <assert.h>
#include <map>
#include "Shader.h"
@ -26,16 +27,30 @@ public:
// A texcoord attribute is also generated texcoord = [(0,0),(1,1)]
static ShaderPointer getDrawTransformUnitQuadVS();
// Shader draw the unit quad objectPos = ([(-1,-1),(1,1)]) and transform it by the full model transform stack (Model, View, Proj).
// A texcoord attribute is also generated covering a rect defined from the uniform vec4 texcoordRect: texcoord = [texcoordRect.xy,texcoordRect.xy + texcoordRect.zw]
static ShaderPointer getDrawTexcoordRectTransformUnitQuadVS();
// Shader draws the unit quad in the full viewport clipPos = ([(-1,-1),(1,1)]) and transform the texcoord = [(0,0),(1,1)] by the model transform.
static ShaderPointer getDrawViewportQuadTransformTexcoordVS();
static ShaderPointer getDrawTexturePS();
static ShaderPointer getDrawColoredTexturePS();
// The shader program combining the shaders available above, so they are unique
typedef ShaderPointer (*GetShader) ();
static ShaderPointer getProgram(GetShader vs, GetShader ps);
protected:
static ShaderPointer _drawTransformUnitQuadVS;
static ShaderPointer _drawTexcoordRectTransformUnitQuadVS;
static ShaderPointer _drawViewportQuadTransformTexcoordVS;
static ShaderPointer _drawTexturePS;
static ShaderPointer _drawColoredTexturePS;
typedef std::map<std::pair<GetShader, GetShader>, ShaderPointer> ProgramMap;
static ProgramMap _programs;
};

View file

@ -81,7 +81,6 @@ void SixenseManager::init() {
if (!_isInitialized) {
_lowVelocityFilter = false;
_controllersAtBase = true;
_calibrationState = CALIBRATION_STATE_IDLE;
// By default we assume the _neckBase (in orb frame) is as high above the orb
// as the "torso" is below it.
@ -234,26 +233,21 @@ void SixenseManager::update(float deltaTime) {
// _prevPalms[numActiveControllers - 1] = palm;
// qCDebug(interfaceapp, "Found new Sixense controller, ID %i", data->controller_index);
// }
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
position *= METERS_PER_MILLIMETER;
// Check to see if this hand/controller is on the base
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
numControllersAtBase++;
}
_controllersAtBase = (numControllersAtBase == 2);
if (!_controllersAtBase) {
if (glm::length(position) >= CONTROLLER_AT_BASE_DISTANCE) {
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// Rotation of Palm
glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * _orbRotation * rotation;
handlePoseEvent(position, rotation, numActiveControllers - 1);
} else {
_poseStateMap.clear();
}
// // Disable the hands (and return to default pose) if both controllers are at base station
@ -268,8 +262,8 @@ void SixenseManager::update(float deltaTime) {
// palm->setTrigger(data->trigger);
// palm->setJoystick(data->joystick_x, data->joystick_y);
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
//handleButtonEvent(data->buttons, numActiveControllers - 1);
//handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// // Emulate the mouse so we can use scripts
// if (Menu::getInstance()->isOptionChecked(MenuOption::HandMouseInput) && !_controllersAtBase) {
@ -643,4 +637,4 @@ UserInputMapper::Input SixenseManager::makeInput(SixenseManager::JoystickAxisCha
UserInputMapper::Input SixenseManager::makeInput(JointChannel joint) {
return UserInputMapper::Input(_deviceID, joint, UserInputMapper::ChannelType::POSE);
}
}

View file

@ -127,7 +127,6 @@ private:
bool _hydrasConnected;
bool _lowVelocityFilter;
bool _controllersAtBase;
bool _invertButtons = DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS;

View file

@ -13,6 +13,7 @@
#include <qjsondocument.h>
#include <QtCore/QDebug>
#include <QtCore/QDataStream>
#include "NetworkLogging.h"
#include "DataServerAccountInfo.h"

View file

@ -12,6 +12,7 @@
#include <math.h>
#include <QtCore/QJsonDocument>
#include <QtCore/QDataStream>
#include "Assignment.h"
#include "HifiSockAddr.h"

View file

@ -17,6 +17,7 @@
#include <QtCore/QJsonValue>
#include <QtCore/QString>
#include <QtCore/QStringList>
#include <QtCore/QVariantMap>
class JSONBreakableMarshal {
public:

View file

@ -40,16 +40,3 @@ NetworkPacket& NetworkPacket::operator=(NetworkPacket const& other) {
copyContents(other.getNode(), other.getByteArray());
return *this;
}
#ifdef HAS_MOVE_SEMANTICS
// move, same as copy, but other packet won't be used further
NetworkPacket::NetworkPacket(NetworkPacket && packet) {
copyContents(packet.getNode(), packet.getByteArray());
}
// move assignment
NetworkPacket& NetworkPacket::operator=(NetworkPacket&& other) {
copyContents(other.getNode(), other.getByteArray());
return *this;
}
#endif

View file

@ -22,12 +22,6 @@ public:
NetworkPacket() { }
NetworkPacket(const NetworkPacket& packet); // copy constructor
NetworkPacket& operator= (const NetworkPacket& other); // copy assignment
#ifdef HAS_MOVE_SEMANTICS
NetworkPacket(NetworkPacket&& packet); // move?? // same as copy, but other packet won't be used further
NetworkPacket& operator= (NetworkPacket&& other); // move assignment
#endif
NetworkPacket(const SharedNodePointer& node, const QByteArray& byteArray);
const SharedNodePointer& getNode() const { return _node; }

View file

@ -13,6 +13,7 @@
#include <QtCore/QDateTime>
#include <QtCore/QDebug>
#include <QtCore/QDataStream>
#include <SharedUtil.h>
#include <UUID.h>

View file

@ -55,7 +55,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketTypeInjectAudio:
return 1;
case PacketTypeAvatarData:
return 6;
return 7;
case PacketTypeAvatarIdentity:
return 1;
case PacketTypeEnvironmentData:

View file

@ -34,13 +34,18 @@ ResourceCache::ResourceCache(QObject* parent) :
}
ResourceCache::~ResourceCache() {
// the unused resources may themselves reference resources that will be added to the unused
// list on destruction, so keep clearing until there are no references left
while (!_unusedResources.isEmpty()) {
foreach (const QSharedPointer<Resource>& resource, _unusedResources) {
resource->setCache(nullptr);
clearUnusedResource();
}
void ResourceCache::refreshAll() {
// Clear all unused resources so we don't have to reload them
clearUnusedResource();
// Refresh all remaining resources in use
foreach (auto resource, _resources) {
if (!resource.isNull()) {
resource.data()->refresh();
}
_unusedResources.clear();
}
}
@ -48,6 +53,8 @@ void ResourceCache::refresh(const QUrl& url) {
QSharedPointer<Resource> resource = _resources.value(url);
if (!resource.isNull()) {
resource->refresh();
} else {
_resources.remove(url);
}
}
@ -134,6 +141,17 @@ void ResourceCache::reserveUnusedResource(qint64 resourceSize) {
}
}
void ResourceCache::clearUnusedResource() {
// the unused resources may themselves reference resources that will be added to the unused
// list on destruction, so keep clearing until there are no references left
while (!_unusedResources.isEmpty()) {
foreach (const QSharedPointer<Resource>& resource, _unusedResources) {
resource->setCache(nullptr);
}
_unusedResources.clear();
}
}
void ResourceCache::attemptRequest(Resource* resource) {
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
if (_requestLimit <= 0) {
@ -253,19 +271,20 @@ void Resource::refresh() {
_replyTimer->deleteLater();
_replyTimer = nullptr;
}
init();
_request.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
if (!_startedLoading) {
attemptRequest();
}
ensureLoading();
emit onRefresh();
}
void Resource::allReferencesCleared() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "allReferencesCleared");
return;
}
if (_cache) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "allReferencesCleared");
return;
}
// create and reinsert new shared pointer
QSharedPointer<Resource> self(this, &Resource::allReferencesCleared);
setSelf(self);
@ -312,8 +331,7 @@ void Resource::reinsert() {
_cache->_resources.insert(_url, _self);
}
const int REPLY_TIMEOUT_MS = 5000;
static const int REPLY_TIMEOUT_MS = 5000;
void Resource::handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal) {
if (!_reply->isFinished()) {
_bytesReceived = bytesReceived;

View file

@ -51,7 +51,7 @@ static const qint64 MAX_UNUSED_MAX_SIZE = 10 * BYTES_PER_GIGABYTES;
class ResourceCacheSharedItems : public Dependency {
SINGLETON_DEPENDENCY
public:
QList<QPointer<Resource> > _pendingRequests;
QList<QPointer<Resource>> _pendingRequests;
QList<Resource*> _loadingRequests;
private:
ResourceCacheSharedItems() { }
@ -78,17 +78,14 @@ public:
ResourceCache(QObject* parent = NULL);
virtual ~ResourceCache();
void refreshAll();
void refresh(const QUrl& url);
public slots:
void checkAsynchronousGets();
protected:
qint64 _unusedResourcesMaxSize = DEFAULT_UNUSED_MAX_SIZE;
qint64 _unusedResourcesSize = 0;
QMap<int, QSharedPointer<Resource> > _unusedResources;
/// Loads a resource from the specified URL.
/// \param fallback a fallback URL to load if the desired one is unavailable
/// \param delayLoad if true, don't load the resource immediately; wait until load is first requested
@ -103,6 +100,7 @@ protected:
void addUnusedResource(const QSharedPointer<Resource>& resource);
void removeUnusedResource(const QSharedPointer<Resource>& resource);
void reserveUnusedResource(qint64 resourceSize);
void clearUnusedResource();
static void attemptRequest(Resource* resource);
static void requestCompleted(Resource* resource);
@ -110,7 +108,7 @@ protected:
private:
friend class Resource;
QHash<QUrl, QWeakPointer<Resource> > _resources;
QHash<QUrl, QWeakPointer<Resource>> _resources;
int _lastLRUKey = 0;
static int _requestLimit;
@ -118,7 +116,10 @@ private:
void getResourceAsynchronously(const QUrl& url);
QReadWriteLock _resourcesToBeGottenLock;
QQueue<QUrl> _resourcesToBeGotten;
qint64 _unusedResourcesMaxSize = DEFAULT_UNUSED_MAX_SIZE;
qint64 _unusedResourcesSize = 0;
QMap<int, QSharedPointer<Resource>> _unusedResources;
};
/// Base class for resources.
@ -172,12 +173,11 @@ public:
const QUrl& getURL() const { return _url; }
signals:
/// Fired when the resource has been loaded.
void loaded();
void onRefresh();
protected slots:
void attemptRequest();
/// Refreshes the resource if the last modified date on the network
@ -185,7 +185,6 @@ protected slots:
void maybeRefresh();
protected:
virtual void init();
/// Called when the download has finished. The recipient should delete the reply when done with it.
@ -207,14 +206,12 @@ protected:
QPointer<ResourceCache> _cache;
private slots:
void handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
void handleReplyError();
void handleReplyFinished();
void handleReplyTimeout();
private:
void setLRUKey(int lruKey) { _lruKey = lruKey; }
void makeRequest();

View file

@ -30,23 +30,6 @@ JurisdictionMap& JurisdictionMap::operator=(const JurisdictionMap& other) {
return *this;
}
#ifdef HAS_MOVE_SEMANTICS
// Move constructor
JurisdictionMap::JurisdictionMap(JurisdictionMap&& other) : _rootOctalCode(NULL) {
init(other._rootOctalCode, other._endNodes);
other._rootOctalCode = NULL;
other._endNodes.clear();
}
// move assignment
JurisdictionMap& JurisdictionMap::operator=(JurisdictionMap&& other) {
init(other._rootOctalCode, other._endNodes);
other._rootOctalCode = NULL;
other._endNodes.clear();
return *this;
}
#endif
// Copy constructor
JurisdictionMap::JurisdictionMap(const JurisdictionMap& other) : _rootOctalCode(NULL) {
copyContents(other);

View file

@ -37,12 +37,6 @@ public:
// standard assignment
JurisdictionMap& operator=(const JurisdictionMap& other); // copy assignment
#ifdef HAS_MOVE_SEMANTICS
// move constructor and assignment
JurisdictionMap(JurisdictionMap&& other); // move constructor
JurisdictionMap& operator= (JurisdictionMap&& other); // move assignment
#endif
// application constructors
JurisdictionMap(const char* filename);
JurisdictionMap(unsigned char* rootOctalCode, const std::vector<unsigned char*>& endNodes);

View file

@ -22,7 +22,6 @@
#include "AbstractViewStateInterface.h"
#include "AmbientOcclusionEffect.h"
#include "GlowEffect.h"
#include "ProgramObject.h"
#include "RenderUtil.h"
#include "TextureCache.h"
@ -108,7 +107,7 @@ void AmbientOcclusionEffect::render() {
glBindTexture(GL_TEXTURE_2D, _rotationTextureID);
// render with the occlusion shader to the secondary/tertiary buffer
auto freeFramebuffer = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFramebuffer = nullptr; // DependencyManager::get<GlowEffect>()->getFreeFramebuffer(); // FIXME
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(freeFramebuffer));
float left, right, bottom, top, nearVal, farVal;
@ -152,7 +151,7 @@ void AmbientOcclusionEffect::render() {
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_ZERO, GL_SRC_COLOR, GL_ZERO, GL_ONE);
auto freeFramebufferTexture = freeFramebuffer->getRenderBuffer(0);
auto freeFramebufferTexture = nullptr; // freeFramebuffer->getRenderBuffer(0); // FIXME
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(freeFramebufferTexture));
_blurProgram->bind();

View file

@ -15,6 +15,7 @@
void AnimationHandle::setURL(const QUrl& url) {
if (_url != url) {
_animation = DependencyManager::get<AnimationCache>()->getAnimation(_url = url);
QObject::connect(_animation.data(), &Resource::onRefresh, this, &AnimationHandle::clearJoints);
_jointMappings.clear();
}
}
@ -110,11 +111,15 @@ void AnimationHandle::setAnimationDetails(const AnimationDetails& details) {
void AnimationHandle::simulate(float deltaTime) {
if (!_animation || !_animation->isLoaded()) {
return;
}
_animationLoop.simulate(deltaTime);
// update the joint mappings if necessary/possible
if (_jointMappings.isEmpty()) {
if (_model->isActive()) {
if (_model && _model->isActive()) {
_jointMappings = _model->getGeometry()->getJointMappings(_animation);
}
if (_jointMappings.isEmpty()) {
@ -146,6 +151,10 @@ void AnimationHandle::simulate(float deltaTime) {
}
void AnimationHandle::applyFrame(float frameIndex) {
if (!_animation || !_animation->isLoaded()) {
return;
}
const FBXGeometry& animationGeometry = _animation->getGeometry();
int frameCount = animationGeometry.animationFrames.size();
const FBXAnimationFrame& floorFrame = animationGeometry.animationFrames.at((int)glm::floor(frameIndex) % frameCount);

View file

@ -94,6 +94,8 @@ private:
void replaceMatchingPriorities(float newPriority);
void restoreJoints();
void clearJoints() { _jointMappings.clear(); }
Model* _model;
WeakAnimationHandlePointer _self;
AnimationPointer _animation;

View file

@ -18,7 +18,6 @@
#include "AbstractViewStateInterface.h"
#include "DeferredLightingEffect.h"
#include "GeometryCache.h"
#include "GlowEffect.h"
#include "RenderUtil.h"
#include "TextureCache.h"
@ -114,9 +113,9 @@ void DeferredLightingEffect::init(AbstractViewStateInterface* viewState) {
loadLightProgram(deferred_light_spot_vert, spot_light_frag, true, _spotLight, _spotLightLocations);
{
auto VSFS = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto PSBlit = gpu::StandardShaderLib::getDrawTexturePS();
auto blitProgram = gpu::ShaderPointer(gpu::Shader::createProgram(VSFS, PSBlit));
//auto VSFS = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
//auto PSBlit = gpu::StandardShaderLib::getDrawTexturePS();
auto blitProgram = gpu::StandardShaderLib::getProgram(gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS, gpu::StandardShaderLib::getDrawTexturePS);
gpu::Shader::makeProgram(*blitProgram);
gpu::StatePointer blitState = gpu::StatePointer(new gpu::State());
blitState->setBlendFunction(true,
@ -238,8 +237,10 @@ void DeferredLightingEffect::render(RenderArgs* args) {
QSize framebufferSize = textureCache->getFrameBufferSize();
// binding the first framebuffer
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
batch.setFramebuffer(freeFBO);
batch.setViewportTransform(args->_viewport);
batch.clearColorFramebuffer(freeFBO->getBufferMask(), glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
@ -251,18 +252,10 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setResourceTexture(3, textureCache->getPrimaryDepthTexture());
// get the viewport side (left, right, both)
int viewport[4];
glGetIntegerv(GL_VIEWPORT, viewport);
const int VIEWPORT_X_INDEX = 0;
const int VIEWPORT_Y_INDEX = 1;
const int VIEWPORT_WIDTH_INDEX = 2;
const int VIEWPORT_HEIGHT_INDEX = 3;
float sMin = viewport[VIEWPORT_X_INDEX] / (float)framebufferSize.width();
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)framebufferSize.width();
float tMin = viewport[VIEWPORT_Y_INDEX] / (float)framebufferSize.height();
float tHeight = viewport[VIEWPORT_HEIGHT_INDEX] / (float)framebufferSize.height();
float sMin = args->_viewport.x / (float)framebufferSize.width();
float sWidth = args->_viewport.z / (float)framebufferSize.width();
float tMin = args->_viewport.y / (float)framebufferSize.height();
float tHeight = args->_viewport.w / (float)framebufferSize.height();
bool useSkyboxCubemap = (_skybox) && (_skybox->getCubemap());
@ -546,7 +539,7 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
auto textureCache = DependencyManager::get<TextureCache>();
QSize framebufferSize = textureCache->getFrameBufferSize();
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
batch.setFramebuffer(textureCache->getPrimaryFramebuffer());
batch.setPipeline(_blitLightBuffer);
@ -555,27 +548,19 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform());
float sMin = args->_viewport.x / (float)framebufferSize.width();
float sWidth = args->_viewport.z / (float)framebufferSize.width();
float tMin = args->_viewport.y / (float)framebufferSize.height();
float tHeight = args->_viewport.w / (float)framebufferSize.height();
int viewport[4];
glGetIntegerv(GL_VIEWPORT, viewport);
const int VIEWPORT_X_INDEX = 0;
const int VIEWPORT_Y_INDEX = 1;
const int VIEWPORT_WIDTH_INDEX = 2;
const int VIEWPORT_HEIGHT_INDEX = 3;
float sMin = viewport[VIEWPORT_X_INDEX] / (float)framebufferSize.width();
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)framebufferSize.width();
float tMin = viewport[VIEWPORT_Y_INDEX] / (float)framebufferSize.height();
float tHeight = viewport[VIEWPORT_HEIGHT_INDEX] / (float)framebufferSize.height();
batch.setViewportTransform(args->_viewport);
Transform model;
model.setTranslation(glm::vec3(sMin, tMin, 0.0));
model.setScale(glm::vec3(sWidth, tHeight, 1.0));
batch.setModelTransform(model);
batch.setViewportTransform(glm::ivec4(viewport[0], viewport[1], viewport[2], viewport[3]));
batch.draw(gpu::TRIANGLE_STRIP, 4);

View file

@ -55,8 +55,6 @@ const int NUM_VERTICES_PER_TRIANGLE = 3;
const int NUM_TRIANGLES_PER_QUAD = 2;
const int NUM_VERTICES_PER_TRIANGULATED_QUAD = NUM_VERTICES_PER_TRIANGLE * NUM_TRIANGLES_PER_QUAD;
const int NUM_COORDS_PER_VERTEX = 3;
const int NUM_BYTES_PER_VERTEX = NUM_COORDS_PER_VERTEX * sizeof(GLfloat);
const int NUM_BYTES_PER_INDEX = sizeof(GLushort);
void GeometryCache::renderSphere(float radius, int slices, int stacks, const glm::vec4& color, bool solid, int id) {
gpu::Batch batch;
@ -308,106 +306,6 @@ void GeometryCache::renderSphere(gpu::Batch& batch, float radius, int slices, in
}
}
void GeometryCache::renderCone(float base, float height, int slices, int stacks) {
VerticesIndices& vbo = _coneVBOs[IntPair(slices, stacks)];
int vertices = (stacks + 2) * slices;
int baseTriangles = slices - 2;
int indices = NUM_VERTICES_PER_TRIANGULATED_QUAD * slices * stacks + NUM_VERTICES_PER_TRIANGLE * baseTriangles;
if (vbo.first == 0) {
GLfloat* vertexData = new GLfloat[vertices * NUM_COORDS_PER_VERTEX * 2];
GLfloat* vertex = vertexData;
// cap
for (int i = 0; i < slices; i++) {
float theta = TWO_PI * i / slices;
//normals
*(vertex++) = 0.0f;
*(vertex++) = 0.0f;
*(vertex++) = -1.0f;
// vertices
*(vertex++) = cosf(theta);
*(vertex++) = sinf(theta);
*(vertex++) = 0.0f;
}
// body
for (int i = 0; i <= stacks; i++) {
float z = (float)i / stacks;
float radius = 1.0f - z;
for (int j = 0; j < slices; j++) {
float theta = TWO_PI * j / slices;
//normals
*(vertex++) = cosf(theta) / SQUARE_ROOT_OF_2;
*(vertex++) = sinf(theta) / SQUARE_ROOT_OF_2;
*(vertex++) = 1.0f / SQUARE_ROOT_OF_2;
// vertices
*(vertex++) = radius * cosf(theta);
*(vertex++) = radius * sinf(theta);
*(vertex++) = z;
}
}
glGenBuffers(1, &vbo.first);
glBindBuffer(GL_ARRAY_BUFFER, vbo.first);
glBufferData(GL_ARRAY_BUFFER, 2 * vertices * NUM_BYTES_PER_VERTEX, vertexData, GL_STATIC_DRAW);
delete[] vertexData;
GLushort* indexData = new GLushort[indices];
GLushort* index = indexData;
for (int i = 0; i < baseTriangles; i++) {
*(index++) = 0;
*(index++) = i + 2;
*(index++) = i + 1;
}
for (int i = 1; i <= stacks; i++) {
GLushort bottom = i * slices;
GLushort top = bottom + slices;
for (int j = 0; j < slices; j++) {
int next = (j + 1) % slices;
*(index++) = bottom + j;
*(index++) = top + next;
*(index++) = top + j;
*(index++) = bottom + j;
*(index++) = bottom + next;
*(index++) = top + next;
}
}
glGenBuffers(1, &vbo.second);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo.second);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices * NUM_BYTES_PER_INDEX, indexData, GL_STATIC_DRAW);
delete[] indexData;
} else {
glBindBuffer(GL_ARRAY_BUFFER, vbo.first);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo.second);
}
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
int stride = NUM_VERTICES_PER_TRIANGULATED_QUAD * sizeof(float);
glNormalPointer(GL_FLOAT, stride, 0);
glVertexPointer(NUM_COORDS_PER_VERTEX, GL_FLOAT, stride, (const void *)(NUM_COORDS_PER_VERTEX * sizeof(float)));
glPushMatrix();
glScalef(base, base, height);
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertices - 1, indices, GL_UNSIGNED_SHORT, 0);
glPopMatrix();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
void GeometryCache::renderGrid(int xDivisions, int yDivisions, const glm::vec4& color) {
gpu::Batch batch;
renderGrid(batch, xDivisions, yDivisions, color);
@ -535,8 +433,6 @@ void GeometryCache::renderGrid(gpu::Batch& batch, int x, int y, int width, int h
}
// Draw vertical grid lines
for (int i = cols + 1; --i >= 0; ) {
//glVertex2i(tx, y);
//glVertex2i(tx, y + height);
*(vertex++) = tx;
*(vertex++) = y;
@ -1942,7 +1838,7 @@ QSharedPointer<NetworkGeometry> NetworkGeometry::getLODOrFallback(float distance
}
}
}
if (lod->isLoaded()) {
if (lod && lod->isLoaded()) {
hysteresis = lodDistance;
return lod;
}
@ -2061,21 +1957,16 @@ void NetworkGeometry::setTextureWithNameToURL(const QString& name, const QUrl& u
QSharedPointer<NetworkTexture> matchingTexture = QSharedPointer<NetworkTexture>();
if (part.diffuseTextureName == name) {
part.diffuseTexture =
textureCache->getTexture(url, DEFAULT_TEXTURE,
_geometry.meshes[i].isEye, QByteArray());
part.diffuseTexture = textureCache->getTexture(url, DEFAULT_TEXTURE, _geometry.meshes[i].isEye);
part.diffuseTexture->setLoadPriorities(_loadPriorities);
} else if (part.normalTextureName == name) {
part.normalTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
false, QByteArray());
part.normalTexture = textureCache->getTexture(url);
part.normalTexture->setLoadPriorities(_loadPriorities);
} else if (part.specularTextureName == name) {
part.specularTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
false, QByteArray());
part.specularTexture = textureCache->getTexture(url);
part.specularTexture->setLoadPriorities(_loadPriorities);
} else if (part.emissiveTextureName == name) {
part.emissiveTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
false, QByteArray());
part.emissiveTexture = textureCache->getTexture(url);
part.emissiveTexture->setLoadPriorities(_loadPriorities);
}
}
@ -2095,22 +1986,22 @@ QStringList NetworkGeometry::getTextureNames() const {
for (int j = 0; j < mesh.parts.size(); j++) {
const NetworkMeshPart& part = mesh.parts[j];
if (!part.diffuseTextureName.isEmpty()) {
if (!part.diffuseTextureName.isEmpty() && part.diffuseTexture) {
QString textureURL = part.diffuseTexture->getURL().toString();
result << part.diffuseTextureName + ":" + textureURL;
}
if (!part.normalTextureName.isEmpty()) {
if (!part.normalTextureName.isEmpty() && part.normalTexture) {
QString textureURL = part.normalTexture->getURL().toString();
result << part.normalTextureName + ":" + textureURL;
}
if (!part.specularTextureName.isEmpty()) {
if (!part.specularTextureName.isEmpty() && part.specularTexture) {
QString textureURL = part.specularTexture->getURL().toString();
result << part.specularTextureName + ":" + textureURL;
}
if (!part.emissiveTextureName.isEmpty()) {
if (!part.emissiveTextureName.isEmpty() && part.emissiveTexture) {
QString textureURL = part.emissiveTexture->getURL().toString();
result << part.emissiveTextureName + ":" + textureURL;
}

View file

@ -133,8 +133,6 @@ public:
int allocateID() { return _nextID++; }
static const int UNKNOWN_ID;
void renderCone(float base, float height, int slices, int stacks);
void renderSphere(float radius, int slices, int stacks, const glm::vec3& color, bool solid = true, int id = UNKNOWN_ID)
{ renderSphere(radius, slices, stacks, glm::vec4(color, 1.0f), solid, id); }
void renderSphere(gpu::Batch& batch, float radius, int slices, int stacks, const glm::vec3& color, bool solid = true, int id = UNKNOWN_ID)

View file

@ -1,225 +0,0 @@
//
// GlowEffect.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 8/7/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
#include <gpu/GPUConfig.h>
#include <QOpenGLFramebufferObject>
#include <QWindow>
#include <PathUtils.h>
#include <PerfStat.h>
#include "GlowEffect.h"
#include "ProgramObject.h"
#include "RenderUtil.h"
#include "TextureCache.h"
#include "RenderUtilsLogging.h"
#include "gpu/GLBackend.h"
GlowEffect::GlowEffect()
: _initialized(false),
_isOddFrame(false),
_isFirstFrame(true),
_intensity(0.0f),
_enabled(false) {
}
GlowEffect::~GlowEffect() {
if (_initialized) {
delete _addProgram;
delete _horizontalBlurProgram;
delete _verticalBlurAddProgram;
delete _verticalBlurProgram;
delete _addSeparateProgram;
delete _diffuseProgram;
}
}
gpu::FramebufferPointer GlowEffect::getFreeFramebuffer() const {
return (_isOddFrame ?
DependencyManager::get<TextureCache>()->getSecondaryFramebuffer():
DependencyManager::get<TextureCache>()->getTertiaryFramebuffer());
}
static ProgramObject* createProgram(const QString& name) {
ProgramObject* program = new ProgramObject();
program->addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/" + name + ".frag");
program->link();
program->bind();
program->setUniformValue("originalTexture", 0);
program->release();
return program;
}
void GlowEffect::init(bool enabled) {
if (_initialized) {
qCDebug(renderutils, "[ERROR] GlowEffeect is already initialized.");
return;
}
_addProgram = createProgram("glow_add");
_horizontalBlurProgram = createProgram("horizontal_blur");
_verticalBlurAddProgram = createProgram("vertical_blur_add");
_verticalBlurProgram = createProgram("vertical_blur");
_addSeparateProgram = createProgram("glow_add_separate");
_diffuseProgram = createProgram("diffuse");
_verticalBlurAddProgram->bind();
_verticalBlurAddProgram->setUniformValue("horizontallyBlurredTexture", 1);
_verticalBlurAddProgram->release();
_addSeparateProgram->bind();
_addSeparateProgram->setUniformValue("blurredTexture", 1);
_addSeparateProgram->release();
_diffuseProgram->bind();
_diffuseProgram->setUniformValue("diffusedTexture", 1);
_diffuseProgram->release();
_diffusionScaleLocation = _diffuseProgram->uniformLocation("diffusionScale");
_initialized = true;
_enabled = enabled;
}
void GlowEffect::prepare(RenderArgs* renderArgs) {
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
_isEmpty = true;
_isOddFrame = !_isOddFrame;
}
void GlowEffect::begin(RenderArgs* renderArgs, float intensity) {
// store the current intensity and add the new amount
_intensityStack.push(_intensity);
glBlendColor(0.0f, 0.0f, 0.0f, _intensity += intensity);
_isEmpty &= (_intensity == 0.0f);
}
void GlowEffect::end(RenderArgs* renderArgs) {
// restore the saved intensity
glBlendColor(0.0f, 0.0f, 0.0f, _intensity = _intensityStack.pop());
}
gpu::FramebufferPointer GlowEffect::render(RenderArgs* renderArgs) {
PerformanceTimer perfTimer("glowEffect");
auto textureCache = DependencyManager::get<TextureCache>();
auto primaryFBO = gpu::GLBackend::getFramebufferID(textureCache->getPrimaryFramebuffer());
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, textureCache->getPrimaryColorTextureID());
auto framebufferSize = textureCache->getFrameBufferSize();
glPushMatrix();
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glDisable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
gpu::FramebufferPointer destFBO = textureCache->getSecondaryFramebuffer();
if (!_enabled || _isEmpty) {
// copy the primary to the screen
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
glBindFramebuffer(GL_READ_FRAMEBUFFER, primaryFBO);
glBlitFramebuffer(
0, 0, framebufferSize.width(), framebufferSize.height(),
0, 0, framebufferSize.width(), framebufferSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
} else {
// diffuse into the secondary/tertiary (alternating between frames)
auto oldDiffusedFBO =
textureCache->getSecondaryFramebuffer();
auto newDiffusedFBO =
textureCache->getTertiaryFramebuffer();
if (_isOddFrame) {
qSwap(oldDiffusedFBO, newDiffusedFBO);
}
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(newDiffusedFBO));
if (_isFirstFrame) {
glClear(GL_COLOR_BUFFER_BIT);
} else {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(oldDiffusedFBO->getRenderBuffer(0)));
_diffuseProgram->bind();
_diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / framebufferSize.width(), 1.0f / framebufferSize.height());
renderFullscreenQuad();
_diffuseProgram->release();
}
destFBO = oldDiffusedFBO;
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// add diffused texture to the primary
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(newDiffusedFBO->getRenderBuffer(0)));
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
glViewport(0, 0, framebufferSize.width(), framebufferSize.height());
_addSeparateProgram->bind();
renderFullscreenQuad();
_addSeparateProgram->release();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
}
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
_isFirstFrame = false;
return destFBO;
}
void GlowEffect::toggleGlowEffect(bool enabled) {
_enabled = enabled;
}
Glower::Glower(float amount) {
RenderArgs renderArgs;
DependencyManager::get<GlowEffect>()->begin(&renderArgs, amount);
}
Glower::Glower(RenderArgs* renderArgs, float amount) : _renderArgs(renderArgs) {
DependencyManager::get<GlowEffect>()->begin(_renderArgs, amount);
}
Glower::~Glower() {
DependencyManager::get<GlowEffect>()->end(_renderArgs);
}

View file

@ -1,97 +0,0 @@
//
// GlowEffect.h
// interface/src/renderer
//
// Created by Andrzej Kapolka on 8/7/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GlowEffect_h
#define hifi_GlowEffect_h
#include <gpu/GPUConfig.h>
#include <gpu/Framebuffer.h>
#include "RenderArgs.h"
#include <QObject>
#include <QGLWidget>
#include <QStack>
#include <DependencyManager.h>
class ProgramObject;
/// A generic full screen glow effect.
class GlowEffect : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
/// Returns a pointer to the framebuffer object that the glow effect is *not* using for persistent state
/// (either the secondary or the tertiary).
gpu::FramebufferPointer getFreeFramebuffer() const;
void init(bool enabled);
/// Prepares the glow effect for rendering the current frame. To be called before rendering the scene.
void prepare(RenderArgs* renderArgs);
/// Starts using the glow effect.
/// \param intensity the desired glow intensity, from zero to one
void begin(RenderArgs* renderArgs, float intensity = 1.0f);
/// Stops using the glow effect.
void end(RenderArgs* renderArgs);
/// Returns the current glow intensity.
float getIntensity() const { return _intensity; }
/// Renders the glow effect. To be called after rendering the scene.
/// \param toTexture whether to render to a texture, rather than to the frame buffer
/// \return the framebuffer object to which we rendered, or NULL if to the frame buffer
gpu::FramebufferPointer render(RenderArgs* renderArgs);
public slots:
void toggleGlowEffect(bool enabled);
private:
GlowEffect();
virtual ~GlowEffect();
bool _initialized;
ProgramObject* _addProgram;
ProgramObject* _horizontalBlurProgram;
ProgramObject* _verticalBlurAddProgram;
ProgramObject* _verticalBlurProgram;
ProgramObject* _addSeparateProgram;
ProgramObject* _diffuseProgram;
int _diffusionScaleLocation;
bool _isEmpty; ///< set when nothing in the scene is currently glowing
bool _isOddFrame; ///< controls the alternation between texture targets in diffuse add mode
bool _isFirstFrame; ///< for persistent modes, notes whether this is the first frame rendered
float _intensity;
QStack<float> _intensityStack;
bool _enabled;
};
/// RAII-style glow handler. Applies glow when in scope.
class Glower {
public:
Glower(float amount = 1.0f);
Glower(RenderArgs* renderArgs, float amount = 1.0f);
~Glower();
private:
RenderArgs* _renderArgs;
};
#endif // hifi_GlowEffect_h

View file

@ -32,7 +32,6 @@
#include "AbstractViewStateInterface.h"
#include "AnimationHandle.h"
#include "DeferredLightingEffect.h"
#include "GlowEffect.h"
#include "Model.h"
#include "RenderUtilsLogging.h"
@ -56,10 +55,6 @@
#include "model_lightmap_specular_map_frag.h"
#include "model_translucent_frag.h"
#define GLBATCH( call ) batch._##call
//#define GLBATCH( call ) call
using namespace std;
static int modelPointerTypeId = qRegisterMetaType<QPointer<Model> >();
@ -457,7 +452,8 @@ bool Model::updateGeometry() {
}
deleteGeometry();
_dilatedTextures.clear();
_geometry = geometry;
setGeometry(geometry);
_meshGroupsKnown = false;
_readyWhenAdded = false; // in case any of our users are using scenes
invalidCalculatedMeshBoxes(); // if we have to reload, we need to assume our mesh boxes are all invalid
@ -824,7 +820,7 @@ void Model::renderSetup(RenderArgs* args) {
}
}
if (!_meshGroupsKnown && isLoadedWithTextures()) {
if (!_meshGroupsKnown && isLoaded()) {
segregateMeshGroups();
}
}
@ -883,7 +879,7 @@ void Model::setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scen
bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
if (!_meshGroupsKnown && isLoadedWithTextures()) {
if (!_meshGroupsKnown && isLoaded()) {
segregateMeshGroups();
}
@ -913,7 +909,7 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChan
}
bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges, render::Item::Status::Getters& statusGetters) {
if (!_meshGroupsKnown && isLoadedWithTextures()) {
if (!_meshGroupsKnown && isLoaded()) {
segregateMeshGroups();
}
@ -1142,13 +1138,32 @@ void Model::setURL(const QUrl& url, const QUrl& fallback, bool retainCurrent, bo
onInvalidate();
// if so instructed, keep the current geometry until the new one is loaded
_nextBaseGeometry = _nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(url, fallback, delayLoad);
_nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(url, fallback, delayLoad);
_nextLODHysteresis = NetworkGeometry::NO_HYSTERESIS;
if (!retainCurrent || !isActive() || (_nextGeometry && _nextGeometry->isLoaded())) {
applyNextGeometry();
}
}
void Model::geometryRefreshed() {
QObject* sender = QObject::sender();
if (sender == _geometry) {
_readyWhenAdded = false; // reset out render items.
_needsReload = true;
invalidCalculatedMeshBoxes();
onInvalidate();
// if so instructed, keep the current geometry until the new one is loaded
_nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(_url);
_nextLODHysteresis = NetworkGeometry::NO_HYSTERESIS;
applyNextGeometry();
} else {
sender->disconnect(this, SLOT(geometryRefreshed()));
}
}
const QSharedPointer<NetworkGeometry> Model::getCollisionGeometry(bool delayLoad)
{
@ -1156,7 +1171,11 @@ const QSharedPointer<NetworkGeometry> Model::getCollisionGeometry(bool delayLoad
_collisionGeometry = DependencyManager::get<GeometryCache>()->getGeometry(_collisionUrl, QUrl(), delayLoad);
}
return _collisionGeometry;
if (_collisionGeometry && _collisionGeometry->isLoaded()) {
return _collisionGeometry;
}
return QSharedPointer<NetworkGeometry>();
}
void Model::setCollisionModelURL(const QUrl& url) {
@ -1776,6 +1795,18 @@ void Model::setBlendedVertices(int blendNumber, const QWeakPointer<NetworkGeomet
}
}
void Model::setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry) {
if (_geometry == newGeometry) {
return;
}
if (_geometry) {
_geometry->disconnect(_geometry.data(), &Resource::onRefresh, this, &Model::geometryRefreshed);
}
_geometry = newGeometry;
QObject::connect(_geometry.data(), &Resource::onRefresh, this, &Model::geometryRefreshed);
}
void Model::applyNextGeometry() {
// delete our local geometry and custom textures
deleteGeometry();
@ -1783,13 +1814,12 @@ void Model::applyNextGeometry() {
_lodHysteresis = _nextLODHysteresis;
// we retain a reference to the base geometry so that its reference count doesn't fall to zero
_baseGeometry = _nextBaseGeometry;
_geometry = _nextGeometry;
setGeometry(_nextGeometry);
_meshGroupsKnown = false;
_readyWhenAdded = false; // in case any of our users are using scenes
_needsReload = false; // we are loaded now!
invalidCalculatedMeshBoxes();
_nextBaseGeometry.reset();
_nextGeometry.reset();
}
@ -1816,22 +1846,6 @@ void Model::deleteGeometry() {
_blendedBlendshapeCoefficients.clear();
}
void Model::setupBatchTransform(gpu::Batch& batch, RenderArgs* args) {
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(Transform());
}
// We should be able to use the Frustum viewpoint onstead of the "viewTransform"
// but it s still buggy in some cases, so let's s wait and fix it...
_transforms[0] = _viewState->getViewTransform();
_transforms[0].preTranslate(-_translation);
batch.setViewTransform(_transforms[0]);
}
AABox Model::getPartBounds(int meshIndex, int partIndex) {
if (meshIndex < _meshStates.size()) {
@ -1966,7 +1980,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
if (isSkinned) {
GLBATCH(glUniformMatrix4fv)(locations->clusterMatrices, state.clusterMatrices.size(), false,
batch._glUniformMatrix4fv(locations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
_transforms[0] = Transform();
_transforms[0].preTranslate(_translation);
@ -1987,7 +2001,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
if (mesh.colors.isEmpty()) {
GLBATCH(glColor4f)(1.0f, 1.0f, 1.0f, 1.0f);
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
// guard against partially loaded meshes
@ -2029,12 +2043,10 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
}
}
static bool showDiffuse = true;
if (showDiffuse && diffuseMap) {
if (diffuseMap && static_cast<NetworkTexture*>(diffuseMap)->isLoaded()) {
batch.setResourceTexture(0, diffuseMap->getGPUTexture());
} else {
batch.setResourceTexture(0, textureCache->getWhiteTexture());
batch.setResourceTexture(0, textureCache->getGrayTexture());
}
if (locations->texcoordMatrices >= 0) {
@ -2045,20 +2057,19 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
if (!part.emissiveTexture.transform.isIdentity()) {
part.emissiveTexture.transform.getMatrix(texcoordTransform[1]);
}
GLBATCH(glUniformMatrix4fv)(locations->texcoordMatrices, 2, false, (const float*) &texcoordTransform);
batch._glUniformMatrix4fv(locations->texcoordMatrices, 2, false, (const float*) &texcoordTransform);
}
if (!mesh.tangents.isEmpty()) {
Texture* normalMap = networkPart.normalTexture.data();
batch.setResourceTexture(1, !normalMap ?
textureCache->getBlueTexture() : normalMap->getGPUTexture());
NetworkTexture* normalMap = networkPart.normalTexture.data();
batch.setResourceTexture(1, (!normalMap || !normalMap->isLoaded()) ?
textureCache->getBlueTexture() : normalMap->getGPUTexture());
}
if (locations->specularTextureUnit >= 0) {
Texture* specularMap = networkPart.specularTexture.data();
batch.setResourceTexture(locations->specularTextureUnit, !specularMap ?
textureCache->getWhiteTexture() : specularMap->getGPUTexture());
NetworkTexture* specularMap = networkPart.specularTexture.data();
batch.setResourceTexture(locations->specularTextureUnit, (!specularMap || !specularMap->isLoaded()) ?
textureCache->getBlackTexture() : specularMap->getGPUTexture());
}
if (args) {
@ -2071,11 +2082,11 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
// assert(locations->emissiveParams >= 0); // we should have the emissiveParams defined in the shader
float emissiveOffset = part.emissiveParams.x;
float emissiveScale = part.emissiveParams.y;
GLBATCH(glUniform2f)(locations->emissiveParams, emissiveOffset, emissiveScale);
batch._glUniform2f(locations->emissiveParams, emissiveOffset, emissiveScale);
Texture* emissiveMap = networkPart.emissiveTexture.data();
batch.setResourceTexture(locations->emissiveTextureUnit, !emissiveMap ?
textureCache->getWhiteTexture() : emissiveMap->getGPUTexture());
NetworkTexture* emissiveMap = networkPart.emissiveTexture.data();
batch.setResourceTexture(locations->emissiveTextureUnit, (!emissiveMap || !emissiveMap->isLoaded()) ?
textureCache->getGrayTexture() : emissiveMap->getGPUTexture());
}
if (translucent && locations->lightBufferUnit >= 0) {
@ -2179,16 +2190,17 @@ void Model::pickPrograms(gpu::Batch& batch, RenderMode mode, bool translucent, f
batch.setPipeline((*pipeline).second._pipeline);
if ((locations->alphaThreshold > -1) && (mode != RenderArgs::SHADOW_RENDER_MODE)) {
GLBATCH(glUniform1f)(locations->alphaThreshold, alphaThreshold);
batch._glUniform1f(locations->alphaThreshold, alphaThreshold);
}
if ((locations->glowIntensity > -1) && (mode != RenderArgs::SHADOW_RENDER_MODE)) {
GLBATCH(glUniform1f)(locations->glowIntensity, DependencyManager::get<GlowEffect>()->getIntensity());
const float DEFAULT_GLOW_INTENSITY = 1.0f; // FIXME - glow is removed
batch._glUniform1f(locations->glowIntensity, DEFAULT_GLOW_INTENSITY);
}
}
bool Model::initWhenReady(render::ScenePointer scene) {
if (isActive() && isRenderable() && !_meshGroupsKnown && isLoadedWithTextures()) {
if (isActive() && isRenderable() && !_meshGroupsKnown && isLoaded()) {
segregateMeshGroups();
render::PendingChanges pendingChanges;

View file

@ -106,6 +106,7 @@ public:
void setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scene);
bool isVisible() const { return _isVisible; }
bool isLoaded() const { return _geometry && _geometry->isLoaded(); }
bool isLoadedWithTextures() const { return _geometry && _geometry->isLoadedWithTextures(); }
void init();
@ -116,7 +117,7 @@ public:
// new Scene/Engine rendering support
bool needsFixupInScene() { return !_readyWhenAdded && readyToAddToScene(); }
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) { return !_needsReload && isRenderable() && isActive() && isLoadedWithTextures(); }
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) { return !_needsReload && isRenderable() && isActive() && isLoaded(); }
bool addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
bool addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges, render::Item::Status::Getters& statusGetters);
void removeFromScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
@ -245,6 +246,7 @@ public:
protected:
QSharedPointer<NetworkGeometry> _geometry;
void setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry);
glm::vec3 _scale;
glm::vec3 _offset;
@ -321,6 +323,9 @@ protected:
// hook for derived classes to be notified when setUrl invalidates the current model.
virtual void onInvalidate() {};
protected slots:
void geometryRefreshed();
private:
friend class AnimationHandle;
@ -330,15 +335,12 @@ private:
QVector<JointState> createJointStates(const FBXGeometry& geometry);
void initJointTransforms();
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
QSharedPointer<NetworkGeometry> _nextBaseGeometry;
QSharedPointer<NetworkGeometry> _nextGeometry;
float _lodDistance;
float _lodHysteresis;
float _nextLODHysteresis;
QSharedPointer<NetworkGeometry> _collisionGeometry;
QSharedPointer<NetworkGeometry> _saveNonCollisionGeometry;
float _pupilDilation;
QVector<float> _blendshapeCoefficients;
@ -405,7 +407,6 @@ private:
// helper functions used by render() or renderInScene()
void setupBatchTransform(gpu::Batch& batch, RenderArgs* args);
static void pickPrograms(gpu::Batch& batch, RenderArgs::RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args,
Locations*& locations);
@ -524,7 +525,6 @@ private:
QMap<render::ItemID, render::PayloadPointer> _renderItems;
bool _readyWhenAdded = false;
bool _needsReload = true;
};
Q_DECLARE_METATYPE(QPointer<Model>)

View file

@ -187,9 +187,6 @@ void DrawTransparentDeferred::run(const SceneContextPointer& sceneContext, const
args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
// reset blend function to standard...
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
}
gpu::PipelinePointer DrawOverlay3D::_opaquePipeline;

View file

@ -15,54 +15,4 @@
/// Renders a quad from (-1, -1, 0) to (1, 1, 0) with texture coordinates from (sMin, tMin) to (sMax, tMax).
void renderFullscreenQuad(float sMin = 0.0f, float sMax = 1.0f, float tMin = 0.0f, float tMax = 1.0f);
template <typename F, GLenum matrix>
void withMatrixPush(F f) {
glMatrixMode(matrix);
glPushMatrix();
f();
glPopMatrix();
}
template <typename F>
void withProjectionPush(F f) {
withMatrixPush<GL_PROJECTION>(f);
}
template <typename F>
void withProjectionIdentity(F f) {
withProjectionPush([&] {
glLoadIdentity();
f();
});
}
template <typename F>
void withProjectionMatrix(GLfloat* matrix, F f) {
withProjectionPush([&] {
glLoadMatrixf(matrix);
f();
});
}
template <typename F>
void withModelviewPush(F f) {
withMatrixPush<GL_MODELVIEW>(f);
}
template <typename F>
void withModelviewIdentity(F f) {
withModelviewPush([&] {
glLoadIdentity();
f();
});
}
template <typename F>
void withModelviewMatrix(GLfloat* matrix, F f) {
withModelviewPush([&] {
glLoadMatrixf(matrix);
f();
});
}
#endif // hifi_RenderUtil_h

View file

@ -504,9 +504,6 @@ glm::vec2 Font::drawString(float x, float y, const QString & str,
_vao->release();
_texture->release(); // TODO: Brad & Sam, let's discuss this. Without this non-textured quads get their colors borked.
_program->release();
// FIXME, needed?
// glDisable(GL_TEXTURE_2D);
return advance;
}

View file

@ -110,24 +110,14 @@ const gpu::TexturePointer& TextureCache::getPermutationNormalTexture() {
_permutationNormalTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC3, gpu::UINT8, gpu::RGB), 256, 2));
_permutationNormalTexture->assignStoredMip(0, _blueTexture->getTexelFormat(), sizeof(data), data);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
}
return _permutationNormalTexture;
}
const unsigned char OPAQUE_WHITE[] = { 0xFF, 0xFF, 0xFF, 0xFF };
//const unsigned char TRANSPARENT_WHITE[] = { 0xFF, 0xFF, 0xFF, 0x0 };
//const unsigned char OPAQUE_BLACK[] = { 0x0, 0x0, 0x0, 0xFF };
const unsigned char OPAQUE_GRAY[] = { 0x80, 0x80, 0x80, 0xFF };
const unsigned char OPAQUE_BLUE[] = { 0x80, 0x80, 0xFF, 0xFF };
/*
static void loadSingleColorTexture(const unsigned char* color) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1, 1, 0, GL_RGBA, GL_UNSIGNED_BYTE, color);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
*/
const unsigned char OPAQUE_BLACK[] = { 0x00, 0x00, 0x00, 0xFF };
const gpu::TexturePointer& TextureCache::getWhiteTexture() {
if (!_whiteTexture) {
@ -137,6 +127,14 @@ const gpu::TexturePointer& TextureCache::getWhiteTexture() {
return _whiteTexture;
}
const gpu::TexturePointer& TextureCache::getGrayTexture() {
if (!_grayTexture) {
_grayTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
_grayTexture->assignStoredMip(0, _whiteTexture->getTexelFormat(), sizeof(OPAQUE_WHITE), OPAQUE_GRAY);
}
return _grayTexture;
}
const gpu::TexturePointer& TextureCache::getBlueTexture() {
if (!_blueTexture) {
_blueTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
@ -145,6 +143,14 @@ const gpu::TexturePointer& TextureCache::getBlueTexture() {
return _blueTexture;
}
const gpu::TexturePointer& TextureCache::getBlackTexture() {
if (!_blackTexture) {
_blackTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
_blackTexture->assignStoredMip(0, _whiteTexture->getTexelFormat(), sizeof(OPAQUE_BLACK), OPAQUE_BLACK);
}
return _blackTexture;
}
/// Extra data for creating textures.
class TextureExtra {
public:

View file

@ -52,9 +52,15 @@ public:
/// Returns an opaque white texture (useful for a default).
const gpu::TexturePointer& getWhiteTexture();
/// Returns an opaque gray texture (useful for a default).
const gpu::TexturePointer& getGrayTexture();
/// Returns the a pale blue texture (useful for a normal map).
const gpu::TexturePointer& getBlueTexture();
/// Returns the a black texture (useful for a default).
const gpu::TexturePointer& getBlackTexture();
/// Returns a texture version of an image file
static gpu::TexturePointer getImageTexture(const QString& path);
@ -112,7 +118,9 @@ private:
gpu::TexturePointer _permutationNormalTexture;
gpu::TexturePointer _whiteTexture;
gpu::TexturePointer _grayTexture;
gpu::TexturePointer _blueTexture;
gpu::TexturePointer _blackTexture;
QHash<QUrl, QWeakPointer<NetworkTexture> > _dilatableNetworkTextures;

View file

@ -104,6 +104,7 @@ public:
gpu::Context* _context = nullptr;
OctreeRenderer* _renderer = nullptr;
ViewFrustum* _viewFrustum = nullptr;
glm::ivec4 _viewport{ 0, 0, 1, 1 };
float _sizeScale = 1.0f;
int _boundaryLevelAdjust = 0;
RenderMode _renderMode = DEFAULT_RENDER_MODE;

View file

@ -87,20 +87,20 @@ public:
const Vec3& getTranslation() const;
void setTranslation(const Vec3& translation); // [new this] = [translation] * [this.rotation] * [this.scale]
void preTranslate(const Vec3& translation); // [new this] = [translation] * [this]
void postTranslate(const Vec3& translation); // [new this] = [this] * [translation] equivalent to glTranslate
void postTranslate(const Vec3& translation); // [new this] = [this] * [translation] equivalent to:glTranslate
const Quat& getRotation() const;
void setRotation(const Quat& rotation); // [new this] = [this.translation] * [rotation] * [this.scale]
void preRotate(const Quat& rotation); // [new this] = [rotation] * [this]
void postRotate(const Quat& rotation); // [new this] = [this] * [rotation] equivalent to glRotate
void postRotate(const Quat& rotation); // [new this] = [this] * [rotation] equivalent to:glRotate
const Vec3& getScale() const;
void setScale(float scale);
void setScale(const Vec3& scale); // [new this] = [this.translation] * [this.rotation] * [scale]
void preScale(float scale);
void preScale(const Vec3& scale);
void postScale(float scale); // [new this] = [this] * [scale] equivalent to glScale
void postScale(const Vec3& scale); // [new this] = [this] * [scale] equivalent to glScale
void postScale(float scale); // [new this] = [this] * [scale] equivalent to:glScale
void postScale(const Vec3& scale); // [new this] = [this] * [scale] equivalent to:glScale
bool isIdentity() const { return (_flags & ~Flags(FLAG_CACHE_INVALID_BITSET)).none(); }
bool isTranslating() const { return _flags[FLAG_TRANSLATION]; }

View file

@ -102,13 +102,15 @@ class QQuickMenuItem;
QObject* addItem(QObject* parent, const QString& text) {
// FIXME add more checking here to ensure no name conflicts
QQuickMenuItem* returnedValue{ nullptr };
#ifndef QT_NO_DEBUG
bool invokeResult =
#endif
QMetaObject::invokeMethod(parent, "addItem", Qt::DirectConnection, Q_RETURN_ARG(QQuickMenuItem*, returnedValue),
Q_ARG(QString, text));
#ifndef QT_NO_DEBUG
Q_ASSERT(invokeResult);
#else
Q_UNUSED(invokeResult);
#endif
QObject* result = reinterpret_cast<QObject*>(returnedValue);
return result;
}
@ -206,12 +208,14 @@ void VrMenu::insertAction(QAction* before, QAction* action) {
result = ::addItem(menu, action->text());
} else {
QQuickMenuItem* returnedValue{ nullptr };
#ifndef QT_NO_DEBUG
bool invokeResult =
#endif
QMetaObject::invokeMethod(menu, "insertItem", Qt::DirectConnection, Q_RETURN_ARG(QQuickMenuItem*, returnedValue),
Q_ARG(int, index), Q_ARG(QString, action->text()));
#ifndef QT_NO_DEBUG
Q_ASSERT(invokeResult);
#else
Q_UNUSED(invokeResult);
#endif
result = reinterpret_cast<QObject*>(returnedValue);
}
Q_ASSERT(result);

View file

@ -125,14 +125,12 @@ public:
DisplayModelElementProxy,
DisplayDebugTimingDetails,
DontDoPrecisionPicking,
DontFadeOnOctreeServerChanges,
DontRenderEntitiesAsScene,
EchoLocalAudio,
EchoServerAudio,
EditEntitiesHelp,
Enable3DTVMode,
EnableCharacterController,
EnableGlowEffect,
EnableVRMode,
ExpandMyAvatarSimulateTiming,
ExpandMyAvatarTiming,

View file

@ -192,17 +192,10 @@ int main (int argc, char** argv) {
targetStringStream << "#ifndef scribe_" << targetName << "_h" << std::endl;
targetStringStream << "#define scribe_" << targetName << "_h" << std::endl << std::endl;
// targetStringStream << "const char " << targetName << "[] = R\"XXXX(" << destStringStream.str() << ")XXXX\";";
std::istringstream destStringStreamAgain(destStringStream.str());
targetStringStream << "const char " << targetName << "[] = \n";
while (!destStringStreamAgain.eof()) {
std::string lineToken;
std::getline(destStringStreamAgain, lineToken);
// targetStringStream << "\"" << lineToken << "\"\n";
targetStringStream << "R\"X(" << lineToken << ")X\"\"\\n\"\n";
}
targetStringStream << ";\n" << std::endl << std::endl;
// targetStringStream << "const char " << targetName << "[] = R\"XXXX(" << destStringStream.str() << ")XXXX\";";
targetStringStream << "const char " << targetName << "[] = R\"SCRIBE(";
targetStringStream << destStringStream.str();
targetStringStream << "\n)SCRIBE\";\n\n";
targetStringStream << "#endif" << std::endl;
} else {
targetStringStream << destStringStream.str();