Merge branch 'master' of https://github.com/highfidelity/hifi into centerOrigin

This commit is contained in:
ZappoMan 2015-08-17 21:53:45 -07:00
commit 5715042102
63 changed files with 2871 additions and 1369 deletions

View file

@ -11,6 +11,8 @@
//
Script.include("http://s3.amazonaws.com/hifi-public/scripts/libraries/toolBars.js");
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
@ -20,7 +22,7 @@ var leftHandActionID = nullActionID;
var rightHandActionID = nullActionID;
var TRIGGER_THRESHOLD = 0.2;
var GRAB_RADIUS = 0.25;
var GRAB_RADIUS = 0.15;
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
@ -32,7 +34,7 @@ var leftHandGrabAction = LEFT_HAND_CLICK;
var rightHandGrabValue = 0;
var leftHandGrabValue = 0;
var prevRightHandGrabValue = 0;
var prevRightHandGrabValue = 0
var prevLeftHandGrabValue = 0;
var grabColor = { red: 0, green: 255, blue: 0};
@ -46,8 +48,8 @@ var toolBar = new ToolBar(0, 0, ToolBar.vertical, "highfidelity.toybox.toolbar",
});
var BUTTON_SIZE = 32;
var SWORD_IMAGE = "https://hifi-public.s3.amazonaws.com/images/sword/sword.svg"; // replace this with a table icon
var CLEANUP_IMAGE = "http://s3.amazonaws.com/hifi-public/images/delete.png"; // cleanup table
var SWORD_IMAGE = "https://hifi-public.s3.amazonaws.com/images/sword/sword.svg"; // TODO: replace this with a table icon
var CLEANUP_IMAGE = "http://s3.amazonaws.com/hifi-public/images/delete.png";
var tableButton = toolBar.addOverlay("image", {
width: BUTTON_SIZE,
height: BUTTON_SIZE,
@ -61,20 +63,25 @@ var cleanupButton = toolBar.addOverlay("image", {
alpha: 1
});
var leftHandOverlay = Overlays.addOverlay("sphere", {
var overlays = false;
var leftHandOverlay;
var rightHandOverlay;
if (overlays) {
leftHandOverlay = Overlays.addOverlay("sphere", {
position: MyAvatar.getLeftPalmPosition(),
size: GRAB_RADIUS,
color: releaseColor,
alpha: 0.5,
solid: false
});
var rightHandOverlay = Overlays.addOverlay("sphere", {
rightHandOverlay = Overlays.addOverlay("sphere", {
position: MyAvatar.getRightPalmPosition(),
size: GRAB_RADIUS,
color: releaseColor,
alpha: 0.5,
solid: false
});
}
var OBJECT_HEIGHT_OFFSET = 0.5;
var MIN_OBJECT_SIZE = 0.05;
@ -86,9 +93,9 @@ var TABLE_DIMENSIONS = {
};
var GRAVITY = {
x: 0,
y: -2,
z: 0
x: 0.0,
y: -2.0,
z: 0.0
}
var LEFT = 0;
@ -101,6 +108,8 @@ var tableEntities = Array(NUM_OBJECTS + 1); // Also includes table
var VELOCITY_MAG = 0.3;
var entitiesToResize = [];
var MODELS = Array(
{ modelURL: "https://hifi-public.s3.amazonaws.com/ozan/props/sword/sword.fbx" },
{ modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Vehicles/clara/spaceshuttle.fbx" },
@ -113,11 +122,41 @@ var MODELS = Array(
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/029db3d4-da2c-4cb2-9c08-b9612ba576f5/02949063e7c4aed42ad9d1a58461f56d.fst?1427169842" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/models/props/MidCenturyModernLivingRoom/Interior/Bar.fbx" },
{ modelURL: "https://hifi-public.s3.amazonaws.com/marketplace/contents/96124d04-d603-4707-a5b3-e03bf47a53b2/1431770eba362c1c25c524126f2970fb.fst?1436924721" }
// Complex models:
// { modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Architecture/sketchfab/cudillero.fbx" },
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/musicality/musicality.fbx" },
// { modelURL: "https://hifi-public.s3.amazonaws.com/ozan/sets/statelyHome/statelyHome.fbx" }
);
var COLLISION_SOUNDS = Array(
"http://public.highfidelity.io/sounds/Collisions-ballhitsandcatches/pingpong_TableBounceMono.wav",
"http://public.highfidelity.io/sounds/Collisions-ballhitsandcatches/billiards/collision1.wav"
);
var RESIZE_TIMER = 0.0;
var RESIZE_WAIT = 0.05; // 50 milliseconds
var leftFist = Entities.addEntity( {
type: "Sphere",
shapeType: 'sphere',
position: MyAvatar.getLeftPalmPosition(),
dimensions: { x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS },
rotation: MyAvatar.getLeftPalmRotation(),
visible: false,
collisionsWillMove: false,
ignoreForCollisions: true
});
var rightFist = Entities.addEntity( {
type: "Sphere",
shapeType: 'sphere',
position: MyAvatar.getRightPalmPosition(),
dimensions: { x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS },
rotation: MyAvatar.getRightPalmRotation(),
visible: false,
collisionsWillMove: false,
ignoreForCollisions: true
});
function letGo(hand) {
var actionIDToRemove = (hand == LEFT) ? leftHandActionID : rightHandActionID;
var entityIDToEdit = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
@ -126,6 +165,7 @@ function letGo(hand) {
MyAvatar.getRightPalmAngularVelocity();
if (actionIDToRemove != nullActionID && entityIDToEdit != null) {
Entities.deleteAction(entityIDToEdit, actionIDToRemove);
// TODO: upon successful letGo, restore collision groups
if (hand == LEFT) {
leftHandObjectID = null;
leftHandActionID = nullActionID;
@ -142,12 +182,15 @@ function setGrabbedObject(hand) {
var objectID = null;
var minDistance = GRAB_RADIUS;
for (var i = 0; i < entities.length; i++) {
// Don't grab the object in your other hands, your fists, or the table
if ((hand == LEFT && entities[i] == rightHandObjectID) ||
(hand == RIGHT) && entities[i] == leftHandObjectID) {
(hand == RIGHT && entities[i] == leftHandObjectID) ||
entities[i] == leftFist || entities[i] == rightFist ||
(tableCreated && entities[i] == tableEntities[0])) {
continue;
} else {
var distance = Vec3.distance(Entities.getEntityProperties(entities[i]).position, handPosition);
if (distance < minDistance) {
if (distance <= minDistance) {
objectID = entities[i];
minDistance = distance;
}
@ -166,19 +209,23 @@ function setGrabbedObject(hand) {
function grab(hand) {
if (!setGrabbedObject(hand)) {
// If you don't grab an object, make a fist
Entities.editEntity((hand == LEFT) ? leftFist : rightFist, { ignoreForCollisions: false } );
return;
}
var objectID = (hand == LEFT) ? leftHandObjectID : rightHandObjectID;
var handRotation = (hand == LEFT) ? MyAvatar.getLeftPalmRotation() : MyAvatar.getRightPalmRotation();
var handPosition = (hand == LEFT) ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition();
var objectRotation = Entities.getEntityProperties(objectID).rotation;
var offsetRotation = Quat.multiply(Quat.inverse(handRotation), objectRotation);
var objectPosition = Entities.getEntityProperties(objectID).position;
var offset = Vec3.subtract(objectPosition, handPosition);
var offsetPosition = Vec3.multiplyQbyV(Quat.inverse(Quat.multiply(handRotation, offsetRotation)), offset);
// print(JSON.stringify(offsetPosition));
var actionID = Entities.addAction("hold", objectID, {
relativePosition: {
x: 0.0,
y: 0.0,
z: 0.0
},
relativePosition: { x: 0, y: 0, z: 0 },
relativeRotation: offsetRotation,
hand: (hand == LEFT) ? "left" : "right",
timeScale: 0.05
@ -190,7 +237,7 @@ function grab(hand) {
rightHandObjectID = null;
}
} else {
// Entities.editEntity(objectID, { ignore});
// TODO: upon successful grab, add to collision group so object doesn't collide with immovable entities
if (hand == LEFT) {
leftHandActionID = actionID;
} else {
@ -199,28 +246,70 @@ function grab(hand) {
}
}
function update() {
Overlays.editOverlay(leftHandOverlay, { position: MyAvatar.getLeftPalmPosition() });
Overlays.editOverlay(rightHandOverlay, { position: MyAvatar.getRightPalmPosition() });
function resizeModels() {
var newEntitiesToResize = [];
for (var i = 0; i < entitiesToResize.length; i++) {
var naturalDimensions = Entities.getEntityProperties(entitiesToResize[i]).naturalDimensions;
if (naturalDimensions.x != 1.0 || naturalDimensions.y != 1.0 || naturalDimensions.z != 1.0) {
// bigger range of sizes for models
var dimensions = Vec3.multiply(randFloat(MIN_OBJECT_SIZE, 3.0*MAX_OBJECT_SIZE), Vec3.normalize(naturalDimensions));
Entities.editEntity(entitiesToResize[i], {
dimensions: dimensions,
shapeType: "box"
});
} else {
newEntitiesToResize.push(entitiesToResize[i]);
}
}
entitiesToResize = newEntitiesToResize;
}
function update(deltaTime) {
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { position: MyAvatar.getLeftPalmPosition() });
Overlays.editOverlay(rightHandOverlay, { position: MyAvatar.getRightPalmPosition() });
}
// if (tableCreated && RESIZE_TIMER < RESIZE_WAIT) {
// RESIZE_TIMER += deltaTime;
// } else if (tableCreated) {
// resizeModels();
// }
rightHandGrabValue = Controller.getActionValue(rightHandGrabAction);
leftHandGrabValue = Controller.getActionValue(leftHandGrabAction);
if (rightHandGrabValue > TRIGGER_THRESHOLD && rightHandObjectID == null) {
Overlays.editOverlay(rightHandOverlay, { color: grabColor });
Entities.editEntity(leftFist, { position: MyAvatar.getLeftPalmPosition() });
Entities.editEntity(rightFist, { position: MyAvatar.getRightPalmPosition() });
if (rightHandGrabValue > TRIGGER_THRESHOLD &&
prevRightHandGrabValue < TRIGGER_THRESHOLD) {
if (overlays) {
Overlays.editOverlay(rightHandOverlay, { color: grabColor });
}
grab(RIGHT);
} else if (rightHandGrabValue < TRIGGER_THRESHOLD &&
prevRightHandGrabValue > TRIGGER_THRESHOLD) {
Overlays.editOverlay(rightHandOverlay, { color: releaseColor });
Entities.editEntity(rightFist, { ignoreForCollisions: true } );
if (overlays) {
Overlays.editOverlay(rightHandOverlay, { color: releaseColor });
}
letGo(RIGHT);
}
if (leftHandGrabValue > TRIGGER_THRESHOLD && leftHandObjectID == null) {
Overlays.editOverlay(leftHandOverlay, { color: grabColor });
if (leftHandGrabValue > TRIGGER_THRESHOLD &&
prevLeftHandGrabValue < TRIGGER_THRESHOLD) {
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { color: grabColor });
}
grab(LEFT);
} else if (leftHandGrabValue < TRIGGER_THRESHOLD &&
prevLeftHandGrabValue > TRIGGER_THRESHOLD) {
Overlays.editOverlay(leftHandOverlay, { color: releaseColor });
Entities.editEntity(leftFist, { ignoreForCollisions: true } );
if (overlays) {
Overlays.editOverlay(leftHandOverlay, { color: releaseColor });
}
letGo(LEFT);
}
@ -231,8 +320,12 @@ function update() {
function cleanUp() {
letGo(RIGHT);
letGo(LEFT);
Overlays.deleteOverlay(leftHandOverlay);
Overlays.deleteOverlay(rightHandOverlay);
if (overlays) {
Overlays.deleteOverlay(leftHandOverlay);
Overlays.deleteOverlay(rightHandOverlay);
}
Entities.deleteEntity(leftFist);
Entities.deleteEntity(rightFist);
removeTable();
toolBar.cleanup();
}
@ -268,11 +361,14 @@ randInt = function(low, high) {
function createTable() {
var tablePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(MyAvatar.orientation)));
tableEntities[0] = Entities.addEntity( {
type: "Box",
type: "Model",
shapeType: 'box',
position: tablePosition,
dimensions: TABLE_DIMENSIONS,
rotation: MyAvatar.orientation,
color: { red: 255, green: 0, blue: 0 }
// color: { red: 102, green: 51, blue: 0 },
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
collisionSoundURL: "http://public.highfidelity.io/sounds/dice/diceCollide.wav"
});
for (var i = 1; i < NUM_OBJECTS + 1; i++) {
@ -308,7 +404,8 @@ function createTable() {
restitution: 0.01,
density: 0.5,
collisionsWillMove: true,
color: { red: randInt(0, 255), green: randInt(0, 255), blue: randInt(0, 255) }
color: { red: randInt(0, 255), green: randInt(0, 255), blue: randInt(0, 255) },
// collisionSoundURL: COLLISION_SOUNDS[randInt(0, COLLISION_SOUNDS.length)]
});
if (type == "Model") {
var randModel = randInt(0, MODELS.length);
@ -316,11 +413,13 @@ function createTable() {
shapeType: "box",
modelURL: MODELS[randModel].modelURL
});
entitiesToResize.push(tableEntities[i]);
}
}
}
function removeTable() {
RESIZE_TIMER = 0.0;
for (var i = 0; i < tableEntities.length; i++) {
Entities.deleteEntity(tableEntities[i]);
}

View file

@ -574,8 +574,14 @@ function findClickedEntity(event) {
}
var mouseHasMovedSincePress = false;
var mousePressStartTime = 0;
var mousePressStartPosition = { x: 0, y: 0 };
var mouseDown = false;
function mousePressEvent(event) {
mouseDown = true;
mousePressStartPosition = { x: event.x, y: event.y };
mousePressStartTime = Date.now();
mouseHasMovedSincePress = false;
mouseCapturedByTool = false;
@ -595,6 +601,8 @@ var highlightedEntityID = null;
var mouseCapturedByTool = false;
var lastMousePosition = null;
var idleMouseTimerId = null;
var CLICK_TIME_THRESHOLD = 500 * 1000; // 500 ms
var CLICK_MOVE_DISTANCE_THRESHOLD = 8;
var IDLE_MOUSE_TIMEOUT = 200;
var DEFAULT_ENTITY_DRAG_DROP_DISTANCE = 2.0;
@ -603,7 +611,21 @@ function mouseMoveEventBuffered(event) {
lastMouseMoveEvent = event;
}
function mouseMove(event) {
mouseHasMovedSincePress = true;
if (mouseDown && !mouseHasMovedSincePress) {
var timeSincePressMicro = Date.now() - mousePressStartTime;
var dX = mousePressStartPosition.x - event.x;
var dY = mousePressStartPosition.y - event.y;
var sqDist = (dX * dX) + (dY * dY);
// If less than CLICK_TIME_THRESHOLD has passed since the mouse click AND the mouse has moved
// less than CLICK_MOVE_DISTANCE_THRESHOLD distance, then don't register this as a mouse move
// yet. The goal is to provide mouse clicks that are more lenient to small movements.
if (timeSincePressMicro < CLICK_TIME_THRESHOLD && sqDist < CLICK_MOVE_DISTANCE_THRESHOLD) {
return;
}
mouseHasMovedSincePress = true;
}
if (placingEntityID) {
var pickRay = Camera.computePickRay(event.x, event.y);
@ -670,6 +692,8 @@ function highlightEntityUnderCursor(position, accurateRay) {
function mouseReleaseEvent(event) {
mouseDown = false;
if (lastMouseMoveEvent) {
mouseMove(lastMouseMoveEvent);
lastMouseMoveEvent = null;

View file

@ -19,7 +19,9 @@ Script.include('../utilities/tools/vector.js');
var URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/";
SatelliteGame = function() {
SatelliteCreator = function() {
print("initializing satellite game");
var MAX_RANGE = 50.0;
var Y_AXIS = {
x: 0,
@ -36,6 +38,10 @@ SatelliteGame = function() {
var ZONE_DIM = 100.0;
var LIGHT_INTENSITY = 1.5;
var center, distance;
var earth;
Earth = function(position, size) {
this.earth = Entities.addEntity({
type: "Model",
@ -68,7 +74,7 @@ SatelliteGame = function() {
this.clouds = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "clouds.fbx?i=2",
modelURL: URL + "clouds.fbx",
position: position,
dimensions: {
x: size + CLOUDS_OFFSET,
@ -101,16 +107,42 @@ SatelliteGame = function() {
});
this.cleanup = function() {
print('cleaning up earth models');
Entities.deleteEntity(this.clouds);
Entities.deleteEntity(this.earth);
Entities.deleteEntity(this.zone);
}
}
// Create earth model
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
var earth = new Earth(center, EARTH_SIZE);
this.init = function() {
if (this.isActive) {
this.quitGame();
}
var confirmed = Window.confirm("Start satellite game?");
if (!confirmed) {
return false;
}
this.isActive = true;
MyAvatar.position = {
x: 1000,
y: 1000,
z: 1000
};
Camera.setPosition({
x: 1000,
y: 1000,
z: 1000
});
// Create earth model
center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
earth = new Earth(center, EARTH_SIZE);
return true;
};
var satellites = [];
var SATELLITE_SIZE = 2.0;
@ -257,12 +289,16 @@ SatelliteGame = function() {
}
}
this.endGame = function() {
this.quitGame = function() {
print("ending satellite game");
this.isActive = false;
for (var i = 0; i < satellites.length; i++) {
Entities.deleteEntity(satellites[i].satellite);
satellites[i].arrow.cleanup();
}
earth.cleanup();
}
@ -283,6 +319,7 @@ SatelliteGame = function() {
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Script.update.connect(update);
Script.scriptEnding.connect(this.endGame);
Script.scriptEnding.connect(this.quitGame);
}
}

View file

@ -0,0 +1,515 @@
//
// widgets-example.js
// games
//
// Copyright 2015 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var ICONS_URL = 'https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/images/';
var panelX = 1250;
var panelY = 500;
var panelWidth = 50;
var panelHeight = 210;
Script.include('../libraries/uiwidgets.js');
UI.setDefaultVisibility(true);
var ICON_WIDTH = 40.0;
var ICON_HEIGHT = 40.0;
var ICON_COLOR = UI.rgba(45, 45, 45, 0.7);
var FOCUSED_COLOR = UI.rgba(250, 250, 250, 1.0);
var PANEL_BACKGROUND_COLOR = UI.rgba(120, 120, 120, 0.8);
var PANEL_PADDING = 7.0;
var PANEL_BORDER = 12.0;
var SUBPANEL_GAP = 1.0;
var icons = [];
function addImage(panel, iconId) {
var icon = panel.add(new UI.Image({
'imageURL': ICONS_URL + iconId + '.svg',
'width': ICON_WIDTH,
'height': ICON_HEIGHT,
'color': ICON_COLOR,
'alpha': ICON_COLOR.a
}));
icons.push(icon);
return icon;
}
var panels = [];
function addPanel(properties) {
properties.background = properties.background || {};
properties.background.backgroundColor = properties.background.backgroundColor ||
PANEL_BACKGROUND_COLOR;
properties.background.backgroundAlpha = properties.background.backgroundAlpha ||
PANEL_BACKGROUND_COLOR.a;
properties.padding = properties.padding || {
x: PANEL_PADDING,
y: PANEL_PADDING
};
properties.border = properties.border || {
x: PANEL_BORDER,
y: PANEL_BORDER
};
var panel = new UI.WidgetStack(properties);
panels.push(panel);
return panel;
}
function makeDraggable(panel, target) {
if (!target) {
target = panel;
}
var dragStart = null;
var initialPos = null;
panel.addAction('onDragBegin', function(event) {
dragStart = {
x: event.x,
y: event.y
};
initialPos = {
x: target.position.x,
y: target.position.y
};
});
panel.addAction('onDragUpdate', function(event) {
target.setPosition(
initialPos.x + event.x - dragStart.x,
initialPos.y + event.y - dragStart.y
);
UI.updateLayout();
});
panel.addAction('onDragEnd', function() {
dragStart = dragEnd = null;
});
}
function setText(text) {
return function() {
demoLabel.setText(text);
UI.updateLayout();
};
}
function join(obj) {
var s = "{";
var sep = "\n";
for (var k in obj) {
s += sep + k + ": " + ("" + obj[k]).replace("\n", "\n");
sep = ",\n";
}
if (s.length > 1)
return s + " }";
return s + "}";
}
setText = undefined;
var tooltipWidget = new UI.Label({
text: "<tooltip>",
width: 500,
height: 20,
visible: false
});
function addTooltip(widget, text) {
widget.addAction('onMouseOver', function(event, widget) {
tooltipWidget.setVisible(true);
tooltipWidget.setPosition(widget.position.x + widget.getWidth() + 20, widget.position.y + 10);
tooltipWidget.setText(text);
UI.updateLayout();
});
widget.addAction('onMouseExit', function() {
tooltipWidget.setVisible(false);
UI.updateLayout();
});
}
var mainPanel = addPanel({
dir: '+y'
});
makeDraggable(mainPanel);
mainPanel.setPosition(1200, 250);
mainPanel.setVisible(true);
var systemViewButton = addImage(mainPanel, 'solarsystems');
var zoomButton = addImage(mainPanel, 'magnifier');
var satelliteButton = addImage(mainPanel, 'satellite');
var settingsButton = addImage(mainPanel, 'settings');
var stopButton = addImage(mainPanel, 'close');
addTooltip(systemViewButton, "system view");
addTooltip(zoomButton, "zoom");
addTooltip(satelliteButton, "satelite view");
addTooltip(settingsButton, "settings");
addTooltip(stopButton, "exit");
var systemViewPanel = addPanel({
dir: '+x',
visible: false
});
var restartButton = addImage(systemViewPanel, 'refresh');
var pauseButton = addImage(systemViewPanel, 'playpause');
var rideButton = addImage(systemViewPanel, 'rocket');
var tweening, tweeningPaused;
Script.include('https://hifi-staff.s3.amazonaws.com/bridget/tween.js');
pauseButton.addAction('onClick', function() {
if (tweening) {
if (!tweeningPaused) {
tweeningPaused = true;
} else {
tweeningPaused = false;
}
return;
}
if (!paused) {
pause();
} else {
resume();
}
});
// Allow to toggle pause with spacebar
function keyPressEvent(event) {
if (event.text == "SPACE") {
if (!paused) {
pause();
} else {
resume();
}
}
}
rideButton.addAction('onClick', function() {
if (!paused) {
pause();
}
if (tweening) {
tweening = false;
tweeningPaused = true;
restart();
return;
}
var confirmed = Window.confirm('Ride through the solar system?');
if (confirmed) {
init();
tweening = true;
tweeningPaused = false;
}
});
restartButton.addAction('onClick', function() {
restart();
tweening = false;
});
var zoomPanel = addPanel({
dir: '+x',
visible: false
});
var zoomButtons = [];
for (var i = 0; i < planets.length; ++i) {
var label = zoomPanel.add(new UI.Label({
text: planets[i].name,
width: 80,
height: 20
}));
zoomButtons.push(label);
UI.updateLayout();
}
UI.updateLayout();
var zoomView = false;
zoomButtons.forEach(function(button, i) {
var planet = planets[i];
button.addAction('onClick', function() {
if (!planets[i].isZoomed) {
planet.zoom();
planet.isZoomed = true;
zoomView = true;
} else {
MyAvatar.position = startingPosition;
Camera.setPosition(cameraStart);
planet.isZoomed = false;
zoomView = false;
}
});
});
var settingsPanel = addPanel({
dir: '+y',
visible: false
});
function addCheckbox(parent, label, labelWidth, enabled, onValueChanged) {
var layout = parent.add(new UI.WidgetStack({
dir: '+x',
visible: true,
backgroundAlpha: 0.0
}));
var label = layout.add(new UI.Label({
text: label,
width: labelWidth,
height: 20,
backgroundAlpha: 0.0
}));
var defaultColor = UI.rgb(10, 10, 10);
var checkbox = layout.add(new UI.Checkbox({
width: 20,
height: 20,
padding: {
x: 3,
y: 3
},
backgroundColor: defaultColor,
backgroundAlpha: 0.9,
checked: enabled,
onValueChanged: onValueChanged
}));
checkbox.label = label;
checkbox.layout = layout;
checkbox.setValue = function(value) {
checkbox.setChecked(value);
}
return checkbox;
}
function addSlider(parent, label, labelWidth, defaultValue, min, max, valueChanged) {
var layout = parent.add(new UI.WidgetStack({
dir: '+x',
visible: true
}));
var label = layout.add(new UI.Label({
text: label,
width: labelWidth,
height: 27
}));
var display = layout.add(new UI.Label({
text: " ",
width: 50,
height: 27
}));
var slider = layout.add(new UI.Slider({
value: defaultValue,
maxValue: max,
minValue: min,
width: 300,
height: 20,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 1.0,
slider: { // slider knob
width: 30,
height: 18,
backgroundColor: UI.rgb(120, 120, 120),
backgroundAlpha: 1.0
}
}));
slider.addAction('onDoubleClick', function() {
slider.setValue(defaultValue);
UI.updateLayout();
});
display.setText("" + (+slider.getValue().toFixed(2)));
slider.onValueChanged = function(value) {
valueChanged(value);
display.setText("" + (+value.toFixed(2)));
UI.updateLayout();
}
slider.label = label;
slider.layout = layout;
return slider;
}
settingsPanel.showTrailsButton = addCheckbox(settingsPanel, "show trails", 120, trailsEnabled, function(value) {
trailsEnabled = value;
if (trailsEnabled) {
for (var i = 0; i < planets.length; ++i) {
planets[i].resetTrails();
}
//if trails are off and we've already created trails, remove existing trails
} else {
for (var i = 0; i < planets.length; ++i) {
planets[i].clearTrails();
}
}
});
var g_multiplier = 1.0;
settingsPanel.gravitySlider = addSlider(settingsPanel, "gravity scale ", 200, g_multiplier, 0.0, 5.0, function(value) {
g_multiplier = value;
GRAVITY = REFERENCE_GRAVITY * g_multiplier;
});
var period_multiplier = 1.0;
var last_alpha = period_multiplier;
settingsPanel.periodSlider = addSlider(settingsPanel, "orbital period scale ", 200, period_multiplier, 0.0, 3.0, function(value) {
period_multiplier = value;
changePeriod(period_multiplier);
});
function changePeriod(alpha) {
var ratio = last_alpha / alpha;
GRAVITY = Math.pow(ratio, 2.0) * GRAVITY;
for (var i = 0; i < planets.length; ++i) {
planets[i].period = ratio * planets[i].period;
planets[i].velocity = Vec3.multiply(ratio, planets[i].velocity);
planets[i].resetTrails();
}
last_alpha = alpha;
}
var satelliteGame;
satelliteButton.addAction('onClick', function() {
if (satelliteGame && satelliteGame.isActive) {
MyAvatar.position = startingPosition;
satelliteGame.quitGame();
if (paused) {
resume();
}
} else {
pause();
satelliteGame = new SatelliteCreator();
satelliteGame.init();
}
});
var subpanels = [systemViewPanel, zoomPanel, settingsPanel];
function hideSubpanelsExcept(panel) {
subpanels.forEach(function(x) {
if (x != panel) {
x.setVisible(false);
}
});
}
function attachPanel(panel, button) {
button.addAction('onClick', function() {
hideSubpanelsExcept(panel);
panel.setVisible(!panel.isVisible());
UI.updateLayout();
})
UI.addAttachment(panel, button, function(target, rel) {
target.setPosition(
rel.position.x - (target.getWidth() + target.border.x + SUBPANEL_GAP),
rel.position.y - target.border.y
);
});
}
attachPanel(systemViewPanel, systemViewButton);
attachPanel(zoomPanel, zoomButton);
attachPanel(settingsPanel, settingsButton);
var addColorToggle = function(widget) {
widget.addAction('onMouseOver', function() {
widget.setColor(FOCUSED_COLOR);
});
widget.addAction('onMouseExit', function() {
widget.setColor(ICON_COLOR);
});
}
systemViewPanel.addAction('onMouseOver', function() {
hideSubpanelsExcept(systemViewPanel);
UI.updateLayout();
});
zoomButton.addAction('onClick', function() {
if (zoomView) {
restart();
}
hideSubpanelsExcept(zoomPanel);
UI.updateLayout();
});
UI.updateLayout();
stopButton.addAction('onClick', function() {
teardown();
Script.stop();
});
// Panel drag behavior
// (click + drag on border to drag)
(function() {
var dragged = null;
this.startDrag = function(dragAction) {
dragged = dragAction;
}
this.updateDrag = function(event) {
if (dragged) {
print("Update drag");
dragged.updateDrag(event);
}
}
this.clearDrag = function(event) {
if (dragged)
print("End drag");
dragged = null;
}
})();
var buttons = icons;
buttons.map(addColorToggle);
panels.map(function(panel) {
makeDraggable(panel, mainPanel);
});
// Cleanup script resources
function teardown() {
UI.teardown();
if (satelliteGame) {
satelliteGame.quitGame();
}
};
UI.debug.setVisible(false);
var inputHandler = {
onMouseMove: function(event) {
updateDrag(event);
UI.handleMouseMove(event);
},
onMousePress: function(event) {
UI.handleMousePress(event);
},
onMouseRelease: function(event) {
clearDrag(event);
UI.handleMouseRelease(event);
},
onMouseDoublePress: function(event) {
UI.handleMouseDoublePress(event);
}
};
Controller.mousePressEvent.connect(inputHandler.onMousePress);
Controller.mouseMoveEvent.connect(inputHandler.onMouseMove);
Controller.mouseReleaseEvent.connect(inputHandler.onMouseRelease);
Controller.mouseDoublePressEvent.connect(inputHandler.onMouseDoublePress);
Controller.keyPressEvent.connect(keyPressEvent);
Script.scriptEnding.connect(teardown);

File diff suppressed because it is too large Load diff

251
examples/fireworks.js Normal file
View file

@ -0,0 +1,251 @@
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var fireSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/GUN-SHOT2.raw");
var audioOptions = {
volume: 0.9,
position: Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()))
};
var DISTANCE_FROM_CAMERA = 7.0;
var bluePalette = [{
red: 0,
green: 206,
blue: 209
}, {
red: 173,
green: 216,
blue: 230
}, {
red: 0,
green: 191,
blue: 255
}];
var greenPalette = [{
red: 152,
green: 251,
blue: 152
}, {
red: 127,
green: 255,
blue: 0
}, {
red: 50,
green: 205,
blue: 50
}];
var redPalette = [{
red: 255,
green: 20,
blue: 147
}, {
red: 255,
green: 69,
blue: 0
}, {
red: 255,
green: 90,
blue: 120
}];
var COLOR_RED = {red: 255, green: 0, blue: 0 };
var COLOR_GREEN = {red: 0, green: 255, blue: 0};
var COLOR_BLUE = {red: 0, green: 0, blue: 255};
var iconsX = 700;
var iconsY = 660;
var ICON_SIZE = 30;
var redIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_RED,
x: iconsX,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var greenIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_GREEN,
x: iconsX + 50,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var blueIcon = Overlays.addOverlay("text", {
backgroundColor: COLOR_BLUE,
x: iconsX + 100,
y: iconsY,
width: ICON_SIZE,
height: ICON_SIZE,
alpha: 0.0,
backgroundAlpha: 1.0,
visible: true
});
var NUM_BURSTS = 11;
var SPEED = 6.0;
var rockets = [];
Rocket = function(point, colorPalette) {
//default to blue palette if no palette passed in
this.colors = colorPalette;
this.point = point;
this.bursts = [];
this.burst = false;
this.emitRate = randInt(80, 120);
this.emitStrength = randInt(5.0, 7.0);
this.rocket = Entities.addEntity({
type: "Sphere",
position: this.point,
dimensions: {
x: 0.07,
y: 0.07,
z: 0.07
},
color: {
red: 240,
green: 240,
blue: 240
}
});
this.animationSettings = JSON.stringify({
fps: 40,
frameIndex: 0,
running: true,
firstFrame: 0,
lastFrame: 20,
loop: false
});
this.direction = {
x: randFloat(-0.4, 0.4),
y: 1.0,
z: 0.0
}
this.time = 0.0;
this.timeout = randInt(15, 40);
};
Rocket.prototype.update = function(deltaTime) {
this.time++;
Entities.editEntity(this.rocket, {
velocity: Vec3.multiply(SPEED, this.direction)
});
var position = Entities.getEntityProperties(this.rocket).position;
if (this.time > this.timeout) {
this.explode(position);
return;
}
};
Rocket.prototype.explode = function(position) {
Audio.playSound(fireSound, audioOptions);
Entities.editEntity(this.rocket, {
velocity: {
x: 0,
y: 0,
z: 0
}
});
var colorIndex = 0;
for (var i = 0; i < NUM_BURSTS; ++i) {
var color = this.colors[colorIndex];
print(JSON.stringify(color));
this.bursts.push(Entities.addEntity({
type: "ParticleEffect",
animationSettings: this.animationSettings,
position: position,
textures: 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png',
emitRate: this.emitRate,
emitStrength: this.emitStrength,
emitDirection: {
x: Math.pow(-1, i) * randFloat(0.0, 1.4),
y: 1.0,
z: 0.0
},
color: color,
lifespan: 1.0,
visible: true,
locked: false
}));
if (colorIndex < this.colors.length - 1) {
colorIndex++;
}
}
this.burst = true;
Entities.deleteEntity(this.rocket);
};
//var lastLoudness;
var LOUDNESS_RADIUS_RATIO = 10;
function update(deltaTime) {
for (var i = 0; i < rockets.length; i++) {
if (!rockets[i].burst) {
rockets[i].update();
}
}
}
function randFloat(min, max) {
return Math.random() * (max - min) + min;
}
function randInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
function computeWorldPoint(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DISTANCE_FROM_CAMERA);
return Vec3.sum(Camera.getPosition(), addVector);
}
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if(clickedOverlay === redIcon) {
rockets.push(new Rocket(computeWorldPoint(event), redPalette));
} else if (clickedOverlay === greenIcon) {
rockets.push(new Rocket(computeWorldPoint(event), greenPalette));
} else if (clickedOverlay === blueIcon) {
rockets.push(new Rocket(computeWorldPoint(event), bluePalette));
}
}
function cleanup() {
Overlays.deleteOverlay(redIcon);
Overlays.deleteOverlay(greenIcon);
Overlays.deleteOverlay(blueIcon);
for (var i = 0; i < rockets.length; ++i) {
Entities.deleteEntity(rockets[i].rocket);
for (var j = 0; j < NUM_BURSTS; ++j) {
Entities.deleteEntity(rockets[i].bursts[j]);
}
}
}
Script.update.connect(update);
Script.scriptEnding.connect(cleanup);
Controller.mousePressEvent.connect(mousePressEvent);

View file

@ -5,6 +5,9 @@
// Created by Zander Otavka on 7/24/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Manage overlays with object oriented goodness, instead of ugly `Overlays.h` methods.
// Instead of:
//
@ -22,39 +25,59 @@
// ...
// billboard.destroy();
//
// See more on usage below.
// More on usage below. Examples in `examples/example/overlayPanelExample.js`.
//
// Note that including this file will delete Overlays from the global scope. All the
// functionality of Overlays is represented here, just better. If you try to use Overlays in
// tandem, there may be performance problems or nasty surprises.
// Note that including this file will delete `Overlays` from the global scope. All the
// functionality of `Overlays` is represented here, just better. If you try to use `Overlays`
// in tandem, there may be performance problems or nasty surprises.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function() {
// Delete `Overlays` from the global scope.
var Overlays = this.Overlays;
delete this.Overlays;
var ABSTRACT = null;
var overlays = {};
var panels = {};
var overlayTypes;
var overlayTypes = {};
// Abstract overlay types
var Overlay,
Overlay2D,
Base3DOverlay,
Planar3DOverlay,
Billboard3DOverlay,
Volume3DOverlay;
// Multiple inheritance mixins
var PanelAttachable,
Billboardable;
function generateOverlayClass(superclass, type, properties) {
var that;
if (type == ABSTRACT) {
that = function(type, params) {
superclass.call(this, type, params);
};
} else {
that = function(params) {
superclass.call(this, type, params);
};
overlayTypes[type] = that;
}
that.prototype = new superclass();
that.prototype.constructor = that;
properties.forEach(function(prop) {
Object.defineProperty(that.prototype, prop, {
get: function() {
return Overlays.getProperty(this._id, prop);
},
set: function(newValue) {
var keyValuePair = {};
keyValuePair[prop] = newValue;
this.setProperties(keyValuePair);
},
configurable: false
});
});
return that;
}
//
// Create a new JavaScript object for an overlay of given ID.
@ -137,249 +160,131 @@
}
//
// Perform global scoped operations on overlays, such as finding by ray intersection.
//
OverlayManager = {
findOnRay: function(pickRay, knownOverlaysOnly, searchList) {
var rayPickResult = Overlays.findRayIntersection(pickRay);
if (rayPickResult.intersects) {
return findOverlay(rayPickResult.overlayID, knownOverlaysOnly, searchList);
}
return null;
},
findAtPoint: function(point, knownOverlaysOnly, searchList) {
var foundID = Overlays.getOverlayAtPoint(point);
if (foundID) {
return findOverlay(foundID, knownOverlaysOnly, searchList);
var Overlay = (function() {
var that = function(type, params) {
if (type && params) {
this._id = Overlays.addOverlay(type, params);
overlays[this._id] = this;
} else {
var pickRay = Camera.computePickRay(point.x, point.y);
return OverlayManager.findOnRay(pickRay, knownOverlaysOnly, searchList);
this._id = 0;
}
},
makeSearchList: function(array) {
var searchList = {};
array.forEach(function(object) {
searchList[object._id] = object;
});
return searchList;
}
};
};
that.prototype.constructor = that;
//
// Object oriented abstraction layer for overlays.
//
// Usage:
// // Create an overlay
// var billboard = new Image3DOverlay({
// visible: true,
// isFacingAvatar: true,
// ignoreRayIntersections: false
// });
//
// // Get a property
// var isVisible = billboard.visible;
//
// // Set a single property
// billboard.position = { x: 1, y: 3, z: 2 };
//
// // Set multiple properties at the same time
// billboard.setProperties({
// url: "http://images.com/overlayImage.jpg",
// dimensions: { x: 2, y: 2 }
// });
//
// // Clone an overlay
// var clonedBillboard = billboard.clone();
//
// // Remove an overlay from the world
// billboard.destroy();
//
// // Remember, there is a poor orphaned JavaScript object left behind. You should
// // remove any references to it so you don't accidentally try to modify an overlay that
// // isn't there.
// billboard = undefined;
//
(function() {
var ABSTRACT = null;
overlayTypes = {};
function generateOverlayClass(superclass, type, properties) {
var that;
if (type == ABSTRACT) {
that = function(type, params) {
superclass.call(this, type, params);
};
} else {
that = function(params) {
superclass.call(this, type, params);
};
overlayTypes[type] = that;
Object.defineProperty(that.prototype, "isLoaded", {
get: function() {
return Overlays.isLoaded(this._id);
}
});
that.prototype = new superclass();
that.prototype.constructor = that;
Object.defineProperty(that.prototype, "parentPanel", {
get: function() {
return findPanel(Overlays.getParentPanel(this._id));
}
});
properties.forEach(function(prop) {
Object.defineProperty(that.prototype, prop, {
get: function() {
return Overlays.getProperty(this._id, prop);
},
set: function(newValue) {
var keyValuePair = {};
keyValuePair[prop] = newValue;
this.setProperties(keyValuePair);
},
configurable: false
});
});
that.prototype.getTextSize = function(text) {
return Overlays.textSize(this._id, text);
};
return that;
}
that.prototype.setProperties = function(properties) {
Overlays.editOverlay(this._id, properties);
};
Overlay = (function() {
var that = function(type, params) {
if (type && params) {
this._id = Overlays.addOverlay(type, params);
overlays[this._id] = this;
} else {
this._id = 0;
}
};
that.prototype.clone = function() {
return makeOverlayFromId(Overlays.cloneOverlay(this._id));
};
that.prototype.constructor = that;
that.prototype.destroy = function() {
Overlays.deleteOverlay(this._id);
};
Object.defineProperty(that.prototype, "isLoaded", {
get: function() {
return Overlays.isLoaded(this._id);
}
});
that.prototype.isPanelAttachable = function() {
return false;
};
Object.defineProperty(that.prototype, "parentPanel", {
get: function() {
return findPanel(Overlays.getParentPanel(this._id));
}
});
that.prototype.getTextSize = function(text) {
return Overlays.textSize(this._id, text);
};
that.prototype.setProperties = function(properties) {
Overlays.editOverlay(this._id, properties);
};
that.prototype.clone = function() {
return makeOverlayFromId(Overlays.cloneOverlay(this._id));
};
that.prototype.destroy = function() {
Overlays.deleteOverlay(this._id);
};
that.prototype.isPanelAttachable = function() {
return false;
};
return generateOverlayClass(that, ABSTRACT, [
"alpha", "glowLevel", "pulseMax", "pulseMin", "pulsePeriod", "glowLevelPulse",
"alphaPulse", "colorPulse", "visible", "anchor"
]);
})();
// Supports multiple inheritance of properties. Just `concat` them onto the end of the
// properties list.
PanelAttachable = ["offsetPosition", "offsetRotation", "offsetScale"];
Billboardable = ["isFacingAvatar"];
Overlay2D = generateOverlayClass(Overlay, ABSTRACT, [
"bounds", "x", "y", "width", "height"
]);
Base3DOverlay = generateOverlayClass(Overlay, ABSTRACT, [
"position", "lineWidth", "rotation", "isSolid", "isFilled", "isWire", "isDashedLine",
"ignoreRayIntersection", "drawInFront", "drawOnHUD"
]);
Planar3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
Billboard3DOverlay = generateOverlayClass(Planar3DOverlay, ABSTRACT, [
].concat(PanelAttachable).concat(Billboardable));
Billboard3DOverlay.prototype.isPanelAttachable = function() { return true; };
Volume3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
generateOverlayClass(Overlay2D, "image", [
"subImage", "imageURL"
]);
generateOverlayClass(Billboard3DOverlay, "image3d", [
"url", "subImage"
]);
generateOverlayClass(Overlay2D, "text", [
"font", "text", "backgroundColor", "backgroundAlpha", "leftMargin", "topMargin"
]);
generateOverlayClass(Billboard3DOverlay, "text3d", [
"text", "backgroundColor", "backgroundAlpha", "lineHeight", "leftMargin", "topMargin",
"rightMargin", "bottomMargin"
]);
generateOverlayClass(Volume3DOverlay, "cube", [
"borderSize"
]);
generateOverlayClass(Volume3DOverlay, "sphere", [
]);
generateOverlayClass(Planar3DOverlay, "circle3d", [
"startAt", "endAt", "outerRadius", "innerRadius", "hasTickMarks",
"majorTickMarksAngle", "minorTickMarksAngle", "majorTickMarksLength",
"minorTickMarksLength", "majorTickMarksColor", "minorTickMarksColor"
]);
generateOverlayClass(Planar3DOverlay, "rectangle3d", [
]);
generateOverlayClass(Base3DOverlay, "line3d", [
"start", "end"
]);
generateOverlayClass(Planar3DOverlay, "grid", [
"minorGridWidth", "majorGridEvery"
]);
generateOverlayClass(Volume3DOverlay, "localmodels", [
]);
generateOverlayClass(Volume3DOverlay, "model", [
"url", "dimensions", "textures"
return generateOverlayClass(that, ABSTRACT, [
"alpha", "glowLevel", "pulseMax", "pulseMin", "pulsePeriod", "glowLevelPulse",
"alphaPulse", "colorPulse", "visible", "anchor"
]);
})();
ImageOverlay = overlayTypes["image"];
Image3DOverlay = overlayTypes["image3d"];
TextOverlay = overlayTypes["text"];
Text3DOverlay = overlayTypes["text3d"];
Cube3DOverlay = overlayTypes["cube"];
Sphere3DOverlay = overlayTypes["sphere"];
Circle3DOverlay = overlayTypes["circle3d"];
Rectangle3DOverlay = overlayTypes["rectangle3d"];
Line3DOverlay = overlayTypes["line3d"];
Grid3DOverlay = overlayTypes["grid"];
LocalModelsOverlay = overlayTypes["localmodels"];
ModelOverlay = overlayTypes["model"];
// Supports multiple inheritance of properties. Just `concat` them onto the end of the
// properties list.
var PanelAttachable = ["offsetPosition", "offsetRotation", "offsetScale"];
var Billboardable = ["isFacingAvatar"];
var Overlay2D = generateOverlayClass(Overlay, ABSTRACT, [
"bounds", "x", "y", "width", "height"
]);
var Base3DOverlay = generateOverlayClass(Overlay, ABSTRACT, [
"position", "lineWidth", "rotation", "isSolid", "isFilled", "isWire", "isDashedLine",
"ignoreRayIntersection", "drawInFront", "drawOnHUD"
]);
var Planar3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
var Billboard3DOverlay = generateOverlayClass(Planar3DOverlay, ABSTRACT, [
].concat(PanelAttachable).concat(Billboardable));
Billboard3DOverlay.prototype.isPanelAttachable = function() { return true; };
var Volume3DOverlay = generateOverlayClass(Base3DOverlay, ABSTRACT, [
"dimensions"
]);
ImageOverlay = generateOverlayClass(Overlay2D, "image", [
"subImage", "imageURL"
]);
Image3DOverlay = generateOverlayClass(Billboard3DOverlay, "image3d", [
"url", "subImage"
]);
TextOverlay = generateOverlayClass(Overlay2D, "text", [
"font", "text", "backgroundColor", "backgroundAlpha", "leftMargin", "topMargin"
]);
Text3DOverlay = generateOverlayClass(Billboard3DOverlay, "text3d", [
"text", "backgroundColor", "backgroundAlpha", "lineHeight", "leftMargin", "topMargin",
"rightMargin", "bottomMargin"
]);
Cube3DOverlay = generateOverlayClass(Volume3DOverlay, "cube", [
"borderSize"
]);
Sphere3DOverlay = generateOverlayClass(Volume3DOverlay, "sphere", [
]);
Circle3DOverlay = generateOverlayClass(Planar3DOverlay, "circle3d", [
"startAt", "endAt", "outerRadius", "innerRadius", "hasTickMarks",
"majorTickMarksAngle", "minorTickMarksAngle", "majorTickMarksLength",
"minorTickMarksLength", "majorTickMarksColor", "minorTickMarksColor"
]);
Rectangle3DOverlay = generateOverlayClass(Planar3DOverlay, "rectangle3d", [
]);
Line3DOverlay = generateOverlayClass(Base3DOverlay, "line3d", [
"start", "end"
]);
Grid3DOverlay = generateOverlayClass(Planar3DOverlay, "grid", [
"minorGridWidth", "majorGridEvery"
]);
LocalModelsOverlay = generateOverlayClass(Volume3DOverlay, "localmodels", [
]);
ModelOverlay = generateOverlayClass(Volume3DOverlay, "model", [
"url", "dimensions", "textures"
]);
//
// Object oriented abstraction layer for panels.
//
OverlayPanel = (function() {
var that = function(params) {
this._id = Overlays.addPanel(params);
@ -455,6 +360,35 @@
})();
OverlayManager = {
findOnRay: function(pickRay, knownOverlaysOnly, searchList) {
var rayPickResult = Overlays.findRayIntersection(pickRay);
if (rayPickResult.intersects) {
return findOverlay(rayPickResult.overlayID, knownOverlaysOnly, searchList);
}
return null;
},
findAtPoint: function(point, knownOverlaysOnly, searchList) {
var foundID = Overlays.getOverlayAtPoint(point);
if (foundID) {
return findOverlay(foundID, knownOverlaysOnly, searchList);
} else {
var pickRay = Camera.computePickRay(point.x, point.y);
return OverlayManager.findOnRay(pickRay, knownOverlaysOnly, searchList);
}
},
makeSearchList: function(array) {
var searchList = {};
array.forEach(function(object) {
searchList[object._id] = object;
});
return searchList;
}
};
// Threadsafe cleanup of JavaScript objects.
function onOverlayDeleted(id) {
if (id in overlays) {
if (overlays[id].parentPanel) {

View file

@ -501,7 +501,7 @@ Box.prototype.destroy = function () {
}
}
Box.prototype.hasOverlay = function (overlayId) {
return this.overlay && this.overlay.getId() === overlayId;
return /*this.overlay &&*/ this.overlay.getId() === overlayId;
}
Box.prototype.getOverlay = function () {
return this.overlay;
@ -615,7 +615,7 @@ Slider.prototype.toString = function () {
}
Slider.prototype.applyLayout = function () {
if (!this.slider) {
ui.complain("Slider.applyLayout on " + this + " failed");
// ui.complain("Slider.applyLayout on " + this + " failed");
return;
}
var val = (this.value - this.minValue) / (this.maxValue - this.minValue);
@ -654,6 +654,7 @@ var Checkbox = UI.Checkbox = function (properties) {
this.position.x + (this.width - this.checkMark.width) * 0.5,
this.position.y + (this.height - this.checkMark.height) * 0.5
);
this.checkMark.parent = this;
this.onValueChanged = properties.onValueChanged || function () {};
@ -919,9 +920,16 @@ var getFocusedWidget = function (event) {
var dispatchEvent = function (action, event, widget) {
function dispatchActions (actions) {
var dispatchedActions = false;
ui.logEvent("dispatching to [" + actions.join(", ") + "]");
actions.forEach(function(action) {
action(event, widget);
ui.logEvent("dispatched to " + action);
dispatchedActions = true;
});
if (!dispatchedActions) {
// ui.logEvent("No actions to dispatch");
}
}
if (widget.actions[action]) {
@ -963,7 +971,7 @@ UI.handleMouseMove = function (event, canStartDrag) {
}
UI.handleMousePress = function (event) {
print("Mouse clicked");
// print("Mouse clicked");
UI.handleMouseMove(event);
ui.clickedWidget = ui.focusedWidget;
if (ui.clickedWidget) {
@ -971,8 +979,18 @@ UI.handleMousePress = function (event) {
}
}
UI.handleMouseDoublePress = function (event) {
// print("DOUBLE CLICK!");
var focused = getFocusedWidget(event);
UI.handleMouseMove(event);
if (focused) {
// print("dispatched onDoubleClick");
dispatchEvent('onDoubleClick', event, focused);
}
}
UI.handleMouseRelease = function (event) {
print("Mouse released");
// print("Mouse released");
if (ui.draggedWidget) {
dispatchEvent('onDragEnd', event, ui.draggedWidget);

155
examples/particleDance.js Normal file
View file

@ -0,0 +1,155 @@
(function() {
var NUM_BURSTS = 3;
var NUM_EMITTERS_PER_BURST = 11;
var RANGE = 5.0;
var AUDIO_RANGE = 0.5 * RANGE;
var DIST_BETWEEN_BURSTS = 1.0;
var LOUDNESS_RADIUS_RATIO = 10;
var TEXTURE_PATH = 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png';
var cameraAxis = Quat.getFront(Camera.getOrientation());
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(RANGE, cameraAxis));
var audioPosition = Vec3.sum(Camera.getPosition(), Vec3.multiply(AUDIO_RANGE, cameraAxis));
var song = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/songs/Made%20In%20Heights%20-%20Forgiveness.wav");
var audioOptions = {
volume: 0.9, position: audioPosition
};
var DISTANCE_FROM_CAMERA = 7.0;
var colorPalette = [{
red: 0,
green: 206,
blue: 209
}, {
red: 173,
green: 216,
blue: 230
}, {
red: 0,
green: 191,
blue: 255
}];
var bursts = [];
var audioStats;
Burst = function(point) {
if (!audioStats) {
audioStats = Audio.playSound(song, audioOptions);
}
this.point = point;
this.emitters = [];
this.emitRate = randInt(80, 120);
this.emitStrength = randInt(4.0, 6.0);
this.animationSettings = JSON.stringify({
fps: 10,
frameIndex: 0,
running: true,
firstFrame: 0,
lastFrame: 50,
loop: true
});
this.direction = {
x: randFloat(-0.3, 0.3),
y: 1.0,
z: 0.0
}
this.base = Entities.addEntity({
type: "Sphere",
position: this.point,
dimensions: {
x: 0.05,
y: 0.05,
z: 0.05
},
color: {
red: 240,
green: 240,
blue: 240
}
});
for (var i = 0; i < NUM_EMITTERS_PER_BURST; ++i) {
var colorIndex = randInt(0, colorPalette.length - 1);
var color = colorPalette[colorIndex];
this.emitters.push(Entities.addEntity({
type: "ParticleEffect",
animationSettings: this.animationSettings,
position: this.point,
textures: TEXTURE_PATH,
emitRate: this.emitRate,
emitStrength: this.emitStrength,
emitDirection: {
x: Math.pow(-1, i) * randFloat(0.0, 0.4),
y: 1.0,
z: 0.0
},
color: color,
lifespan: 1.0,
visible: true,
locked: false
}));
}
};
var nextPosition = center;
var posOrNeg = -1;
for (var i = 0; i < NUM_BURSTS; ++i) {
posOrNeg *= -1;
bursts.push(new Burst(nextPosition));
var offset = {
x: RANGE/(i+2) * posOrNeg,
y: 0,
z: 0
};
var nextPosition = Vec3.sum(nextPosition, offset);
}
function update(deltaTime) {
for (var i = 0; i < NUM_BURSTS; i++) {
if (audioStats && audioStats.loudness > 0.0) {
for (var j = 0; j < NUM_EMITTERS_PER_BURST; ++j) {
Entities.editEntity(bursts[i].emitters[j], {
particleRadius: audioStats.loudness / LOUDNESS_RADIUS_RATIO
});
}
}
}
}
function randFloat(min, max) {
return Math.random() * (max - min) + min;
}
function randInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
this.cleanup = function() {
for (var i = 0; i < NUM_BURSTS; ++i) {
Entities.deleteEntity(bursts[i].base);
for (var j = 0; j < NUM_EMITTERS_PER_BURST; ++j) {
var emitter = bursts[i].emitters[j];
Entities.deleteEntity(emitter);
}
}
Audio.stop();
}
Script.update.connect(update);
})();
Script.scriptEnding.connect(cleanup);

View file

@ -1360,7 +1360,7 @@ var CHECK_MARK_COLOR = {
this.nextY = this.y + this.getHeight();
var item = new CollapsablePanelItem(name, this.x, this.nextY, textWidth, rawHeight, panel);
var item = new CollapsablePanelItem(name, this.x, this.nextY, textWidth, rawHeight);
item.isSubPanel = true;
this.nextY += 1.5 * item.height;

View file

@ -77,6 +77,7 @@
#include <NetworkAccessManager.h>
#include <NetworkingConstants.h>
#include <ObjectMotionState.h>
#include <OffscreenGlCanvas.h>
#include <OctalCode.h>
#include <OctreeSceneStats.h>
#include <udt/PacketHeaders.h>
@ -97,6 +98,8 @@
#include <input-plugins/UserInputMapper.h>
#include <VrMenu.h>
#include <RenderableWebEntityItem.h>
#include "AudioClient.h"
#include "DiscoverabilityManager.h"
#include "GLCanvas.h"
@ -109,9 +112,7 @@
#include "InterfaceActionFactory.h"
#include "avatar/AvatarManager.h"
#include "audio/AudioScope.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
@ -145,9 +146,10 @@
#include "ui/AddressBarDialog.h"
#include "ui/UpdateDialog.h"
//#include <qopenglcontext.h>
#include "ui/overlays/Cube3DOverlay.h"
// ON WIndows PC, NVidia Optimus laptop, we want to enable NVIDIA GPU
// FIXME seems to be broken.
#if defined(Q_OS_WIN)
extern "C" {
_declspec(dllexport) DWORD NvOptimusEnablement = 0x00000001;
@ -170,7 +172,6 @@ public:
void call() { _fun(); }
};
using namespace std;
// Starfield information
@ -297,6 +298,13 @@ bool setupEssentials(int& argc, char** argv) {
return true;
}
// FIXME move to header, or better yet, design some kind of UI manager
// to take care of highlighting keyboard focused items, rather than
// continuing to overburden Application.cpp
Cube3DOverlay* _keyboardFocusHighlight{ nullptr };
int _keyboardFocusHighlightID{ -1 };
Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
QApplication(argc, argv),
_dependencyManagerIsSetup(setupEssentials(argc, argv)),
@ -670,6 +678,49 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::DomainConnectionDenied, this, "handleDomainConnectionDeniedPacket");
// If the user clicks an an entity, we will check that it's an unlocked web entity, and if so, set the focus to it
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
connect(entityScriptingInterface.data(), &EntityScriptingInterface::clickDownOnEntity,
[this, entityScriptingInterface](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_keyboardFocusedItem != entityItemID) {
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
auto properties = entityScriptingInterface->getEntityProperties(entityItemID);
if (EntityTypes::Web == properties.getType() && !properties.getLocked()) {
auto entity = entityScriptingInterface->getEntityTree()->findEntityByID(entityItemID);
RenderableWebEntityItem* webEntity = dynamic_cast<RenderableWebEntityItem*>(entity.get());
if (webEntity) {
webEntity->setProxyWindow(_window->windowHandle());
_keyboardFocusedItem = entityItemID;
_lastAcceptedKeyPress = usecTimestampNow();
if (_keyboardFocusHighlightID < 0 || !getOverlays().isAddedOverlay(_keyboardFocusHighlightID)) {
_keyboardFocusHighlight = new Cube3DOverlay();
_keyboardFocusHighlight->setAlpha(1.0f);
_keyboardFocusHighlight->setBorderSize(1.0f);
_keyboardFocusHighlight->setColor({ 0xFF, 0xEF, 0x00 });
_keyboardFocusHighlight->setIsSolid(false);
_keyboardFocusHighlight->setPulseMin(0.5);
_keyboardFocusHighlight->setPulseMax(1.0);
_keyboardFocusHighlight->setColorPulse(1.0);
_keyboardFocusHighlight->setIgnoreRayIntersection(true);
_keyboardFocusHighlight->setDrawInFront(true);
}
_keyboardFocusHighlight->setRotation(webEntity->getRotation());
_keyboardFocusHighlight->setPosition(webEntity->getPosition());
_keyboardFocusHighlight->setDimensions(webEntity->getDimensions() * 1.05f);
_keyboardFocusHighlight->setVisible(true);
_keyboardFocusHighlightID = getOverlays().addOverlay(_keyboardFocusHighlight);
}
}
}
});
// If the user clicks somewhere where there is NO entity at all, we will release focus
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity,
[=](const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId) {
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
_keyboardFocusHighlight->setVisible(false);
});
}
void Application::aboutToQuit() {
@ -680,6 +731,11 @@ void Application::aboutToQuit() {
}
void Application::cleanupBeforeQuit() {
if (_keyboardFocusHighlightID > 0) {
getOverlays().deleteOverlay(_keyboardFocusHighlightID);
_keyboardFocusHighlightID = -1;
}
_keyboardFocusHighlight = nullptr;
_entities.clear(); // this will allow entity scripts to properly shutdown
@ -853,6 +909,10 @@ void Application::initializeGL() {
InfoView::show(INFO_HELP_PATH, true);
}
QWindow* getProxyWindow() {
return qApp->getWindow()->windowHandle();
}
void Application::initializeUi() {
AddressBarDialog::registerType();
ErrorDialog::registerType();
@ -1231,6 +1291,29 @@ bool Application::event(QEvent* event) {
return true;
}
if (!_keyboardFocusedItem.isInvalidID()) {
switch (event->type()) {
case QEvent::KeyPress:
case QEvent::KeyRelease: {
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
auto entity = entityScriptingInterface->getEntityTree()->findEntityByID(_keyboardFocusedItem);
RenderableWebEntityItem* webEntity = dynamic_cast<RenderableWebEntityItem*>(entity.get());
if (webEntity && webEntity->getEventHandler()) {
event->setAccepted(false);
QCoreApplication::sendEvent(webEntity->getEventHandler(), event);
if (event->isAccepted()) {
_lastAcceptedKeyPress = usecTimestampNow();
return true;
}
}
break;
}
default:
break;
}
}
switch (event->type()) {
case QEvent::MouseMove:
mouseMoveEvent((QMouseEvent*)event);
@ -1936,6 +2019,15 @@ void Application::idle() {
return; // bail early, nothing to do here.
}
// Drop focus from _keyboardFocusedItem if no keyboard messages for 30 seconds
if (!_keyboardFocusedItem.isInvalidID()) {
const quint64 LOSE_FOCUS_AFTER_ELAPSED_TIME = 30 * USECS_PER_SECOND; // if idle for 30 seconds, drop focus
quint64 elapsedSinceAcceptedKeyPress = usecTimestampNow() - _lastAcceptedKeyPress;
if (elapsedSinceAcceptedKeyPress > LOSE_FOCUS_AFTER_ELAPSED_TIME) {
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
}
}
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
// details normally.
@ -2352,6 +2444,10 @@ void Application::updateMouseRay() {
}
}
// Called during Application::update immediately before AvatarManager::updateMyAvatar, updating my data that is then sent to everyone.
// (Maybe this code should be moved there?)
// The principal result is to call updateLookAtTargetAvatar() and then setLookAtPosition().
// Note that it is called BEFORE we update position or joints based on sensors, etc.
void Application::updateMyAvatarLookAtPosition() {
PerformanceTimer perfTimer("lookAt");
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
@ -4679,6 +4775,11 @@ void Application::updateDisplayMode() {
if (oldDisplayPlugin) {
oldDisplayPlugin->deactivate();
_offscreenContext->makeCurrent();
// if the old plugin was HMD and the new plugin is not HMD, then hide our hmdtools
if (oldDisplayPlugin->isHmd() && !newDisplayPlugin->isHmd()) {
DependencyManager::get<DialogsManager>()->hmdTools(false);
}
}
emit activeDisplayPluginChanged();
resetSensors();

View file

@ -678,6 +678,9 @@ private:
bool _overlayEnabled = true;
QRect _savedGeometry;
DialogsManagerScriptingInterface* _dialogsManagerScriptingInterface = new DialogsManagerScriptingInterface();
EntityItemID _keyboardFocusedItem;
quint64 _lastAcceptedKeyPress = 0;
};
#endif // hifi_Application_h

View file

@ -289,16 +289,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::CenterPlayerInView,
0, false, qApp, SLOT(rotationModeChanged()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::HMDTools,
#ifdef Q_OS_MAC
Qt::META | Qt::Key_H,
#else
Qt::CTRL | Qt::Key_H,
#endif
false,
dialogsManager.data(),
SLOT(hmdTools(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,

View file

@ -199,7 +199,6 @@ namespace MenuOption {
const QString FullscreenMirror = "Fullscreen Mirror";
const QString GlowWhenSpeaking = "Glow When Speaking";
const QString HandMouseInput = "Enable Hand Mouse Input";
const QString HMDTools = "HMD Tools";
const QString IncreaseAvatarSize = "Increase Avatar Size";
const QString IndependentMode = "Independent Mode";
const QString InputMenu = "Avatar>Input Devices";

View file

@ -344,6 +344,20 @@ glm::quat Head::getFinalOrientationInLocalFrame() const {
return glm::quat(glm::radians(glm::vec3(getFinalPitch(), getFinalYaw(), getFinalRoll() )));
}
// Everyone else's head keeps track of a lookAtPosition that everybody sees the same, and refers to where that head
// is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyon's Interface
// will have the same value for the lookAtPosition of any given head.
//
// Everyone else's head also keeps track of a correctedLookAtPosition that may be different for the same head within
// different Interfaces. If that head is not looking at me, the correctedLookAtPosition is the same as the lookAtPosition.
// However, if that head is looking at me, then I will attempt to adjust the lookAtPosition by the difference between
// my (singular) eye position and my actual camera position. This adjustment is used on their eyeballs during rendering
// (and also on any lookAt vector display for that head, during rendering). Note that:
// 1. this adjustment can be made directly to the other head's eyeball joints, because we won't be send their joint information to others.
// 2. the corrected position is a separate ivar, so the common/uncorrected value is still available
//
// There is a pun here: The two lookAtPositions will always be the same for my own avatar in my own Interface, because I
// will not be looking at myself. (Even in a mirror, I will be looking at the camera.)
glm::vec3 Head::getCorrectedLookAtPosition() {
if (isLookingAtMe()) {
return _correctedLookAtPosition;
@ -364,7 +378,7 @@ void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
bool Head::isLookingAtMe() {
// Allow for outages such as may be encountered during avatar movement
quint64 now = usecTimestampNow();
const quint64 LOOKING_AT_ME_GAP_ALLOWED = 1000000; // microseconds
const quint64 LOOKING_AT_ME_GAP_ALLOWED = (5 * 1000 * 1000) / 60; // n frames, in microseconds
return _isLookingAtMe || (now - _wasLastLookingAtMe) < LOOKING_AT_ME_GAP_ALLOWED;
}

View file

@ -97,7 +97,7 @@ void SkeletonModel::initJointStates(QVector<JointState> states) {
}
const float PALM_PRIORITY = DEFAULT_PRIORITY;
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
if (_owningAvatar->isMyAvatar()) {
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
@ -105,26 +105,43 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Model::updateRig(deltaTime, parentTransform);
if (_owningAvatar->isMyAvatar()) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Head* head = _owningAvatar->getHead();
Rig::HeadParameters params;
params.modelRotation = getRotation();
params.modelTranslation = getTranslation();
params.leanSideways = _owningAvatar->getHead()->getFinalLeanSideways();
params.leanForward = _owningAvatar->getHead()->getFinalLeanForward();
params.torsoTwist = _owningAvatar->getHead()->getTorsoTwist();
params.localHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInLocalFrame();
params.worldHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInWorldFrame();
params.eyeLookAt = _owningAvatar->getHead()->getLookAtPosition();
params.eyeSaccade = _owningAvatar->getHead()->getSaccade();
params.leanSideways = head->getFinalLeanSideways();
params.leanForward = head->getFinalLeanForward();
params.torsoTwist = head->getTorsoTwist();
params.localHeadOrientation = head->getFinalOrientationInLocalFrame();
params.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
params.eyeLookAt = head->getLookAtPosition();
params.eyeSaccade = head->getSaccade();
params.leanJointIndex = geometry.leanJointIndex;
params.neckJointIndex = geometry.neckJointIndex;
params.leftEyeJointIndex = geometry.leftEyeJointIndex;
params.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromHeadParameters(params);
} else {
// This is a little more work than we really want.
//
// Other avatars joint, including their eyes, should already be set just like any other joints
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
//
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
// (They latch their looking at me position.) We will revisit that as priorities allow.
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Head* head = _owningAvatar->getHead();
_rig->updateEyeJoints(geometry.leftEyeJointIndex, geometry.rightEyeJointIndex,
getTranslation(), getRotation(),
head->getFinalOrientationInWorldFrame(), head->getCorrectedLookAtPosition());
}
}
// Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed),
// but just before head has been simulated.
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getSkeletonPosition());
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));

View file

@ -153,21 +153,20 @@ void EyeTracker::onStreamStarted() {
qCDebug(interfaceapp) << "Eye Tracker: Started streaming";
}
// TODO: Re-enable once saving / loading calibrations is working
//if (_isStreaming) {
// // Automatically load calibration if one has been saved.
// QString availableCalibrations = QString(smi_getAvailableCalibrations());
// if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
// result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
// if (result != SMI_RET_SUCCESS) {
// qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
// QMessageBox::warning(nullptr, "Eye Tracker Error", "Error loading calibration"
// + smiReturnValueToString(result));
// } else {
// qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
// }
// }
//}
if (_isStreaming) {
// Automatically load calibration if one has been saved.
QString availableCalibrations = QString(smi_getAvailableCalibrations());
if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
QMessageBox::warning(nullptr, "Eye Tracker Error", "Error loading calibration"
+ smiReturnValueToString(result));
} else {
qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
}
}
}
}
#endif
@ -260,11 +259,10 @@ void EyeTracker::calibrate(int points) {
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error performing calibration:" << smiReturnValueToString(result);
} else {
// TODO: Re - enable once saving / loading calibrations is working
//result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
//if (result != SMI_RET_SUCCESS) {
// qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
//}
result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
}
}
}
@ -292,11 +290,10 @@ QString EyeTracker::smiReturnValueToString(int value) {
return "Eye cameras not available";
case smi_ErrorReturnValue::SMI_ERROR_OCULUS_RUNTIME_NOT_SUPPORTED:
return "Oculus runtime not supported";
// TODO: Re-enable once saving / loading calibrations is working
//case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
// return "File not found";
//case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
// return "File empty";
case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
return "File not found";
case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
return "File empty";
case smi_ErrorReturnValue::SMI_ERROR_UNKNOWN:
return "Unknown error";
default:

View file

@ -58,6 +58,7 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
auto dialogWidget = new QDialog(Application::getInstance()->getWindow(), Qt::Window);
dialogWidget->setWindowTitle(title);
dialogWidget->resize(width, height);
dialogWidget->installEventFilter(this);
connect(dialogWidget, &QDialog::finished, this, &WebWindowClass::hasClosed);
auto layout = new QVBoxLayout(dialogWidget);
@ -93,6 +94,19 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
WebWindowClass::~WebWindowClass() {
}
bool WebWindowClass::eventFilter(QObject* sender, QEvent* event) {
if (sender == _windowWidget) {
if (event->type() == QEvent::Move) {
emit moved(getPosition());
}
if (event->type() == QEvent::Resize) {
emit resized(getSize());
}
}
return false;
}
void WebWindowClass::hasClosed() {
emit closed();
}
@ -122,6 +136,32 @@ void WebWindowClass::setURL(const QString& url) {
_webView->setUrl(url);
}
QSizeF WebWindowClass::getSize() const {
QSizeF size = _windowWidget->size();
return size;
}
void WebWindowClass::setSize(QSizeF size) {
setSize(size.width(), size.height());
}
void WebWindowClass::setSize(int width, int height) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setSize", Qt::AutoConnection, Q_ARG(int, width), Q_ARG(int, height));
return;
}
_windowWidget->resize(width, height);
}
glm::vec2 WebWindowClass::getPosition() const {
QPoint position = _windowWidget->pos();
return glm::vec2(position.x(), position.y());
}
void WebWindowClass::setPosition(glm::vec2 position) {
setPosition(position.x, position.y);
}
void WebWindowClass::setPosition(int x, int y) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setPosition", Qt::AutoConnection, Q_ARG(int, x), Q_ARG(int, y));

View file

@ -35,6 +35,9 @@ class WebWindowClass : public QObject {
Q_OBJECT
Q_PROPERTY(QObject* eventBridge READ getEventBridge)
Q_PROPERTY(QString url READ getURL)
Q_PROPERTY(glm::vec2 position READ getPosition WRITE setPosition);
Q_PROPERTY(QSizeF size READ getSize WRITE setSize);
public:
WebWindowClass(const QString& title, const QString& url, int width, int height, bool isToolWindow = false);
~WebWindowClass();
@ -43,7 +46,12 @@ public:
public slots:
void setVisible(bool visible);
glm::vec2 getPosition() const;
void setPosition(int x, int y);
void setPosition(glm::vec2 position);
QSizeF getSize() const;
void setSize(QSizeF size);
void setSize(int width, int height);
QString getURL() const { return _webView->url().url(); }
void setURL(const QString& url);
void raise();
@ -52,8 +60,13 @@ public slots:
void setTitle(const QString& title);
signals:
void moved(glm::vec2 position);
void resized(QSizeF size);
void closed();
protected:
virtual bool eventFilter(QObject* sender, QEvent* event);
private slots:
void hasClosed();

View file

@ -46,12 +46,7 @@ ApplicationOverlay::ApplicationOverlay()
// then release it back to the UI for re-use
auto offscreenUi = DependencyManager::get<OffscreenUi>();
connect(offscreenUi.data(), &OffscreenUi::textureUpdated, this, [&](GLuint textureId) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->lockTexture(textureId);
std::swap(_uiTexture, textureId);
if (textureId) {
offscreenUi->releaseTexture(textureId);
}
_uiTexture = textureId;
});
}

View file

@ -173,7 +173,6 @@ void DialogsManager::hmdTools(bool showTools) {
}
void DialogsManager::hmdToolsClosed() {
Menu::getInstance()->getActionForOption(MenuOption::HMDTools)->setChecked(false);
_hmdToolsDialog->hide();
}

View file

@ -30,8 +30,9 @@
static const int WIDTH = 350;
static const int HEIGHT = 100;
HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint)
QDialog(parent, Qt::Window | Qt::CustomizeWindowHint | Qt::WindowTitleHint | Qt::WindowStaysOnTopHint)
{
// FIXME do we want to support more than one connected HMD? It seems like a pretty corner case
foreach(auto displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
@ -171,14 +172,12 @@ void HMDToolsDialog::leaveHMDMode() {
}
void HMDToolsDialog::reject() {
// Just regularly close upon ESC
close();
// We don't want this window to be closable from a close icon, just from our "Leave HMD Mode" button
}
void HMDToolsDialog::closeEvent(QCloseEvent* event) {
// TODO: consider if we want to prevent closing of this window with event->ignore();
QDialog::closeEvent(event);
emit closed();
// We don't want this window to be closable from a close icon, just from our "Leave HMD Mode" button
event->ignore();
}
void HMDToolsDialog::centerCursorOnWidget(QWidget* widget) {

View file

@ -42,7 +42,7 @@ void Image3DOverlay::update(float deltatime) {
}
void Image3DOverlay::render(RenderArgs* args) {
if (!_texture) {
if (!_isLoaded) {
_isLoaded = true;
_texture = DependencyManager::get<TextureCache>()->getTexture(_url);
}
@ -90,7 +90,6 @@ void Image3DOverlay::render(RenderArgs* args) {
applyTransformTo(_transform, true);
Transform transform = _transform;
transform.postScale(glm::vec3(getDimensions(), 1.0f));
transform.postRotate(glm::angleAxis(glm::pi<float>(), IDENTITY_UP));
batch->setModelTransform(transform);
batch->setResourceTexture(0, _texture->getGPUTexture());

View file

@ -5,14 +5,15 @@
// Modified by Zander Otavka on 7/15/15
// Copyright 2014 High Fidelity, Inc.
//
// Exposes methods for managing `Overlay`s and `OverlayPanel`s to scripts.
//
// YOU SHOULD NOT USE `Overlays` DIRECTLY, unless you like pain and deprecation. Instead, use the
// object oriented abstraction layer found in `examples/libraries/overlayUtils.js`.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Exposes methods to scripts for managing `Overlay`s and `OverlayPanel`s.
//
// YOU SHOULD NOT USE `Overlays` DIRECTLY, unless you like pain and deprecation. Instead, use
// the object oriented API replacement found in `examples/libraries/overlayManager.js`. See
// that file for docs and usage.
//
#ifndef hifi_Overlays_h
#define hifi_Overlays_h
@ -57,11 +58,11 @@ void RayToOverlayIntersectionResultFromScriptValue(const QScriptValue& object, R
class Overlays : public QObject {
Q_OBJECT
public:
Overlays();
~Overlays();
void init();
void update(float deltatime);
void renderHUD(RenderArgs* renderArgs);
@ -103,7 +104,7 @@ public slots:
/// returns details about the closest 3D Overlay hit by the pick ray
RayToOverlayIntersectionResult findRayIntersection(const PickRay& ray);
/// returns whether the overlay's assets are loaded or not
bool isLoaded(unsigned int id);
@ -153,5 +154,5 @@ private:
};
#endif // hifi_Overlays_h

View file

@ -1,6 +1,6 @@
//
// PanelAttachable.cpp
// hifi
// interface/src/ui/overlays
//
// Created by Zander Otavka on 7/15/15.
// Copyright 2015 High Fidelity, Inc.

View file

@ -8,6 +8,24 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Base class for anything that can attach itself to an `OverlayPanel` as a child.
// `PanelAttachable keeps an `std::shared_ptr` to it's parent panel, and sets its
// transformations and visibility based on the parent.
//
// When subclassing `PanelAttachable`, make sure `applyTransformTo`, `getProperty`, and
// `setProperties are all called in the appropriate places. Look through `Image3DOverlay` and
// `Billboard3DOverlay` for examples. Pay special attention to `applyTransformTo`; it should
// be called in three places for `Overlay`s: `render`, `update`, and `findRayIntersection`.
//
// When overriding `applyTransformTo`, make sure to wrap all of your code, including the call
// to the superclass method, with the following `if` block. Then call the superclass method
// with force = true.
//
// if (force || usecTimestampNow() > _transformExpiry) {
// PanelAttachable::applyTransformTo(transform, true);
// ...
// }
//
#ifndef hifi_PanelAttachable_h
#define hifi_PanelAttachable_h
@ -42,6 +60,8 @@ protected:
QScriptValue getProperty(QScriptEngine* scriptEngine, const QString& property);
void setProperties(const QScriptValue& properties);
/// set position, rotation and scale on transform based on offsets, and parent panel offsets
/// if force is false, only apply transform if it hasn't been applied in the last .1 seconds
virtual void applyTransformTo(Transform& transform, bool force = false);
quint64 _transformExpiry = 0;

View file

@ -67,7 +67,7 @@ bool ElbowConstraint::apply(glm::quat& rotation) const {
// update rotation
const float MIN_SWING_REAL_PART = 0.99999f;
if (twistWasClamped || fabsf(swingRotation.w < MIN_SWING_REAL_PART)) {
if (twistWasClamped || fabsf(swingRotation.w) < MIN_SWING_REAL_PART) {
if (twistWasClamped) {
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
}

View file

@ -786,8 +786,8 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
void Rig::updateFromHeadParameters(const HeadParameters& params) {
updateLeanJoint(params.leanJointIndex, params.leanSideways, params.leanForward, params.torsoTwist);
updateNeckJoint(params.neckJointIndex, params.localHeadOrientation, params.leanSideways, params.leanForward, params.torsoTwist);
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoints(params.leftEyeJointIndex, params.rightEyeJointIndex, params.modelTranslation, params.modelRotation,
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
}
void Rig::updateLeanJoint(int index, float leanSideways, float leanForward, float torsoTwist) {
@ -828,6 +828,11 @@ void Rig::updateNeckJoint(int index, const glm::quat& localHeadOrientation, floa
}
}
void Rig::updateEyeJoints(int leftEyeIndex, int rightEyeIndex, const glm::vec3& modelTranslation, const glm::quat& modelRotation,
const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
updateEyeJoint(leftEyeIndex, modelTranslation, modelRotation, worldHeadOrientation, lookAtSpot, saccade);
updateEyeJoint(rightEyeIndex, modelTranslation, modelRotation, worldHeadOrientation, lookAtSpot, saccade);
}
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
if (index >= 0 && _jointStates[index].getParentIndex() >= 0) {
auto& state = _jointStates[index];

View file

@ -157,6 +157,8 @@ public:
void setEnableRig(bool isEnabled) { _enableRig = isEnabled; }
void updateFromHeadParameters(const HeadParameters& params);
void updateEyeJoints(int leftEyeIndex, int rightEyeIndex, const glm::vec3& modelTranslation, const glm::quat& modelRotation,
const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade = glm::vec3(0.0f));
virtual void setHandPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation,
float scale, float priority) = 0;

View file

@ -861,6 +861,8 @@ void EntityTreeRenderer::mousePressEvent(QMouseEvent* event, unsigned int device
if (entityScript.property("clickDownOnEntity").isValid()) {
entityScript.property("clickDownOnEntity").call(entityScript, entityScriptArgs);
}
} else {
emit mousePressOffEntity(rayPickResult, event, deviceID);
}
_lastMouseEvent = MouseEvent(*event, deviceID);
_lastMouseEventValid = true;

View file

@ -95,6 +95,7 @@ public:
signals:
void mousePressOnEntity(const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId);
void mousePressOffEntity(const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId);
void mouseMoveOnEntity(const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId);
void mouseReleaseOnEntity(const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId);

View file

@ -43,20 +43,12 @@ RenderableWebEntityItem::~RenderableWebEntityItem() {
if (_webSurface) {
_webSurface->pause();
_webSurface->disconnect(_connection);
// After the disconnect, ensure that we have the latest texture by acquiring the
// lock used when updating the _texture value
_textureLock.lock();
_textureLock.unlock();
// The lifetime of the QML surface MUST be managed by the main thread
// Additionally, we MUST use local variables copied by value, rather than
// member variables, since they would implicitly refer to a this that
// is no longer valid
auto webSurface = _webSurface;
auto texture = _texture;
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface, texture] {
if (texture) {
webSurface->releaseTexture(texture);
}
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
webSurface->deleteLater();
});
}
@ -64,6 +56,16 @@ RenderableWebEntityItem::~RenderableWebEntityItem() {
}
void RenderableWebEntityItem::render(RenderArgs* args) {
#ifdef WANT_EXTRA_DEBUGGING
{
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well
glm::vec4 cubeColor{ 1.0f, 0.0f, 0.0f, 1.0f};
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f, cubeColor);
}
#endif
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
QSurface * currentSurface = currentContext->surface();
if (!_webSurface) {
@ -74,23 +76,7 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
_webSurface->resume();
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
_webSurface->lockTexture(textureId);
assert(!glGetError());
// TODO change to atomic<GLuint>?
withLock(_textureLock, [&] {
std::swap(_texture, textureId);
});
if (textureId) {
_webSurface->releaseTexture(textureId);
}
if (_texture) {
_webSurface->makeCurrent();
glBindTexture(GL_TEXTURE_2D, _texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
_webSurface->doneCurrent();
}
_texture = textureId;
});
auto forwardMouseEvent = [=](const RayToEntityIntersectionResult& intersection, const QMouseEvent* event, unsigned int deviceId) {
@ -145,6 +131,19 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
point += 0.5f;
point.y = 1.0f - point.y;
point *= getDimensions() * METERS_TO_INCHES * DPI;
if (event->button() == Qt::MouseButton::LeftButton) {
if (event->type() == QEvent::MouseButtonPress) {
this->_pressed = true;
this->_lastMove = ivec2((int)point.x, (int)point.y);
} else if (event->type() == QEvent::MouseButtonRelease) {
this->_pressed = false;
}
}
if (event->type() == QEvent::MouseMove) {
this->_lastMove = ivec2((int)point.x, (int)point.y);
}
// Forward the mouse event.
QMouseEvent mappedEvent(event->type(),
QPoint((int)point.x, (int)point.y),
@ -158,6 +157,16 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
QObject::connect(renderer, &EntityTreeRenderer::mousePressOnEntity, forwardMouseEvent);
QObject::connect(renderer, &EntityTreeRenderer::mouseReleaseOnEntity, forwardMouseEvent);
QObject::connect(renderer, &EntityTreeRenderer::mouseMoveOnEntity, forwardMouseEvent);
QObject::connect(renderer, &EntityTreeRenderer::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (this->_pressed && this->getID() == entityItemID) {
// If the user mouses off the entity while the button is down, simulate a mouse release
QMouseEvent mappedEvent(QEvent::MouseButtonRelease,
QPoint(_lastMove.x, _lastMove.y),
Qt::MouseButton::LeftButton,
Qt::MouseButtons(), Qt::KeyboardModifiers());
QCoreApplication::sendEvent(_webSurface->getWindow(), &mappedEvent);
}
});
}
glm::vec2 dims = glm::vec2(getDimensions());
@ -196,3 +205,11 @@ void RenderableWebEntityItem::setSourceUrl(const QString& value) {
}
}
}
void RenderableWebEntityItem::setProxyWindow(QWindow* proxyWindow) {
_webSurface->setProxyWindow(proxyWindow);
}
QObject* RenderableWebEntityItem::getEventHandler() {
return _webSurface->getEventHandler();
}

View file

@ -16,6 +16,8 @@
#include "RenderableEntityItem.h"
class OffscreenQmlSurface;
class QWindow;
class QObject;
class RenderableWebEntityItem : public WebEntityItem {
public:
@ -26,6 +28,9 @@ public:
virtual void render(RenderArgs* args);
virtual void setSourceUrl(const QString& value);
void setProxyWindow(QWindow* proxyWindow);
QObject* getEventHandler();
SIMPLE_RENDERABLE();
@ -34,7 +39,8 @@ private:
QMetaObject::Connection _connection;
uint32_t _texture{ 0 };
ivec2 _lastPress{ INT_MIN };
QMutex _textureLock;
bool _pressed{ false };
ivec2 _lastMove{ INT_MIN };
};

View file

@ -36,9 +36,9 @@ void main(void) {
float inPositionY = (_inPosition.y - 0.5) / voxelVolumeSize.y;
float inPositionZ = (_inPosition.z - 0.5) / voxelVolumeSize.z;
vec4 xyDiffuse = texture2D(xMap, vec2(-inPositionX, -inPositionY));
vec4 xzDiffuse = texture2D(yMap, vec2(-inPositionX, inPositionZ));
vec4 yzDiffuse = texture2D(zMap, vec2(inPositionZ, -inPositionY));
vec4 xyDiffuse = texture(xMap, vec2(-inPositionX, -inPositionY));
vec4 xzDiffuse = texture(yMap, vec2(-inPositionX, inPositionZ));
vec4 yzDiffuse = texture(zMap, vec2(inPositionZ, -inPositionY));
vec3 xyDiffuseScaled = xyDiffuse.rgb * abs(worldNormal.z);
vec3 xzDiffuseScaled = xzDiffuse.rgb * abs(worldNormal.y);

View file

@ -133,7 +133,7 @@ void GLBackend::updateTransform() {
if (offset >= 0) {
glBindBufferRange(GL_UNIFORM_BUFFER, TRANSFORM_CAMERA_SLOT,
_transform._transformCameraBuffer,
offset, sizeof(Backend::TransformObject));
offset, sizeof(Backend::TransformCamera));
}
(void)CHECK_GL_ERROR();

View file

@ -535,8 +535,8 @@ void SixenseManager::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::LEFT_HAND, makeInput(LEFT_HAND));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND, makeInput(RIGHT_HAND));
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(BUTTON_FWD, 0));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(BUTTON_FWD, 1));
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(BACK_TRIGGER, 0));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(BACK_TRIGGER, 1));
}

View file

@ -401,8 +401,8 @@ void ViveControllerManager::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(GRIP_BUTTON, 0));
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(GRIP_BUTTON, 1));
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(TRIGGER_BUTTON, 0));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(TRIGGER_BUTTON, 1));
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(BACK_TRIGGER, 0));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(BACK_TRIGGER, 1));
// Hands
mapper.addInputChannel(UserInputMapper::LEFT_HAND, makeInput(LEFT_HAND));

View file

@ -32,4 +32,12 @@ if (WIN32)
endif()
endif (WIN32)
add_dependency_external_projects(boostconfig)
find_package(BoostConfig REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
add_dependency_external_projects(oglplus)
find_package(OGLPLUS REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})
link_hifi_libraries(animation fbx shared gpu model render environment)

View file

@ -0,0 +1,9 @@
//
// Created by Bradley Austin Davis on 2015/08/06.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GLEscrow.h"

View file

@ -0,0 +1,222 @@
//
// Created by Bradley Austin Davis on 2015/08/06.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_GLEscrow_h
#define hifi_GLEscrow_h
#include <utility>
#include <algorithm>
#include <deque>
#include <forward_list>
#include <functional>
#include <GL/glew.h>
#include <mutex>
#include <SharedUtil.h>
#include <NumericalConstants.h>
// The GLEscrow class provides a simple mechanism for producer GL contexts to provide
// content to a consumer where the consumer is assumed to be connected to a display and
// therefore must never be blocked.
//
// So we need to accomplish a few things.
//
// First the producer context needs to be able to supply content to the primary thread
// in such a way that the consumer only gets it when it's actually valid for reading
// (meaning that the async writing operations have been completed)
//
// Second, the client thread should be able to release the resource when it's finished
// using it (but again the reading of the resource is likely asyncronous)
//
// Finally, blocking operations need to be minimal, and any potentially blocking operations
// that can't be avoided need to be pushed to the submission context to avoid impacting
// the framerate of the consumer
//
// This class acts as a kind of border guard and holding pen between the two contexts
// to hold resources which the CPU is no longer using, but which might still be
// in use by the GPU. Fence sync objects are used to moderate the actual release of
// resources in either direction.
template <
typename T,
// Only accept numeric types
typename = typename std::enable_if<std::is_arithmetic<T>::value, T>::type
>
class GLEscrow {
public:
struct Item {
T _value;
GLsync _sync;
uint64_t _created;
Item(T value, GLsync sync) :
_value(value), _sync(sync), _created(usecTimestampNow())
{
}
uint64_t age() {
return usecTimestampNow() - _created;
}
bool signaled() {
auto result = glClientWaitSync(_sync, 0, 0);
if (GL_TIMEOUT_EXPIRED != result && GL_WAIT_FAILED != result) {
return true;
}
if (age() > (USECS_PER_SECOND / 2)) {
qWarning() << "Long unsignaled sync";
}
return false;
}
};
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
using Recycler = std::function<void(T t)>;
// deque gives us random access, double ended push & pop and size, all in constant time
using Deque = std::deque<Item>;
using List = std::forward_list<Item>;
void setRecycler(Recycler recycler) {
_recycler = recycler;
}
// Submit a new resource from the producer context
// returns the number of prior submissions that were
// never consumed before becoming available.
// producers should self-limit if they start producing more
// work than is being consumed;
size_t submit(T t, GLsync writeSync = 0) {
if (!writeSync) {
// FIXME should the release and submit actually force the creation of a fence?
writeSync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
glFlush();
}
{
Lock lock(_mutex);
_submits.push_back(Item(t, writeSync));
}
return cleanTrash();
}
// Returns the next available resource provided by the submitter,
// or if none is available (which could mean either the submission
// list is empty or that the first item on the list isn't yet signaled
T fetch() {
T result{0};
// On the one hand using try_lock() reduces the chance of blocking the consumer thread,
// but if the produce thread is going fast enough, it could effectively
// starve the consumer out of ever actually getting resources.
if (_mutex.try_lock()) {
if (signaled(_submits, 0)) {
result = _submits.at(0)._value;
_submits.pop_front();
}
_mutex.unlock();
}
return result;
}
// If fetch returns a non-zero value, it's the responsibility of the
// client to release it at some point
void release(T t, GLsync readSync = 0) {
if (!readSync) {
// FIXME should the release and submit actually force the creation of a fence?
readSync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
glFlush();
}
Lock lock(_mutex);
_releases.push_back(Item(t, readSync));
}
private:
size_t cleanTrash() {
size_t wastedWork{ 0 };
List trash;
{
// We only ever need one ready item available in the list, so if the
// second item is signaled (implying the first is as well, remove the first
// item. Iterate until the SECOND item in the list is not in the ready state
// The signaled function takes care of checking against the deque size
while (signaled(_submits, 1)) {
pop(_submits);
++wastedWork;
}
// Stuff in the release queue can be cleared out as soon as it's signaled
while (signaled(_releases, 0)) {
pop(_releases);
}
trash.swap(_trash);
}
// FIXME maybe doing a timing on the deleters and warn if it's taking excessive time?
// although we are out of the lock, so it shouldn't be blocking anything
std::for_each(trash.begin(), trash.end(), [&](typename List::const_reference item) {
if (item._value) {
_recycler(item._value);
}
if (item._sync) {
glDeleteSync(item._sync);
}
});
return wastedWork;
}
// May be called on any thread, but must be inside a locked section
void pop(Deque& deque) {
auto& item = deque.front();
_trash.push_front(item);
deque.pop_front();
}
// May be called on any thread, but must be inside a locked section
bool signaled(Deque& deque, size_t i) {
if (i >= deque.size()) {
return false;
}
auto& item = deque.at(i);
// If there's no sync object, either it's not required or it's already been found to be signaled
if (!item._sync) {
return true;
}
// Check the sync value using a zero timeout to ensure we don't block
// This is critically important as this is the only GL function we'll call
// inside the locked sections, so it cannot have any latency
if (item.signaled()) {
// if the sync is signaled, queue it for deletion
_trash.push_front(Item(0, item._sync));
// And change the stored value to 0 so we don't check it again
item._sync = 0;
return true;
}
return false;
}
Mutex _mutex;
Recycler _recycler;
// Items coming from the submission / writer context
Deque _submits;
// Items coming from the client context.
Deque _releases;
// Items which are no longer in use.
List _trash;
};
using GLTextureEscrow = GLEscrow<GLuint>;
#endif

View file

@ -34,11 +34,9 @@ OffscreenGlCanvas::~OffscreenGlCanvas() {
void OffscreenGlCanvas::create(QOpenGLContext* sharedContext) {
if (nullptr != sharedContext) {
sharedContext->doneCurrent();
_context->setFormat(sharedContext->format());
_context->setShareContext(sharedContext);
} else {
_context->setFormat(getDefaultOpenGlSurfaceFormat());
}
_context->setFormat(getDefaultOpenGlSurfaceFormat());
_context->create();
_offscreenSurface->setFormat(_context->format());

View file

@ -6,21 +6,34 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OffscreenQmlSurface.h"
#include "OglplusHelpers.h"
#include <QOpenGLFramebufferObject>
#include <QOpenGLDebugLogger>
#include <QGLWidget>
#include <QWidget>
#include <QtQml>
#include <QQmlEngine>
#include <QQmlComponent>
#include <QQuickItem>
#include <QQuickWindow>
#include <QQuickRenderControl>
#include <QWaitCondition>
#include <QMutex>
#include "FboCache.h"
#include <PerfStat.h>
#include <NumericalConstants.h>
#include "GLEscrow.h"
#include "OffscreenGlCanvas.h"
#include "AbstractViewStateInterface.h"
// FIXME move to threaded rendering with Qt 5.5
// #define QML_THREADED
// Time between receiving a request to render the offscreen UI actually triggering
// the render. Could possibly be increased depending on the framerate we expect to
// achieve.
// This has the effect of capping the framerate at 200
static const int MIN_TIMER_MS = 5;
class QMyQuickRenderControl : public QQuickRenderControl {
protected:
QWindow* renderWindow(QPoint* offset) Q_DECL_OVERRIDE{
@ -35,119 +48,324 @@ protected:
private:
QWindow* _renderWindow{ nullptr };
friend class OffscreenQmlRenderer;
friend class OffscreenQmlSurface;
};
#include "AbstractViewStateInterface.h"
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
// Time between receiving a request to render the offscreen UI actually triggering
// the render. Could possibly be increased depending on the framerate we expect to
// achieve.
static const int MAX_QML_FRAMERATE = 10;
static const int MIN_RENDER_INTERVAL_US = USECS_PER_SECOND / MAX_QML_FRAMERATE;
static const int MIN_TIMER_MS = 5;
#ifdef QML_THREADED
static const QEvent::Type INIT = QEvent::Type(QEvent::User + 1);
static const QEvent::Type RENDER = QEvent::Type(QEvent::User + 2);
static const QEvent::Type RESIZE = QEvent::Type(QEvent::User + 3);
static const QEvent::Type STOP = QEvent::Type(QEvent::User + 4);
static const QEvent::Type UPDATE = QEvent::Type(QEvent::User + 5);
#endif
class OffscreenQmlRenderer : public OffscreenGlCanvas {
friend class OffscreenQmlSurface;
public:
OffscreenQmlRenderer(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
OffscreenGlCanvas::create(shareContext);
#ifdef QML_THREADED
// Qt 5.5
// _renderControl->prepareThread(_renderThread);
_context->moveToThread(&_thread);
moveToThread(&_thread);
_thread.setObjectName("QML Thread");
_thread.start();
post(INIT);
#else
init();
#endif
}
#ifdef QML_THREADED
bool event(QEvent *e)
{
switch (int(e->type())) {
case INIT:
{
QMutexLocker lock(&_mutex);
init();
}
return true;
case RENDER:
{
QMutexLocker lock(&_mutex);
render(&lock);
}
return true;
case RESIZE:
{
QMutexLocker lock(&_mutex);
resize();
}
return true;
case STOP:
{
QMutexLocker lock(&_mutex);
cleanup();
}
return true;
default:
return QObject::event(e);
}
}
void post(const QEvent::Type& type) {
QCoreApplication::postEvent(this, new QEvent(type));
}
#endif
private:
void setupFbo() {
using namespace oglplus;
_textures.setSize(_size);
_depthStencil.reset(new Renderbuffer());
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
.Storage(
PixelDataInternalFormat::DepthComponent,
_size.x, _size.y);
_fbo.reset(new Framebuffer());
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
FramebufferAttachment::Depth, *_depthStencil);
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
}
OffscreenQmlSurface::OffscreenQmlSurface() :
_renderControl(new QMyQuickRenderControl), _fboCache(new FboCache) {
void init() {
_renderControl = new QMyQuickRenderControl();
connect(_renderControl, &QQuickRenderControl::renderRequested, _surface, &OffscreenQmlSurface::requestRender);
connect(_renderControl, &QQuickRenderControl::sceneChanged, _surface, &OffscreenQmlSurface::requestUpdate);
// Create a QQuickWindow that is associated with out render control. Note that this
// window never gets created or shown, meaning that it will never get an underlying
// native (platform) window.
QQuickWindow::setDefaultAlphaBuffer(true);
// Weirdness... QQuickWindow NEEDS to be created on the rendering thread, or it will refuse to render
// because it retains an internal 'context' object that retains the thread it was created on,
// regardless of whether you later move it to another thread.
_quickWindow = new QQuickWindow(_renderControl);
_quickWindow->setColor(QColor(255, 255, 255, 0));
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
#ifdef QML_THREADED
// However, because we want to use synchronous events with the quickwindow, we need to move it back to the main
// thread after it's created.
_quickWindow->moveToThread(qApp->thread());
#endif
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
_renderControl->initialize(_context);
setupFbo();
_escrow.setRecycler([this](GLuint texture){
_textures.recycleTexture(texture);
});
doneCurrent();
}
void cleanup() {
if (!makeCurrent()) {
qFatal("Failed to make context current on render thread");
return;
}
_renderControl->invalidate();
_fbo.reset();
_depthStencil.reset();
_textures.clear();
doneCurrent();
#ifdef QML_THREADED
_context->moveToThread(QCoreApplication::instance()->thread());
_cond.wakeOne();
#endif
}
void resize(const QSize& newSize) {
// Update our members
if (_quickWindow) {
_quickWindow->setGeometry(QRect(QPoint(), newSize));
_quickWindow->contentItem()->setSize(newSize);
}
// Qt bug in 5.4 forces this check of pixel ratio,
// even though we're rendering offscreen.
qreal pixelRatio = 1.0;
if (_renderControl && _renderControl->_renderWindow) {
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
} else {
pixelRatio = AbstractViewStateInterface::instance()->getDevicePixelRatio();
}
uvec2 newOffscreenSize = toGlm(newSize * pixelRatio);
_textures.setSize(newOffscreenSize);
if (newOffscreenSize == _size) {
return;
}
_size = newOffscreenSize;
// Clear out any fbos with the old size
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
qDebug() << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
setupFbo();
doneCurrent();
}
void render(QMutexLocker *lock) {
if (_surface->_paused) {
return;
}
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
Q_ASSERT(toGlm(_quickWindow->geometry().size()) == _size);
//Q_ASSERT(toGlm(_quickWindow->geometry().size()) == _textures._size);
_renderControl->sync();
#ifdef QML_THREADED
_cond.wakeOne();
lock->unlock();
#endif
using namespace oglplus;
_quickWindow->setRenderTarget(GetName(*_fbo), QSize(_size.x, _size.y));
TexturePtr texture = _textures.getNextTexture();
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
_fbo->Complete(Framebuffer::Target::Draw);
//Context::Clear().ColorBuffer();
{
_renderControl->render();
// FIXME The web browsers seem to be leaving GL in an error state.
// Need a debug context with sync logging to figure out why.
// for now just clear the errors
glGetError();
}
// FIXME probably unecessary
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
_quickWindow->resetOpenGLState();
_escrow.submit(GetName(*texture));
_lastRenderTime = usecTimestampNow();
}
void aboutToQuit() {
#ifdef QML_THREADED
QMutexLocker lock(&_quitMutex);
_quit = true;
#endif
}
void stop() {
#ifdef QML_THREADED
QMutexLocker lock(&_quitMutex);
post(STOP);
_cond.wait(&_mutex);
#else
cleanup();
#endif
}
bool allowNewFrame(uint8_t fps) {
auto minRenderInterval = USECS_PER_SECOND / fps;
auto lastInterval = usecTimestampNow() - _lastRenderTime;
return (lastInterval > minRenderInterval);
}
OffscreenQmlSurface* _surface{ nullptr };
QQuickWindow* _quickWindow{ nullptr };
QMyQuickRenderControl* _renderControl{ nullptr };
#ifdef QML_THREADED
QThread _thread;
QMutex _mutex;
QWaitCondition _cond;
QMutex _quitMutex;
#endif
bool _quit;
FramebufferPtr _fbo;
RenderbufferPtr _depthStencil;
uvec2 _size{ 1920, 1080 };
uint64_t _lastRenderTime{ 0 };
TextureRecycler _textures;
GLTextureEscrow _escrow;
};
OffscreenQmlSurface::OffscreenQmlSurface() {
}
OffscreenQmlSurface::~OffscreenQmlSurface() {
// Make sure the context is current while doing cleanup. Note that we use the
// offscreen surface here because passing 'this' at this point is not safe: the
// underlying platform window may already be destroyed. To avoid all the trouble, use
// another surface that is valid for sure.
makeCurrent();
// Delete the render control first since it will free the scenegraph resources.
// Destroy the QQuickWindow only afterwards.
delete _renderControl;
_renderer->stop();
delete _renderer;
delete _qmlComponent;
delete _quickWindow;
delete _qmlEngine;
doneCurrent();
delete _fboCache;
}
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
OffscreenGlCanvas::create(shareContext);
_renderer = new OffscreenQmlRenderer(this, shareContext);
makeCurrent();
// Create a QQuickWindow that is associated with out render control. Note that this
// window never gets created or shown, meaning that it will never get an underlying
// native (platform) window.
QQuickWindow::setDefaultAlphaBuffer(true);
_quickWindow = new QQuickWindow(_renderControl);
_quickWindow->setColor(QColor(255, 255, 255, 0));
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
// Create a QML engine.
_qmlEngine = new QQmlEngine;
if (!_qmlEngine->incubationController()) {
_qmlEngine->setIncubationController(_quickWindow->incubationController());
_qmlEngine->setIncubationController(_renderer->_quickWindow->incubationController());
}
// When Quick says there is a need to render, we will not render immediately. Instead,
// a timer with a small interval is used to get better performance.
_updateTimer.setSingleShot(true);
_updateTimer.setInterval(MIN_TIMER_MS);
connect(&_updateTimer, &QTimer::timeout, this, &OffscreenQmlSurface::updateQuick);
// Now hook up the signals. For simplicy we don't differentiate between
// renderRequested (only render is needed, no sync) and sceneChanged (polish and sync
// is needed too).
connect(_renderControl, &QQuickRenderControl::renderRequested, this, &OffscreenQmlSurface::requestRender);
connect(_renderControl, &QQuickRenderControl::sceneChanged, this, &OffscreenQmlSurface::requestUpdate);
#ifdef DEBUG
connect(_quickWindow, &QQuickWindow::focusObjectChanged, [this]{
qCDebug(offscreenFocus) << "New focus item " << _quickWindow->focusObject();
});
connect(_quickWindow, &QQuickWindow::activeFocusItemChanged, [this] {
qCDebug(offscreenFocus) << "New active focus item " << _quickWindow->activeFocusItem();
});
#endif
_updateTimer.start();
_qmlComponent = new QQmlComponent(_qmlEngine);
// Initialize the render control and our OpenGL resources.
makeCurrent();
_renderControl->initialize(_context);
}
void OffscreenQmlSurface::resize(const QSize& newSize) {
// Qt bug in 5.4 forces this check of pixel ratio,
// even though we're rendering offscreen.
qreal pixelRatio = 1.0;
#ifdef QML_THREADED
QMutexLocker _locker(&(_renderer->_mutex));
#endif
if (!_renderer || !_renderer->_quickWindow) {
QSize currentSize = _renderer->_quickWindow->geometry().size();
if (newSize == currentSize) {
return;
}
}
_qmlEngine->rootContext()->setContextProperty("surfaceSize", newSize);
if (_renderControl && _renderControl->_renderWindow) {
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
} else {
pixelRatio = AbstractViewStateInterface::instance()->getDevicePixelRatio();
}
QSize newOffscreenSize = newSize * pixelRatio;
if (newOffscreenSize == _fboCache->getSize()) {
return;
}
// Clear out any fbos with the old size
makeCurrent();
qDebug() << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
_fboCache->setSize(newSize * pixelRatio);
if (_quickWindow) {
_quickWindow->setGeometry(QRect(QPoint(), newSize));
_quickWindow->contentItem()->setSize(newSize);
}
// Update our members
if (_rootItem) {
_rootItem->setSize(newSize);
}
doneCurrent();
#ifdef QML_THREADED
_renderer->post(RESIZE);
#else
_renderer->resize(newSize);
#endif
}
QQuickItem* OffscreenQmlSurface::getRootItem() {
@ -173,20 +391,11 @@ QObject* OffscreenQmlSurface::load(const QUrl& qmlSource, std::function<void(QQm
void OffscreenQmlSurface::requestUpdate() {
_polish = true;
requestRender();
_render = true;
}
void OffscreenQmlSurface::requestRender() {
if (!_updateTimer.isActive()) {
auto now = usecTimestampNow();
auto lastInterval = now - _lastRenderTime;
if (lastInterval > MIN_RENDER_INTERVAL_US) {
_updateTimer.setInterval(MIN_TIMER_MS);
} else {
_updateTimer.setInterval((MIN_RENDER_INTERVAL_US - lastInterval) / USECS_PER_MSEC);
}
_updateTimer.start();
}
_render = true;
}
QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f) {
@ -240,54 +449,38 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
}
// The root item is ready. Associate it with the window.
_rootItem = newItem;
_rootItem->setParentItem(_quickWindow->contentItem());
_rootItem->setSize(_quickWindow->renderTargetSize());
_rootItem->setParentItem(_renderer->_quickWindow->contentItem());
_rootItem->setSize(_renderer->_quickWindow->renderTargetSize());
return _rootItem;
}
void OffscreenQmlSurface::updateQuick() {
PerformanceTimer perfTimer("qmlUpdate");
if (_paused) {
return;
}
if (!makeCurrent()) {
if (!_renderer || !_renderer->allowNewFrame(_maxFps)) {
return;
}
// Polish, synchronize and render the next frame (into our fbo). In this example
// everything happens on the same thread and therefore all three steps are performed
// in succession from here. In a threaded setup the render() call would happen on a
// separate thread.
if (_polish) {
_renderControl->polishItems();
_renderControl->sync();
_renderer->_renderControl->polishItems();
_polish = false;
}
QOpenGLFramebufferObject* fbo = _fboCache->getReadyFbo();
if (_render) {
#ifdef QML_THREADED
_renderer->post(RENDER);
#else
_renderer->render(nullptr);
#endif
_render = false;
}
_quickWindow->setRenderTarget(fbo);
fbo->bind();
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
_renderControl->render();
// FIXME The web browsers seem to be leaving GL in an error state.
// Need a debug context with sync logging to figure out why.
// for now just clear the errors
glGetError();
_quickWindow->resetOpenGLState();
QOpenGLFramebufferObject::bindDefault();
_lastRenderTime = usecTimestampNow();
// Force completion of all the operations before we emit the texture as being ready for use
glFinish();
emit textureUpdated(fbo->texture());
GLuint newTexture = _renderer->_escrow.fetch();
if (newTexture) {
if (_currentTexture) {
_renderer->_escrow.release(_currentTexture);
}
_currentTexture = newTexture;
emit textureUpdated(_currentTexture);
}
}
QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject) {
@ -299,7 +492,7 @@ QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObjec
}
vec2 offscreenPosition = toGlm(sourcePosition);
offscreenPosition /= sourceSize;
offscreenPosition *= vec2(toGlm(_quickWindow->size()));
offscreenPosition *= vec2(toGlm(_renderer->_quickWindow->size()));
return QPointF(offscreenPosition.x, offscreenPosition.y);
}
@ -309,7 +502,7 @@ QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObjec
//
bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* event) {
if (_quickWindow == originalDestination) {
if (_renderer->_quickWindow == originalDestination) {
return false;
}
// Only intercept events while we're in an active state
@ -321,7 +514,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
// Don't intercept our own events, or we enter an infinite recursion
QObject* recurseTest = originalDestination;
while (recurseTest) {
Q_ASSERT(recurseTest != _rootItem && recurseTest != _quickWindow);
Q_ASSERT(recurseTest != _rootItem && recurseTest != _renderer->_quickWindow);
recurseTest = recurseTest->parent();
}
#endif
@ -330,7 +523,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
switch (event->type()) {
case QEvent::Resize: {
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
QGLWidget* widget = dynamic_cast<QGLWidget*>(originalDestination);
QWidget* widget = dynamic_cast<QWidget*>(originalDestination);
if (widget) {
this->resize(resizeEvent->size());
}
@ -340,7 +533,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
case QEvent::KeyPress:
case QEvent::KeyRelease: {
event->ignore();
if (QCoreApplication::sendEvent(_quickWindow, event)) {
if (QCoreApplication::sendEvent(_renderer->_quickWindow, event)) {
return event->isAccepted();
}
break;
@ -353,7 +546,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
wheelEvent->delta(), wheelEvent->buttons(),
wheelEvent->modifiers(), wheelEvent->orientation());
mappedEvent.ignore();
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
if (QCoreApplication::sendEvent(_renderer->_quickWindow, &mappedEvent)) {
return mappedEvent.isAccepted();
}
break;
@ -376,7 +569,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
_qmlEngine->rootContext()->setContextProperty("lastMousePosition", transformedPos);
}
mappedEvent.ignore();
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
if (QCoreApplication::sendEvent(_renderer->_quickWindow, &mappedEvent)) {
return mappedEvent.isAccepted();
}
break;
@ -389,14 +582,6 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
return false;
}
void OffscreenQmlSurface::lockTexture(int texture) {
_fboCache->lockTexture(texture);
}
void OffscreenQmlSurface::releaseTexture(int texture) {
_fboCache->releaseTexture(texture);
}
void OffscreenQmlSurface::pause() {
_paused = true;
}
@ -411,13 +596,17 @@ bool OffscreenQmlSurface::isPaused() const {
}
void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
_renderControl->_renderWindow = window;
_renderer->_renderControl->_renderWindow = window;
}
QObject* OffscreenQmlSurface::getEventHandler() {
return getWindow();
}
QQuickWindow* OffscreenQmlSurface::getWindow() {
return _quickWindow;
return _renderer->_quickWindow;
}
QSize OffscreenQmlSurface::size() const {
return _quickWindow->geometry().size();
return _renderer->_quickWindow->geometry().size();
}

View file

@ -17,18 +17,18 @@
#include <GLMHelpers.h>
#include <ThreadHelpers.h>
#include "OffscreenGlCanvas.h"
class QWindow;
class QMyQuickRenderControl;
class QOpenGLContext;
class QQmlEngine;
class QQmlContext;
class QQmlComponent;
class QQuickWindow;
class QQuickItem;
class FboCache;
class OffscreenQmlSurface : public OffscreenGlCanvas {
class OffscreenQmlRenderer;
class OffscreenQmlSurface : public QObject {
Q_OBJECT
public:
@ -45,6 +45,7 @@ public:
return load(QUrl(qmlSourceFile), f);
}
void setMaxFps(uint8_t maxFps) { _maxFps = maxFps; }
// Optional values for event handling
void setProxyWindow(QWindow* window);
void setMouseTranslator(MouseTranslator mouseTranslator) {
@ -58,6 +59,7 @@ public:
void setBaseUrl(const QUrl& baseUrl);
QQuickItem* getRootItem();
QQuickWindow* getWindow();
QObject* getEventHandler();
virtual bool eventFilter(QObject* originalDestination, QEvent* event);
@ -67,8 +69,6 @@ signals:
public slots:
void requestUpdate();
void requestRender();
void lockTexture(int texture);
void releaseTexture(int texture);
private:
QObject* finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f);
@ -77,20 +77,20 @@ private:
private slots:
void updateQuick();
protected:
QQuickWindow* _quickWindow{ nullptr };
private:
QMyQuickRenderControl* _renderControl{ nullptr };
friend class OffscreenQmlRenderer;
OffscreenQmlRenderer* _renderer{ nullptr };
QQmlEngine* _qmlEngine{ nullptr };
QQmlComponent* _qmlComponent{ nullptr };
QQuickItem* _rootItem{ nullptr };
QTimer _updateTimer;
FboCache* _fboCache;
quint64 _lastRenderTime{ 0 };
uint32_t _currentTexture{ 0 };
bool _render{ false };
bool _polish{ true };
bool _paused{ true };
uint8_t _maxFps{ 60 };
MouseTranslator _mouseTranslator{ [](const QPointF& p) { return p; } };
};
#endif

View file

@ -7,6 +7,7 @@
//
#include "OglplusHelpers.h"
#include <QSharedPointer>
#include <set>
using namespace oglplus;
using namespace oglplus::shapes;
@ -317,3 +318,73 @@ ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, i
new shapes::ShapeWrapper({ "Position", "TexCoord" }, SphereSection(fov, aspect, slices, stacks), *program)
);
}
void TextureRecycler::setSize(const uvec2& size) {
if (size == _size) {
return;
}
_size = size;
while (!_readyTextures.empty()) {
_readyTextures.pop();
}
std::set<Map::key_type> toDelete;
std::for_each(_allTextures.begin(), _allTextures.end(), [&](Map::const_reference item) {
if (!item.second._active && item.second._size != _size) {
toDelete.insert(item.first);
}
});
std::for_each(toDelete.begin(), toDelete.end(), [&](Map::key_type key) {
_allTextures.erase(key);
});
}
void TextureRecycler::clear() {
while (!_readyTextures.empty()) {
_readyTextures.pop();
}
_allTextures.clear();
}
TexturePtr TextureRecycler::getNextTexture() {
using namespace oglplus;
if (_readyTextures.empty()) {
TexturePtr newTexture(new Texture());
Context::Bound(oglplus::Texture::Target::_2D, *newTexture)
.MinFilter(TextureMinFilter::Linear)
.MagFilter(TextureMagFilter::Linear)
.WrapS(TextureWrap::ClampToEdge)
.WrapT(TextureWrap::ClampToEdge)
.Image2D(
0, PixelDataInternalFormat::RGBA8,
_size.x, _size.y,
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr
);
GLuint texId = GetName(*newTexture);
_allTextures[texId] = TexInfo{ newTexture, _size };
_readyTextures.push(newTexture);
}
TexturePtr result = _readyTextures.front();
_readyTextures.pop();
GLuint texId = GetName(*result);
auto& item = _allTextures[texId];
item._active = true;
return result;
}
void TextureRecycler::recycleTexture(GLuint texture) {
Q_ASSERT(_allTextures.count(texture));
auto& item = _allTextures[texture];
Q_ASSERT(item._active);
item._active = false;
if (item._size != _size) {
// Buh-bye
_allTextures.erase(texture);
return;
}
_readyTextures.push(item._tex);
}

View file

@ -10,6 +10,10 @@
// FIXME support oglplus on all platforms
// For now it's a convenient helper for Windows
#include <queue>
#include <map>
#include <QtGlobal>
#include "GLMHelpers.h"
@ -33,6 +37,8 @@
#include "NumericalConstants.h"
using FramebufferPtr = std::shared_ptr<oglplus::Framebuffer>;
using RenderbufferPtr = std::shared_ptr<oglplus::Renderbuffer>;
using TexturePtr = std::shared_ptr<oglplus::Texture>;
using ShapeWrapperPtr = std::shared_ptr<oglplus::shapes::ShapeWrapper>;
using BufferPtr = std::shared_ptr<oglplus::Buffer>;
using VertexArrayPtr = std::shared_ptr<oglplus::VertexArray>;
@ -151,3 +157,29 @@ protected:
};
using BasicFramebufferWrapperPtr = std::shared_ptr<BasicFramebufferWrapper>;
class TextureRecycler {
public:
void setSize(const uvec2& size);
void clear();
TexturePtr getNextTexture();
void recycleTexture(GLuint texture);
private:
struct TexInfo {
TexturePtr _tex;
uvec2 _size;
bool _active{ false };
TexInfo() {}
TexInfo(TexturePtr tex, const uvec2& size) : _tex(tex), _size(size) {}
};
using Map = std::map<GLuint, TexInfo>;
using Queue = std::queue<TexturePtr>;
Map _allTextures;
Queue _readyTextures;
uvec2 _size{ 1920, 1080 };
};

View file

@ -38,8 +38,8 @@ public:
// so I think it's OK for the time being.
bool OffscreenUi::shouldSwallowShortcut(QEvent* event) {
Q_ASSERT(event->type() == QEvent::ShortcutOverride);
QObject* focusObject = _quickWindow->focusObject();
if (focusObject != _quickWindow && focusObject != getRootItem()) {
QObject* focusObject = getWindow()->focusObject();
if (focusObject != getWindow() && focusObject != getRootItem()) {
//qDebug() << "Swallowed shortcut " << static_cast<QKeyEvent*>(event)->key();
event->accept();
return true;

View file

@ -12,7 +12,7 @@ foreach(DIR ${TEST_SUBDIRS})
endif()
endforeach()
file(GLOB SHARED_TEST_HEADER_FILES "${CMAKE_CURRENT_SOURCE_DIR}/*.h" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
file(GLOB SHARED_TEST_HEADER_FILES "${CMAKE_CURRENT_SOURCE_DIR}/*.h " "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
add_custom_target("test-extensions"
SOURCES "${SHARED_TEST_HEADER_FILES}")
@ -34,4 +34,4 @@ add_custom_target("all-tests"
set_target_properties("all-tests" PROPERTIES FOLDER "hidden/test-targets")
set_target_properties("all-tests" PROPERTIES
EXCLUDE_FROM_DEFAULT_BUILD TRUE
EXCLUDE_FROM_ALL TRUE)
EXCLUDE_FROM_ALL TRUE)

66
tests/GLMTestUtils.h Normal file
View file

@ -0,0 +1,66 @@
//
// GLMTestUtils.h
// tests/physics/src
//
// Created by Seiji Emery on 6/22/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GLMTestUtils_h
#define hifi_GLMTestUtils_h
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <QTextStream>
// Implements functionality in QTestExtensions.h for glm types
// Computes the error value between two quaternions (using glm::dot)
float getErrorDifference(const glm::quat& a, const glm::quat& b) {
return fabsf(glm::dot(a, b)) - 1.0f;
}
inline float getErrorDifference(const glm::vec3& a, const glm::vec3& b) {
return glm::distance(a, b);
}
inline float getErrorDifference(const glm::mat4& a, const glm::mat4& b) {
float maxDiff = 0;
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
float diff = fabs(a[i][j] - b[i][j]);
maxDiff = std::max(diff, maxDiff);
}
}
return maxDiff;
}
inline QTextStream& operator<<(QTextStream& stream, const glm::vec3& v) {
return stream << "glm::vec3 { " << v.x << ", " << v.y << ", " << v.z << " }";
}
QTextStream& operator<<(QTextStream& stream, const glm::quat& q) {
return stream << "glm::quat { " << q.x << ", " << q.y << ", " << q.z << ", " << q.w << " }";
}
inline QTextStream& operator<< (QTextStream& stream, const glm::mat4& matrix) {
stream << "[\n\t\t";
stream.setFieldWidth(15);
for (int r = 0; r < 4; ++r) {
for (int c = 0; c < 4; ++c) {
stream << matrix[c][r];
}
stream << "\n\t\t";
}
stream.setFieldWidth(0);
stream << "]\n\t"; // hacky as hell, but this should work...
return stream;
}
#define QCOMPARE_QUATS(rotationA, rotationB, angle) \
QVERIFY(fabsf(1.0f - fabsf(glm::dot(rotationA, rotationB))) < 2.0f * sinf(angle))
#endif

View file

@ -15,6 +15,8 @@
#include <QtTest/QtTest>
#include <functional>
#include "GLMTestUtils.h"
// Implements several extensions to QtTest.
//
// Problems with QtTest:
@ -35,6 +37,10 @@
//
float getErrorDifference(const float& a, const float& b) {
return fabsf(a - b);
}
// Generates a QCOMPARE-style failure message that can be passed to QTest::qFail.
//
// Formatting looks like this:
@ -279,3 +285,20 @@ bool compareData (const char* data, const char* expectedData, size_t length) {
#define COMPARE_DATA(actual, expected, length) \
QCOMPARE_WITH_EXPR((ByteData ( actual, length )), (ByteData ( expected, length )), compareData(actual, expected, length))
// Produces a relative error test for float usable QCOMPARE_WITH_LAMBDA.
inline auto errorTest (float actual, float expected, float acceptableRelativeError)
-> std::function<bool ()> {
return [actual, expected, acceptableRelativeError] () {
if (fabsf(expected) <= acceptableRelativeError) {
return fabsf(actual - expected) < fabsf(acceptableRelativeError);
}
return fabsf((actual - expected) / expected) < fabsf(acceptableRelativeError);
};
}
#define QCOMPARE_WITH_RELATIVE_ERROR(actual, expected, relativeError) \
QCOMPARE_WITH_LAMBDA(actual, expected, errorTest(actual, expected, relativeError))

View file

@ -16,34 +16,8 @@
#include <NumericalConstants.h>
#include <SwingTwistConstraint.h>
// HACK -- these helper functions need to be defined BEFORE including magic inside QTestExtensions.h
// TODO: fix QTestExtensions so we don't need to do this in every test.
// Computes the error value between two quaternions (using glm::dot)
float getErrorDifference(const glm::quat& a, const glm::quat& b) {
return fabsf(glm::dot(a, b)) - 1.0f;
}
QTextStream& operator<<(QTextStream& stream, const glm::quat& q) {
return stream << "glm::quat { " << q.x << ", " << q.y << ", " << q.z << ", " << q.w << " }";
}
// Produces a relative error test for float usable QCOMPARE_WITH_LAMBDA.
inline auto errorTest (float actual, float expected, float acceptableRelativeError)
-> std::function<bool ()> {
return [actual, expected, acceptableRelativeError] () {
if (fabsf(expected) <= acceptableRelativeError) {
return fabsf(actual - expected) < fabsf(acceptableRelativeError);
}
return fabsf((actual - expected) / expected) < fabsf(acceptableRelativeError);
};
}
#include "../QTestExtensions.h"
#define QCOMPARE_WITH_RELATIVE_ERROR(actual, expected, relativeError) \
QCOMPARE_WITH_LAMBDA(actual, expected, errorTest(actual, expected, relativeError))
QTEST_MAIN(RotationConstraintTests)

View file

@ -23,10 +23,6 @@
// (used by QCOMPARE_WITH_RELATIVE_ERROR via QCOMPARE_WITH_LAMBDA)
// (this is only used by btMatrix3x3 in MeshMassPropertiesTests.cpp, so it's only defined for the Mat3 type)
// Return the error between values a and b; used to implement QCOMPARE_WITH_ABS_ERROR
inline btScalar getErrorDifference(const btScalar& a, const btScalar& b) {
return fabs(a - b);
}
// Return the error between values a and b; used to implement QCOMPARE_WITH_ABS_ERROR
inline btScalar getErrorDifference(const btVector3& a, const btVector3& b) {
return (a - b).length();

View file

@ -17,7 +17,6 @@
#include <NumericalConstants.h>
// Add additional qtest functionality (the include order is important!)
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
// Constants

View file

@ -1,27 +0,0 @@
//
// GlmTestUtils.h
// tests/physics/src
//
// Created by Seiji Emery on 6/22/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GlmTestUtils_h
#define hifi_GlmTestUtils_h
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
// Implements functionality in QTestExtensions.h for glm types
inline float getErrorDifference(const glm::vec3& a, const glm::vec3& b) {
return glm::distance(a, b);
}
inline QTextStream& operator<<(QTextStream& stream, const glm::vec3& v) {
return stream << "glm::vec3 { " << v.x << ", " << v.y << ", " << v.z << " }";
}
#endif

View file

@ -16,7 +16,6 @@
// Add additional qtest functionality (the include order is important!)
#include "BulletTestUtils.h"
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
const btScalar acceptableRelativeError(1.0e-5f);

View file

@ -20,6 +20,8 @@
#include <QDir>
#include <QGuiApplication>
#include <GLHelpers.h>
#include "../model/Skybox_vert.h"
#include "../model/Skybox_frag.h"
@ -112,85 +114,22 @@
#include "paintStroke_vert.h"
#include "paintStroke_frag.h"
class RateCounter {
std::vector<float> times;
QElapsedTimer timer;
public:
RateCounter() {
timer.start();
}
void reset() {
times.clear();
}
unsigned int count() const {
return times.size() - 1;
}
float elapsed() const {
if (times.size() < 1) {
return 0.0f;
}
float elapsed = *times.rbegin() - *times.begin();
return elapsed;
}
void increment() {
times.push_back(timer.elapsed() / 1000.0f);
}
float rate() const {
if (elapsed() == 0.0f) {
return 0.0f;
}
return (float) count() / elapsed();
}
};
const QString& getQmlDir() {
static QString dir;
if (dir.isEmpty()) {
QDir path(__FILE__);
path.cdUp();
dir = path.cleanPath(path.absoluteFilePath("../../../interface/resources/qml/")) + "/";
qDebug() << "Qml Path: " << dir;
}
return dir;
}
#include "polyvox_vert.h"
#include "polyvox_frag.h"
// Create a simple OpenGL window that renders text in various ways
class QTestWindow : public QWindow {
Q_OBJECT
QOpenGLContext* _context{ nullptr };
QSize _size;
//TextRenderer* _textRenderer[4];
RateCounter fps;
protected:
void renderText();
private:
void resizeWindow(const QSize& size) {
_size = size;
}
public:
QTestWindow() {
setSurfaceType(QSurface::OpenGLSurface);
QSurfaceFormat format;
// Qt Quick may need a depth and stencil buffer. Always make sure these are available.
format.setDepthBufferSize(16);
format.setStencilBufferSize(8);
format.setVersion(4, 1);
format.setProfile(QSurfaceFormat::OpenGLContextProfile::CoreProfile);
format.setOption(QSurfaceFormat::DebugContext);
QSurfaceFormat format = getDefaultOpenGlSurfaceFormat();
setFormat(format);
_context = new QOpenGLContext;
_context->setFormat(format);
_context->create();
@ -199,8 +138,6 @@ public:
makeCurrent();
gpu::Context::init<gpu::GLBackend>();
{
QOpenGLDebugLogger* logger = new QOpenGLDebugLogger(this);
logger->initialize(); // initializes in the current context, i.e. ctx
@ -208,25 +145,8 @@ public:
connect(logger, &QOpenGLDebugLogger::messageLogged, this, [&](const QOpenGLDebugMessage & debugMessage) {
qDebug() << debugMessage;
});
// logger->startLogging(QOpenGLDebugLogger::SynchronousLogging);
}
qDebug() << (const char*)glGetString(GL_VERSION);
//_textRenderer[0] = TextRenderer::getInstance(SANS_FONT_FAMILY, 12, false);
//_textRenderer[1] = TextRenderer::getInstance(SERIF_FONT_FAMILY, 12, false,
// TextRenderer::SHADOW_EFFECT);
//_textRenderer[2] = TextRenderer::getInstance(MONO_FONT_FAMILY, 48, -1,
// false, TextRenderer::OUTLINE_EFFECT);
//_textRenderer[3] = TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, 24);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glClearColor(0.2f, 0.2f, 0.2f, 1);
glDisable(GL_DEPTH_TEST);
makeCurrent();
// setFramePosition(QPoint(-1000, 0));
resize(QSize(800, 600));
}
@ -237,18 +157,15 @@ public:
void makeCurrent() {
_context->makeCurrent(this);
}
protected:
void resizeEvent(QResizeEvent* ev) override {
resizeWindow(ev->size());
}
};
void testShaderBuild(const char* vs_src, const char * fs_src) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(vs_src)));
auto fs = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(fs_src)));
auto pr = gpu::ShaderPointer(gpu::Shader::createProgram(vs, fs));
gpu::Shader::makeProgram(*pr);
if (!gpu::Shader::makeProgram(*pr)) {
throw std::runtime_error("Failed to compile shader");
}
}
void QTestWindow::draw() {
@ -257,8 +174,8 @@ void QTestWindow::draw() {
}
makeCurrent();
glClearColor(0.2f, 0.2f, 0.2f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, _size.width() * devicePixelRatio(), _size.height() * devicePixelRatio());
static std::once_flag once;
std::call_once(once, [&]{
@ -328,17 +245,10 @@ void QTestWindow::draw() {
testShaderBuild(Skybox_vert, Skybox_frag);
testShaderBuild(paintStroke_vert,paintStroke_frag);
testShaderBuild(polyvox_vert, polyvox_frag);
});
_context->swapBuffers(this);
glFinish();
fps.increment();
if (fps.elapsed() >= 2.0f) {
qDebug() << "FPS: " << fps.rate();
fps.reset();
}
}
void messageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
@ -352,7 +262,6 @@ void messageHandler(QtMsgType type, const QMessageLogContext& context, const QSt
}
}
const char * LOG_FILTER_RULES = R"V0G0N(
hifi.gpu=true
)V0G0N";

View file

@ -9,22 +9,16 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AngularConstraintTests.h"
#include <iostream>
#include <AngularConstraint.h>
#include <NumericalConstants.h>
#include <StreamUtils.h>
#include "AngularConstraintTests.h"
#include "../QTestExtensions.h"
// Computes the error value between two quaternions (using glm::dot)
float getErrorDifference(const glm::quat& a, const glm::quat& b) {
return fabsf(glm::dot(a, b) - 1.0f);
}
QTextStream& operator<<(QTextStream& stream, const glm::quat& q) {
return stream << "glm::quat { " << q.x << ", " << q.y << ", " << q.z << ", " << q.w << " }";
}
QTEST_MAIN(AngularConstraintTests)

View file

@ -22,13 +22,6 @@
QTEST_MAIN(GeometryUtilTests)
float getErrorDifference(const float& a, const float& b) {
return fabsf(a - b);
}
float getErrorDifference(const glm::vec3& a, const glm::vec3& b) {
return glm::distance(a, b);
}
void GeometryUtilTests::testLocalRayRectangleIntersection() {
glm::vec3 xAxis(1.0f, 0.0f, 0.0f);

View file

@ -13,7 +13,12 @@
#include <qqueue.h>
#include <MovingMinMaxAvg.h>
#include <NumericalConstants.h>
#include <SharedUtil.h>
#include "../QTestExtensions.h"
QTEST_MAIN(MovingMinMaxAvgTests)

View file

@ -14,15 +14,6 @@
#include <QtTest/QtTest>
inline float getErrorDifference(float a, float b) {
return fabsf(a - b);
}
#include "../QTestExtensions.h"
#include "MovingMinMaxAvg.h"
#include "SharedUtil.h"
class MovingMinMaxAvgTests : public QObject {
private slots:

View file

@ -10,10 +10,13 @@
#include "TransformTests.h"
#include <algorithm>
#include <glm/glm.hpp>
#include <SharedLogging.h>
#include <Transform.h>
#include "SharedLogging.h"
#include <../QTestExtensions.h>
#include "../QTestExtensions.h"
using namespace glm;

View file

@ -12,33 +12,6 @@
#define hifi_TransformTests_h
#include <QtTest/QtTest>
#include <glm/glm.hpp>
#include <algorithm>
inline float getErrorDifference(const glm::mat4& a, const glm::mat4& b) {
float maxDiff = 0;
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
float diff = fabs(a[i][j] - b[i][j]);
maxDiff = std::max(diff, maxDiff);
}
}
return maxDiff;
}
inline QTextStream& operator<< (QTextStream& stream, const glm::mat4& matrix) {
stream << "[\n\t\t";
stream.setFieldWidth(15);
for (int r = 0; r < 4; ++r) {
for (int c = 0; c < 4; ++c) {
stream << matrix[c][r];
}
stream << "\n\t\t";
}
stream.setFieldWidth(0);
stream << "]\n\t"; // hacky as hell, but this should work...
return stream;
}
class TransformTests : public QObject {
Q_OBJECT

View file

@ -187,13 +187,7 @@ public:
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->create(_context);
connect(offscreenUi.data(), &OffscreenUi::textureUpdated, this, [this, offscreenUi](int textureId) {
offscreenUi->lockTexture(textureId);
assert(!glGetError());
GLuint oldTexture = testQmlTexture;
testQmlTexture = textureId;
if (oldTexture) {
offscreenUi->releaseTexture(oldTexture);
}
});
makeCurrent();