Merge branch 'master' of https://github.com/highfidelity/hifi into audio-output-buffer

This commit is contained in:
howard-stearns 2016-02-18 11:35:41 -08:00
commit 30ae1950d2
49 changed files with 1278 additions and 1407 deletions

View file

@ -478,6 +478,25 @@ function MyController(hand) {
}
};
this.searchIndicatorOn = function(handPosition, distantPickRay) {
var SEARCH_SPHERE_SIZE = 0.011;
var SEARCH_SPHERE_FOLLOW_RATE = 0.50;
if (this.intersectionDistance > 0) {
// If we hit something with our pick ray, move the search sphere toward that distance
this.searchSphereDistance = this.searchSphereDistance * SEARCH_SPHERE_FOLLOW_RATE +
this.intersectionDistance * (1.0 - SEARCH_SPHERE_FOLLOW_RATE);
}
var searchSphereLocation = Vec3.sum(distantPickRay.origin,
Vec3.multiply(distantPickRay.direction, this.searchSphereDistance));
this.searchSphereOn(searchSphereLocation, SEARCH_SPHERE_SIZE * this.searchSphereDistance,
(this.triggerSmoothedGrab() || this.bumperSqueezed()) ? INTERSECT_COLOR : NO_INTERSECT_COLOR);
if ((USE_OVERLAY_LINES_FOR_SEARCHING === true) && PICK_WITH_HAND_RAY) {
this.overlayLineOn(handPosition, searchSphereLocation,
(this.triggerSmoothedGrab() || this.bumperSqueezed()) ? INTERSECT_COLOR : NO_INTERSECT_COLOR);
}
}
this.handleDistantParticleBeam = function(handPosition, objectPosition, color) {
@ -921,7 +940,8 @@ function MyController(hand) {
continue;
}
if (this.state == STATE_SEARCHING && !isPhysical && distance > NEAR_PICK_MAX_DISTANCE && !near) {
if (this.state == STATE_SEARCHING &&
!isPhysical && distance > NEAR_PICK_MAX_DISTANCE && !near && !grabbableDataForCandidate.wantsTrigger) {
// we can't distance-grab non-physical
if (WANT_DEBUG_SEARCH_NAME && propsForCandidate.name == WANT_DEBUG_SEARCH_NAME) {
print("grab is skipping '" + WANT_DEBUG_SEARCH_NAME + "': not physical and too far for near-grab");
@ -1005,24 +1025,7 @@ function MyController(hand) {
this.lineOn(distantPickRay.origin, Vec3.multiply(distantPickRay.direction, LINE_LENGTH), NO_INTERSECT_COLOR);
}
var SEARCH_SPHERE_SIZE = 0.011;
var SEARCH_SPHERE_FOLLOW_RATE = 0.50;
if (this.intersectionDistance > 0) {
// If we hit something with our pick ray, move the search sphere toward that distance
this.searchSphereDistance = this.searchSphereDistance * SEARCH_SPHERE_FOLLOW_RATE +
this.intersectionDistance * (1.0 - SEARCH_SPHERE_FOLLOW_RATE);
}
var searchSphereLocation = Vec3.sum(distantPickRay.origin,
Vec3.multiply(distantPickRay.direction, this.searchSphereDistance));
this.searchSphereOn(searchSphereLocation, SEARCH_SPHERE_SIZE * this.searchSphereDistance,
(this.triggerSmoothedGrab() || this.bumperSqueezed()) ? INTERSECT_COLOR : NO_INTERSECT_COLOR);
if ((USE_OVERLAY_LINES_FOR_SEARCHING === true) && PICK_WITH_HAND_RAY) {
this.overlayLineOn(handPosition, searchSphereLocation,
(this.triggerSmoothedGrab() || this.bumperSqueezed()) ? INTERSECT_COLOR : NO_INTERSECT_COLOR);
}
this.searchIndicatorOn(handPosition, distantPickRay);
Controller.setReticleVisible(false);
};
@ -1543,17 +1546,20 @@ function MyController(hand) {
var now = Date.now();
if (now - this.lastPickTime > MSECS_PER_SEC / PICKS_PER_SECOND_PER_HAND) {
var intersection = Entities.findRayIntersection(pickRay, true);
this.lastPickTime = now;
if (intersection.entityID != this.grabbedEntity) {
this.setState(STATE_RELEASE);
this.callEntityMethodOnGrabbed("stopFarTrigger");
return;
if (intersection.accurate) {
this.lastPickTime = now;
if (intersection.entityID != this.grabbedEntity) {
this.setState(STATE_RELEASE);
this.callEntityMethodOnGrabbed("stopFarTrigger");
return;
}
if (intersection.intersects) {
this.intersectionDistance = Vec3.distance(pickRay.origin, intersection.intersection);
}
this.searchIndicatorOn(handPosition, pickRay);
}
}
if (USE_ENTITY_LINES_FOR_MOVING === true) {
this.lineOn(pickRay.origin, Vec3.multiply(pickRay.direction, LINE_LENGTH), NO_INTERSECT_COLOR);
}
this.callEntityMethodOnGrabbed("continueFarTrigger");
};

View file

@ -26,6 +26,7 @@ Script.include([
"libraries/entityCameraTool.js",
"libraries/gridTool.js",
"libraries/entityList.js",
"particle_explorer/particleExplorerTool.js",
"libraries/lightOverlayManager.js",
]);
@ -37,17 +38,13 @@ var lightOverlayManager = new LightOverlayManager();
var cameraManager = new CameraManager();
var grid = Grid();
// gridTool = GridTool({
// horizontalGrid: grid
// });
// gridTool.setVisible(false);
var entityListTool = EntityListTool();
selectionManager.addEventListener(function() {
selectionDisplay.updateHandles();
lightOverlayManager.updatePositions();
});
});
var toolIconUrl = HIFI_PUBLIC_BUCKET + "images/tools/";
var toolHeight = 50;
@ -134,9 +131,9 @@ var importingSVOTextOverlay = Overlays.addOverlay("text", {
var MARKETPLACE_URL = "https://metaverse.highfidelity.com/marketplace";
var marketplaceWindow = new OverlayWebWindow({
title: 'Marketplace',
source: "about:blank",
width: 900,
title: 'Marketplace',
source: "about:blank",
width: 900,
height: 700,
visible: false
});
@ -176,7 +173,8 @@ var toolBar = (function() {
newTextButton,
newWebButton,
newZoneButton,
newPolyVoxButton;
newPolyVoxButton,
newParticleButton
function initialize() {
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL, "highfidelity.edit.toolbar", function(windowDimensions, toolbar) {
@ -186,7 +184,7 @@ var toolBar = (function() {
};
});
activeButton = toolBar.addTool({
imageURL: toolIconUrl + "edit-status.svg",
@ -314,6 +312,20 @@ var toolBar = (function() {
visible: false
});
newParticleButton = toolBar.addTool({
imageURL: toolIconUrl + "particle.svg",
subImage: {
x: 0,
y: 0,
width: 256,
height: 256
},
width: toolWidth,
height: toolHeight,
alpha: 0.9,
visible: false
});
that.setActive(false);
}
@ -360,6 +372,7 @@ var toolBar = (function() {
toolBar.showTool(newWebButton, doShow);
toolBar.showTool(newZoneButton, doShow);
toolBar.showTool(newPolyVoxButton, doShow);
toolBar.showTool(newParticleButton, doShow);
};
var RESIZE_INTERVAL = 50;
@ -427,8 +440,8 @@ var toolBar = (function() {
newModelButtonDown = true;
return true;
}
if (newCubeButton === toolBar.clicked(clickedOverlay)) {
createNewEntity({
type: "Box",
@ -616,6 +629,22 @@ var toolBar = (function() {
return true;
}
if (newParticleButton === toolBar.clicked(clickedOverlay)) {
createNewEntity({
type: "ParticleEffect",
isEmitting: true,
particleRadius: 0.1,
emitAcceleration: {x: 0, y: -1, z: 0},
accelerationSpread: {x: 5, y: 0, z: 5},
emitSpeed: 1,
lifespan: 1,
particleRadius: 0.025,
alphaFinish: 0,
emitRate: 100,
textures: "https://hifi-public.s3.amazonaws.com/alan/Particles/Particle-Sprite-Smoke-1.png",
});
}
return false;
};
@ -636,7 +665,7 @@ var toolBar = (function() {
}
newModelButtonDown = false;
return handled;
}
@ -1226,7 +1255,8 @@ function selectAllEtitiesInCurrentSelectionBox(keepIfTouching) {
function deleteSelectedEntities() {
if (SelectionManager.hasSelection()) {
print(" Delete Entities");
selectedParticleEntity = 0;
particleExplorerTool.destroyWebView();
SelectionManager.saveProperties();
var savedProperties = [];
for (var i = 0; i < selectionManager.selections.length; i++) {
@ -1499,8 +1529,8 @@ PropertiesTool = function(opts) {
var url = Script.resolvePath('html/entityProperties.html');
var webView = new OverlayWebWindow({
title: 'Entity Properties',
source: url,
title: 'Entity Properties',
source: url,
toolWindow: true
});
@ -1554,8 +1584,16 @@ PropertiesTool = function(opts) {
} else {
if (data.properties.dynamic === false) {
// this object is leaving dynamic, so we zero its velocities
data.properties["velocity"] = {x: 0, y: 0, z: 0};
data.properties["angularVelocity"] = {x: 0, y: 0, z: 0};
data.properties["velocity"] = {
x: 0,
y: 0,
z: 0
};
data.properties["angularVelocity"] = {
x: 0,
y: 0,
z: 0
};
}
if (data.properties.rotation !== undefined) {
var rotation = data.properties.rotation;
@ -1838,3 +1876,39 @@ propertyMenu.onSelectMenuItem = function(name) {
var showMenuItem = propertyMenu.addMenuItem("Show in Marketplace");
propertiesTool = PropertiesTool();
var particleExplorerTool = ParticleExplorerTool();
var selectedParticleEntity = 0;
entityListTool.webView.eventBridge.webEventReceived.connect(function(data) {
var data = JSON.parse(data);
if (data.type == "selectionUpdate") {
var ids = data.entityIds;
if(ids.length === 1) {
if (Entities.getEntityProperties(ids[0], "type").type === "ParticleEffect" ) {
if (JSON.stringify(selectedParticleEntity) === JSON.stringify(ids[0])) {
// This particle entity is already selected, so return
return;
}
// Destroy the old particles web view first
particleExplorerTool.destroyWebView();
particleExplorerTool.createWebView();
var properties = Entities.getEntityProperties(ids[0]);
var particleData = {
messageType: "particle_settings",
currentProperties: properties
};
selectedParticleEntity = ids[0];
particleExplorerTool.setActiveParticleEntity(ids[0]);
particleExplorerTool.webView.eventBridge.webEventReceived.connect(function(data) {
var data = JSON.parse(data);
if (data.messageType === "page_loaded") {
particleExplorerTool.webView.eventBridge.emitScriptEvent(JSON.stringify(particleData));
}
});
} else {
selectedParticleEntity = 0;
particleExplorerTool.destroyWebView();
}
}
}
});

View file

@ -47,6 +47,17 @@
);
};
}
function createEmitCheckedToStringPropertyUpdateFunction(checkboxElement, name, propertyName) {
var newString = "";
if (checkboxElement.checked) {
newString += name + "";
} else {
}
}
function createEmitGroupCheckedPropertyUpdateFunction(group, propertyName) {
return function () {
var properties = {};
@ -64,7 +75,7 @@
function createEmitNumberPropertyUpdateFunction(propertyName) {
return function() {
EventBridge.emitWebEvent(
'{ "type":"update", "properties":{"' + propertyName + '":' + parseFloat(this.value).toFixed(2) + '}}'
'{ "type":"update", "properties":{"' + propertyName + '":' + Number(this.value.toFixed(4)) + '}}'
);
};
}
@ -207,6 +218,28 @@
}
};
function updateCheckedSubProperty(propertyName, propertyValue, subPropertyElement, subPropertyString) {
if (subPropertyElement.checked) {
if (propertyValue.indexOf(subPropertyString)) {
propertyValue += subPropertyString + ',';
}
} else {
// We've unchecked, so remove
propertyValue = propertyValue.replace(subPropertyString + ",", "");
}
var _properties ={}
_properties[propertyName] = propertyValue;
EventBridge.emitWebEvent(
JSON.stringify({
type: "update",
properties: _properties
})
);
}
function loaded() {
openEventBridge(function() {
var allSections = [];
@ -263,6 +296,11 @@
var elDensity = document.getElementById("property-density");
var elCollisionless = document.getElementById("property-collisionless");
var elDynamic = document.getElementById("property-dynamic" );
var elCollideStatic = document.getElementById("property-collide-static");
var elCollideDynamic = document.getElementById("property-collide-dynamic");
var elCollideKinematic = document.getElementById("property-collide-kinematic");
var elCollideMyAvatar = document.getElementById("property-collide-myAvatar");
var elCollideOtherAvatar = document.getElementById("property-collide-otherAvatar");
var elCollisionSoundURL = document.getElementById("property-collision-sound-url");
var elLifetime = document.getElementById("property-lifetime");
var elScriptURL = document.getElementById("property-script-url");
@ -312,15 +350,7 @@
var elHyperlinkHref = document.getElementById("property-hyperlink-href");
var elHyperlinkDescription = document.getElementById("property-hyperlink-description");
var elHyperlinkSections = document.querySelectorAll(".hyperlink-section");
var elParticleSections = document.querySelectorAll(".particle-section");
allSections.push(elParticleSections);
var elParticleIsEmitting = document.getElementById("property-particle-is-emitting");
var elParticleMaxParticles = document.getElementById("property-particle-maxparticles");
var elParticleLifeSpan = document.getElementById("property-particle-lifespan");
var elParticleEmitRate = document.getElementById("property-particle-emit-rate");
var elParticleRadius = document.getElementById("property-particle-radius");
var elParticleTextures = document.getElementById("property-particle-textures");
var elTextSections = document.querySelectorAll(".text-section");
allSections.push(elTextSections);
@ -378,6 +408,7 @@
var elPreviewCameraButton = document.getElementById("preview-camera-button");
if (window.EventBridge !== undefined) {
var properties;
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.type == "update") {
@ -419,7 +450,7 @@
var selected = false;
}
var properties = data.selections[0].properties;
properties = data.selections[0].properties;
elID.innerHTML = properties.id;
@ -433,54 +464,64 @@
} else {
enableChildren(document.getElementById("properties-list"), 'input');
}
elName.value = properties.name;
elVisible.checked = properties.visible;
elPositionX.value = properties.position.x.toFixed(2);
elPositionY.value = properties.position.y.toFixed(2);
elPositionZ.value = properties.position.z.toFixed(2);
elPositionX.value = properties.position.x.toFixed(4);
elPositionY.value = properties.position.y.toFixed(4);
elPositionZ.value = properties.position.z.toFixed(4);
elDimensionsX.value = properties.dimensions.x.toFixed(2);
elDimensionsY.value = properties.dimensions.y.toFixed(2);
elDimensionsZ.value = properties.dimensions.z.toFixed(2);
elDimensionsX.value = properties.dimensions.x.toFixed(4);
elDimensionsY.value = properties.dimensions.y.toFixed(4);
elDimensionsZ.value = properties.dimensions.z.toFixed(4);
elParentID.value = properties.parentID;
elParentJointIndex.value = properties.parentJointIndex;
elRegistrationX.value = properties.registrationPoint.x.toFixed(2);
elRegistrationY.value = properties.registrationPoint.y.toFixed(2);
elRegistrationZ.value = properties.registrationPoint.z.toFixed(2);
elRegistrationX.value = properties.registrationPoint.x.toFixed(4);
elRegistrationY.value = properties.registrationPoint.y.toFixed(4);
elRegistrationZ.value = properties.registrationPoint.z.toFixed(4);
elRotationX.value = properties.rotation.x.toFixed(2);
elRotationY.value = properties.rotation.y.toFixed(2);
elRotationZ.value = properties.rotation.z.toFixed(2);
elRotationX.value = properties.rotation.x.toFixed(4);
elRotationY.value = properties.rotation.y.toFixed(4);
elRotationZ.value = properties.rotation.z.toFixed(4);
elLinearVelocityX.value = properties.velocity.x.toFixed(2);
elLinearVelocityY.value = properties.velocity.y.toFixed(2);
elLinearVelocityZ.value = properties.velocity.z.toFixed(2);
elLinearVelocityX.value = properties.velocity.x.toFixed(4);
elLinearVelocityY.value = properties.velocity.y.toFixed(4);
elLinearVelocityZ.value = properties.velocity.z.toFixed(4);
elLinearDamping.value = properties.damping.toFixed(2);
elAngularVelocityX.value = (properties.angularVelocity.x * RADIANS_TO_DEGREES).toFixed(2);
elAngularVelocityY.value = (properties.angularVelocity.y * RADIANS_TO_DEGREES).toFixed(2);
elAngularVelocityZ.value = (properties.angularVelocity.z * RADIANS_TO_DEGREES).toFixed(2);
elAngularDamping.value = properties.angularDamping.toFixed(2);
elAngularVelocityX.value = (properties.angularVelocity.x * RADIANS_TO_DEGREES).toFixed(4);
elAngularVelocityY.value = (properties.angularVelocity.y * RADIANS_TO_DEGREES).toFixed(4);
elAngularVelocityZ.value = (properties.angularVelocity.z * RADIANS_TO_DEGREES).toFixed(4);
elAngularDamping.value = properties.angularDamping.toFixed(4);
elRestitution.value = properties.restitution.toFixed(2);
elFriction.value = properties.friction.toFixed(2);
elRestitution.value = properties.restitution.toFixed(4);
elFriction.value = properties.friction.toFixed(4);
elGravityX.value = properties.gravity.x.toFixed(2);
elGravityY.value = properties.gravity.y.toFixed(2);
elGravityZ.value = properties.gravity.z.toFixed(2);
elGravityX.value = properties.gravity.x.toFixed(4);
elGravityY.value = properties.gravity.y.toFixed(4);
elGravityZ.value = properties.gravity.z.toFixed(4);
elAccelerationX.value = properties.acceleration.x.toFixed(2);
elAccelerationY.value = properties.acceleration.y.toFixed(2);
elAccelerationZ.value = properties.acceleration.z.toFixed(2);
elAccelerationX.value = properties.acceleration.x.toFixed(4);
elAccelerationY.value = properties.acceleration.y.toFixed(4);
elAccelerationZ.value = properties.acceleration.z.toFixed(4);
elDensity.value = properties.density.toFixed(2);
elDensity.value = properties.density.toFixed(4);
elCollisionless.checked = properties.collisionless;
elDynamic.checked = properties.dynamic;
elCollideStatic.checked = properties.collidesWith.indexOf("static") > -1;
elCollideKinematic.checked = properties.collidesWith.indexOf("kinematic") > -1;
elCollideDynamic.checked = properties.collidesWith.indexOf("dynamic") > -1;
elCollideMyAvatar.checked = properties.collidesWith.indexOf("myAvatar") > -1;
elCollideOtherAvatar.checked = properties.collidesWith.indexOf("otherAvatar") > -1;
elCollisionSoundURL.value = properties.collisionSoundURL;
elLifetime.value = properties.lifetime;
elScriptURL.value = properties.script;
@ -602,18 +643,6 @@
elZoneSkyboxURL.value = properties.skybox.url;
showElements(document.getElementsByClassName('skybox-section'), elZoneBackgroundMode.value == 'skybox');
} else if (properties.type == "ParticleEffect") {
for (var i = 0; i < elParticleSections.length; i++) {
elParticleSections[i].style.display = 'block';
}
elParticleIsEmitting.checked = properties.isEmitting;
elParticleMaxParticles.value = properties.maxParticles;
elParticleLifeSpan.value = properties.lifespan.toFixed(2);
elParticleEmitRate.value = properties.emitRate.toFixed(1);
elParticleRadius.value = properties.particleRadius.toFixed(3);
elParticleTextures.value = properties.textures;
} else if (properties.type == "PolyVox") {
for (var i = 0; i < elPolyVoxSections.length; i++) {
elPolyVoxSections[i].style.display = 'block';
@ -702,6 +731,29 @@
elDensity.addEventListener('change', createEmitNumberPropertyUpdateFunction('density'));
elCollisionless.addEventListener('change', createEmitCheckedPropertyUpdateFunction('collisionless'));
elDynamic.addEventListener('change', createEmitCheckedPropertyUpdateFunction('dynamic'));
elCollideDynamic.addEventListener('change', function() {
updateCheckedSubProperty("collidesWith", properties.collidesWith, elCollideDynamic, 'dynamic');
});
elCollideKinematic.addEventListener('change', function() {
updateCheckedSubProperty("collidesWith", properties.collidesWith, elCollideKinematic, 'kinematic');
});
elCollideStatic.addEventListener('change', function() {
updateCheckedSubProperty("collidesWith", properties.collidesWith, elCollideStatic, 'static');
});
elCollideMyAvatar.addEventListener('change', function() {
updateCheckedSubProperty("collidesWith", properties.collidesWith, elCollideMyAvatar, 'myAvatar');
});
elCollideOtherAvatar.addEventListener('change', function() {
updateCheckedSubProperty("collidesWith", properties.collidesWith, elCollideOtherAvatar, 'otherAvatar');
});
elCollisionSoundURL.addEventListener('change', createEmitTextPropertyUpdateFunction('collisionSoundURL'));
elLifetime.addEventListener('change', createEmitNumberPropertyUpdateFunction('lifetime'));
@ -749,13 +801,6 @@
elWebSourceURL.addEventListener('change', createEmitTextPropertyUpdateFunction('sourceUrl'));
elParticleIsEmitting.addEventListener('change', createEmitCheckedPropertyUpdateFunction('isEmitting'));
elParticleMaxParticles.addEventListener('change', createEmitNumberPropertyUpdateFunction('maxParticles'));
elParticleLifeSpan.addEventListener('change', createEmitNumberPropertyUpdateFunction('lifespan'));
elParticleEmitRate.addEventListener('change', createEmitNumberPropertyUpdateFunction('emitRate'));
elParticleRadius.addEventListener('change', createEmitNumberPropertyUpdateFunction('particleRadius'));
elParticleTextures.addEventListener('change', createEmitTextPropertyUpdateFunction('textures'));
elModelURL.addEventListener('change', createEmitTextPropertyUpdateFunction('modelURL'));
elShapeType.addEventListener('change', createEmitTextPropertyUpdateFunction('shapeType'));
elCompoundShapeURL.addEventListener('change', createEmitTextPropertyUpdateFunction('compoundShapeURL'));
@ -1192,9 +1237,9 @@
<div class="property">
<div class="label">Position</div>
<div class="value">
<div class="input-area ">X<input class="coord" type='number' id="property-pos-x"><div class="prop-x"></div></div>
<div class="input-area ">Y<input class="coord" type='number' id="property-pos-y"><div class="prop-y"></div></div>
<div class="input-area ">Z<input class="coord" type='number' id="property-pos-z"><div class="prop-z"></div></div>
<div class="input-area ">X<input class="coord" type='number' id="property-pos-x" step="0.1"><div class="prop-x"></div></div>
<div class="input-area ">Y<input class="coord" type='number' id="property-pos-y" step="0.1"><div class="prop-y"></div></div>
<div class="input-area ">Z<input class="coord" type='number' id="property-pos-z" step="0.1"><div class="prop-z"></div></div>
<div>
<input type="button" id="move-selection-to-grid" value="Selection to Grid">
<input type="button" id="move-all-to-grid" value="All to Grid">
@ -1228,9 +1273,9 @@
<div class="property">
<div class="label">Dimensions</div>
<div class="value">
<div class="input-area">X <input class="coord" type='number' id="property-dim-x"><div class="prop-x"></div></div>
<div class="input-area">Y <input class="coord" type='number' id="property-dim-y"><div class="prop-y"></div></div>
<div class="input-area">Z <input class="coord" type='number' id="property-dim-z"><div class="prop-z"></div></div>
<div class="input-area">X <input class="coord" type='number' id="property-dim-x" step="0.1"><div class="prop-x"></div></div>
<div class="input-area">Y <input class="coord" type='number' id="property-dim-y" step="0.1"><div class="prop-y"></div></div>
<div class="input-area">Z <input class="coord" type='number' id="property-dim-z" step="0.1"><div class="prop-z"></div></div>
<div>
<input type="button" id="reset-to-natural-dimensions" value="Reset to Natural Dimensions">
</div>
@ -1280,9 +1325,9 @@
<div class="property">
<div class="label">Rotation</div>
<div class="value">
<div class="input-area">Pitch <input class="coord" type='number' id="property-rot-x"></div>
<div class="input-area">Yaw <input class="coord" type='number' id="property-rot-y"></div>
<div class="input-area">Roll <input class="coord" type='number' id="property-rot-z"></div>
<div class="input-area">Pitch <input class="coord" type='number' id="property-rot-x" step="0.1"></div>
<div class="input-area">Yaw <input class="coord" type='number' id="property-rot-y" step="0.1"></div>
<div class="input-area">Roll <input class="coord" type='number' id="property-rot-z"step="0.1"></div>
</div>
</div>
@ -1384,6 +1429,48 @@
<span class="value">
<input type='checkbox' id="property-dynamic">
</span>
</div>
<div class = "sub-section-header"> Collides With: </div>
<div class = "sub-props-checkbox-group">
<div class="property">
<span class="label"> static</span>
<span class="value">
<input type='checkbox' id="property-collide-static">
</span>
</div>
<div class="property">
<span class="label"> dynamic</span>
<span class="value">
<input type='checkbox' id="property-collide-dynamic">
</span>
</div>
<div class="property">
<span class="label"> kinematic</span>
<span class="value">
<input type='checkbox' id="property-collide-kinematic">
</span>
</div>
<div class="property">
<span class="label"> myAvatar</span>
<span class="value">
<input type='checkbox' id="property-collide-myAvatar">
</span>
</div>
<div class="property">
<span class="label"> otherAvatar</span>
<span class="value">
<input type='checkbox' id="property-collide-otherAvatar">
</span>
</div>
</div>
</div>
<div class="property">
@ -1500,51 +1587,6 @@
</div>
</div>
<div class="section-header particle-section">
<label>Particle</label>
</div>
<div class="particle-section property">
<span class="label">Is Emitting</span>
<span class="value">
<input type='checkbox' id="property-particle-is-emitting">
</span>
</div>
<div class="particle-section property">
<div class="label">Max Particles</div>
<div class="value">
<input type='number' id="property-particle-maxparticles" min="0" max="2048" step="1">
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Life Span</div>
<div class="value">
<input type='number' id="property-particle-lifespan" min="0" step="0.1">
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Emission Rate</div>
<div class="value">
<input type='number' id="property-particle-emit-rate" min="0" step="0.5">
</div>
</div>
<div class="particle-section property">
<div class="label">Particle Radius</div>
<div class="value">
<input class="coord" type='number' id="property-particle-radius" min="0" step="0.005">
</div>
</div>
<div class="particle-section property">
<div class="label">Textures</div>
<div class="value">
<textarea id="property-particle-textures" value=''></textarea>
</div>
</div>
<div class="section-header light-section">
<label>Light</label>
</div>
@ -1584,4 +1626,4 @@
</div>
</div>
</body>
</html>
</html>

View file

@ -263,6 +263,10 @@ table#properties-list {
border-bottom: 0.75pt solid #e5e5e5;
}
.sub-props-checkbox-group {
margin-left: 20px;
}
#properties-list .label {
font-weight: bold;
overflow: hidden;

View file

@ -8,12 +8,16 @@ EntityListTool = function(opts) {
title: 'Entities', source: url, toolWindow: true
});
var searchRadius = 100;
var visible = false;
webView.setVisible(visible);
that.webView = webView;
that.setVisible = function(newVisible) {
visible = newVisible;
webView.setVisible(visible);
@ -71,6 +75,7 @@ EntityListTool = function(opts) {
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
}
webView.eventBridge.webEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.type == "selectionUpdate") {

View file

@ -1,504 +0,0 @@
//
// main.js
//
// Created by James B. Pollack @imgntn on 9/26/2015
// Copyright 2015 High Fidelity, Inc.
// Web app side of the App - contains GUI.
// This is an example of a new, easy way to do two way bindings between dynamically created GUI and in-world entities.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/*global window, alert, EventBridge, dat, listenForSettingsUpdates,createVec3Folder,createQuatFolder,writeVec3ToInterface,writeDataToInterface*/
var Settings = function() {
this.exportSettings = function() {
//copyExportSettingsToClipboard();
showPreselectedPrompt();
};
this.importSettings = function() {
importSettings();
};
};
//2-way bindings-aren't quite ready yet. see bottom of file.
var AUTO_UPDATE = false;
var UPDATE_ALL_FREQUENCY = 100;
var controllers = [];
var colorControllers = [];
var folders = [];
var gui = null;
var settings = new Settings();
var updateInterval;
var currentInputField;
var storedController;
var keysToIgnore = [
'importSettings',
'exportSettings',
'script',
'visible',
'locked',
'userData',
'position',
'dimensions',
'rotation',
'id',
'description',
'type',
'created',
'age',
'ageAsText',
'boundingBox',
'naturalDimensions',
'naturalPosition',
'velocity',
'gravity',
'acceleration',
'damping',
'restitution',
'friction',
'density',
'lifetime',
'scriptTimestamp',
'registrationPoint',
'angularVelocity',
'angularDamping',
'collisionless',
'dynamic',
'href',
'actionData',
'marketplaceID',
'collisionSoundURL',
'shapeType',
'isEmitting',
'sittingPoints',
'originalTextures',
'parentJointIndex',
'parentID'
];
var individualKeys = [];
var vec3Keys = [];
var quatKeys = [];
var colorKeys = [];
window.onload = function() {
if (typeof EventBridge !== 'undefined') {
var stringifiedData = JSON.stringify({
messageType: 'page_loaded'
});
EventBridge.emitWebEvent(
stringifiedData
);
listenForSettingsUpdates();
window.onresize = setGUIWidthToWindowWidth;
} else {
console.log('No event bridge, probably not in interface.');
}
};
function loadGUI() {
//whether or not to autoplace
gui = new dat.GUI({
autoPlace: false
});
//if not autoplacing, put gui in a custom container
if (gui.autoPlace === false) {
var customContainer = document.getElementById('my-gui-container');
customContainer.appendChild(gui.domElement);
}
// presets for the GUI itself. a little confusing and import/export is mostly what we want to do at the moment.
// gui.remember(settings);
var keys = _.keys(settings);
_.each(keys, function(key) {
var shouldIgnore = _.contains(keysToIgnore, key);
if (shouldIgnore) {
return;
}
var subKeys = _.keys(settings[key]);
var hasX = _.contains(subKeys, 'x');
var hasY = _.contains(subKeys, 'y');
var hasZ = _.contains(subKeys, 'z');
var hasW = _.contains(subKeys, 'w');
var hasRed = _.contains(subKeys, 'red');
var hasGreen = _.contains(subKeys, 'green');
var hasBlue = _.contains(subKeys, 'blue');
if ((hasX && hasY && hasZ) && hasW === false) {
vec3Keys.push(key);
} else if (hasX && hasY && hasZ && hasW) {
quatKeys.push(key);
} else if (hasRed || hasGreen || hasBlue) {
colorKeys.push(key);
} else {
individualKeys.push(key);
}
});
//alphabetize our keys
individualKeys.sort();
vec3Keys.sort();
quatKeys.sort();
colorKeys.sort();
//add to gui in the order they should appear
gui.add(settings, 'importSettings');
gui.add(settings, 'exportSettings');
addIndividualKeys();
addFolders();
//set the gui width to match the web window width
gui.width = window.innerWidth;
//2-way binding stuff
// if (AUTO_UPDATE) {
// setInterval(manuallyUpdateDisplay, UPDATE_ALL_FREQUENCY);
// registerDOMElementsForListenerBlocking();
// }
}
function addIndividualKeys() {
_.each(individualKeys, function(key) {
//temporary patch for not crashing when this goes below 0
var controller;
if (key.indexOf('emitRate') > -1) {
controller = gui.add(settings, key).min(0);
} else {
controller = gui.add(settings, key);
}
//2-way - need to fix not being able to input exact values if constantly listening
//controller.listen();
//keep track of our controller
controllers.push(controller);
//hook into change events for this gui controller
controller.onChange(function(value) {
// Fires on every change, drag, keypress, etc.
writeDataToInterface(this.property, value);
});
});
}
function addFolders() {
_.each(colorKeys, function(key) {
createColorPicker(key);
});
_.each(vec3Keys, function(key) {
createVec3Folder(key);
});
_.each(quatKeys, function(key) {
createQuatFolder(key);
});
}
function createColorPicker(key) {
var colorObject = settings[key];
var colorArray = convertColorObjectToArray(colorObject);
settings[key] = colorArray;
var controller = gui.addColor(settings, key);
controller.onChange(function(value) {
var obj = {};
obj[key] = convertColorArrayToObject(value);
writeVec3ToInterface(obj);
});
return;
}
function createVec3Folder(category) {
var folder = gui.addFolder(category);
folder.add(settings[category], 'x').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category][this.property] = value;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'y').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category][this.property] = value;
obj[category].z = settings[category].z;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'z').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].y = settings[category].y;
obj[category].x = settings[category].x;
obj[category][this.property] = value;
writeVec3ToInterface(obj);
});
folders.push(folder);
folder.open();
}
function createQuatFolder(category) {
var folder = gui.addFolder(category);
folder.add(settings[category], 'x').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category][this.property] = value;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'y').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category][this.property] = value;
obj[category].z = settings[category].z;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'z').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category].y = settings[category].y;
obj[category][this.property] = value;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'w').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
obj[category][this.property] = value;
writeVec3ToInterface(obj);
});
folders.push(folder);
folder.open();
}
function convertColorObjectToArray(colorObject) {
var colorArray = [];
_.each(colorObject, function(singleColor) {
colorArray.push(singleColor);
});
return colorArray;
}
function convertColorArrayToObject(colorArray) {
var colorObject = {
red: colorArray[0],
green: colorArray[1],
blue: colorArray[2]
};
return colorObject;
}
function writeDataToInterface(property, value) {
var data = {};
data[property] = value;
var sendData = {
messageType: "settings_update",
updatedSettings: data
};
var stringifiedData = JSON.stringify(sendData);
EventBridge.emitWebEvent(stringifiedData);
}
function writeVec3ToInterface(obj) {
var sendData = {
messageType: "settings_update",
updatedSettings: obj
};
var stringifiedData = JSON.stringify(sendData);
EventBridge.emitWebEvent(stringifiedData);
}
function listenForSettingsUpdates() {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
//2-way
// if (data.messageType === 'object_update') {
// _.each(data.objectSettings, function(value, key) {
// settings[key] = value;
// });
// }
if (data.messageType === 'initial_settings') {
_.each(data.initialSettings, function(value, key) {
settings[key] = {};
settings[key] = value;
});
loadGUI();
}
});
}
function manuallyUpdateDisplay() {
// Iterate over all controllers
// this is expensive, write a method for indiviudal controllers and use it when the value is different than a cached value, perhaps.
var i;
for (i in gui.__controllers) {
gui.__controllers[i].updateDisplay();
}
}
function setGUIWidthToWindowWidth() {
if (gui !== null) {
gui.width = window.innerWidth;
}
}
function handleInputKeyPress(e) {
if (e.keyCode === 13) {
importSettings();
}
return false;
}
function importSettings() {
var importInput = document.getElementById('importer-input');
try {
var importedSettings = JSON.parse(importInput.value);
var keys = _.keys(importedSettings);
_.each(keys, function(key) {
var shouldIgnore = _.contains(keysToIgnore, key);
if (shouldIgnore) {
return;
}
settings[key] = importedSettings[key];
});
writeVec3ToInterface(settings);
manuallyUpdateDisplay();
} catch (e) {
alert('Not properly formatted JSON');
}
}
function prepareSettingsForExport() {
var keys = _.keys(settings);
var exportSettings = {};
_.each(keys, function(key) {
var shouldIgnore = _.contains(keysToIgnore, key);
if (shouldIgnore) {
return;
}
if (key.indexOf('color') > -1) {
var colorObject = convertColorArrayToObject(settings[key]);
settings[key] = colorObject;
}
exportSettings[key] = settings[key];
});
return JSON.stringify(exportSettings);
}
function showPreselectedPrompt() {
window.prompt("Ctrl-C to copy, then Enter.", prepareSettingsForExport());
}
function removeContainerDomElement() {
var elem = document.getElementById("my-gui-container");
elem.parentNode.removeChild(elem);
}
function removeListenerFromGUI(key) {
_.each(gui.__listening, function(controller, index) {
if (controller.property === key) {
storedController = controller;
gui.__listening.splice(index, 1);
}
});
}
//the section below is to try to work at achieving two way bindings;
function addListenersBackToGUI() {
gui.__listening.push(storedController);
storedController = null;
}
function registerDOMElementsForListenerBlocking() {
_.each(gui.__controllers, function(controller) {
var input = controller.domElement.childNodes[0];
input.addEventListener('focus', function() {
console.log('INPUT ELEMENT GOT FOCUS!' + controller.property);
removeListenerFromGUI(controller.property);
});
});
_.each(gui.__controllers, function(controller) {
var input = controller.domElement.childNodes[0];
input.addEventListener('blur', function() {
console.log('INPUT ELEMENT GOT BLUR!' + controller.property);
addListenersBackToGUI();
});
});
// also listen to inputs inside of folders
_.each(gui.__folders, function(folder) {
_.each(folder.__controllers, function(controller) {
var input = controller.__input;
input.addEventListener('focus', function() {
console.log('FOLDER ELEMENT GOT FOCUS!' + controller.property);
});
});
});
}

View file

@ -1,5 +1,5 @@
<!--
// main.js
// particleExplorer.hml
//
//
// Created by James B. Pollack @imgntn on 9/26/2015
@ -16,14 +16,16 @@
<head>
<script type="text/javascript" src="dat.gui.min.js"></script>
<script type="text/javascript" src="underscore-min.js"></script>
<script type="text/javascript" src="main.js?123"></script>
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
<script type="text/javascript" src="../html/eventBridgeLoader.js"></script>
<script type="text/javascript" src="particleExplorer.js?v42"></script>
<script>
</script>
<style>
body{
background-color:black;
overflow-x: hidden;
background-color:black;
overflow-x: hidden;
}
#my-gui-container{
@ -31,12 +33,23 @@ body{
}
.importer{
margin-bottom:4px;
margin-bottom:4px;
}
.exported-props-section {
width: 50%;
margin: 0 auto;
}
#exported-props {
/* Set the margin-left and margin-right automatically set */
color: white;
white-space: pre-wrap; /* css-3 */
}
::-webkit-input-placeholder {
text-align: center;
font-family: Helvetica
text-align: center;
font-family: Helvetica
}
#importer-input{
@ -50,6 +63,8 @@ body{
<body>
<div class="importer">
<input type='text' id="importer-input" placeholder="Import: Paste JSON here." onkeypress="handleInputKeyPress(event)">
<div class = "exported-props-section">
<div id = "exported-props"></div>
</div>
<div id="my-gui-container">
</div>

View file

@ -2,215 +2,497 @@
// particleExplorer.js
//
// Created by James B. Pollack @imgntn on 9/26/2015
// includes setup from @ctrlaltdavid's particlesTest.js
// Copyright 2015 High Fidelity, Inc.
//
// Interface side of the App.
// Quickly edit the aesthetics of a particle system.
// Web app side of the App - contains GUI.
// This is an example of a new, easy way to do two way bindings between dynamically created GUI and in-world entities.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// next version: 2 way bindings, integrate with edit.js
//
/*global print, WebWindow, MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
/*global window, alert, EventBridge, dat, listenForSettingsUpdates,createVec3Folder,createQuatFolder,writeVec3ToInterface,writeDataToInterface*/
var box,
sphere,
sphereDimensions = {
x: 0.4,
y: 0.8,
z: 0.2
},
pointDimensions = {
x: 0.0,
y: 0.0,
z: 0.0
},
sphereOrientation = Quat.fromPitchYawRollDegrees(-60.0, 30.0, 0.0),
verticalOrientation = Quat.fromPitchYawRollDegrees(-90.0, 0.0, 0.0),
particles,
particleExample = -1,
PARTICLE_RADIUS = 0.04,
SLOW_EMIT_RATE = 2.0,
HALF_EMIT_RATE = 50.0,
FAST_EMIT_RATE = 100.0,
SLOW_EMIT_SPEED = 0.025,
FAST_EMIT_SPEED = 1.0,
GRAVITY_EMIT_ACCELERATON = {
x: 0.0,
y: -0.3,
z: 0.0
},
ZERO_EMIT_ACCELERATON = {
x: 0.0,
y: 0.0,
z: 0.0
},
PI = 3.141593,
DEG_TO_RAD = PI / 180.0,
NUM_PARTICLE_EXAMPLES = 18;
var particleProperties;
function setUp() {
var boxPoint,
spawnPoint;
boxPoint = Vec3.sum(MyAvatar.position, Vec3.multiply(4.0, Quat.getFront(Camera.getOrientation())));
boxPoint = Vec3.sum(boxPoint, {
x: 0.0,
y: -0.5,
z: 0.0
});
spawnPoint = Vec3.sum(boxPoint, {
x: 0.0,
y: 1.0,
z: 0.0
});
box = Entities.addEntity({
type: "Box",
name: "ParticlesTest Box",
position: boxPoint,
rotation: verticalOrientation,
dimensions: {
x: 0.3,
y: 0.3,
z: 0.3
},
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: 3600, // 1 hour; just in case
visible: true
});
// Same size and orientation as emitter when ellipsoid.
sphere = Entities.addEntity({
type: "Sphere",
name: "ParticlesTest Sphere",
position: boxPoint,
rotation: sphereOrientation,
dimensions: sphereDimensions,
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: 3600, // 1 hour; just in case
visible: false
});
// 1.0m above the box or ellipsoid.
particles = Entities.addEntity({
type: "ParticleEffect",
name: "ParticlesTest Emitter",
position: spawnPoint,
emitOrientation: verticalOrientation,
particleRadius: PARTICLE_RADIUS,
radiusSpread: 0.0,
emitRate: SLOW_EMIT_RATE,
emitSpeed: FAST_EMIT_SPEED,
speedSpread: 0.0,
emitAcceleration: GRAVITY_EMIT_ACCELERATON,
accelerationSpread: {
x: 0.0,
y: 0.0,
z: 0.0
},
textures: "https://hifi-public.s3.amazonaws.com/alan/Particles/Particle-Sprite-Smoke-1.png",
color: {
red: 255,
green: 255,
blue: 255
},
lifespan: 5.0,
locked: false,
isEmitting: true,
lifetime: 3600 // 1 hour; just in case
});
}
SettingsWindow = function() {
var _this = this;
this.webWindow = null;
this.init = function() {
Script.update.connect(waitForObjectAuthorization);
_this.webWindow = new WebWindow('Particle Explorer', Script.resolvePath('index.html'), 400, 600, false);
_this.webWindow.eventBridge.webEventReceived.connect(_this.onWebEventReceived);
var Settings = function() {
this.exportSettings = function() {
//copyExportSettingsToClipboard();
showPreselectedPrompt();
};
this.sendData = function(data) {
_this.webWindow.eventBridge.emitScriptEvent(JSON.stringify(data));
};
this.onWebEventReceived = function(data) {
var _data = JSON.parse(data);
if (_data.messageType === 'page_loaded') {
_this.webWindow.setVisible(true);
_this.pageLoaded = true;
sendInitialSettings(particleProperties);
}
if (_data.messageType === 'settings_update') {
editEntity(_data.updatedSettings);
return;
}
this.importSettings = function() {
importSettings();
};
};
function waitForObjectAuthorization() {
var properties = Entities.getEntityProperties(particles, "isKnownID");
var isKnownID = properties.isKnownID;
if (isKnownID === false || SettingsWindow.pageLoaded === false) {
return;
//2-way bindings-aren't quite ready yet. see bottom of file.
var AUTO_UPDATE = false;
var UPDATE_ALL_FREQUENCY = 100;
var controllers = [];
var colorControllers = [];
var folders = [];
var gui = null;
var settings = new Settings();
var updateInterval;
var currentInputField;
var storedController;
//CHANGE TO WHITELIST
var keysToAllow = [
'isEmitting',
'maxParticles',
'lifespan',
'emitRate',
'emitSpeed',
'speedSpread',
'emitOrientation',
'emitDimensios',
'emitRadiusStart',
'polarStart',
'polarFinish',
'azimuthFinish',
'emitAcceleration',
'accelerationSpread',
'particleRadius',
'radiusSpread',
'radiusStart',
'radiusFinish',
'color',
'colorSpread',
'colorStart',
'colorFinish',
'alpha',
'alphaSpread',
'alphaStart',
'alphaFinish',
'emitterShouldTrail',
'textures'
];
var individualKeys = [];
var vec3Keys = [];
var quatKeys = [];
var colorKeys = [];
window.onload = function() {
openEventBridge(function() {
var stringifiedData = JSON.stringify({
messageType: 'page_loaded'
});
EventBridge.emitWebEvent(
stringifiedData
);
listenForSettingsUpdates();
window.onresize = setGUIWidthToWindowWidth;
})
};
function loadGUI() {
//whether or not to autoplace
gui = new dat.GUI({
autoPlace: false
});
//if not autoplacing, put gui in a custom container
if (gui.autoPlace === false) {
var customContainer = document.getElementById('my-gui-container');
customContainer.appendChild(gui.domElement);
}
var currentProperties = Entities.getEntityProperties(particles);
particleProperties = currentProperties;
Script.update.connect(sendObjectUpdates);
Script.update.disconnect(waitForObjectAuthorization);
// presets for the GUI itself. a little confusing and import/export is mostly what we want to do at the moment.
// gui.remember(settings);
var keys = _.keys(settings);
_.each(keys, function(key) {
var shouldAllow = _.contains(keysToAllow, key);
if (shouldAllow) {
var subKeys = _.keys(settings[key]);
var hasX = _.contains(subKeys, 'x');
var hasY = _.contains(subKeys, 'y');
var hasZ = _.contains(subKeys, 'z');
var hasW = _.contains(subKeys, 'w');
var hasRed = _.contains(subKeys, 'red');
var hasGreen = _.contains(subKeys, 'green');
var hasBlue = _.contains(subKeys, 'blue');
if ((hasX && hasY && hasZ) && hasW === false) {
vec3Keys.push(key);
} else if (hasX && hasY && hasZ && hasW) {
quatKeys.push(key);
} else if (hasRed || hasGreen || hasBlue) {
colorKeys.push(key);
} else {
individualKeys.push(key);
}
}
});
//alphabetize our keys
individualKeys.sort();
vec3Keys.sort();
quatKeys.sort();
colorKeys.sort();
//add to gui in the order they should appear
gui.add(settings, 'importSettings');
gui.add(settings, 'exportSettings');
addIndividualKeys();
addFolders();
//set the gui width to match the web window width
gui.width = window.innerWidth;
//2-way binding stuff
// if (AUTO_UPDATE) {
// setInterval(manuallyUpdateDisplay, UPDATE_ALL_FREQUENCY);
// registerDOMElementsForListenerBlocking();
// }
}
function sendObjectUpdates() {
var currentProperties = Entities.getEntityProperties(particles);
sendUpdatedObject(currentProperties);
function addIndividualKeys() {
_.each(individualKeys, function(key) {
//temporary patch for not crashing when this goes below 0
var controller;
if (key.indexOf('emitRate') > -1) {
controller = gui.add(settings, key).min(0);
} else {
controller = gui.add(settings, key);
}
//2-way - need to fix not being able to input exact values if constantly listening
//controller.listen();
//keep track of our controller
controllers.push(controller);
//hook into change events for this gui controller
controller.onChange(function(value) {
// Fires on every change, drag, keypress, etc.
writeDataToInterface(this.property, value);
});
});
}
function sendInitialSettings(properties) {
var settings = {
messageType: 'initial_settings',
initialSettings: properties
function addFolders() {
_.each(colorKeys, function(key) {
createColorPicker(key);
});
_.each(vec3Keys, function(key) {
createVec3Folder(key);
});
_.each(quatKeys, function(key) {
createQuatFolder(key);
});
}
function createColorPicker(key) {
var colorObject = settings[key];
var colorArray = convertColorObjectToArray(colorObject);
settings[key] = colorArray;
var controller = gui.addColor(settings, key);
controller.onChange(function(value) {
var obj = {};
obj[key] = convertColorArrayToObject(value);
writeVec3ToInterface(obj);
});
return;
}
function createVec3Folder(category) {
var folder = gui.addFolder(category);
folder.add(settings[category], 'x').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category][this.property] = value;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'y').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category][this.property] = value;
obj[category].z = settings[category].z;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'z').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].y = settings[category].y;
obj[category].x = settings[category].x;
obj[category][this.property] = value;
writeVec3ToInterface(obj);
});
folders.push(folder);
folder.open();
}
function createQuatFolder(category) {
var folder = gui.addFolder(category);
folder.add(settings[category], 'x').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category][this.property] = value;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'y').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category][this.property] = value;
obj[category].z = settings[category].z;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'z').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category].y = settings[category].y;
obj[category][this.property] = value;
obj[category].w = settings[category].w;
writeVec3ToInterface(obj);
});
folder.add(settings[category], 'w').step(0.1).onChange(function(value) {
// Fires when a controller loses focus.
var obj = {};
obj[category] = {};
obj[category].x = settings[category].x;
obj[category].y = settings[category].y;
obj[category].z = settings[category].z;
obj[category][this.property] = value;
writeVec3ToInterface(obj);
});
folders.push(folder);
folder.open();
}
function convertColorObjectToArray(colorObject) {
var colorArray = [];
_.each(colorObject, function(singleColor) {
colorArray.push(singleColor);
});
return colorArray;
}
function convertColorArrayToObject(colorArray) {
var colorObject = {
red: colorArray[0],
green: colorArray[1],
blue: colorArray[2]
};
settingsWindow.sendData(settings);
return colorObject;
}
function sendUpdatedObject(properties) {
var settings = {
messageType: 'object_update',
objectSettings: properties
function writeDataToInterface(property, value) {
var data = {};
data[property] = value;
var sendData = {
messageType: "settings_update",
updatedSettings: data
};
settingsWindow.sendData(settings);
var stringifiedData = JSON.stringify(sendData);
EventBridge.emitWebEvent(stringifiedData);
}
function editEntity(properties) {
Entities.editEntity(particles, properties);
function writeVec3ToInterface(obj) {
var sendData = {
messageType: "settings_update",
updatedSettings: obj
};
var stringifiedData = JSON.stringify(sendData);
EventBridge.emitWebEvent(stringifiedData);
}
function tearDown() {
Entities.deleteEntity(particles);
Entities.deleteEntity(box);
Entities.deleteEntity(sphere);
Script.update.disconnect(sendObjectUpdates);
function listenForSettingsUpdates() {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.messageType === 'particle_settings') {
_.each(data.currentProperties, function(value, key) {
settings[key] = {};
settings[key] = value;
});
loadGUI();
}
});
}
var settingsWindow = new SettingsWindow();
settingsWindow.init();
setUp();
Script.scriptEnding.connect(tearDown);
function manuallyUpdateDisplay() {
// Iterate over all controllers
// this is expensive, write a method for indiviudal controllers and use it when the value is different than a cached value, perhaps.
var i;
for (i in gui.__controllers) {
gui.__controllers[i].updateDisplay();
}
}
function setGUIWidthToWindowWidth() {
if (gui !== null) {
gui.width = window.innerWidth;
}
}
function handleInputKeyPress(e) {
if (e.keyCode === 13) {
importSettings();
}
return false;
}
function importSettings() {
var importInput = document.getElementById('importer-input');
try {
var importedSettings = JSON.parse(importInput.value);
var keys = _.keys(importedSettings);
_.each(keys, function(key) {
var shouldAllow = _.contains(keysToAllow, key);
if (!shouldAllow) {
return;
}
settings[key] = importedSettings[key];
});
writeVec3ToInterface(settings);
manuallyUpdateDisplay();
} catch (e) {
alert('Not properly formatted JSON');
}
}
function prepareSettingsForExport() {
var keys = _.keys(settings);
var exportSettings = {};
_.each(keys, function(key) {
var shouldAllow = _.contains(keysToAllow, key);
if (!shouldAllow) {
return;
}
if (key.indexOf('color') > -1) {
var colorObject = convertColorArrayToObject(settings[key]);
settings[key] = colorObject;
}
exportSettings[key] = settings[key];
});
return JSON.stringify(exportSettings, null, 4);
}
function showPreselectedPrompt() {
var elem = document.getElementById("exported-props");
var exportSettings = prepareSettingsForExport();
elem.innerHTML = "";
var buttonnode= document.createElement('input');
buttonnode.setAttribute('type','button');
buttonnode.setAttribute('value','close');
elem.appendChild(document.createTextNode("COPY THE BELOW FIELD TO CLIPBOARD:"));
elem.appendChild(document.createElement("br"));
var textAreaNode = document.createElement("textarea");
textAreaNode.value = exportSettings;
elem.appendChild(textAreaNode);
elem.appendChild(document.createElement("br"));
elem.appendChild(buttonnode);
buttonnode.onclick = function() {
console.log("click")
elem.innerHTML = "";
}
//window.alert("Ctrl-C to copy, then Enter.", prepareSettingsForExport());
}
function removeContainerDomElement() {
var elem = document.getElementById("my-gui-container");
elem.parentNode.removeChild(elem);
}
function removeListenerFromGUI(key) {
_.each(gui.__listening, function(controller, index) {
if (controller.property === key) {
storedController = controller;
gui.__listening.splice(index, 1);
}
});
}
//the section below is to try to work at achieving two way bindings;
function addListenersBackToGUI() {
gui.__listening.push(storedController);
storedController = null;
}
function registerDOMElementsForListenerBlocking() {
_.each(gui.__controllers, function(controller) {
var input = controller.domElement.childNodes[0];
input.addEventListener('focus', function() {
console.log('INPUT ELEMENT GOT FOCUS!' + controller.property);
removeListenerFromGUI(controller.property);
});
});
_.each(gui.__controllers, function(controller) {
var input = controller.domElement.childNodes[0];
input.addEventListener('blur', function() {
console.log('INPUT ELEMENT GOT BLUR!' + controller.property);
addListenersBackToGUI();
});
});
// also listen to inputs inside of folders
_.each(gui.__folders, function(folder) {
_.each(folder.__controllers, function(controller) {
var input = controller.__input;
input.addEventListener('focus', function() {
console.log('FOLDER ELEMENT GOT FOCUS!' + controller.property);
});
});
});
}

View file

@ -0,0 +1,60 @@
//
// particleExplorerTool.js
//
// Created by Eric Levin on 2/15/16
// Copyright 2016 High Fidelity, Inc.
// Adds particleExplorer tool to the edit panel when a user selects a particle entity from the edit tool window
// This is an example of a new, easy way to do two way bindings between dynamically created GUI and in-world entities.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/*global window, alert, EventBridge, dat, listenForSettingsUpdates,createVec3Folder,createQuatFolder,writeVec3ToInterface,writeDataToInterface*/
var PARTICLE_EXPLORER_HTML_URL = Script.resolvePath('particleExplorer.html');
ParticleExplorerTool = function() {
var that = {};
that.createWebView = function() {
var url = PARTICLE_EXPLORER_HTML_URL;
that.webView = new OverlayWebWindow({
title: 'Particle Explorer',
source: url,
toolWindow: true
});
that.webView.setVisible(true);
that.webView.eventBridge.webEventReceived.connect(that.webEventReceived);
}
that.destroyWebView = function() {
if (!that.webView) {
print("EBL CAN'ZT CLOSE WEB VIEW- IT DOESNT EXISTS!")
return;
}
print("EBL CLOSING WEB VIEW")
that.webView.close();
that.webView = null;
that.activeParticleEntity = 0;
}
that.webEventReceived = function(data) {
var data = JSON.parse(data);
if (data.messageType === "settings_update") {
Entities.editEntity(that.activeParticleEntity, data.updatedSettings);
}
}
that.setActiveParticleEntity = function(id) {
that.activeParticleEntity = id;
}
return that;
};

View file

@ -18,7 +18,7 @@
[ "Keyboard.MouseMoveRight" ]
]
},
"when": [ "Application.InHMD", "Application.ComfortMode", "Keyboard.RightMouseButton" ],
"when": [ "Application.InHMD", "Application.SnapTurn", "Keyboard.RightMouseButton" ],
"to": "Actions.StepYaw",
"filters":
[
@ -34,7 +34,7 @@
[ "Keyboard.TouchpadRight" ]
]
},
"when": [ "Application.InHMD", "Application.ComfortMode" ],
"when": [ "Application.InHMD", "Application.SnapTurn" ],
"to": "Actions.StepYaw",
"filters":
[
@ -49,7 +49,7 @@
["Keyboard.D", "Keyboard.Right"]
]
},
"when": [ "Application.InHMD", "Application.ComfortMode" ],
"when": [ "Application.InHMD", "Application.SnapTurn" ],
"to": "Actions.StepYaw",
"filters":
[

View file

@ -5,7 +5,7 @@
{ "from": "Standard.LX", "to": "Actions.TranslateX" },
{ "from": "Standard.RX",
"when": [ "Application.InHMD", "Application.ComfortMode" ],
"when": [ "Application.InHMD", "Application.SnapTurn" ],
"to": "Actions.StepYaw",
"filters":
[

View file

@ -35,7 +35,6 @@ QtObject {
readonly property string centerPlayerInView: "Center Player In View";
readonly property string chat: "Chat...";
readonly property string collisions: "Collisions";
readonly property string comfortMode: "Comfort Mode";
readonly property string connexion: "Activate 3D Connexion Devices";
readonly property string console_: "Console...";
readonly property string controlWithSpeech: "Control With Speech";

View file

@ -849,8 +849,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_applicationStateDevice->addInputVariant(QString("InHMD"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getAvatarUpdater()->isHMDMode();
}));
_applicationStateDevice->addInputVariant(QString("ComfortMode"), controller::StateController::ReadLambda([]() -> float {
return (float)Menu::getInstance()->isOptionChecked(MenuOption::ComfortMode);
_applicationStateDevice->addInputVariant(QString("SnapTurn"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getMyAvatar()->getSnapTurn();
}));
_applicationStateDevice->addInputVariant(QString("Grounded"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getMyAvatar()->getCharacterController()->onGround();
@ -1532,7 +1532,7 @@ void Application::paintGL() {
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
_myCamera.setProjection(displayPlugin->getCullingProjection(_myCamera.getProjection()));
renderArgs._context->enableStereo(true);
mat4 eyeOffsets[2];
mat4 eyeProjections[2];
@ -1563,7 +1563,7 @@ void Application::paintGL() {
displayPlugin->setEyeRenderPose(_frameCount, eye, headPose);
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
eyeProjections[eye] = displayPlugin->getEyeProjection(eye, baseProjection);
});
renderArgs._context->setStereoProjections(eyeProjections);
renderArgs._context->setStereoViews(eyeOffsets);
@ -5005,7 +5005,7 @@ void Application::updateInputModes() {
mat4 Application::getEyeProjection(int eye) const {
if (isHMDMode()) {
return getActiveDisplayPlugin()->getProjection((Eye)eye, _viewFrustum.getProjection());
return getActiveDisplayPlugin()->getEyeProjection((Eye)eye, _viewFrustum.getProjection());
}
return _viewFrustum.getProjection();

View file

@ -490,7 +490,6 @@ Menu::Menu() {
avatar, SLOT(setEnableMeshVisible(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::TurnWithHead, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ComfortMode, 0, true);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::UseAnimPreAndPostRotations, 0, false,
avatar, SLOT(setUseAnimPreAndPostRotations(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableInverseKinematics, 0, true,

View file

@ -177,7 +177,6 @@ namespace MenuOption {
const QString CenterPlayerInView = "Center Player In View";
const QString Chat = "Chat...";
const QString Collisions = "Collisions";
const QString ComfortMode = "Comfort Mode";
const QString Connexion = "Activate 3D Connexion Devices";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";

View file

@ -322,6 +322,12 @@ void Avatar::removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::S
}
}
void Avatar::updateRenderItem(render::PendingChanges& pendingChanges) {
if (_renderItemID != render::Item::INVALID_ITEM_ID) {
pendingChanges.updateItem<render::Payload<AvatarData>>(_renderItemID, [](render::Payload<AvatarData>& p) {});
}
}
void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
auto& batch = *renderArgs->_batch;
PROFILE_RANGE_BATCH(batch, __FUNCTION__);

View file

@ -76,6 +76,8 @@ public:
void removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges);
void updateRenderItem(render::PendingChanges& pendingChanges);
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setDisplayingLookatTarget(bool displayingLookatTarget) { getHead()->setRenderLookatTarget(displayingLookatTarget); }

View file

@ -128,6 +128,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
PerformanceTimer perfTimer("otherAvatars");
render::PendingChanges pendingChanges;
// simulate avatars
auto hashCopy = getHashCopy();
@ -148,8 +149,11 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
avatar->simulate(deltaTime);
avatar->endUpdate();
++avatarIterator;
avatar->updateRenderItem(pendingChanges);
}
}
qApp->getMain3DScene()->enqueuePendingChanges(pendingChanges);
// simulate avatar fades
simulateAvatarFades(deltaTime);

View file

@ -653,6 +653,7 @@ void MyAvatar::saveData() {
settings.setValue("displayName", _displayName);
settings.setValue("collisionSoundURL", _collisionSoundURL);
settings.setValue("snapTurn", _useSnapTurn);
settings.endGroup();
}
@ -746,6 +747,7 @@ void MyAvatar::loadData() {
setDisplayName(settings.value("displayName").toString());
setCollisionSoundURL(settings.value("collisionSoundURL", DEFAULT_AVATAR_COLLISION_SOUND_URL).toString());
setSnapTurn(settings.value("snapTurn").toBool());
settings.endGroup();

View file

@ -151,6 +151,9 @@ public:
// Removes a handler previously added by addAnimationStateHandler.
Q_INVOKABLE void removeAnimationStateHandler(QScriptValue handler) { _rig->removeAnimationStateHandler(handler); }
Q_INVOKABLE bool getSnapTurn() const { return _useSnapTurn; }
Q_INVOKABLE void setSnapTurn(bool on) { _useSnapTurn = on; }
// get/set avatar data
void saveData();
void loadData();
@ -370,6 +373,7 @@ private:
QUrl _fullAvatarURLFromPreferences;
QString _fullAvatarModelName;
QUrl _animGraphUrl {""};
bool _useSnapTurn { true };
// cache of the current HMD sensor position and orientation
// in sensor space.

View file

@ -57,7 +57,11 @@ void setupPreferences() {
auto preference = new AvatarPreference(AVATAR_BASICS, "Appearance: ", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = [=]()->bool {return myAvatar->getSnapTurn(); };
auto setter = [=](bool value) { myAvatar->setSnapTurn(value); };
preferences->addPreference(new CheckPreference(AVATAR_BASICS, "Snap Turn when in HMD", getter, setter));
}
{
auto getter = []()->QString { return Snapshot::snapshotsLocation.get(); };
auto setter = [](const QString& value) { Snapshot::snapshotsLocation.set(value); };

View file

@ -278,8 +278,8 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
if (axisLength > MIN_AXIS_LENGTH) {
// compute angle of rotation that brings tip closer to target
axis /= axisLength;
float angle = acosf(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)));
float cosAngle = glm::clamp(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)), -1.0f, 1.0f);
float angle = acosf(cosAngle);
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
if (angle > MIN_ADJUSTMENT_ANGLE) {
// reduce angle by a fraction (for stability)
@ -348,7 +348,7 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
}
// keep track of tip's new transform as we descend towards root
tipPosition = jointPosition + deltaRotation * leverArm;
tipPosition = jointPosition + deltaRotation * (tipPosition - jointPosition);
tipOrientation = glm::normalize(deltaRotation * tipOrientation);
tipParentOrientation = glm::normalize(deltaRotation * tipParentOrientation);

View file

@ -0,0 +1,87 @@
//
// Created by Bradley Austin Davis on 2016/02/15
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HmdDisplayPlugin.h"
#include <memory>
#include <glm/gtc/matrix_transform.hpp>
#include <QtCore/QLoggingCategory>
#include <QtWidgets/QWidget>
#include <GLMHelpers.h>
#include <plugins/PluginContainer.h>
#include "../Logging.h"
static const QString MONO_PREVIEW = "Mono Preview";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const bool DEFAULT_MONO_VIEW = true;
void HmdDisplayPlugin::activate() {
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
[this](bool clicked) {
_monoPreview = clicked;
_container->setBoolSetting("monoPreview", _monoPreview);
}, true, _monoPreview);
_container->removeMenu(FRAMERATE);
WindowOpenGLDisplayPlugin::activate();
}
void HmdDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
}
void HmdDisplayPlugin::customizeContext() {
WindowOpenGLDisplayPlugin::customizeContext();
// Only enable mirroring if we know vsync is disabled
enableVsync(false);
_enablePreview = !isVsyncEnabled();
}
void HmdDisplayPlugin::internalPresent() {
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
float windowAspect = aspect(windowSize);
float sceneAspect = aspect(_renderTargetSize);
if (_monoPreview) {
sceneAspect /= 2.0f;
}
float aspectRatio = sceneAspect / windowAspect;
uvec2 targetViewportSize = windowSize;
if (aspectRatio < 1.0f) {
targetViewportSize.x *= aspectRatio;
} else {
targetViewportSize.y /= aspectRatio;
}
uvec2 targetViewportPosition;
if (targetViewportSize.x < windowSize.x) {
targetViewportPosition.x = (windowSize.x - targetViewportSize.x) / 2;
} else if (targetViewportSize.y < windowSize.y) {
targetViewportPosition.y = (windowSize.y - targetViewportSize.y) / 2;
}
glClear(GL_COLOR_BUFFER_BIT);
glViewport(
targetViewportPosition.x, targetViewportPosition.y,
targetViewportSize.x * (_monoPreview ? 2 : 1), targetViewportSize.y);
glEnable(GL_SCISSOR_TEST);
glScissor(
targetViewportPosition.x, targetViewportPosition.y,
targetViewportSize.x, targetViewportSize.y);
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
glDisable(GL_SCISSOR_TEST);
swapBuffers();
}
}

View file

@ -0,0 +1,39 @@
//
// Created by Bradley Austin Davis on 2016/02/15
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QtGlobal>
#include "../WindowOpenGLDisplayPlugin.h"
class HmdDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
bool isHmd() const override final { return true; }
float getIPD() const override final { return _ipd; }
glm::mat4 getEyeToHeadTransform(Eye eye) const override final { return _eyeOffsets[eye]; }
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override final { return _eyeProjections[eye]; }
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override final { return _cullingProjection; }
glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); }
glm::uvec2 getRecommendedRenderSize() const override final { return _renderTargetSize; }
void activate() override;
void deactivate() override;
protected:
void internalPresent() override;
void customizeContext() override;
std::array<glm::mat4, 2> _eyeOffsets;
std::array<glm::mat4, 2> _eyeProjections;
glm::mat4 _cullingProjection;
glm::uvec2 _renderTargetSize;
float _ipd { 0.064f };
private:
bool _enablePreview { false };
bool _monoPreview { true };
};

View file

@ -39,16 +39,11 @@ const float DEFAULT_SEPARATION = DEFAULT_IPD / DEFAULT_SCREEN_WIDTH;
// Default convergence depth: where is the screen plane in the virtual space (which depth)
const float DEFAULT_CONVERGENCE = 0.5f;
glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
glm::mat4 StereoDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
// Refer to http://www.nvidia.com/content/gtc-2010/pdfs/2010_gtc2010.pdf on creating
// stereo projection matrices. Do NOT use "toe-in", use translation.
// Updated version: http://developer.download.nvidia.com/assets/gamedev/docs/Siggraph2011-Stereoscopy_From_XY_to_Z-SG.pdf
if (eye == Mono) {
// FIXME provide a combined matrix, needed for proper culling
return baseProjection;
}
float frustumshift = DEFAULT_SEPARATION;
if (eye == Right) {
frustumshift = -frustumshift;

View file

@ -20,8 +20,8 @@ public:
virtual void deactivate() override;
virtual float getRecommendedAspectRatio() const override;
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override;
// NOTE, because Stereo displays don't include head tracking, and therefore
// can't include roll or pitch, the eye separation is embedded into the projection
// matrix. However, this eliminates the possibility of easily mainpulating

View file

@ -133,11 +133,8 @@ QUuid EntityScriptingInterface::addEntity(const EntityItemProperties& properties
float cost = calculateCost(density * volume, 0, newVelocity);
cost *= costMultiplier;
if(cost > _currentAvatarEnergy) {
if (cost > _currentAvatarEnergy) {
return QUuid();
} else {
//debit the avatar energy and continue
emit debitEnergySource(cost);
}
EntityItemID id = EntityItemID(QUuid::createUuid());
@ -173,6 +170,7 @@ QUuid EntityScriptingInterface::addEntity(const EntityItemProperties& properties
// queue the packet
if (success) {
emit debitEnergySource(cost);
queueEntityMessage(PacketType::EntityAdd, id, propertiesWithSimID);
}
@ -232,7 +230,7 @@ EntityItemProperties EntityScriptingInterface::getEntityProperties(QUuid identit
QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties& scriptSideProperties) {
EntityItemProperties properties = scriptSideProperties;
auto dimensions = properties.getDimensions();
float volume = dimensions.x * dimensions.y * dimensions.z;
auto density = properties.getDensity();
@ -242,18 +240,18 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
EntityItemID entityID(id);
if (!_entityTree) {
queueEntityMessage(PacketType::EntityEdit, entityID, properties);
//if there is no local entity entity tree, no existing velocity, use 0.
float cost = calculateCost(density * volume, oldVelocity, newVelocity);
cost *= costMultiplier;
if(cost > _currentAvatarEnergy) {
if (cost > _currentAvatarEnergy) {
return QUuid();
} else {
//debit the avatar energy and continue
emit debitEnergySource(cost);
}
return id;
}
// If we have a local entity tree set, then also update it.
@ -268,8 +266,8 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
return;
}
//existing entity, retrieve old velocity for check down below
oldVelocity = entity->getVelocity().length();
oldVelocity = entity->getVelocity().length();
if (!scriptSideProperties.parentIDChanged()) {
properties.setParentID(entity->getParentID());
}
@ -284,16 +282,18 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
}
}
properties = convertLocationFromScriptSemantics(properties);
updatedEntity = _entityTree->updateEntity(entityID, properties);
float cost = calculateCost(density * volume, oldVelocity, newVelocity);
cost *= costMultiplier;
if(cost > _currentAvatarEnergy) {
if (cost > _currentAvatarEnergy) {
updatedEntity = false;
} else {
//debit the avatar energy and continue
emit debitEnergySource(cost);
updatedEntity = _entityTree->updateEntity(entityID, properties);
if (updatedEntity) {
emit debitEnergySource(cost);
}
}
});
@ -370,15 +370,16 @@ void EntityScriptingInterface::deleteEntity(QUuid id) {
_entityTree->withWriteLock([&] {
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(entityID);
if (entity) {
auto dimensions = entity->getDimensions();
float volume = dimensions.x * dimensions.y * dimensions.z;
auto density = entity->getDensity();
auto velocity = entity->getVelocity().length();
float cost = calculateCost(density * volume, velocity, 0);
cost *= costMultiplier;
if(cost > _currentAvatarEnergy) {
if (cost > _currentAvatarEnergy) {
shouldDelete = false;
return;
} else {
//debit the avatar energy and continue

View file

@ -116,5 +116,10 @@ void Light::setShowContour(float show) {
editSchema()._control.w = show;
}
void Light::setAmbientSphere(const gpu::SphericalHarmonics& sphere) {
editSchema()._ambientSphere = sphere;
}
void Light::setAmbientSpherePreset(gpu::SphericalHarmonics::Preset preset) {
editSchema()._ambientSphere.assignPreset(preset);
}

View file

@ -94,10 +94,10 @@ public:
void setAmbientIntensity(float intensity);
float getAmbientIntensity() const { return getSchema()._ambientIntensity; }
// Spherical Harmonics storing the Ambien lighting approximation used for the Sun typed light
void setAmbientSphere(const gpu::SphericalHarmonics& sphere) { _ambientSphere = sphere; }
const gpu::SphericalHarmonics& getAmbientSphere() const { return _ambientSphere; }
void setAmbientSpherePreset(gpu::SphericalHarmonics::Preset preset) { _ambientSphere.assignPreset(preset); }
// Spherical Harmonics storing the Ambient lighting approximation used for the Sun typed light
void setAmbientSphere(const gpu::SphericalHarmonics& sphere);
const gpu::SphericalHarmonics& getAmbientSphere() const { return getSchema()._ambientSphere; }
void setAmbientSpherePreset(gpu::SphericalHarmonics::Preset preset);
// Schema to access the attribute values of the light
class Schema {
@ -112,6 +112,8 @@ public:
Vec4 _shadow{0.0f};
Vec4 _control{0.0f, 0.0f, 0.0f, 0.0f};
gpu::SphericalHarmonics _ambientSphere;
};
const UniformBufferView& getSchemaBuffer() const { return _schemaBuffer; }
@ -121,7 +123,6 @@ protected:
Flags _flags;
UniformBufferView _schemaBuffer;
Transform _transform;
gpu::SphericalHarmonics _ambientSphere;
const Schema& getSchema() const { return _schemaBuffer.get<Schema>(); }
Schema& editSchema() { return _schemaBuffer.edit<Schema>(); }

View file

@ -11,6 +11,41 @@
<@if not MODEL_LIGHT_SLH@>
<@def MODEL_LIGHT_SLH@>
struct SphericalHarmonics {
vec4 L00;
vec4 L1m1;
vec4 L10;
vec4 L11;
vec4 L2m2;
vec4 L2m1;
vec4 L20;
vec4 L21;
vec4 L22;
};
vec4 evalSphericalLight(SphericalHarmonics sh, vec3 direction ) {
vec3 dir = direction.xyz;
const float C1 = 0.429043;
const float C2 = 0.511664;
const float C3 = 0.743125;
const float C4 = 0.886227;
const float C5 = 0.247708;
vec4 value = C1 * sh.L22 * (dir.x * dir.x - dir.y * dir.y) +
C3 * sh.L20 * dir.z * dir.z +
C4 * sh.L00 - C5 * sh.L20 +
2.0 * C1 * ( sh.L2m2 * dir.x * dir.y +
sh.L21 * dir.x * dir.z +
sh.L2m1 * dir.y * dir.z ) +
2.0 * C2 * ( sh.L11 * dir.x +
sh.L1m1 * dir.y +
sh.L10 * dir.z ) ;
return value;
}
struct Light {
vec4 _position;
vec4 _direction;
@ -20,6 +55,8 @@ struct Light {
vec4 _shadow;
vec4 _control;
SphericalHarmonics _ambientSphere;
};
vec3 getLightPosition(Light l) { return l._position.xyz; }
@ -65,6 +102,10 @@ float getLightShowContour(Light l) {
return l._control.w;
}
SphericalHarmonics getLightAmbientSphere(Light l) {
return l._ambientSphere;
}
<@if GPU_FEATURE_PROFILE == GPU_CORE @>
uniform lightBuffer {
Light light;

View file

@ -24,8 +24,7 @@ class QImage;
enum Eye {
Left,
Right,
Mono
Right
};
/*
@ -95,10 +94,15 @@ public:
}
// Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const {
virtual glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
return baseProjection;
}
virtual glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const {
return baseProjection;
}
// Fetch the most recently displayed image as a QImage
virtual QImage getScreenshot() const = 0;

View file

@ -26,46 +26,6 @@ vec4 evalSkyboxLight(vec3 direction, float lod) {
<@endfunc@>
<@func declareSphericalHarmonics()@>
struct SphericalHarmonics {
vec4 L00;
vec4 L1m1;
vec4 L10;
vec4 L11;
vec4 L2m2;
vec4 L2m1;
vec4 L20;
vec4 L21;
vec4 L22;
};
vec4 evalSphericalLight(SphericalHarmonics sh, vec3 direction ) {
vec3 dir = direction.xzy; // we don;t understand why yet but we need to use z as vertical axis?
const float C1 = 0.429043;
const float C2 = 0.511664;
const float C3 = 0.743125;
const float C4 = 0.886227;
const float C5 = 0.247708;
vec4 value = C1 * sh.L22 * (dir.x * dir.x - dir.y * dir.y) +
C3 * sh.L20 * dir.z * dir.z +
C4 * sh.L00 - C5 * sh.L20 +
2.0 * C1 * ( sh.L2m2 * dir.x * dir.y +
sh.L21 * dir.x * dir.z +
sh.L2m1 * dir.y * dir.z ) +
2.0 * C2 * ( sh.L11 * dir.x +
sh.L1m1 * dir.y +
sh.L10 * dir.z ) ;
return value;
}
// Need one SH
uniform SphericalHarmonics ambientSphere;
<@endfunc@>
// Everything about light
<@include model/Light.slh@>
@ -91,8 +51,6 @@ vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, float obsc
<@func declareEvalAmbientSphereGlobalColor()@>
<$declareSphericalHarmonics()$>
vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
// Need the light now
Light light = getLight();
@ -102,7 +60,7 @@ vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, floa
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
vec3 ambientNormal = fragNormal.xyz;
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, ambientNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec3 color = diffuse.rgb * evalSphericalLight(getLightAmbientSphere(light), ambientNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);
@ -115,7 +73,6 @@ vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, floa
<@func declareEvalSkyboxGlobalColor()@>
<$declareSkyboxMap()$>
<$declareSphericalHarmonics()$>
vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
// Need the light now
@ -125,7 +82,7 @@ vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscu
vec4 fragEyeVector = invViewMat * vec4(-position, 0.0);
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec3 color = diffuse.rgb * evalSphericalLight(getLightAmbientSphere(light), fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);

View file

@ -344,26 +344,7 @@ void DeferredLightingEffect::render(const render::RenderContextPointer& renderCo
}
{ // Setup the global lighting
auto globalLight = _allocatedLights[_globalLights.front()];
if (locations->ambientSphere >= 0) {
gpu::SphericalHarmonics sh = globalLight->getAmbientSphere();
if (useSkyboxCubemap && _skybox->getCubemap()->getIrradiance()) {
sh = (*_skybox->getCubemap()->getIrradiance());
}
for (int i =0; i <gpu::SphericalHarmonics::NUM_COEFFICIENTS; i++) {
batch._glUniform4fv(locations->ambientSphere + i, 1, (const float*) (&sh) + i * 4);
}
}
if (useSkyboxCubemap) {
batch.setResourceTexture(SKYBOX_MAP_UNIT, _skybox->getCubemap());
}
if (locations->lightBufferUnit >= 0) {
batch.setUniformBuffer(locations->lightBufferUnit, globalLight->getSchemaBuffer());
}
setupKeyLightBatch(batch, locations->lightBufferUnit, SKYBOX_MAP_UNIT);
}
{
@ -512,10 +493,18 @@ void DeferredLightingEffect::render(const render::RenderContextPointer& renderCo
}
}
void DeferredLightingEffect::setupBatch(gpu::Batch& batch, int lightBufferUnit) {
void DeferredLightingEffect::setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int skyboxCubemapUnit) {
PerformanceTimer perfTimer("DLE->setupBatch()");
auto globalLight = _allocatedLights[_globalLights.front()];
batch.setUniformBuffer(lightBufferUnit, globalLight->getSchemaBuffer());
if (lightBufferUnit >= 0) {
batch.setUniformBuffer(lightBufferUnit, globalLight->getSchemaBuffer());
}
bool useSkyboxCubemap = (_skybox) && (_skybox->getCubemap());
if (useSkyboxCubemap && (skyboxCubemapUnit >= 0)) {
batch.setResourceTexture(skyboxCubemapUnit, _skybox->getCubemap());
}
}
static void loadLightProgram(const char* vertSource, const char* fragSource, bool lightVolume, gpu::PipelinePointer& pipeline, LightLocationsPtr& locations) {
@ -594,6 +583,11 @@ void DeferredLightingEffect::setGlobalLight(const glm::vec3& direction, const gl
void DeferredLightingEffect::setGlobalSkybox(const model::SkyboxPointer& skybox) {
_skybox = skybox;
auto light = _allocatedLights.front();
if (_skybox && _skybox->getCubemap() && _skybox->getCubemap()->isDefined() && _skybox->getCubemap()->getIrradiance()) {
light->setAmbientSphere( (*_skybox->getCubemap()->getIrradiance()) );
}
}
model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {

View file

@ -46,7 +46,7 @@ public:
void prepare(RenderArgs* args);
void render(const render::RenderContextPointer& renderContext);
void setupBatch(gpu::Batch& batch, int lightBufferUnit);
void setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int skyboxCubemapUnit);
// update global lighting
void setAmbientLightMode(int preset);

View file

@ -96,7 +96,9 @@ void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch) {
batchSetter(pipeline, batch);
// Set the light
if (pipeline.locations->lightBufferUnit >= 0) {
DependencyManager::get<DeferredLightingEffect>()->setupBatch(batch, pipeline.locations->lightBufferUnit);
DependencyManager::get<DeferredLightingEffect>()->setupKeyLightBatch(batch,
pipeline.locations->lightBufferUnit,
-1);
}
}

View file

@ -5,7 +5,7 @@
// model_translucent.frag
// fragment shader
//
// Created by Andrzej Kapolka on 9/19/14.
// Created by Sam Gateau on 2/15/2016.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -14,31 +14,13 @@
<@include model/Material.slh@>
<@include DeferredLighting.slh@>
<@include model/Light.slh@>
<@include DeferredGlobalLight.slh@>
<$declareEvalAmbientSphereGlobalColor()$>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
vec4 evalGlobalColor(float shadowAttenuation, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss, float opacity) {
// Need the light now
Light light = getLight();
TransformCamera cam = getTransformCamera();
vec3 fragNormal;
<$transformEyeToWorldDir(cam, normal, fragNormal)$>
vec3 fragEyeVectorView = normalize(-position);
vec3 fragEyeDir;
<$transformEyeToWorldDir(cam, fragEyeVectorView, fragEyeDir)$>
vec3 color = opacity * diffuse.rgb * getLightColor(light) * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);
color += vec3(diffuse * shading.w * opacity + shading.rgb) * shadowAttenuation * getLightColor(light) * getLightIntensity(light);
return vec4(color, opacity);
}
uniform sampler2D diffuseMap;
@ -61,11 +43,16 @@ void main(void) {
float fragGloss = getMaterialShininess(mat) / 128;
float fragOpacity = getMaterialOpacity(mat) * diffuse.a * _alpha;
_fragColor = evalGlobalColor(1.0,
TransformCamera cam = getTransformCamera();
_fragColor = vec4(evalAmbientSphereGlobalColor(
cam._viewInverse,
1.0,
1.0,
fragPosition,
fragNormal,
fragDiffuse,
fragSpecular,
fragGloss,
fragGloss),
fragOpacity);
}

View file

@ -25,7 +25,7 @@ namespace render {
void dirtyHelper();
bool showDisplay{ true }; // FIXME FOR debug
bool showDisplay{ false };
bool showNetwork{ false };
public slots:

View file

@ -42,13 +42,20 @@ ScriptEngines::ScriptEngines()
}
QString normalizeScriptUrl(const QString& rawScriptUrl) {
auto lower = rawScriptUrl.toLower();
if (!rawScriptUrl.startsWith("http:") && !rawScriptUrl.startsWith("https:") && !rawScriptUrl.startsWith("atp:")) {
#ifdef Q_OS_LINUX
if (rawScriptUrl.startsWith("file:")) {
return rawScriptUrl;
}
return QUrl::fromLocalFile(rawScriptUrl).toString();
#else
if (rawScriptUrl.startsWith("file:")) {
return rawScriptUrl.toLower();
}
// Force lowercase on file scripts because of drive letter weirdness.
return QUrl::fromLocalFile(rawScriptUrl).toString().toLower();
#endif
}
return QUrl(rawScriptUrl).toString();
}

View file

@ -11,22 +11,10 @@
#include "OculusHelpers.h"
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusBaseDisplayPlugin::resetSensors() {
ovr_RecenterPose(_session);
}
glm::mat4 OculusBaseDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return glm::translate(mat4(), toGlm(_eyeOffsets[eye]));
}
glm::mat4 OculusBaseDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
@ -70,7 +58,7 @@ void OculusBaseDisplayPlugin::customizeContext() {
glewExperimental = true;
GLenum err = glewInit();
glGetError();
WindowOpenGLDisplayPlugin::customizeContext();
HmdDisplayPlugin::customizeContext();
}
void OculusBaseDisplayPlugin::init() {
@ -88,37 +76,31 @@ void OculusBaseDisplayPlugin::activate() {
qFatal("Failed to acquire HMD");
}
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
_hmdDesc = ovr_GetHmdDesc(_session);
_ipd = ovr_GetFloat(_session, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
_viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
_viewScaleDesc.HmdToEyeViewOffset[eye] = erd.HmdToEyeViewOffset;
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
auto combinedFov = _eyeFovs[0];
combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_desiredFramebufferSize = uvec2(
_renderTargetSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
@ -144,20 +126,8 @@ void OculusBaseDisplayPlugin::activate() {
}
void OculusBaseDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
#if (OVR_MAJOR_VERSION >= 6)
HmdDisplayPlugin::deactivate();
ovr_Destroy(_session);
_session = nullptr;
ovr_Shutdown();
#endif
}
float OculusBaseDisplayPlugin::getIPD() const {
float result = OVR_DEFAULT_IPD;
#if (OVR_MAJOR_VERSION >= 6)
result = ovr_GetFloat(_session, OVR_KEY_IPD, result);
#endif
return result;
}

View file

@ -7,13 +7,13 @@
//
#pragma once
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <QTimer>
#include <OVR_CAPI_GL.h>
class OculusBaseDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OculusBaseDisplayPlugin : public HmdDisplayPlugin {
public:
virtual bool isSupported() const override;
@ -24,25 +24,13 @@ public:
virtual void deactivate() override;
// Stereo specific methods
virtual bool isHmd() const override final { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override final;
virtual glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); }
virtual void resetSensors() override final;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override final;
virtual float getIPD() const override final;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
protected:
virtual void customizeContext() override;
protected:
ovrVector3f _eyeOffsets[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrSession _session;
ovrGraphicsLuid _luid;
float _ipd{ OVR_DEFAULT_IPD };
@ -50,23 +38,5 @@ protected:
ovrFovPort _eyeFovs[2];
ovrHmdDesc _hmdDesc;
ovrLayerEyeFov _sceneLayer;
ovrViewScaleDesc _viewScaleDesc;
};
#if (OVR_MAJOR_VERSION == 6)
#define ovr_Create ovrHmd_Create
#define ovr_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL
#define ovr_CreateMirrorTextureGL ovrHmd_CreateMirrorTextureGL
#define ovr_Destroy ovrHmd_Destroy
#define ovr_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet
#define ovr_DestroyMirrorTexture ovrHmd_DestroyMirrorTexture
#define ovr_GetFloat ovrHmd_GetFloat
#define ovr_GetFovTextureSize ovrHmd_GetFovTextureSize
#define ovr_GetFrameTiming ovrHmd_GetFrameTiming
#define ovr_GetTrackingState ovrHmd_GetTrackingState
#define ovr_GetRenderDesc ovrHmd_GetRenderDesc
#define ovr_RecenterPose ovrHmd_RecenterPose
#define ovr_SubmitFrame ovrHmd_SubmitFrame
#define ovr_ConfigureTracking ovrHmd_ConfigureTracking
#define ovr_GetHmdDesc(X) *X
#endif

View file

@ -6,150 +6,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusDisplayPlugin.h"
#include <QtOpenGL/QGLWidget>
// FIXME get rid of this
#include <gl/Config.h>
#include <plugins/PluginContainer.h>
#include "OculusHelpers.h"
#if (OVR_MAJOR_VERSION >= 6)
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovr_CreateSwapTextureSetGL,
// ovr_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrSession session;
RiftFramebufferWrapper(const ovrSession& session) : session(session) {
color = 0;
depth = 0;
};
~RiftFramebufferWrapper() {
destroyColor();
}
void Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
protected:
virtual void destroyColor() {
}
virtual void initDepth() override final {
}
};
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
SwapFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
void Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
protected:
virtual void destroyColor() override {
if (color) {
ovr_DestroySwapTextureSet(session, color);
color = nullptr;
}
}
virtual void initColor() override {
destroyColor();
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
qFatal("Unable to create swap textures");
}
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
virtual void initDone() override {
}
virtual void onBind(oglplus::Framebuffer::Target target) override {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
};
// We use a FBO to wrap the mirror texture because it makes it easier to
// render to the screen via glBlitFramebuffer
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
MirrorFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) { }
private:
virtual void destroyColor() override {
if (color) {
ovr_DestroyMirrorTexture(session, (ovrTexture*)color);
color = nullptr;
}
}
void initColor() override {
destroyColor();
ovrResult result = ovr_CreateMirrorTextureGL(session, GL_SRGB8_ALPHA8, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
void initDone() override {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
};
#endif
const QString OculusDisplayPlugin::NAME("Oculus Rift");
static const QString MONO_PREVIEW = "Mono Preview";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const bool DEFAULT_MONO_VIEW = true;
void OculusDisplayPlugin::activate() {
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
[this](bool clicked) {
_monoPreview = clicked;
_container->setBoolSetting("monoPreview", _monoPreview);
}, true, _monoPreview);
_container->removeMenu(FRAMERATE);
OculusBaseDisplayPlugin::activate();
}
@ -182,27 +43,8 @@ void OculusDisplayPlugin::internalPresent() {
}
using namespace oglplus;
// Need to make sure only the display plugin is responsible for
// controlling vsync
wglSwapIntervalEXT(0);
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
if (_monoPreview) {
Context::Viewport(windowSize.x * 2, windowSize.y);
Context::Scissor(0, windowSize.y, windowSize.x, windowSize.y);
} else {
Context::Viewport(windowSize.x, windowSize.y);
}
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
}
const auto& size = _sceneFbo->size;
_sceneFbo->Bound([&] {
auto size = _sceneFbo->size;
Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
//glEnable(GL_FRAMEBUFFER_SRGB);
@ -225,27 +67,17 @@ void OculusDisplayPlugin::internalPresent() {
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Right] = eyePoses.second;
{
ovrViewScaleDesc viewScaleDesc;
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = _eyeOffsets[0];
viewScaleDesc.HmdToEyeViewOffset[1] = _eyeOffsets[1];
ovrLayerHeader* layers = &_sceneLayer.Header;
ovrResult result = ovr_SubmitFrame(_session, frameIndex, &viewScaleDesc, &layers, 1);
ovrResult result = ovr_SubmitFrame(_session, frameIndex, &_viewScaleDesc, &layers, 1);
if (!OVR_SUCCESS(result)) {
qDebug() << result;
}
}
_sceneFbo->Increment();
/*
The swapbuffer call here is only required if we want to mirror the content to the screen.
However, it should only be done if we can reliably disable v-sync on the mirror surface,
otherwise the swapbuffer delay will interefere with the framerate of the headset
*/
if (_enablePreview) {
swapBuffers();
}
// Handle mirroring to screen in base class
HmdDisplayPlugin::internalPresent();
}
void OculusDisplayPlugin::setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm::mat4& pose) {

View file

@ -8,16 +8,69 @@
#include "OculusHelpers.h"
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
namespace Oculus {
ovrHmd _hmd;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[2];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
SwapFramebufferWrapper::SwapFramebufferWrapper(const ovrSession& session)
: _session(session) {
color = nullptr;
depth = nullptr;
}
SwapFramebufferWrapper::~SwapFramebufferWrapper() {
destroyColor();
}
void SwapFramebufferWrapper::Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
void SwapFramebufferWrapper::Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
void SwapFramebufferWrapper::destroyColor() {
if (color) {
ovr_DestroySwapTextureSet(_session, color);
color = nullptr;
}
}
void SwapFramebufferWrapper::initColor() {
destroyColor();
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(_session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
qFatal("Unable to create swap textures");
}
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
void SwapFramebufferWrapper::initDone() {
}
void SwapFramebufferWrapper::onBind(oglplus::Framebuffer::Target target) {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
void SwapFramebufferWrapper::onUnbind(oglplus::Framebuffer::Target target) {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}

View file

@ -12,9 +12,7 @@
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
#if (OVR_MAJOR_VERSION < 6)
#define OVR_SUCCESS(x) x
#endif
#include <gl/OglplusHelpers.h>
// Convenience method for looping over each eye with a lambda
template <typename Function>
@ -87,3 +85,26 @@ inline ovrPosef ovrPoseFromGlm(const glm::mat4 & m) {
result.Position = ovrFromGlm(translation);
return result;
}
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public FramebufferWrapper<ovrSwapTextureSet*, void*> {
SwapFramebufferWrapper(const ovrSession& session);
~SwapFramebufferWrapper();
void Increment();
void Resize(const uvec2 & size);
protected:
void initColor() override final;
void initDepth() override final {}
void initDone() override final;
void onBind(oglplus::Framebuffer::Target target) override final;
void onUnbind(oglplus::Framebuffer::Target target) override final;
void destroyColor();
private:
ovrSession _session;
};

View file

@ -31,29 +31,15 @@ const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5) (Legacy)");
OculusLegacyDisplayPlugin::OculusLegacyDisplayPlugin() {
}
uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusLegacyDisplayPlugin::resetSensors() {
ovrHmd_RecenterPose(_hmd);
}
glm::mat4 OculusLegacyDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
if (frameIndex > lastFrameSeen) {
Lock lock(_mutex);
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
ovrHmd_GetEyePoses(_hmd, frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
lastFrameSeen = frameIndex;
}
return toGlm(_trackingState.HeadPose.ThePose);
@ -87,7 +73,7 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
}
void OculusLegacyDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
if (!(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
@ -100,30 +86,26 @@ void OculusLegacyDisplayPlugin::activate() {
qFatal("Failed to acquire HMD");
}
_ipd = ovrHmd_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
auto combinedFov = _eyeFovs[0];
combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_renderTargetSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
if (!ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
@ -132,13 +114,12 @@ void OculusLegacyDisplayPlugin::activate() {
}
void OculusLegacyDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
HmdDisplayPlugin::deactivate();
ovrHmd_Destroy(_hmd);
_hmd = nullptr;
ovr_Shutdown();
}
// DLL based display plugins MUST initialize GLEW inside the DLL code.
void OculusLegacyDisplayPlugin::customizeContext() {
static std::once_flag once;
@ -147,7 +128,7 @@ void OculusLegacyDisplayPlugin::customizeContext() {
glewInit();
glGetError();
});
WindowOpenGLDisplayPlugin::customizeContext();
HmdDisplayPlugin::customizeContext();
#if 0
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
auto& header = config.Config.Header;
@ -179,7 +160,7 @@ void OculusLegacyDisplayPlugin::customizeContext() {
#if 0
void OculusLegacyDisplayPlugin::uncustomizeContext() {
WindowOpenGLDisplayPlugin::uncustomizeContext();
HmdDisplayPlugin::uncustomizeContext();
}
void OculusLegacyDisplayPlugin::internalPresent() {
@ -200,3 +181,4 @@ float OculusLegacyDisplayPlugin::getTargetFrameRate() {
return TARGET_RATE_OculusLegacy;
}

View file

@ -7,7 +7,7 @@
//
#pragma once
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <QTimer>
@ -15,7 +15,7 @@
const float TARGET_RATE_OculusLegacy = 75.0f;
class OculusLegacyDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OculusLegacyDisplayPlugin : public HmdDisplayPlugin {
public:
OculusLegacyDisplayPlugin();
virtual bool isSupported() const override;
@ -27,12 +27,7 @@ public:
virtual int getHmdScreen() const override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual float getTargetFrameRate() override;
@ -50,12 +45,7 @@ private:
ovrHmd _hmd;
mutable ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
mutable ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
//ovrTexture _eyeTextures[2]; // FIXME - not currently in use
mutable int _hmdScreen { -1 };
bool _hswDismissed { false };

View file

@ -25,10 +25,8 @@
#include "OpenVrHelpers.h"
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.displayplugins")
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
static vr::IVRCompositor* _compositor{ nullptr };
@ -36,40 +34,7 @@ static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTracke
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
static mat4 _sensorResetMat;
static uvec2 _windowSize;
static uvec2 _renderTargetSize;
struct PerEyeData {
//uvec2 _viewportOrigin;
//uvec2 _viewportSize;
mat4 _projectionMatrix;
mat4 _eyeOffset;
mat4 _pose;
};
static PerEyeData _eyesData[2];
template<typename F>
void openvr_for_each_eye(F f) {
f(vr::Hmd_Eye::Eye_Left);
f(vr::Hmd_Eye::Eye_Right);
}
mat4 toGlm(const vr::HmdMatrix44_t& m) {
return glm::transpose(glm::make_mat4(&m.m[0][0]));
}
mat4 toGlm(const vr::HmdMatrix34_t& m) {
mat4 result = mat4(
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
return result;
}
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool OpenVrDisplayPlugin::isSupported() const {
return vr::VR_IsHmdPresent();
@ -91,15 +56,17 @@ void OpenVrDisplayPlugin::activate() {
{
Lock lock(_poseMutex);
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
PerEyeData& eyeData = _eyesData[eye];
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
_eyeOffsets[eye] = toGlm(_hmd->GetEyeToHeadTransform(eye));
_eyeProjections[eye] = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
});
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
_cullingProjection = _eyeProjections[0];
}
_compositor = vr::VRCompositor();
Q_ASSERT(_compositor);
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
}
void OpenVrDisplayPlugin::deactivate() {
@ -109,34 +76,18 @@ void OpenVrDisplayPlugin::deactivate() {
_hmd = nullptr;
}
_compositor = nullptr;
WindowOpenGLDisplayPlugin::deactivate();
HmdDisplayPlugin::deactivate();
}
void OpenVrDisplayPlugin::customizeContext() {
// Display plugins in DLLs must initialize glew locally
static std::once_flag once;
std::call_once(once, []{
glewExperimental = true;
GLenum err = glewInit();
glGetError();
});
WindowOpenGLDisplayPlugin::customizeContext();
enableVsync(false);
// Only enable mirroring if we know vsync is disabled
_enablePreview = !isVsyncEnabled();
}
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
return _renderTargetSize;
}
mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) const {
// FIXME hack to ensure that we don't crash trying to get the combined matrix
if (eye == Mono) {
eye = Left;
}
Lock lock(_poseMutex);
return _eyesData[eye]._projectionMatrix;
HmdDisplayPlugin::customizeContext();
}
void OpenVrDisplayPlugin::resetSensors() {
@ -145,41 +96,17 @@ void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
}
glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
Lock lock(_poseMutex);
return _eyesData[eye]._eyeOffset;
}
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
Lock lock(_poseMutex);
return _trackedDevicePoseMat4[0];
}
void OpenVrDisplayPlugin::submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) {
WindowOpenGLDisplayPlugin::submitSceneTexture(frameIndex, sceneTexture, sceneSize);
}
void OpenVrDisplayPlugin::internalPresent() {
// Flip y-axis since GL UV coords are backwards.
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
if (_monoPreview) {
glViewport(0, 0, windowSize.x * 2, windowSize.y);
glScissor(0, windowSize.y, windowSize.x, windowSize.y);
} else {
glViewport(0, 0, windowSize.x, windowSize.y);
}
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
}
vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto };
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
@ -187,10 +114,6 @@ void OpenVrDisplayPlugin::internalPresent() {
glFinish();
if (_enablePreview) {
swapBuffers();
}
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
{
@ -200,10 +123,8 @@ void OpenVrDisplayPlugin::internalPresent() {
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
}
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
});
}
//WindowOpenGLDisplayPlugin::internalPresent();
// Handle the mirroring in the base class
HmdDisplayPlugin::internalPresent();
}

View file

@ -11,15 +11,14 @@
#include <openvr.h>
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
class OpenVrDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OpenVrDisplayPlugin : public HmdDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString& getName() const override { return NAME; }
virtual bool isHmd() const override { return true; }
virtual float getTargetFrameRate() override { return TARGET_RATE_OpenVr; }
@ -28,16 +27,9 @@ public:
virtual void customizeContext() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
// Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual void submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
protected:
virtual void internalPresent() override;
@ -45,8 +37,6 @@ protected:
private:
vr::IVRSystem* _hmd { nullptr };
static const QString NAME;
bool _enablePreview { false };
bool _monoPreview { true };
mutable Mutex _poseMutex;
};

View file

@ -16,6 +16,7 @@
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;

View file

@ -15,3 +15,21 @@
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
template<typename F>
void openvr_for_each_eye(F f) {
f(vr::Hmd_Eye::Eye_Left);
f(vr::Hmd_Eye::Eye_Right);
}
inline mat4 toGlm(const vr::HmdMatrix44_t& m) {
return glm::transpose(glm::make_mat4(&m.m[0][0]));
}
inline mat4 toGlm(const vr::HmdMatrix34_t& m) {
mat4 result = mat4(
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
return result;
}