mirror of
https://github.com/overte-org/overte.git
synced 2025-08-06 22:39:18 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into orange
This commit is contained in:
commit
ec7165983d
30 changed files with 290 additions and 153 deletions
|
@ -151,7 +151,7 @@
|
||||||
data.properties[group][property] = {
|
data.properties[group][property] = {
|
||||||
x: elX.value,
|
x: elX.value,
|
||||||
y: elY.value,
|
y: elY.value,
|
||||||
z: elZ.value,
|
z: elZ ? elZ.value : 0,
|
||||||
};
|
};
|
||||||
EventBridge.emitWebEvent(JSON.stringify(data));
|
EventBridge.emitWebEvent(JSON.stringify(data));
|
||||||
}
|
}
|
||||||
|
@ -683,7 +683,6 @@
|
||||||
elZoneKeyLightAmbientIntensity.value = properties.keyLight.ambientIntensity.toFixed(2);
|
elZoneKeyLightAmbientIntensity.value = properties.keyLight.ambientIntensity.toFixed(2);
|
||||||
elZoneKeyLightDirectionX.value = properties.keyLight.direction.x.toFixed(2);
|
elZoneKeyLightDirectionX.value = properties.keyLight.direction.x.toFixed(2);
|
||||||
elZoneKeyLightDirectionY.value = properties.keyLight.direction.y.toFixed(2);
|
elZoneKeyLightDirectionY.value = properties.keyLight.direction.y.toFixed(2);
|
||||||
elZoneKeyLightDirectionZ.value = properties.keyLight.direction.z.toFixed(2);
|
|
||||||
elZoneKeyLightAmbientURL.value = properties.keyLight.ambientURL;
|
elZoneKeyLightAmbientURL.value = properties.keyLight.ambientURL;
|
||||||
|
|
||||||
|
|
||||||
|
@ -937,12 +936,11 @@
|
||||||
elZoneKeyLightColorBlue.addEventListener('change', zoneKeyLightColorChangeFunction);
|
elZoneKeyLightColorBlue.addEventListener('change', zoneKeyLightColorChangeFunction);
|
||||||
elZoneKeyLightIntensity.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('keyLight','intensity'));
|
elZoneKeyLightIntensity.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('keyLight','intensity'));
|
||||||
elZoneKeyLightAmbientIntensity.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('keyLight','ambientIntensity'));
|
elZoneKeyLightAmbientIntensity.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('keyLight','ambientIntensity'));
|
||||||
var zoneKeyLightDirectionChangeFunction = createEmitGroupVec3PropertyUpdateFunction('keyLight','direction', elZoneKeyLightDirectionX, elZoneKeyLightDirectionY, elZoneKeyLightDirectionZ);
|
elZoneKeyLightAmbientURL.addEventListener('change', createEmitGroupTextPropertyUpdateFunction('keyLight','ambientURL'));
|
||||||
|
var zoneKeyLightDirectionChangeFunction = createEmitGroupVec3PropertyUpdateFunction('keyLight','direction', elZoneKeyLightDirectionX, elZoneKeyLightDirectionY);
|
||||||
elZoneKeyLightDirectionX.addEventListener('change', zoneKeyLightDirectionChangeFunction);
|
elZoneKeyLightDirectionX.addEventListener('change', zoneKeyLightDirectionChangeFunction);
|
||||||
elZoneKeyLightDirectionY.addEventListener('change', zoneKeyLightDirectionChangeFunction);
|
elZoneKeyLightDirectionY.addEventListener('change', zoneKeyLightDirectionChangeFunction);
|
||||||
elZoneKeyLightDirectionZ.addEventListener('change', zoneKeyLightDirectionChangeFunction);
|
|
||||||
elZoneKeyLightAmbientURL.addEventListener('change', createEmitGroupTextPropertyUpdateFunction('keyLight','ambientURL'));
|
|
||||||
|
|
||||||
elZoneStageLatitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','latitude'));
|
elZoneStageLatitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','latitude'));
|
||||||
elZoneStageLongitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','longitude'));
|
elZoneStageLongitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','longitude'));
|
||||||
elZoneStageAltitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','altitude'));
|
elZoneStageAltitude.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('stage','altitude'));
|
||||||
|
@ -1170,9 +1168,8 @@
|
||||||
<div class="zone-section keyLight-section property">
|
<div class="zone-section keyLight-section property">
|
||||||
<div class="label">Light Direction</div>
|
<div class="label">Light Direction</div>
|
||||||
<div class="value">
|
<div class="value">
|
||||||
<div class="input-area">Pitch <input class="coord" type="number" id="property-zone-key-light-direction-x"></div>
|
<div class="input-area">Altitude <input class="coord" type="number" id="property-zone-key-light-direction-x"></div>
|
||||||
<div class="input-area">Yaw <input class="coord" type="number" id="property-zone-key-light-direction-y"></div>
|
<div class="input-area">Azimuth <input class="coord" type="number" id="property-zone-key-light-direction-y"></div>
|
||||||
<div class="input-area">Roll <input class="coord" type="number" id="property-zone-key-light-direction-z"></div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -16,8 +16,8 @@ var deletingVoxels = false;
|
||||||
var addingSpheres = false;
|
var addingSpheres = false;
|
||||||
var deletingSpheres = false;
|
var deletingSpheres = false;
|
||||||
|
|
||||||
var offAlpha = 0.5;
|
var offAlpha = 0.8;
|
||||||
var onAlpha = 0.9;
|
var onAlpha = 1.0;
|
||||||
var editSphereRadius = 4;
|
var editSphereRadius = 4;
|
||||||
|
|
||||||
function floorVector(v) {
|
function floorVector(v) {
|
||||||
|
@ -48,52 +48,47 @@ var toolBar = (function () {
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: onAlpha,
|
alpha: onAlpha,
|
||||||
visible: true,
|
visible: true,
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
addVoxelButton = toolBar.addTool({
|
addVoxelButton = toolBar.addTool({
|
||||||
imageURL: toolIconUrl + "voxel-add.svg",
|
imageURL: toolIconUrl + "voxel-add.svg",
|
||||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
|
||||||
width: toolWidth,
|
width: toolWidth,
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: offAlpha,
|
alpha: offAlpha,
|
||||||
visible: false
|
visible: false
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
deleteVoxelButton = toolBar.addTool({
|
deleteVoxelButton = toolBar.addTool({
|
||||||
imageURL: toolIconUrl + "voxel-delete.svg",
|
imageURL: toolIconUrl + "voxel-delete.svg",
|
||||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
|
||||||
width: toolWidth,
|
width: toolWidth,
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: offAlpha,
|
alpha: offAlpha,
|
||||||
visible: false
|
visible: false
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
addSphereButton = toolBar.addTool({
|
addSphereButton = toolBar.addTool({
|
||||||
imageURL: toolIconUrl + "sphere-add.svg",
|
imageURL: toolIconUrl + "sphere-add.svg",
|
||||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
|
||||||
width: toolWidth,
|
width: toolWidth,
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: offAlpha,
|
alpha: offAlpha,
|
||||||
visible: false
|
visible: false
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
deleteSphereButton = toolBar.addTool({
|
deleteSphereButton = toolBar.addTool({
|
||||||
imageURL: toolIconUrl + "sphere-delete.svg",
|
imageURL: toolIconUrl + "sphere-delete.svg",
|
||||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
|
||||||
width: toolWidth,
|
width: toolWidth,
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: offAlpha,
|
alpha: offAlpha,
|
||||||
visible: false
|
visible: false
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
addTerrainButton = toolBar.addTool({
|
addTerrainButton = toolBar.addTool({
|
||||||
imageURL: toolIconUrl + "voxel-terrain.svg",
|
imageURL: toolIconUrl + "voxel-terrain.svg",
|
||||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
|
||||||
width: toolWidth,
|
width: toolWidth,
|
||||||
height: toolHeight,
|
height: toolHeight,
|
||||||
alpha: onAlpha,
|
alpha: onAlpha,
|
||||||
visible: false
|
visible: false
|
||||||
});
|
}, false);
|
||||||
|
|
||||||
that.setActive(false);
|
that.setActive(false);
|
||||||
}
|
}
|
||||||
|
@ -193,7 +188,6 @@ var toolBar = (function () {
|
||||||
|
|
||||||
that.cleanup = function () {
|
that.cleanup = function () {
|
||||||
toolBar.cleanup();
|
toolBar.cleanup();
|
||||||
// Overlays.deleteOverlay(activeButton);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -237,7 +231,6 @@ function grabLowestJointY() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
function addTerrainBlock() {
|
function addTerrainBlock() {
|
||||||
var baseLocation = getTerrainAlignedLocation(Vec3.sum(MyAvatar.position, {x:8, y:8, z:8}));
|
var baseLocation = getTerrainAlignedLocation(Vec3.sum(MyAvatar.position, {x:8, y:8, z:8}));
|
||||||
if (baseLocation.y > MyAvatar.position.y) {
|
if (baseLocation.y > MyAvatar.position.y) {
|
||||||
|
@ -253,10 +246,26 @@ function addTerrainBlock() {
|
||||||
baseLocation = getTerrainAlignedLocation(facingPosition);
|
baseLocation = getTerrainAlignedLocation(facingPosition);
|
||||||
alreadyThere = lookupTerrainForLocation(baseLocation);
|
alreadyThere = lookupTerrainForLocation(baseLocation);
|
||||||
if (alreadyThere) {
|
if (alreadyThere) {
|
||||||
return;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var polyVoxID = addTerrainBlockNearLocation(baseLocation);
|
||||||
|
|
||||||
|
if (polyVoxID) {
|
||||||
|
var AvatarPositionInVoxelCoords = Entities.worldCoordsToVoxelCoords(polyVoxID, MyAvatar.position);
|
||||||
|
// TODO -- how to find the avatar's feet?
|
||||||
|
var topY = Math.round(AvatarPositionInVoxelCoords.y) - 4;
|
||||||
|
Entities.setVoxelsInCuboid(polyVoxID, {x:0, y:0, z:0}, {x:16, y:topY, z:16}, 255);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addTerrainBlockNearLocation(baseLocation) {
|
||||||
|
var alreadyThere = lookupTerrainForLocation(baseLocation);
|
||||||
|
if (alreadyThere) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
var polyVoxID = Entities.addEntity({
|
var polyVoxID = Entities.addEntity({
|
||||||
type: "PolyVox",
|
type: "PolyVox",
|
||||||
name: "terrain",
|
name: "terrain",
|
||||||
|
@ -269,12 +278,6 @@ function addTerrainBlock() {
|
||||||
zTextureURL: "http://headache.hungry.com/~seth/hifi/dirt.jpeg"
|
zTextureURL: "http://headache.hungry.com/~seth/hifi/dirt.jpeg"
|
||||||
});
|
});
|
||||||
|
|
||||||
var AvatarPositionInVoxelCoords = Entities.worldCoordsToVoxelCoords(polyVoxID, MyAvatar.position);
|
|
||||||
// TODO -- how to find the avatar's feet?
|
|
||||||
var topY = Math.round(AvatarPositionInVoxelCoords.y) - 4;
|
|
||||||
Entities.setVoxelsInCuboid(polyVoxID, {x:0, y:0, z:0}, {x:16, y:topY, z:16}, 255);
|
|
||||||
|
|
||||||
|
|
||||||
//////////
|
//////////
|
||||||
// stitch together the terrain with x/y/z NeighorID properties
|
// stitch together the terrain with x/y/z NeighorID properties
|
||||||
//////////
|
//////////
|
||||||
|
@ -330,7 +333,7 @@ function addTerrainBlock() {
|
||||||
properties.zPNeighborID = lookupTerrainForLocation(Vec3.sum(baseLocation, {x:0, y:0, z:16}));
|
properties.zPNeighborID = lookupTerrainForLocation(Vec3.sum(baseLocation, {x:0, y:0, z:16}));
|
||||||
Entities.editEntity(polyVoxID, properties);
|
Entities.editEntity(polyVoxID, properties);
|
||||||
|
|
||||||
return true;
|
return polyVoxID;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -456,9 +459,6 @@ function keyReleaseEvent(event) {
|
||||||
|
|
||||||
|
|
||||||
function cleanup() {
|
function cleanup() {
|
||||||
for (var i = 0; i < overlays.length; i++) {
|
|
||||||
Overlays.deleteOverlay(overlays[i]);
|
|
||||||
}
|
|
||||||
toolBar.cleanup();
|
toolBar.cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -169,6 +169,28 @@ Item {
|
||||||
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +
|
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +
|
||||||
", Pending: " + root.downloadsPending;
|
", Pending: " + root.downloadsPending;
|
||||||
}
|
}
|
||||||
|
Text {
|
||||||
|
color: root.fontColor;
|
||||||
|
font.pixelSize: root.fontSize
|
||||||
|
visible: root.expanded && root.downloadUrls.length > 0;
|
||||||
|
text: "Download URLs:"
|
||||||
|
}
|
||||||
|
ListView {
|
||||||
|
width: geoCol.width
|
||||||
|
height: root.downloadUrls.length * 15
|
||||||
|
|
||||||
|
visible: root.expanded && root.downloadUrls.length > 0;
|
||||||
|
|
||||||
|
model: root.downloadUrls
|
||||||
|
delegate: Text {
|
||||||
|
color: root.fontColor;
|
||||||
|
font.pixelSize: root.fontSize
|
||||||
|
visible: root.expanded;
|
||||||
|
text: modelData.length > 30
|
||||||
|
? modelData.substring(0, 5) + "..." + modelData.substring(modelData.length - 22)
|
||||||
|
: modelData
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Rectangle {
|
Rectangle {
|
||||||
|
|
|
@ -272,11 +272,13 @@ public:
|
||||||
void run() override {
|
void run() override {
|
||||||
while (!_quit) {
|
while (!_quit) {
|
||||||
QThread::sleep(HEARTBEAT_UPDATE_INTERVAL_SECS);
|
QThread::sleep(HEARTBEAT_UPDATE_INTERVAL_SECS);
|
||||||
|
#ifdef NDEBUG
|
||||||
auto now = usecTimestampNow();
|
auto now = usecTimestampNow();
|
||||||
auto lastHeartbeatAge = now - _heartbeat;
|
auto lastHeartbeatAge = now - _heartbeat;
|
||||||
if (lastHeartbeatAge > MAX_HEARTBEAT_AGE_USECS) {
|
if (lastHeartbeatAge > MAX_HEARTBEAT_AGE_USECS) {
|
||||||
deadlockDetectionCrash();
|
deadlockDetectionCrash();
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1418,6 +1420,8 @@ void Application::paintGL() {
|
||||||
// FIXME not needed anymore?
|
// FIXME not needed anymore?
|
||||||
_offscreenContext->makeCurrent();
|
_offscreenContext->makeCurrent();
|
||||||
|
|
||||||
|
displayPlugin->updateHeadPose(_frameCount);
|
||||||
|
|
||||||
// update the avatar with a fresh HMD pose
|
// update the avatar with a fresh HMD pose
|
||||||
getMyAvatar()->updateFromHMDSensorMatrix(getHMDSensorPose());
|
getMyAvatar()->updateFromHMDSensorMatrix(getHMDSensorPose());
|
||||||
|
|
||||||
|
@ -1598,12 +1602,7 @@ void Application::paintGL() {
|
||||||
auto baseProjection = renderArgs._viewFrustum->getProjection();
|
auto baseProjection = renderArgs._viewFrustum->getProjection();
|
||||||
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
|
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
|
||||||
float IPDScale = hmdInterface->getIPDScale();
|
float IPDScale = hmdInterface->getIPDScale();
|
||||||
|
mat4 headPose = displayPlugin->getHeadPose();
|
||||||
// Tell the plugin what pose we're using to render. In this case we're just using the
|
|
||||||
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
|
|
||||||
// for rotational timewarp. If we move to support positonal timewarp, we need to
|
|
||||||
// ensure this contains the full pose composed with the eye offsets.
|
|
||||||
mat4 headPose = displayPlugin->getHeadPose(_frameCount);
|
|
||||||
|
|
||||||
// FIXME we probably don't need to set the projection matrix every frame,
|
// FIXME we probably don't need to set the projection matrix every frame,
|
||||||
// only when the display plugin changes (or in non-HMD modes when the user
|
// only when the display plugin changes (or in non-HMD modes when the user
|
||||||
|
@ -1620,6 +1619,10 @@ void Application::paintGL() {
|
||||||
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
|
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
|
||||||
eyeOffsets[eye] = eyeOffsetTransform;
|
eyeOffsets[eye] = eyeOffsetTransform;
|
||||||
|
|
||||||
|
// Tell the plugin what pose we're using to render. In this case we're just using the
|
||||||
|
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
|
||||||
|
// for rotational timewarp. If we move to support positonal timewarp, we need to
|
||||||
|
// ensure this contains the full pose composed with the eye offsets.
|
||||||
displayPlugin->setEyeRenderPose(_frameCount, eye, headPose * glm::inverse(eyeOffsetTransform));
|
displayPlugin->setEyeRenderPose(_frameCount, eye, headPose * glm::inverse(eyeOffsetTransform));
|
||||||
|
|
||||||
eyeProjections[eye] = displayPlugin->getEyeProjection(eye, baseProjection);
|
eyeProjections[eye] = displayPlugin->getEyeProjection(eye, baseProjection);
|
||||||
|
@ -2975,7 +2978,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
lookAtPosition.x = -lookAtPosition.x;
|
lookAtPosition.x = -lookAtPosition.x;
|
||||||
}
|
}
|
||||||
if (isHMD) {
|
if (isHMD) {
|
||||||
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose(_frameCount);
|
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
||||||
glm::quat hmdRotation = glm::quat_cast(headPose);
|
glm::quat hmdRotation = glm::quat_cast(headPose);
|
||||||
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
|
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
|
||||||
} else {
|
} else {
|
||||||
|
@ -4927,7 +4930,7 @@ mat4 Application::getEyeOffset(int eye) const {
|
||||||
|
|
||||||
mat4 Application::getHMDSensorPose() const {
|
mat4 Application::getHMDSensorPose() const {
|
||||||
if (isHMDMode()) {
|
if (isHMDMode()) {
|
||||||
return getActiveDisplayPlugin()->getHeadPose(_frameCount);
|
return getActiveDisplayPlugin()->getHeadPose();
|
||||||
}
|
}
|
||||||
return mat4();
|
return mat4();
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,6 @@ void AvatarUpdate::synchronousProcess() {
|
||||||
|
|
||||||
// Keep our own updated value, so that our asynchronous code can consult it.
|
// Keep our own updated value, so that our asynchronous code can consult it.
|
||||||
_isHMDMode = qApp->isHMDMode();
|
_isHMDMode = qApp->isHMDMode();
|
||||||
auto frameCount = qApp->getFrameCount();
|
|
||||||
|
|
||||||
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
|
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
|
||||||
MyAvatar* myAvatar = manager->getMyAvatar();
|
MyAvatar* myAvatar = manager->getMyAvatar();
|
||||||
|
@ -38,7 +37,7 @@ void AvatarUpdate::synchronousProcess() {
|
||||||
|
|
||||||
// transform the head pose from the displayPlugin into avatar coordinates.
|
// transform the head pose from the displayPlugin into avatar coordinates.
|
||||||
glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()));
|
glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()));
|
||||||
_headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose(frameCount));
|
_headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose());
|
||||||
|
|
||||||
if (!isThreaded()) {
|
if (!isThreaded()) {
|
||||||
process();
|
process();
|
||||||
|
@ -55,6 +54,7 @@ bool AvatarUpdate::process() {
|
||||||
deltaMicroseconds = 10000; // 10 ms
|
deltaMicroseconds = 10000; // 10 ms
|
||||||
}
|
}
|
||||||
float deltaSeconds = (float) deltaMicroseconds / (float) USECS_PER_SECOND;
|
float deltaSeconds = (float) deltaMicroseconds / (float) USECS_PER_SECOND;
|
||||||
|
assert(deltaSeconds > 0.0f);
|
||||||
_lastAvatarUpdate = start;
|
_lastAvatarUpdate = start;
|
||||||
qApp->setAvatarSimrateSample(1.0f / deltaSeconds);
|
qApp->setAvatarSimrateSample(1.0f / deltaSeconds);
|
||||||
|
|
||||||
|
|
|
@ -1258,7 +1258,7 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
|
||||||
if (qApp->isHMDMode()) {
|
if (qApp->isHMDMode()) {
|
||||||
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
|
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
|
||||||
|
|
||||||
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose(qApp->getFrameCount());
|
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose();
|
||||||
glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
|
glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
|
||||||
leftEyePose = leftEyePose * headPose;
|
leftEyePose = leftEyePose * headPose;
|
||||||
glm::vec3 leftEyePosition = extractTranslation(leftEyePose);
|
glm::vec3 leftEyePosition = extractTranslation(leftEyePose);
|
||||||
|
|
|
@ -156,7 +156,7 @@ void Stats::updateStats(bool force) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// update the entities ping with the average for all connected entity servers
|
// update the entities ping with the average for all connected entity servers
|
||||||
STAT_UPDATE(entitiesPing, octreeServerCount ? totalPingOctree / octreeServerCount : -1);
|
STAT_UPDATE(entitiesPing, octreeServerCount ? totalPingOctree / octreeServerCount : -1);
|
||||||
|
|
||||||
|
@ -192,9 +192,29 @@ void Stats::updateStats(bool force) {
|
||||||
STAT_UPDATE(audioMixerPps, -1);
|
STAT_UPDATE(audioMixerPps, -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
STAT_UPDATE(downloads, ResourceCache::getLoadingRequests().size());
|
QList<Resource*> loadingRequests = ResourceCache::getLoadingRequests();
|
||||||
|
STAT_UPDATE(downloads, loadingRequests.size());
|
||||||
STAT_UPDATE(downloadLimit, ResourceCache::getRequestLimit())
|
STAT_UPDATE(downloadLimit, ResourceCache::getRequestLimit())
|
||||||
STAT_UPDATE(downloadsPending, ResourceCache::getPendingRequestCount());
|
STAT_UPDATE(downloadsPending, ResourceCache::getPendingRequestCount());
|
||||||
|
|
||||||
|
// See if the active download urls have changed
|
||||||
|
bool shouldUpdateUrls = _downloads != _downloadUrls.size();
|
||||||
|
if (!shouldUpdateUrls) {
|
||||||
|
for (int i = 0; i < _downloads; i++) {
|
||||||
|
if (loadingRequests[i]->getURL().toString() != _downloadUrls[i]) {
|
||||||
|
shouldUpdateUrls = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the urls have changed, update the list
|
||||||
|
if (shouldUpdateUrls) {
|
||||||
|
_downloadUrls.clear();
|
||||||
|
foreach (Resource* resource, loadingRequests) {
|
||||||
|
_downloadUrls << resource->getURL().toString();
|
||||||
|
}
|
||||||
|
emit downloadUrlsChanged();
|
||||||
|
}
|
||||||
// TODO fix to match original behavior
|
// TODO fix to match original behavior
|
||||||
//stringstream downloads;
|
//stringstream downloads;
|
||||||
//downloads << "Downloads: ";
|
//downloads << "Downloads: ";
|
||||||
|
@ -306,7 +326,7 @@ void Stats::updateStats(bool force) {
|
||||||
// we will also include room for 1 line per timing record and a header of 4 lines
|
// we will also include room for 1 line per timing record and a header of 4 lines
|
||||||
// Timing details...
|
// Timing details...
|
||||||
|
|
||||||
// First iterate all the records, and for the ones that should be included, insert them into
|
// First iterate all the records, and for the ones that should be included, insert them into
|
||||||
// a new Map sorted by average time...
|
// a new Map sorted by average time...
|
||||||
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
|
bool onlyDisplayTopTen = Menu::getInstance()->isOptionChecked(MenuOption::OnlyDisplayTopTen);
|
||||||
QMap<float, QString> sortedRecords;
|
QMap<float, QString> sortedRecords;
|
||||||
|
@ -366,7 +386,7 @@ void Stats::setRenderDetails(const RenderDetails& details) {
|
||||||
/*
|
/*
|
||||||
// display expanded or contracted stats
|
// display expanded or contracted stats
|
||||||
void Stats::display(
|
void Stats::display(
|
||||||
int voxelPacketsToProcess)
|
int voxelPacketsToProcess)
|
||||||
{
|
{
|
||||||
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
|
// iterate all the current voxel stats, and list their sending modes, and total voxel counts
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
public: \
|
public: \
|
||||||
type name() { return _##name; }; \
|
type name() { return _##name; }; \
|
||||||
private: \
|
private: \
|
||||||
type _##name{ initialValue };
|
type _##name{ initialValue };
|
||||||
|
|
||||||
|
|
||||||
class Stats : public QQuickItem {
|
class Stats : public QQuickItem {
|
||||||
|
@ -58,6 +58,7 @@ class Stats : public QQuickItem {
|
||||||
STATS_PROPERTY(int, downloads, 0)
|
STATS_PROPERTY(int, downloads, 0)
|
||||||
STATS_PROPERTY(int, downloadLimit, 0)
|
STATS_PROPERTY(int, downloadLimit, 0)
|
||||||
STATS_PROPERTY(int, downloadsPending, 0)
|
STATS_PROPERTY(int, downloadsPending, 0)
|
||||||
|
Q_PROPERTY(QStringList downloadUrls READ downloadUrls NOTIFY downloadUrlsChanged)
|
||||||
STATS_PROPERTY(int, triangles, 0)
|
STATS_PROPERTY(int, triangles, 0)
|
||||||
STATS_PROPERTY(int, quads, 0)
|
STATS_PROPERTY(int, quads, 0)
|
||||||
STATS_PROPERTY(int, materialSwitches, 0)
|
STATS_PROPERTY(int, materialSwitches, 0)
|
||||||
|
@ -105,6 +106,8 @@ public:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QStringList downloadUrls () { return _downloadUrls; }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void forceUpdateStats() { updateStats(true); }
|
void forceUpdateStats() { updateStats(true); }
|
||||||
|
|
||||||
|
@ -138,6 +141,7 @@ signals:
|
||||||
void downloadsChanged();
|
void downloadsChanged();
|
||||||
void downloadLimitChanged();
|
void downloadLimitChanged();
|
||||||
void downloadsPendingChanged();
|
void downloadsPendingChanged();
|
||||||
|
void downloadUrlsChanged();
|
||||||
void trianglesChanged();
|
void trianglesChanged();
|
||||||
void quadsChanged();
|
void quadsChanged();
|
||||||
void materialSwitchesChanged();
|
void materialSwitchesChanged();
|
||||||
|
@ -167,6 +171,7 @@ private:
|
||||||
bool _timingExpanded{ false };
|
bool _timingExpanded{ false };
|
||||||
QString _monospaceFont;
|
QString _monospaceFont;
|
||||||
const AudioIOStats* _audioStats;
|
const AudioIOStats* _audioStats;
|
||||||
|
QStringList _downloadUrls = QStringList();
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_Stats_h
|
#endif // hifi_Stats_h
|
||||||
|
|
|
@ -481,7 +481,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
|
||||||
|
|
||||||
// smooth transitions by relaxing _hipsOffset toward the new value
|
// smooth transitions by relaxing _hipsOffset toward the new value
|
||||||
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.15f;
|
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.15f;
|
||||||
float tau = dt > HIPS_OFFSET_SLAVE_TIMESCALE ? 1.0f : dt / HIPS_OFFSET_SLAVE_TIMESCALE;
|
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
|
||||||
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
|
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -384,11 +384,20 @@ void SwingTwistConstraint::dynamicallyAdjustLimits(const glm::quat& rotation) {
|
||||||
|
|
||||||
swingTwistDecomposition(postRotation, Vectors::UNIT_Y, swingRotation, twistRotation);
|
swingTwistDecomposition(postRotation, Vectors::UNIT_Y, swingRotation, twistRotation);
|
||||||
|
|
||||||
// adjust swing limits
|
{ // adjust swing limits
|
||||||
glm::vec3 swungY = swingRotation * Vectors::UNIT_Y;
|
glm::vec3 swungY = swingRotation * Vectors::UNIT_Y;
|
||||||
glm::vec3 swingAxis = glm::cross(Vectors::UNIT_Y, swungY);
|
glm::vec3 swingAxis = glm::cross(Vectors::UNIT_Y, swungY);
|
||||||
float theta = atan2f(-swingAxis.z, swingAxis.x);
|
float theta = atan2f(-swingAxis.z, swingAxis.x);
|
||||||
_swingLimitFunction.dynamicallyAdjustMinDots(theta, swungY.y);
|
if (isnan(theta)) {
|
||||||
|
// atan2f() will only return NaN if either of its arguments is NaN, which can only
|
||||||
|
// happen if we've been given a bad rotation. Since a NaN value here could potentially
|
||||||
|
// cause a crash (we use the value of theta to compute indices into a std::vector)
|
||||||
|
// we specifically check for this case.
|
||||||
|
theta = 0.0f;
|
||||||
|
swungY.y = 1.0f;
|
||||||
|
}
|
||||||
|
_swingLimitFunction.dynamicallyAdjustMinDots(theta, swungY.y);
|
||||||
|
}
|
||||||
|
|
||||||
// restore twist limits
|
// restore twist limits
|
||||||
if (_twistAdjusted) {
|
if (_twistAdjusted) {
|
||||||
|
|
|
@ -342,7 +342,7 @@ void CompositorHelper::computeHmdPickRay(const glm::vec2& cursorPos, glm::vec3&
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 CompositorHelper::getUiTransform() const {
|
glm::mat4 CompositorHelper::getUiTransform() const {
|
||||||
return _currentCamera * glm::inverse(_currentDisplayPlugin->getHeadPose(_currentFrame));
|
return _currentCamera * glm::inverse(_currentDisplayPlugin->getHeadPose());
|
||||||
}
|
}
|
||||||
|
|
||||||
//Finds the collision point of a world space ray
|
//Finds the collision point of a world space ray
|
||||||
|
|
|
@ -160,4 +160,8 @@ void HmdDisplayPlugin::updateFrameData() {
|
||||||
Parent::updateFrameData();
|
Parent::updateFrameData();
|
||||||
Lock lock(_mutex);
|
Lock lock(_mutex);
|
||||||
_currentRenderEyePoses = _renderEyePoses[_currentRenderFrameIndex];
|
_currentRenderEyePoses = _renderEyePoses[_currentRenderFrameIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
|
||||||
|
return _headPoseCache.get();
|
||||||
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
//
|
//
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <ThreadSafeValueCache.h>
|
||||||
|
|
||||||
#include <QtGlobal>
|
#include <QtGlobal>
|
||||||
|
|
||||||
#include "../OpenGLDisplayPlugin.h"
|
#include "../OpenGLDisplayPlugin.h"
|
||||||
|
@ -24,7 +26,7 @@ public:
|
||||||
void setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm::mat4& pose) override final;
|
void setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm::mat4& pose) override final;
|
||||||
bool isDisplayVisible() const override { return isHmdMounted(); }
|
bool isDisplayVisible() const override { return isHmdMounted(); }
|
||||||
|
|
||||||
|
virtual glm::mat4 getHeadPose() const override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual void hmdPresent() = 0;
|
virtual void hmdPresent() = 0;
|
||||||
|
@ -46,6 +48,7 @@ protected:
|
||||||
using EyePoses = std::array<glm::mat4, 2>;
|
using EyePoses = std::array<glm::mat4, 2>;
|
||||||
QMap<uint32_t, EyePoses> _renderEyePoses;
|
QMap<uint32_t, EyePoses> _renderEyePoses;
|
||||||
EyePoses _currentRenderEyePoses;
|
EyePoses _currentRenderEyePoses;
|
||||||
|
ThreadSafeValueCache<glm::mat4> _headPoseCache { glm::mat4() };
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool _enablePreview { false };
|
bool _enablePreview { false };
|
||||||
|
|
|
@ -182,9 +182,10 @@ bool EntityTreeRenderer::findBestZoneAndMaybeContainingEntities(const glm::vec3&
|
||||||
// if this entity is a zone, use this time to determine the bestZone
|
// if this entity is a zone, use this time to determine the bestZone
|
||||||
if (entity->getType() == EntityTypes::Zone) {
|
if (entity->getType() == EntityTypes::Zone) {
|
||||||
if (!entity->getVisible()) {
|
if (!entity->getVisible()) {
|
||||||
|
#ifdef WANT_DEBUG
|
||||||
qCDebug(entitiesrenderer) << "not visible";
|
qCDebug(entitiesrenderer) << "not visible";
|
||||||
}
|
#endif
|
||||||
else {
|
} else {
|
||||||
float entityVolumeEstimate = entity->getVolumeEstimate();
|
float entityVolumeEstimate = entity->getVolumeEstimate();
|
||||||
if (entityVolumeEstimate < _bestZoneVolume) {
|
if (entityVolumeEstimate < _bestZoneVolume) {
|
||||||
_bestZoneVolume = entityVolumeEstimate;
|
_bestZoneVolume = entityVolumeEstimate;
|
||||||
|
|
|
@ -179,7 +179,7 @@ void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool replyOK(QNetworkReply* netReply, QUrl url) { // This will be reworked when we make things asynchronous
|
static bool replyOK(QNetworkReply* netReply, QUrl url) { // This will be reworked when we make things asynchronous
|
||||||
return (netReply->isFinished() &&
|
return (netReply && netReply->isFinished() &&
|
||||||
(url.toString().startsWith("file", Qt::CaseInsensitive) ? // file urls don't have http status codes
|
(url.toString().startsWith("file", Qt::CaseInsensitive) ? // file urls don't have http status codes
|
||||||
netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString().isEmpty() :
|
netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString().isEmpty() :
|
||||||
(netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200)));
|
(netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200)));
|
||||||
|
@ -191,11 +191,10 @@ bool OBJReader::isValidTexture(const QByteArray &filename) {
|
||||||
}
|
}
|
||||||
QUrl candidateUrl = _url.resolved(QUrl(filename));
|
QUrl candidateUrl = _url.resolved(QUrl(filename));
|
||||||
QNetworkReply *netReply = request(candidateUrl, true);
|
QNetworkReply *netReply = request(candidateUrl, true);
|
||||||
if (!netReply) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
bool isValid = replyOK(netReply, candidateUrl);
|
bool isValid = replyOK(netReply, candidateUrl);
|
||||||
netReply->deleteLater();
|
if (netReply) {
|
||||||
|
netReply->deleteLater();
|
||||||
|
}
|
||||||
return isValid;
|
return isValid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,20 +263,19 @@ QNetworkReply* OBJReader::request(QUrl& url, bool isTest) {
|
||||||
if (!qApp) {
|
if (!qApp) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
bool aboutToQuit{ false };
|
||||||
|
auto connection = QObject::connect(qApp, &QCoreApplication::aboutToQuit, [&] {
|
||||||
|
aboutToQuit = true;
|
||||||
|
});
|
||||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||||
QNetworkRequest netRequest(url);
|
QNetworkRequest netRequest(url);
|
||||||
QNetworkReply* netReply = isTest ? networkAccessManager.head(netRequest) : networkAccessManager.get(netRequest);
|
QNetworkReply* netReply = isTest ? networkAccessManager.head(netRequest) : networkAccessManager.get(netRequest);
|
||||||
if (!qApp) {
|
if (!qApp || aboutToQuit) {
|
||||||
return netReply;
|
return nullptr;
|
||||||
}
|
}
|
||||||
QEventLoop loop; // Create an event loop that will quit when we get the finished signal
|
QEventLoop loop; // Create an event loop that will quit when we get the finished signal
|
||||||
QObject::connect(netReply, SIGNAL(finished()), &loop, SLOT(quit()));
|
QObject::connect(netReply, SIGNAL(finished()), &loop, SLOT(quit()));
|
||||||
loop.exec(); // Nothing is going to happen on this whole run thread until we get this
|
loop.exec(); // Nothing is going to happen on this whole run thread until we get this
|
||||||
|
|
||||||
bool aboutToQuit { false };
|
|
||||||
auto connection = QObject::connect(qApp, &QCoreApplication::aboutToQuit, [&] {
|
|
||||||
aboutToQuit = true;
|
|
||||||
});
|
|
||||||
static const int WAIT_TIMEOUT_MS = 500;
|
static const int WAIT_TIMEOUT_MS = 500;
|
||||||
while (qApp && !aboutToQuit && !netReply->isReadable()) {
|
while (qApp && !aboutToQuit && !netReply->isReadable()) {
|
||||||
netReply->waitForReadyRead(WAIT_TIMEOUT_MS); // so we might as well block this thread waiting for the response, rather than
|
netReply->waitForReadyRead(WAIT_TIMEOUT_MS); // so we might as well block this thread waiting for the response, rather than
|
||||||
|
@ -570,9 +568,11 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
|
||||||
parseMaterialLibrary(netReply);
|
parseMaterialLibrary(netReply);
|
||||||
} else {
|
} else {
|
||||||
qCDebug(modelformat) << "OBJ Reader WARNING:" << libraryName << "did not answer. Got"
|
qCDebug(modelformat) << "OBJ Reader WARNING:" << libraryName << "did not answer. Got"
|
||||||
<< netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
|
<< (!netReply ? "aborted" : netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString());
|
||||||
|
}
|
||||||
|
if (netReply) {
|
||||||
|
netReply->deleteLater();
|
||||||
}
|
}
|
||||||
netReply->deleteLater();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,17 +32,25 @@ OffscreenGLCanvas::~OffscreenGLCanvas() {
|
||||||
_context->doneCurrent();
|
_context->doneCurrent();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
|
bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
|
||||||
if (nullptr != sharedContext) {
|
if (nullptr != sharedContext) {
|
||||||
sharedContext->doneCurrent();
|
sharedContext->doneCurrent();
|
||||||
_context->setShareContext(sharedContext);
|
_context->setShareContext(sharedContext);
|
||||||
}
|
}
|
||||||
_context->setFormat(getDefaultOpenGLSurfaceFormat());
|
_context->setFormat(getDefaultOpenGLSurfaceFormat());
|
||||||
_context->create();
|
if (_context->create()) {
|
||||||
|
_offscreenSurface->setFormat(_context->format());
|
||||||
|
_offscreenSurface->create();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
_offscreenSurface->setFormat(_context->format());
|
qWarning() << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
|
||||||
_offscreenSurface->create();
|
qWarning() << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
|
||||||
|
qWarning() << "GL Vendor: " << QString((const char*) glGetString(GL_VENDOR));
|
||||||
|
qWarning() << "GL Renderer: " << QString((const char*) glGetString(GL_RENDERER));
|
||||||
|
qWarning() << "Failed to create OffscreenGLCanvas";
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OffscreenGLCanvas::makeCurrent() {
|
bool OffscreenGLCanvas::makeCurrent() {
|
||||||
|
|
|
@ -23,7 +23,7 @@ class OffscreenGLCanvas : public QObject {
|
||||||
public:
|
public:
|
||||||
OffscreenGLCanvas();
|
OffscreenGLCanvas();
|
||||||
~OffscreenGLCanvas();
|
~OffscreenGLCanvas();
|
||||||
void create(QOpenGLContext* sharedContext = nullptr);
|
bool create(QOpenGLContext* sharedContext = nullptr);
|
||||||
bool makeCurrent();
|
bool makeCurrent();
|
||||||
void doneCurrent();
|
void doneCurrent();
|
||||||
QOpenGLContext* getContext() {
|
QOpenGLContext* getContext() {
|
||||||
|
|
|
@ -65,7 +65,11 @@ class OffscreenQmlRenderer : public OffscreenGLCanvas {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
OffscreenQmlRenderer(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
|
OffscreenQmlRenderer(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
|
||||||
OffscreenGLCanvas::create(shareContext);
|
if (!OffscreenGLCanvas::create(shareContext)) {
|
||||||
|
static const char* error = "Failed to create OffscreenGLCanvas";
|
||||||
|
qWarning() << error;
|
||||||
|
throw error;
|
||||||
|
};
|
||||||
|
|
||||||
_renderControl = new QMyQuickRenderControl();
|
_renderControl = new QMyQuickRenderControl();
|
||||||
|
|
||||||
|
@ -153,7 +157,7 @@ private:
|
||||||
qWarning("Failed to make context current on render thread");
|
qWarning("Failed to make context current on render thread");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_renderControl->initialize(_context);
|
_renderControl->initialize(getContext());
|
||||||
setupFbo();
|
setupFbo();
|
||||||
_escrow.setRecycler([this](GLuint texture){
|
_escrow.setRecycler([this](GLuint texture){
|
||||||
_textures.recycleTexture(texture);
|
_textures.recycleTexture(texture);
|
||||||
|
|
|
@ -72,11 +72,15 @@ void GeometryReader::run() {
|
||||||
const bool grabLightmaps = true;
|
const bool grabLightmaps = true;
|
||||||
const float lightmapLevel = 1.0f;
|
const float lightmapLevel = 1.0f;
|
||||||
fbxgeo = readFBX(_data, _mapping, _url.path(), grabLightmaps, lightmapLevel);
|
fbxgeo = readFBX(_data, _mapping, _url.path(), grabLightmaps, lightmapLevel);
|
||||||
|
if (fbxgeo->meshes.size() == 0 && fbxgeo->joints.size() == 0) {
|
||||||
|
// empty fbx geometry, indicates error
|
||||||
|
throw QString("empty geometry, possibly due to an unsupported FBX version");
|
||||||
|
}
|
||||||
} else if (_url.path().toLower().endsWith(".obj")) {
|
} else if (_url.path().toLower().endsWith(".obj")) {
|
||||||
fbxgeo = OBJReader().readOBJ(_data, _mapping, _url);
|
fbxgeo = OBJReader().readOBJ(_data, _mapping, _url);
|
||||||
} else {
|
} else {
|
||||||
QString errorStr("unsupported format");
|
QString errorStr("unsupported format");
|
||||||
emit onError(NetworkGeometry::ModelParseError, errorStr);
|
throw errorStr;
|
||||||
}
|
}
|
||||||
emit onSuccess(fbxgeo);
|
emit onSuccess(fbxgeo);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -56,7 +56,7 @@ gpu::Texture* TextureUsage::create2DTextureFromImage(const QImage& srcImage, con
|
||||||
bool validAlpha = false;
|
bool validAlpha = false;
|
||||||
bool alphaAsMask = true;
|
bool alphaAsMask = true;
|
||||||
const uint8 OPAQUE_ALPHA = 255;
|
const uint8 OPAQUE_ALPHA = 255;
|
||||||
const uint8 TRANSLUCENT_ALPHA = 0;
|
const uint8 TRANSPARENT_ALPHA = 0;
|
||||||
if (image.hasAlphaChannel()) {
|
if (image.hasAlphaChannel()) {
|
||||||
std::map<uint8, uint32> alphaHistogram;
|
std::map<uint8, uint32> alphaHistogram;
|
||||||
|
|
||||||
|
@ -70,10 +70,7 @@ gpu::Texture* TextureUsage::create2DTextureFromImage(const QImage& srcImage, con
|
||||||
for (int x = 0; x < image.width(); ++x) {
|
for (int x = 0; x < image.width(); ++x) {
|
||||||
auto alpha = qAlpha(data[x]);
|
auto alpha = qAlpha(data[x]);
|
||||||
alphaHistogram[alpha] ++;
|
alphaHistogram[alpha] ++;
|
||||||
if (alpha != OPAQUE_ALPHA) {
|
validAlpha = validAlpha || (alpha != OPAQUE_ALPHA);
|
||||||
validAlpha = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,10 +78,10 @@ gpu::Texture* TextureUsage::create2DTextureFromImage(const QImage& srcImage, con
|
||||||
if (validAlpha && (alphaHistogram.size() > 1)) {
|
if (validAlpha && (alphaHistogram.size() > 1)) {
|
||||||
auto totalNumPixels = image.height() * image.width();
|
auto totalNumPixels = image.height() * image.width();
|
||||||
auto numOpaques = alphaHistogram[OPAQUE_ALPHA];
|
auto numOpaques = alphaHistogram[OPAQUE_ALPHA];
|
||||||
auto numTranslucents = alphaHistogram[TRANSLUCENT_ALPHA];
|
auto numTransparents = alphaHistogram[TRANSPARENT_ALPHA];
|
||||||
auto numTransparents = totalNumPixels - numOpaques - numTranslucents;
|
auto numTranslucents = totalNumPixels - numOpaques - numTransparents;
|
||||||
|
|
||||||
alphaAsMask = ((numTransparents / (double)totalNumPixels) < 0.05);
|
alphaAsMask = ((numTranslucents / (double)totalNumPixels) < 0.05);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -159,44 +159,45 @@ void ResourceCache::clearUnusedResource() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ResourceCache::attemptRequest(Resource* resource) {
|
void ResourceCacheSharedItems::appendActiveRequest(Resource* resource) {
|
||||||
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
Lock lock(_mutex);
|
||||||
|
_loadingRequests.append(resource);
|
||||||
// Disable request limiting for ATP
|
|
||||||
if (resource->getURL().scheme() != URL_SCHEME_ATP) {
|
|
||||||
if (_requestsActive >= _requestLimit) {
|
|
||||||
// wait until a slot becomes available
|
|
||||||
sharedItems->_pendingRequests.append(resource);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
++_requestsActive;
|
|
||||||
}
|
|
||||||
|
|
||||||
sharedItems->_loadingRequests.append(resource);
|
|
||||||
resource->makeRequest();
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ResourceCache::requestCompleted(Resource* resource) {
|
void ResourceCacheSharedItems::appendPendingRequest(Resource* resource) {
|
||||||
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
Lock lock(_mutex);
|
||||||
sharedItems->_loadingRequests.removeOne(resource);
|
_pendingRequests.append(resource);
|
||||||
if (resource->getURL().scheme() != URL_SCHEME_ATP) {
|
|
||||||
--_requestsActive;
|
|
||||||
}
|
|
||||||
|
|
||||||
attemptHighestPriorityRequest();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ResourceCache::attemptHighestPriorityRequest() {
|
QList<QPointer<Resource>> ResourceCacheSharedItems::getPendingRequests() const {
|
||||||
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
Lock lock(_mutex);
|
||||||
|
return _pendingRequests;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t ResourceCacheSharedItems::getPendingRequestsCount() const {
|
||||||
|
Lock lock(_mutex);
|
||||||
|
return _pendingRequests.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
QList<Resource*> ResourceCacheSharedItems::getLoadingRequests() const {
|
||||||
|
Lock lock(_mutex);
|
||||||
|
return _loadingRequests;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ResourceCacheSharedItems::removeRequest(Resource* resource) {
|
||||||
|
Lock lock(_mutex);
|
||||||
|
_loadingRequests.removeOne(resource);
|
||||||
|
}
|
||||||
|
|
||||||
|
Resource* ResourceCacheSharedItems::getHighestPendingRequest() {
|
||||||
|
Lock lock(_mutex);
|
||||||
// look for the highest priority pending request
|
// look for the highest priority pending request
|
||||||
int highestIndex = -1;
|
int highestIndex = -1;
|
||||||
float highestPriority = -FLT_MAX;
|
float highestPriority = -FLT_MAX;
|
||||||
for (int i = 0; i < sharedItems->_pendingRequests.size(); ) {
|
for (int i = 0; i < _pendingRequests.size();) {
|
||||||
Resource* resource = sharedItems->_pendingRequests.at(i).data();
|
Resource* resource = _pendingRequests.at(i).data();
|
||||||
if (!resource) {
|
if (!resource) {
|
||||||
sharedItems->_pendingRequests.removeAt(i);
|
_pendingRequests.removeAt(i);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
float priority = resource->getLoadPriority();
|
float priority = resource->getLoadPriority();
|
||||||
|
@ -206,7 +207,45 @@ bool ResourceCache::attemptHighestPriorityRequest() {
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
return (highestIndex >= 0) && attemptRequest(sharedItems->_pendingRequests.takeAt(highestIndex));
|
if (highestIndex >= 0) {
|
||||||
|
return _pendingRequests.takeAt(highestIndex);
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ResourceCache::attemptRequest(Resource* resource) {
|
||||||
|
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
||||||
|
|
||||||
|
// Disable request limiting for ATP
|
||||||
|
if (resource->getURL().scheme() != URL_SCHEME_ATP) {
|
||||||
|
if (_requestsActive >= _requestLimit) {
|
||||||
|
// wait until a slot becomes available
|
||||||
|
sharedItems->appendPendingRequest(resource);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
++_requestsActive;
|
||||||
|
}
|
||||||
|
|
||||||
|
sharedItems->appendActiveRequest(resource);
|
||||||
|
resource->makeRequest();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ResourceCache::requestCompleted(Resource* resource) {
|
||||||
|
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
||||||
|
sharedItems->removeRequest(resource);
|
||||||
|
if (resource->getURL().scheme() != URL_SCHEME_ATP) {
|
||||||
|
--_requestsActive;
|
||||||
|
}
|
||||||
|
|
||||||
|
attemptHighestPriorityRequest();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ResourceCache::attemptHighestPriorityRequest() {
|
||||||
|
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
||||||
|
auto resource = sharedItems->getHighestPendingRequest();
|
||||||
|
return (resource && attemptRequest(resource));
|
||||||
}
|
}
|
||||||
|
|
||||||
const int DEFAULT_REQUEST_LIMIT = 10;
|
const int DEFAULT_REQUEST_LIMIT = 10;
|
||||||
|
@ -228,9 +267,10 @@ Resource::Resource(const QUrl& url, bool delayLoad) :
|
||||||
|
|
||||||
Resource::~Resource() {
|
Resource::~Resource() {
|
||||||
if (_request) {
|
if (_request) {
|
||||||
ResourceCache::requestCompleted(this);
|
_request->disconnect(this);
|
||||||
_request->deleteLater();
|
_request->deleteLater();
|
||||||
_request = nullptr;
|
_request = nullptr;
|
||||||
|
ResourceCache::requestCompleted(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,7 +415,14 @@ void Resource::handleDownloadProgress(uint64_t bytesReceived, uint64_t bytesTota
|
||||||
}
|
}
|
||||||
|
|
||||||
void Resource::handleReplyFinished() {
|
void Resource::handleReplyFinished() {
|
||||||
Q_ASSERT(_request);
|
Q_ASSERT_X(_request, "Resource::handleReplyFinished", "Request should not be null while in handleReplyFinished");
|
||||||
|
|
||||||
|
if (!_request || _request != sender()) {
|
||||||
|
// This can happen in the edge case that a request is timed out, but a `finished` signal is emitted before it is deleted.
|
||||||
|
qWarning(networking) << "Received signal Resource::handleReplyFinished from ResourceRequest that is not the current"
|
||||||
|
<< " request: " << sender() << ", " << _request;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceCache::requestCompleted(this);
|
ResourceCache::requestCompleted(this);
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
#ifndef hifi_ResourceCache_h
|
#ifndef hifi_ResourceCache_h
|
||||||
#define hifi_ResourceCache_h
|
#define hifi_ResourceCache_h
|
||||||
|
|
||||||
|
#include <mutex>
|
||||||
#include <QtCore/QHash>
|
#include <QtCore/QHash>
|
||||||
#include <QtCore/QList>
|
#include <QtCore/QList>
|
||||||
#include <QtCore/QObject>
|
#include <QtCore/QObject>
|
||||||
|
@ -53,12 +54,25 @@ static const qint64 MAX_UNUSED_MAX_SIZE = 10 * BYTES_PER_GIGABYTES;
|
||||||
// object instead
|
// object instead
|
||||||
class ResourceCacheSharedItems : public Dependency {
|
class ResourceCacheSharedItems : public Dependency {
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
|
|
||||||
|
using Mutex = std::mutex;
|
||||||
|
using Lock = std::unique_lock<Mutex>;
|
||||||
public:
|
public:
|
||||||
QList<QPointer<Resource>> _pendingRequests;
|
void appendPendingRequest(Resource* newRequest);
|
||||||
QList<Resource*> _loadingRequests;
|
void appendActiveRequest(Resource* newRequest);
|
||||||
|
void removeRequest(Resource* doneRequest);
|
||||||
|
QList<QPointer<Resource>> getPendingRequests() const;
|
||||||
|
uint32_t getPendingRequestsCount() const;
|
||||||
|
QList<Resource*> getLoadingRequests() const;
|
||||||
|
Resource* getHighestPendingRequest();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
ResourceCacheSharedItems() { }
|
ResourceCacheSharedItems() { }
|
||||||
virtual ~ResourceCacheSharedItems() { }
|
virtual ~ResourceCacheSharedItems() { }
|
||||||
|
|
||||||
|
mutable Mutex _mutex;
|
||||||
|
QList<QPointer<Resource>> _pendingRequests;
|
||||||
|
QList<Resource*> _loadingRequests;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -75,11 +89,11 @@ public:
|
||||||
void setUnusedResourceCacheSize(qint64 unusedResourcesMaxSize);
|
void setUnusedResourceCacheSize(qint64 unusedResourcesMaxSize);
|
||||||
qint64 getUnusedResourceCacheSize() const { return _unusedResourcesMaxSize; }
|
qint64 getUnusedResourceCacheSize() const { return _unusedResourcesMaxSize; }
|
||||||
|
|
||||||
static const QList<Resource*>& getLoadingRequests()
|
static const QList<Resource*> getLoadingRequests()
|
||||||
{ return DependencyManager::get<ResourceCacheSharedItems>()->_loadingRequests; }
|
{ return DependencyManager::get<ResourceCacheSharedItems>()->getLoadingRequests(); }
|
||||||
|
|
||||||
static int getPendingRequestCount()
|
static int getPendingRequestCount()
|
||||||
{ return DependencyManager::get<ResourceCacheSharedItems>()->_pendingRequests.size(); }
|
{ return DependencyManager::get<ResourceCacheSharedItems>()->getPendingRequestsCount(); }
|
||||||
|
|
||||||
ResourceCache(QObject* parent = NULL);
|
ResourceCache(QObject* parent = NULL);
|
||||||
virtual ~ResourceCache();
|
virtual ~ResourceCache();
|
||||||
|
|
|
@ -121,8 +121,12 @@ public:
|
||||||
static const glm::mat4 transform; return transform;
|
static const glm::mat4 transform; return transform;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const {
|
// will query the underlying hmd api to compute the most recent head pose
|
||||||
static const glm::mat4 pose; return pose;
|
virtual void updateHeadPose(uint32_t frameIndex) {}
|
||||||
|
|
||||||
|
// returns a copy of the most recent head pose, computed via updateHeadPose
|
||||||
|
virtual glm::mat4 getHeadPose() const {
|
||||||
|
return glm::mat4();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Needed for timewarp style features
|
// Needed for timewarp style features
|
||||||
|
|
|
@ -571,6 +571,7 @@ void Model::removeFromScene(std::shared_ptr<render::Scene> scene, render::Pendin
|
||||||
}
|
}
|
||||||
_renderItems.clear();
|
_renderItems.clear();
|
||||||
_renderItemsSet.clear();
|
_renderItemsSet.clear();
|
||||||
|
_meshGroupsKnown = false;
|
||||||
_readyWhenAdded = false;
|
_readyWhenAdded = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,14 +15,11 @@ void OculusBaseDisplayPlugin::resetSensors() {
|
||||||
ovr_RecenterPose(_session);
|
ovr_RecenterPose(_session);
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 OculusBaseDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
void OculusBaseDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||||
static uint32_t lastFrameSeen = 0;
|
|
||||||
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
|
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
|
||||||
auto trackingState = ovr_GetTrackingState(_session, displayTime, frameIndex > lastFrameSeen);
|
auto trackingState = ovr_GetTrackingState(_session, displayTime, true);
|
||||||
if (frameIndex > lastFrameSeen) {
|
mat4 headPose = toGlm(trackingState.HeadPose.ThePose);
|
||||||
lastFrameSeen = frameIndex;
|
_headPoseCache.set(headPose);
|
||||||
}
|
|
||||||
return toGlm(trackingState.HeadPose.ThePose);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OculusBaseDisplayPlugin::isSupported() const {
|
bool OculusBaseDisplayPlugin::isSupported() const {
|
||||||
|
|
|
@ -20,7 +20,7 @@ public:
|
||||||
|
|
||||||
// Stereo specific methods
|
// Stereo specific methods
|
||||||
virtual void resetSensors() override final;
|
virtual void resetSensors() override final;
|
||||||
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
|
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void customizeContext() override;
|
void customizeContext() override;
|
||||||
|
|
|
@ -35,14 +35,10 @@ void OculusLegacyDisplayPlugin::resetSensors() {
|
||||||
ovrHmd_RecenterPose(_hmd);
|
ovrHmd_RecenterPose(_hmd);
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
void OculusLegacyDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||||
static uint32_t lastFrameSeen = 0;
|
Lock lock(_mutex);
|
||||||
if (frameIndex > lastFrameSeen) {
|
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
|
||||||
Lock lock(_mutex);
|
_headPoseCache.set(toGlm(_trackingState.HeadPose.ThePose));
|
||||||
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
|
|
||||||
lastFrameSeen = frameIndex;
|
|
||||||
}
|
|
||||||
return toGlm(_trackingState.HeadPose.ThePose);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OculusLegacyDisplayPlugin::isSupported() const {
|
bool OculusLegacyDisplayPlugin::isSupported() const {
|
||||||
|
|
|
@ -26,7 +26,7 @@ public:
|
||||||
|
|
||||||
// Stereo specific methods
|
// Stereo specific methods
|
||||||
virtual void resetSensors() override;
|
virtual void resetSensors() override;
|
||||||
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
|
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||||
|
|
||||||
virtual float getTargetFrameRate() override;
|
virtual float getTargetFrameRate() override;
|
||||||
|
|
||||||
|
|
|
@ -112,7 +112,7 @@ void OpenVrDisplayPlugin::resetSensors() {
|
||||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
void OpenVrDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||||
|
|
||||||
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
||||||
float frameDuration = 1.f / displayFrequency;
|
float frameDuration = 1.f / displayFrequency;
|
||||||
|
@ -139,14 +139,15 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
||||||
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
|
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
|
||||||
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
|
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
|
||||||
}
|
}
|
||||||
return _trackedDevicePoseMat4[0];
|
|
||||||
|
_headPoseCache.set(_trackedDevicePoseMat4[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
void OpenVrDisplayPlugin::hmdPresent() {
|
void OpenVrDisplayPlugin::hmdPresent() {
|
||||||
// Flip y-axis since GL UV coords are backwards.
|
// Flip y-axis since GL UV coords are backwards.
|
||||||
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
|
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
|
||||||
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
|
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
|
||||||
|
|
||||||
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
|
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||||
|
|
||||||
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
|
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
|
||||||
|
|
|
@ -27,7 +27,7 @@ public:
|
||||||
|
|
||||||
// Stereo specific methods
|
// Stereo specific methods
|
||||||
virtual void resetSensors() override;
|
virtual void resetSensors() override;
|
||||||
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
|
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void internalActivate() override;
|
void internalActivate() override;
|
||||||
|
|
Loading…
Reference in a new issue