Merge pull request #10636 from zfox23/spectatorCamera_unprototype

"Unprototype" the Spectator Camera
This commit is contained in:
Zach Fox 2017-06-09 12:00:26 -07:00 committed by GitHub
commit c32f5c1f2b
7 changed files with 222 additions and 180 deletions

View file

@ -114,7 +114,7 @@
#include <render/RenderFetchCullSortTask.h> #include <render/RenderFetchCullSortTask.h>
#include <RenderDeferredTask.h> #include <RenderDeferredTask.h>
#include <RenderForwardTask.h> #include <RenderForwardTask.h>
#include <PrototypeSelfie.h> #include <SecondaryCamera.h>
#include <ResourceCache.h> #include <ResourceCache.h>
#include <ResourceRequest.h> #include <ResourceRequest.h>
#include <SandboxUtils.h> #include <SandboxUtils.h>
@ -1874,7 +1874,7 @@ void Application::initializeGL() {
} }
_renderEngine->addJob<MainRenderTask>("MainFrame", cullFunctor, isDeferred); _renderEngine->addJob<MainRenderTask>("MainFrame", cullFunctor, isDeferred);
_renderEngine->addJob<SelfieRenderTask>("SelfieFrame", cullFunctor); _renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraFrame", cullFunctor);
/* _renderEngine->addJob<RenderShadowTask>("RenderShadowTask", cullFunctor); /* _renderEngine->addJob<RenderShadowTask>("RenderShadowTask", cullFunctor);

View file

@ -1,102 +0,0 @@
#include "PrototypeSelfie.h"
#include <gpu/Context.h>
void MainRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred) {
task.addJob<RenderShadowTask>("RenderShadowTask", cullFunctor);
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
if (!isDeferred) {
task.addJob<RenderForwardTask>("Forward", items);
} else {
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);
}
}
#include <TextureCache.h>
using RenderArgsPointer = std::shared_ptr<RenderArgs>;
void SelfieRenderTaskConfig::resetSize(int width, int height) { // Carefully adjust the framebuffer / texture.
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSelfieFramebuffer(width, height);
setEnabled(wasEnabled);
}
class BeginSelfieFrame { // Changes renderContext for our framebuffer and and view.
glm::vec3 _position{};
glm::quat _orientation{};
public:
using Config = BeginSelfieFrameConfig;
using JobModel = render::Job::ModelO<BeginSelfieFrame, RenderArgsPointer, Config>;
BeginSelfieFrame() {
_cachedArgsPointer = std::make_shared<RenderArgs>(_cachedArgs);
}
void configure(const Config& config) {
// Why does this run all the time, even when not enabled? Should we check and bail?
//qDebug() << "FIXME pos" << config.position << "orient" << config.orientation;
_position = config.position;
_orientation = config.orientation;
}
void run(const render::RenderContextPointer& renderContext, RenderArgsPointer& cachedArgs) {
auto args = renderContext->args;
auto textureCache = DependencyManager::get<TextureCache>();
auto destFramebuffer = textureCache->getSelfieFramebuffer();
// Caching/restoring the old values doesn't seem to be needed. Is it because we happen to be last in the pipeline (which would be a bug waiting to happen)?
_cachedArgsPointer->_blitFramebuffer = args->_blitFramebuffer;
_cachedArgsPointer->_viewport = args->_viewport;
_cachedArgsPointer->_displayMode = args->_displayMode;
args->_blitFramebuffer = destFramebuffer;
args->_viewport = glm::ivec4(0, 0, destFramebuffer->getWidth(), destFramebuffer->getHeight());
args->_viewport = glm::ivec4(0, 0, destFramebuffer->getWidth(), destFramebuffer->getHeight());
args->_displayMode = RenderArgs::MONO;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
batch.disableContextStereo();
});
auto srcViewFrustum = args->getViewFrustum();
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
//srcViewFrustum.calculate(); // do we need this? I don't think so
//qDebug() << "FIXME pos" << _position << "orient" << _orientation << "frust pos" << srcViewFrustum.getPosition() << "orient" << srcViewFrustum.getOrientation() << "direct" << srcViewFrustum.getDirection();
args->pushViewFrustum(srcViewFrustum);
cachedArgs = _cachedArgsPointer;
}
protected:
RenderArgs _cachedArgs;
RenderArgsPointer _cachedArgsPointer;
};
class EndSelfieFrame { // Restores renderContext.
public:
using JobModel = render::Job::ModelI<EndSelfieFrame, RenderArgsPointer>;
void run(const render::RenderContextPointer& renderContext, const RenderArgsPointer& cachedArgs) {
auto args = renderContext->args;
args->_blitFramebuffer = cachedArgs->_blitFramebuffer;
args->_viewport = cachedArgs->_viewport;
args->popViewFrustum();
args->_displayMode = cachedArgs->_displayMode;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
batch.restoreContextStereo();
});
}
};
void SelfieRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor) {
const auto cachedArg = task.addJob<BeginSelfieFrame>("BeginSelfie");
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);
task.addJob<EndSelfieFrame>("EndSelfie", cachedArg);
}

View file

@ -0,0 +1,113 @@
//
// SecondaryCamera.cpp
// interface/src
//
// Created by Samuel Gateau, Howard Stearns, and Zach Fox on 2017-06-08.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "SecondaryCamera.h"
#include <TextureCache.h>
#include <gpu/Context.h>
using RenderArgsPointer = std::shared_ptr<RenderArgs>;
void MainRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred) {
task.addJob<RenderShadowTask>("RenderShadowTask", cullFunctor);
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
if (!isDeferred) {
task.addJob<RenderForwardTask>("Forward", items);
} else {
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);
}
}
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
setEnabled(wasEnabled);
}
void SecondaryCameraRenderTaskConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
resetSize(width, height);
}
class BeginSecondaryCameraFrame { // Changes renderContext for our framebuffer and and view.
glm::vec3 _position{};
glm::quat _orientation{};
public:
using Config = BeginSecondaryCameraFrameConfig;
using JobModel = render::Job::ModelO<BeginSecondaryCameraFrame, RenderArgsPointer, Config>;
BeginSecondaryCameraFrame() {
_cachedArgsPointer = std::make_shared<RenderArgs>(_cachedArgs);
}
void configure(const Config& config) {
if (config.enabled || config.alwaysEnabled) {
_position = config.position;
_orientation = config.orientation;
}
}
void run(const render::RenderContextPointer& renderContext, RenderArgsPointer& cachedArgs) {
auto args = renderContext->args;
auto textureCache = DependencyManager::get<TextureCache>();
gpu::FramebufferPointer destFramebuffer;
destFramebuffer = textureCache->getSpectatorCameraFramebuffer(); // FIXME: Change the destination based on some unimplemented config var
if (destFramebuffer) {
// Caching/restoring the old values doesn't seem to be needed. Is it because we happen to be last in the pipeline (which would be a bug waiting to happen)?
_cachedArgsPointer->_blitFramebuffer = args->_blitFramebuffer;
_cachedArgsPointer->_viewport = args->_viewport;
_cachedArgsPointer->_displayMode = args->_displayMode;
args->_blitFramebuffer = destFramebuffer;
args->_viewport = glm::ivec4(0, 0, destFramebuffer->getWidth(), destFramebuffer->getHeight());
args->_displayMode = RenderArgs::MONO;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
batch.disableContextStereo();
});
auto srcViewFrustum = args->getViewFrustum();
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
args->pushViewFrustum(srcViewFrustum);
cachedArgs = _cachedArgsPointer;
}
}
protected:
RenderArgs _cachedArgs;
RenderArgsPointer _cachedArgsPointer;
};
class EndSecondaryCameraFrame { // Restores renderContext.
public:
using JobModel = render::Job::ModelI<EndSecondaryCameraFrame, RenderArgsPointer>;
void run(const render::RenderContextPointer& renderContext, const RenderArgsPointer& cachedArgs) {
auto args = renderContext->args;
args->_blitFramebuffer = cachedArgs->_blitFramebuffer;
args->_viewport = cachedArgs->_viewport;
args->popViewFrustum();
args->_displayMode = cachedArgs->_displayMode;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
batch.restoreContextStereo();
});
}
};
void SecondaryCameraRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor) {
const auto cachedArg = task.addJob<BeginSecondaryCameraFrame>("BeginSecondaryCamera");
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);
task.addJob<EndSecondaryCameraFrame>("EndSecondaryCamera", cachedArg);
}

View file

@ -1,6 +1,17 @@
//
// SecondaryCamera.h
// interface/src
//
// Created by Samuel Gateau, Howard Stearns, and Zach Fox on 2017-06-08.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once #pragma once
#ifndef hifi_PrototypeSelfie_h #ifndef hifi_SecondaryCamera_h
#define hifi_PrototypeSelfie_h #define hifi_SecondaryCamera_h
#include <RenderShadowTask.h> #include <RenderShadowTask.h>
#include <render/RenderFetchCullSortTask.h> #include <render/RenderFetchCullSortTask.h>
@ -18,33 +29,35 @@ public:
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred = true); void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred = true);
}; };
class BeginSelfieFrameConfig : public render::Task::Config { // Exposes view frustum position/orientation to javascript. class BeginSecondaryCameraFrameConfig : public render::Task::Config { // Exposes view frustum position/orientation to javascript.
Q_OBJECT Q_OBJECT
Q_PROPERTY(glm::vec3 position MEMBER position NOTIFY dirty) // of viewpoint to render from Q_PROPERTY(glm::vec3 position MEMBER position NOTIFY dirty) // of viewpoint to render from
Q_PROPERTY(glm::quat orientation MEMBER orientation NOTIFY dirty) // of viewpoint to render from Q_PROPERTY(glm::quat orientation MEMBER orientation NOTIFY dirty) // of viewpoint to render from
public: public:
glm::vec3 position{}; glm::vec3 position{};
glm::quat orientation{}; glm::quat orientation{};
BeginSelfieFrameConfig() : render::Task::Config(false) {} BeginSecondaryCameraFrameConfig() : render::Task::Config(false) {}
signals: signals:
void dirty(); void dirty();
}; };
class SelfieRenderTaskConfig : public render::Task::Config { class SecondaryCameraRenderTaskConfig : public render::Task::Config {
Q_OBJECT Q_OBJECT
public: public:
SelfieRenderTaskConfig() : render::Task::Config(false) {} SecondaryCameraRenderTaskConfig() : render::Task::Config(false) {}
private:
void resetSize(int width, int height);
signals: signals:
void dirty(); void dirty();
public slots: public slots:
void resetSize(int width, int height); void resetSizeSpectatorCamera(int width, int height);
}; };
class SelfieRenderTask { class SecondaryCameraRenderTask {
public: public:
using Config = SelfieRenderTaskConfig; using Config = SecondaryCameraRenderTaskConfig;
using JobModel = render::Task::Model<SelfieRenderTask, Config>; using JobModel = render::Task::Model<SecondaryCameraRenderTask, Config>;
SelfieRenderTask() {} SecondaryCameraRenderTask() {}
void configure(const Config& config) {} void configure(const Config& config) {}
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor); void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor);
}; };

View file

@ -50,7 +50,8 @@ Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.k
const std::string TextureCache::KTX_DIRNAME { "ktx_cache" }; const std::string TextureCache::KTX_DIRNAME { "ktx_cache" };
const std::string TextureCache::KTX_EXT { "ktx" }; const std::string TextureCache::KTX_EXT { "ktx" };
const std::string TextureCache::SELFIE_FRAME_URL { "http://selfieFrame" }; static const QString RESOURCE_SCHEME = "resource";
static const QUrl SPECTATOR_CAMERA_FRAME_URL("resource://spectatorCameraFrame");
static const float SKYBOX_LOAD_PRIORITY { 10.0f }; // Make sure skybox loads first static const float SKYBOX_LOAD_PRIORITY { 10.0f }; // Make sure skybox loads first
static const float HIGH_MIPS_LOAD_PRIORITY { 9.0f }; // Make sure high mips loads after skybox but before models static const float HIGH_MIPS_LOAD_PRIORITY { 9.0f }; // Make sure high mips loads after skybox but before models
@ -182,9 +183,8 @@ ScriptableResource* TextureCache::prefetch(const QUrl& url, int type, int maxNum
} }
NetworkTexturePointer TextureCache::getTexture(const QUrl& url, image::TextureUsage::Type type, const QByteArray& content, int maxNumPixels) { NetworkTexturePointer TextureCache::getTexture(const QUrl& url, image::TextureUsage::Type type, const QByteArray& content, int maxNumPixels) {
if (url == QUrl(SELFIE_FRAME_URL.c_str())) { if (url.scheme() == RESOURCE_SCHEME) {
return getResourceTexture(url);
return getSelfieNetworkTexture();
} }
TextureExtra extra = { type, content, maxNumPixels }; TextureExtra extra = { type, content, maxNumPixels };
return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>(); return ResourceCache::getResource(url, QUrl(), &extra).staticCast<NetworkTexture>();
@ -885,31 +885,30 @@ void ImageReader::read() {
} }
NetworkTexturePointer TextureCache::getSelfieNetworkTexture() { NetworkTexturePointer TextureCache::getResourceTexture(QUrl resourceTextureUrl) {
if (!_selfieNetworkTexture) { gpu::TexturePointer texture;
_selfieNetworkTexture.reset(new NetworkTexture(QUrl(SELFIE_FRAME_URL.c_str()))); if (resourceTextureUrl == SPECTATOR_CAMERA_FRAME_URL) {
auto texture = getSelfieTexture(); if (!_spectatorCameraNetworkTexture) {
_selfieNetworkTexture->setImage(texture, texture->getWidth(), texture->getHeight()); _spectatorCameraNetworkTexture.reset(new NetworkTexture(resourceTextureUrl));
} }
return _selfieNetworkTexture; texture = _spectatorCameraFramebuffer->getRenderBuffer(0);
} if (texture) {
_spectatorCameraNetworkTexture->setImage(texture, texture->getWidth(), texture->getHeight());
const gpu::TexturePointer& TextureCache::getSelfieTexture() { return _spectatorCameraNetworkTexture;
if (!_selfieTexture) { }
getSelfieFramebuffer();
}
return _selfieTexture;
}
const gpu::FramebufferPointer& TextureCache::getSelfieFramebuffer() {
if (!_selfieFramebuffer) {
resetSelfieFramebuffer(2048, 1024);
} }
return _selfieFramebuffer; return NetworkTexturePointer();
} }
void TextureCache::resetSelfieFramebuffer(int width, int height) { const gpu::FramebufferPointer& TextureCache::getSpectatorCameraFramebuffer() {
_selfieFramebuffer.reset(gpu::Framebuffer::create("selfie", gpu::Element::COLOR_SRGBA_32, 2048, 1024)); if (!_spectatorCameraFramebuffer) {
_selfieTexture = _selfieFramebuffer->getRenderBuffer(0); resetSpectatorCameraFramebuffer(2048, 1024);
_selfieNetworkTexture.reset(); }
} return _spectatorCameraFramebuffer;
}
void TextureCache::resetSpectatorCameraFramebuffer(int width, int height) {
_spectatorCameraFramebuffer.reset(gpu::Framebuffer::create("spectatorCamera", gpu::Element::COLOR_SRGBA_32, width, height));
_spectatorCameraNetworkTexture.reset();
}

View file

@ -170,11 +170,10 @@ public:
gpu::TexturePointer cacheTextureByHash(const std::string& hash, const gpu::TexturePointer& texture); gpu::TexturePointer cacheTextureByHash(const std::string& hash, const gpu::TexturePointer& texture);
/// Selfie rendering targets. /// SpectatorCamera rendering targets.
NetworkTexturePointer getSelfieNetworkTexture(); NetworkTexturePointer getResourceTexture(QUrl resourceTextureUrl);
const gpu::TexturePointer& getSelfieTexture(); const gpu::FramebufferPointer& getSpectatorCameraFramebuffer();
const gpu::FramebufferPointer& getSelfieFramebuffer(); void resetSpectatorCameraFramebuffer(int width, int height);
void resetSelfieFramebuffer(int width, int height);
protected: protected:
// Overload ResourceCache::prefetch to allow specifying texture type for loads // Overload ResourceCache::prefetch to allow specifying texture type for loads
@ -193,7 +192,6 @@ private:
static const std::string KTX_DIRNAME; static const std::string KTX_DIRNAME;
static const std::string KTX_EXT; static const std::string KTX_EXT;
static const std::string SELFIE_FRAME_URL;
KTXCache _ktxCache; KTXCache _ktxCache;
// Map from image hashes to texture weak pointers // Map from image hashes to texture weak pointers
@ -206,10 +204,8 @@ private:
gpu::TexturePointer _blueTexture; gpu::TexturePointer _blueTexture;
gpu::TexturePointer _blackTexture; gpu::TexturePointer _blackTexture;
NetworkTexturePointer _spectatorCameraNetworkTexture;
gpu::FramebufferPointer _selfieFramebuffer; gpu::FramebufferPointer _spectatorCameraFramebuffer;
gpu::TexturePointer _selfieTexture;
NetworkTexturePointer _selfieNetworkTexture;
}; };
#endif // hifi_TextureCache_h #endif // hifi_TextureCache_h

View file

@ -46,6 +46,8 @@
// camera: The in-world entity that corresponds to the spectator camera. // camera: The in-world entity that corresponds to the spectator camera.
// cameraIsDynamic: "false" for now while we figure out why dynamic, parented overlays // cameraIsDynamic: "false" for now while we figure out why dynamic, parented overlays
// drift with respect to their parent // drift with respect to their parent
// lastCameraPosition: Holds the last known camera position
// lastCameraRotation: Holds the last known camera rotation
// //
// Arguments: // Arguments:
// None // None
@ -54,16 +56,23 @@
// The update function for the spectator camera. Modifies the camera's position // The update function for the spectator camera. Modifies the camera's position
// and orientation. // and orientation.
// //
var spectatorFrameRenderConfig = Render.getConfig("SelfieFrame"); var spectatorFrameRenderConfig = Render.getConfig("SecondaryCameraFrame");
var beginSpectatorFrameRenderConfig = Render.getConfig("BeginSelfie"); var beginSpectatorFrameRenderConfig = Render.getConfig("BeginSecondaryCamera");
var viewFinderOverlay = false; var viewFinderOverlay = false;
var camera = false; var camera = false;
var cameraIsDynamic = false; var cameraIsDynamic = false;
var lastCameraPosition = false;
var lastCameraRotation = false;
function updateRenderFromCamera() { function updateRenderFromCamera() {
var cameraData = Entities.getEntityProperties(camera, ['position', 'rotation']); var cameraData = Entities.getEntityProperties(camera, ['position', 'rotation']);
// FIXME: don't muck with config if properties haven't changed. if (JSON.stringify(lastCameraRotation) !== JSON.stringify(cameraData.rotation)) {
beginSpectatorFrameRenderConfig.position = cameraData.position; lastCameraRotation = cameraData.rotation;
beginSpectatorFrameRenderConfig.orientation = cameraData.rotation; beginSpectatorFrameRenderConfig.orientation = lastCameraRotation;
}
if (JSON.stringify(lastCameraPosition) !== JSON.stringify(cameraData.position)) {
lastCameraPosition = cameraData.position;
beginSpectatorFrameRenderConfig.position = Vec3.sum(inFrontOf(0.17, lastCameraPosition, lastCameraRotation), {x: 0, y: 0.02, z: 0});
}
if (cameraIsDynamic) { if (cameraIsDynamic) {
// BUG: image3d overlays don't retain their locations properly when parented to a dynamic object // BUG: image3d overlays don't retain their locations properly when parented to a dynamic object
Overlays.editOverlay(viewFinderOverlay, { orientation: flip(cameraData.rotation) }); Overlays.editOverlay(viewFinderOverlay, { orientation: flip(cameraData.rotation) });
@ -88,35 +97,49 @@
function spectatorCameraOn() { function spectatorCameraOn() {
// Set the special texture size based on the window in which it will eventually be displayed. // Set the special texture size based on the window in which it will eventually be displayed.
var size = Controller.getViewportDimensions(); // FIXME: Need a signal to hook into when the dimensions change. var size = Controller.getViewportDimensions(); // FIXME: Need a signal to hook into when the dimensions change.
spectatorFrameRenderConfig.resetSize(size.x, size.y); spectatorFrameRenderConfig.resetSizeSpectatorCamera(size.x, size.y);
spectatorFrameRenderConfig.enabled = beginSpectatorFrameRenderConfig.enabled = true; spectatorFrameRenderConfig.enabled = beginSpectatorFrameRenderConfig.enabled = true;
var cameraRotation = MyAvatar.orientation, cameraPosition = inFrontOf(2); var cameraRotation = MyAvatar.orientation, cameraPosition = inFrontOf(1, Vec3.sum(MyAvatar.position, { x: 0, y: 0.3, z: 0 }));
Script.update.connect(updateRenderFromCamera); Script.update.connect(updateRenderFromCamera);
isUpdateRenderWired = true; isUpdateRenderWired = true;
camera = Entities.addEntity({ camera = Entities.addEntity({
type: 'Box', "angularDamping": 0.98000001907348633,
dimensions: { x: 0.4, y: 0.2, z: 0.4 }, "collisionsWillMove": 0,
userData: '{"grabbableKey":{"grabbable":true}}', "damping": 0.98000001907348633,
dynamic: cameraIsDynamic, "dimensions": {
color: { red: 255, green: 0, blue: 0 }, "x": 0.2338641881942749,
name: 'SpectatorCamera', "y": 0.407032310962677,
position: cameraPosition, // Put the camera in front of me so that I can find it. "z": 0.38702544569969177
rotation: cameraRotation },
"dynamic": cameraIsDynamic,
"modelURL": "http://hifi-content.s3.amazonaws.com/alan/dev/spectator-camera.fbx",
"queryAACube": {
"scale": 0.60840487480163574,
"x": -0.30420243740081787,
"y": -0.30420243740081787,
"z": -0.30420243740081787
},
"rotation": { x: 0, y: 0, z: 0 },
"position": { x: 0, y: 0, z: 0 },
"shapeType": "simple-compound",
"type": "Model",
"userData": "{\"grabbableKey\":{\"grabbable\":true}}"
}, true); }, true);
// Put an image3d overlay on the near face, as a viewFinder. // This image3d overlay acts as the camera's preview screen.
viewFinderOverlay = Overlays.addOverlay("image3d", { viewFinderOverlay = Overlays.addOverlay("image3d", {
url: "http://selfieFrame", url: "resource://spectatorCameraFrame",
//url: "http://1.bp.blogspot.com/-1GABEq__054/T03B00j_OII/AAAAAAAAAa8/jo55LcvEPHI/s1600/Winning.jpg", emissive: true,
parentID: camera, parentID: camera,
alpha: 1, alpha: 1,
position: inFrontOf(-0.25, cameraPosition, cameraRotation), position: { x: 0.007, y: 0.15, z: -0.005 },
// FIXME: We shouldn't need the flip and the negative scale. scale: -0.16,
// e.g., This isn't necessary using an ordinary .jpg with lettering, above.
// Must be something about the view frustum projection matrix?
// But don't go changing that in (c++ code) without getting all the way to a desktop display!
orientation: flip(cameraRotation),
scale: -0.35,
}); });
Entities.editEntity(camera, { position: cameraPosition, rotation: cameraRotation });
// FIXME: We shouldn't need the flip and the negative scale.
// e.g., This isn't necessary using an ordinary .jpg with lettering, above.
// Must be something about the view frustum projection matrix?
// But don't go changing that in (c++ code) without getting all the way to a desktop display!
Overlays.editOverlay(viewFinderOverlay, { orientation: flip(cameraRotation) });
setDisplay(monitorShowsCameraView); setDisplay(monitorShowsCameraView);
} }
@ -141,7 +164,6 @@
} }
if (camera) { if (camera) {
Entities.deleteEntity(camera); Entities.deleteEntity(camera);
print("ZACH FOX GOODBYE");
} }
if (viewFinderOverlay) { if (viewFinderOverlay) {
Overlays.deleteOverlay(viewFinderOverlay); Overlays.deleteOverlay(viewFinderOverlay);
@ -216,7 +238,7 @@
function setDisplay(showCameraView) { function setDisplay(showCameraView) {
// It would be fancy if (showCameraView && !isUpdateRenderWired) would show instructions, but that's out of scope for now. // It would be fancy if (showCameraView && !isUpdateRenderWired) would show instructions, but that's out of scope for now.
var url = (showCameraView && isUpdateRenderWired) ? "http://selfieFrame" : ""; var url = (showCameraView && isUpdateRenderWired) ? "resource://spectatorCameraFrame" : "";
Window.setDisplayTexture(url); Window.setDisplayTexture(url);
} }
const MONITOR_SHOWS_CAMERA_VIEW_DEFAULT = false; const MONITOR_SHOWS_CAMERA_VIEW_DEFAULT = false;
@ -264,6 +286,7 @@
tablet.loadQMLSource("../SpectatorCamera.qml"); tablet.loadQMLSource("../SpectatorCamera.qml");
onSpectatorCameraScreen = true; onSpectatorCameraScreen = true;
sendToQml({ method: 'updateSpectatorCameraCheckbox', params: !!camera }); sendToQml({ method: 'updateSpectatorCameraCheckbox', params: !!camera });
sendToQml({ method: 'updateMonitorShowsSwitch', params: !!Settings.getValue('spectatorCamera/monitorShowsCameraView', false) });
setMonitorShowsCameraViewAndSendToQml(monitorShowsCameraView); setMonitorShowsCameraViewAndSendToQml(monitorShowsCameraView);
} }
} }