Merge pull request #10721 from davidkelly/dk/spectatorCameraPreview

Spectator camera preview
This commit is contained in:
David Kelly 2017-07-03 16:42:27 -07:00 committed by GitHub
commit c205d3eb2a
8 changed files with 277 additions and 37 deletions

View file

@ -11,6 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
@ -38,7 +39,7 @@ Rectangle {
letterboxMessage.visible = true;
letterboxMessage.popupRadius = 0;
}
//
// TITLE BAR START
//
@ -77,7 +78,7 @@ Rectangle {
//
// TITLE BAR END
//
//
// SPECTATOR APP DESCRIPTION START
//
@ -152,7 +153,7 @@ Rectangle {
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
MouseArea {
anchors.fill: parent;
hoverEnabled: enabled;
@ -186,7 +187,7 @@ Rectangle {
// SPECTATOR APP DESCRIPTION END
//
//
// SPECTATOR CONTROLS START
//
@ -212,23 +213,28 @@ Rectangle {
boxSize: 24;
onClicked: {
sendToScript({method: (checked ? 'spectatorCameraOn' : 'spectatorCameraOff')});
spectatorCameraPreview.ready = checked;
}
}
// Spectator Camera Preview
Image {
Hifi.ResourceImageItem {
id: spectatorCameraPreview;
url: "resource://spectatorCameraFrame";
ready: cameraToggleCheckBox.checked;
mirrorVertically: true;
height: 250;
anchors.left: parent.left;
anchors.top: cameraToggleCheckBox.bottom;
anchors.topMargin: 20;
anchors.right: parent.right;
fillMode: Image.PreserveAspectFit;
horizontalAlignment: Image.AlignHCenter;
verticalAlignment: Image.AlignVCenter;
source: "http://1.bp.blogspot.com/-1GABEq__054/T03B00j_OII/AAAAAAAAAa8/jo55LcvEPHI/s1600/Winning.jpg";
onVisibleChanged: {
ready = cameraToggleCheckBox.checked;
update();
}
}
// "Monitor Shows" Switch Label Glyph
HiFiGlyphs {
id: monitorShowsSwitchLabelGlyph;
@ -236,7 +242,7 @@ Rectangle {
size: 32;
color: hifi.colors.blueHighlight;
anchors.top: spectatorCameraPreview.bottom;
anchors.topMargin: 12;
anchors.topMargin: 20;
anchors.left: parent.left;
}
// "Monitor Shows" Switch Label
@ -282,7 +288,7 @@ Rectangle {
sendToScript({method: 'changeSwitchViewFromControllerPreference', params: checked});
}
}
}
}
//
// SPECTATOR CONTROLS END
//
@ -295,11 +301,11 @@ Rectangle {
//
// Relevant Variables:
// None
//
//
// Arguments:
// message: The message sent from the SpectatorCamera JavaScript.
// Messages are in format "{method, params}", like json-rpc.
//
//
// Description:
// Called when a message is received from spectatorCamera.js.
//

View file

@ -169,6 +169,7 @@
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
#include "SpeechRecognizer.h"
#endif
#include "ui/ResourceImageItem.h"
#include "ui/AddressBarDialog.h"
#include "ui/AvatarInputs.h"
#include "ui/DialogsManager.h"
@ -2004,6 +2005,7 @@ void Application::initializeUi() {
LoginDialog::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
qmlRegisterType<ResourceImageItem>("Hifi", 1, 0, "ResourceImageItem");
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
auto offscreenUi = DependencyManager::get<OffscreenUi>();

View file

@ -0,0 +1,111 @@
//
// ResourceImageItem.cpp
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//#include "Application.h"
#include "ResourceImageItem.h"
#include <QOpenGLFramebufferObjectFormat>
#include <QOpenGLFunctions>
#include <QOpenGLExtraFunctions>
#include <QOpenGLContext>
void ResourceImageItem::setUrl(const QString& url) {
if (url != m_url) {
m_url = url;
update();
}
}
void ResourceImageItem::setReady(bool ready) {
if (ready != m_ready) {
m_ready = ready;
update();
}
}
void ResourceImageItemRenderer::onUpdateTimer() {
if (_ready && _networkTexture && _networkTexture->isLoaded()) {
if(_fboMutex.tryLock()) {
invalidateFramebufferObject();
qApp->getActiveDisplayPlugin()->copyTextureToQuickFramebuffer(_networkTexture, _copyFbo, &_fenceSync);
_fboMutex.unlock();
} else {
qDebug() << "couldn't get a lock, using last frame";
}
}
update();
}
ResourceImageItemRenderer::ResourceImageItemRenderer() : QQuickFramebufferObject::Renderer() {
connect(&_updateTimer, SIGNAL(timeout()), this, SLOT(onUpdateTimer()));
}
void ResourceImageItemRenderer::synchronize(QQuickFramebufferObject* item) {
ResourceImageItem* resourceImageItem = static_cast<ResourceImageItem*>(item);
resourceImageItem->setFlag(QQuickItem::ItemHasContents);
bool urlChanged = false;
if (_url != resourceImageItem->getUrl()) {
_url = resourceImageItem->getUrl();
urlChanged = true;
}
bool readyChanged = false;
if (_ready != resourceImageItem->getReady()) {
_ready = resourceImageItem->getReady();
readyChanged = true;
}
_window = resourceImageItem->window();
if (_ready && !_url.isNull() && !_url.isEmpty() && (urlChanged || readyChanged || !_networkTexture)) {
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
}
static const int UPDATE_TIMER_DELAY_IN_MS = 100; // 100 ms = 10 hz for now
if (_ready && !_updateTimer.isActive()) {
_updateTimer.start(UPDATE_TIMER_DELAY_IN_MS);
} else if (!_ready && _updateTimer.isActive()) {
_updateTimer.stop();
}
}
QOpenGLFramebufferObject* ResourceImageItemRenderer::createFramebufferObject(const QSize& size) {
QOpenGLFramebufferObjectFormat format;
format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
_copyFbo = new QOpenGLFramebufferObject(size, format);
_copyFbo->bind();
return new QOpenGLFramebufferObject(size, format);
}
void ResourceImageItemRenderer::render() {
auto f = QOpenGLContext::currentContext()->extraFunctions();
bool doUpdate = false;
// black background
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
if (_fenceSync) {
f->glWaitSync(_fenceSync, 0, GL_TIMEOUT_IGNORED);
f->glDeleteSync(_fenceSync);
_fenceSync = 0;
doUpdate = true;
}
if (_ready) {
_fboMutex.lock();
_copyFbo->bind();
QOpenGLFramebufferObject::blitFramebuffer(framebufferObject(), _copyFbo, GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT, GL_NEAREST);
_copyFbo->release();
_fboMutex.unlock();
if (doUpdate) {
update();
}
}
glFlush();
_window->resetOpenGLState();
}

View file

@ -0,0 +1,61 @@
//
// ResourceImageItem.h
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_ResourceImageItem_h
#define hifi_ResourceImageItem_h
#include "Application.h"
#include <QQuickFramebufferObject>
#include <QQuickWindow>
#include <QTimer>
#include <TextureCache.h>
class ResourceImageItemRenderer : public QObject, public QQuickFramebufferObject::Renderer {
Q_OBJECT
public:
ResourceImageItemRenderer();
QOpenGLFramebufferObject* createFramebufferObject(const QSize& size) override;
void synchronize(QQuickFramebufferObject* item) override;
void render() override;
private:
bool _ready;
QString _url;
NetworkTexturePointer _networkTexture;
QQuickWindow* _window;
QMutex _fboMutex;
QOpenGLFramebufferObject* _copyFbo;
GLsync _fenceSync { 0 };
QTimer _updateTimer;
public slots:
void onUpdateTimer();
};
class ResourceImageItem : public QQuickFramebufferObject {
Q_OBJECT
Q_PROPERTY(QString url READ getUrl WRITE setUrl)
Q_PROPERTY(bool ready READ getReady WRITE setReady)
public:
QString getUrl() const { return m_url; }
void setUrl(const QString& url);
bool getReady() const { return m_ready; }
void setReady(bool ready);
QQuickFramebufferObject::Renderer* createRenderer() const override { return new ResourceImageItemRenderer; }
private:
QString m_url;
bool m_ready { false };
};
#endif // hifi_ResourceImageItem_h

View file

@ -16,6 +16,7 @@
#include <QtOpenGL/QGLWidget>
#include <QtGui/QImage>
#include <QOpenGLFramebufferObject>
#if defined(Q_OS_MAC)
#include <OpenGL/CGLCurrent.h>
#endif
@ -41,7 +42,7 @@
#include <ui-plugins/PluginContainer.h>
#include <ui/Menu.h>
#include <CursorManager.h>
#include <TextureCache.h>
#include "CompositorHelper.h"
#include "Logging.h"
@ -55,7 +56,7 @@ out vec4 outFragColor;
float sRGBFloatToLinear(float value) {
const float SRGB_ELBOW = 0.04045;
return (value <= SRGB_ELBOW) ? value / 12.92 : pow((value + 0.055) / 1.055, 2.4);
}
@ -121,10 +122,10 @@ public:
PROFILE_SET_THREAD_NAME("Present Thread");
// FIXME determine the best priority balance between this and the main thread...
// It may be dependent on the display plugin being used, since VR plugins should
// It may be dependent on the display plugin being used, since VR plugins should
// have higher priority on rendering (although we could say that the Oculus plugin
// doesn't need that since it has async timewarp).
// A higher priority here
// A higher priority here
setPriority(QThread::HighPriority);
OpenGLDisplayPlugin* currentPlugin{ nullptr };
Q_ASSERT(_context);
@ -233,7 +234,7 @@ public:
// Move the context back to the presentation thread
_context->moveToThread(this);
// restore control of the context to the presentation thread and signal
// restore control of the context to the presentation thread and signal
// the end of the operation
_finishedMainThreadOperation = true;
lock.unlock();
@ -291,7 +292,7 @@ bool OpenGLDisplayPlugin::activate() {
if (!RENDER_THREAD) {
RENDER_THREAD = _presentThread;
}
// Child classes may override this in order to do things like initialize
// libraries, etc
if (!internalActivate()) {
@ -411,7 +412,7 @@ void OpenGLDisplayPlugin::customizeContext() {
gpu::Shader::makeProgram(*program);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_overlayPipeline = gpu::Pipeline::create(program, state);
@ -686,7 +687,7 @@ void OpenGLDisplayPlugin::resetPresentRate() {
// _presentRate = RateCounter<100>();
}
float OpenGLDisplayPlugin::renderRate() const {
float OpenGLDisplayPlugin::renderRate() const {
return _renderRate.rate();
}
@ -821,3 +822,51 @@ void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
}
}
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
withMainThreadContext([&] {
GLuint sourceTexture = glBackend->getTextureID(networkTexture->getGPUTexture());
GLuint targetTexture = target->texture();
GLuint fbo[2] {0, 0};
// need mipmaps for blitting texture
glGenerateTextureMipmap(sourceTexture);
// create 2 fbos (one for initial texture, second for scaled one)
glCreateFramebuffers(2, fbo);
// setup source fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[0]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, sourceTexture, 0);
GLint texWidth, texHeight;
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &texWidth);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &texHeight);
// setup destination fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[1]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, targetTexture, 0);
// maintain aspect ratio, filling the width first if possible. If that makes the height too
// much, fill height instead. TODO: only do this when texture changes
GLint newX = 0;
GLint newY = 0;
float aspectRatio = (float)texHeight / (float)texWidth;
GLint newWidth = target->width();
GLint newHeight = std::round(aspectRatio * (float) target->width());
if (newHeight > target->height()) {
newHeight = target->height();
newWidth = std::round((float)target->height() / aspectRatio);
newX = (target->width() - newWidth) / 2;
} else {
newY = (target->height() - newHeight) / 2;
}
glBlitNamedFramebuffer(fbo[0], fbo[1], 0, 0, texWidth, texHeight, newX, newY, newWidth, newHeight, GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT, GL_NEAREST);
// don't delete the textures!
glDeleteFramebuffers(2, fbo);
*fenceSync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
});
}

View file

@ -38,7 +38,7 @@ protected:
using Condition = std::condition_variable;
public:
~OpenGLDisplayPlugin();
// These must be final to ensure proper ordering of operations
// These must be final to ensure proper ordering of operations
// between the main thread and the presentation thread
bool activate() override final;
void deactivate() override final;
@ -79,6 +79,8 @@ public:
// Three threads, one for rendering, one for texture transfers, one reserved for the GL driver
int getRequiredThreadCount() const override { return 3; }
void copyTextureToQuickFramebuffer(NetworkTexturePointer source, QOpenGLFramebufferObject* target, GLsync* fenceSync) override;
protected:
friend class PresentThread;
@ -103,7 +105,7 @@ protected:
// Returns true on successful activation
virtual bool internalActivate() { return true; }
virtual void internalDeactivate() {}
// Returns true on successful activation of standby session
virtual bool activateStandBySession() { return true; }
virtual void deactivateSession() {}

View file

@ -223,7 +223,7 @@ gpu::TexturePointer TextureCache::cacheTextureByHash(const std::string& hash, co
gpu::TexturePointer getFallbackTextureForType(image::TextureUsage::Type type) {
gpu::TexturePointer result;
auto textureCache = DependencyManager::get<TextureCache>();
// Since this can be called on a background thread, there's a chance that the cache
// Since this can be called on a background thread, there's a chance that the cache
// will be destroyed by the time we request it
if (!textureCache) {
return result;
@ -373,7 +373,7 @@ void NetworkTexture::makeRequest() {
if (!_sourceIsKTX) {
Resource::makeRequest();
return;
}
}
// We special-handle ktx requests to run 2 concurrent requests right off the bat
PROFILE_ASYNC_BEGIN(resource, "Resource:" + getType(), QString::number(_requestID), { { "url", _url.toString() }, { "activeURL", _activeUrl.toString() } });
@ -912,7 +912,7 @@ void ImageReader::read() {
}
}
// If we found the texture either because it's in use or via KTX deserialization,
// If we found the texture either because it's in use or via KTX deserialization,
// set the image and return immediately.
if (texture) {
QMetaObject::invokeMethod(resource.data(), "setImage",
@ -961,7 +961,7 @@ void ImageReader::read() {
qCWarning(modelnetworking) << "Unable to serialize texture to KTX " << _url;
}
// We replace the texture with the one stored in the cache. This deals with the possible race condition of two different
// We replace the texture with the one stored in the cache. This deals with the possible race condition of two different
// images with the same hash being loaded concurrently. Only one of them will make it into the cache by hash first and will
// be the winner
texture = textureCache->cacheTextureByHash(hash, texture);
@ -980,10 +980,12 @@ NetworkTexturePointer TextureCache::getResourceTexture(QUrl resourceTextureUrl)
if (!_spectatorCameraNetworkTexture) {
_spectatorCameraNetworkTexture.reset(new NetworkTexture(resourceTextureUrl));
}
texture = _spectatorCameraFramebuffer->getRenderBuffer(0);
if (texture) {
_spectatorCameraNetworkTexture->setImage(texture, texture->getWidth(), texture->getHeight());
return _spectatorCameraNetworkTexture;
if (_spectatorCameraFramebuffer) {
texture = _spectatorCameraFramebuffer->getRenderBuffer(0);
if (texture) {
_spectatorCameraNetworkTexture->setImage(texture, texture->getWidth(), texture->getHeight());
return _spectatorCameraNetworkTexture;
}
}
}

View file

@ -22,9 +22,10 @@
#include <RegisteredMetaTypes.h>
#include <shared/Bilateral.h>
#include <gpu/Forward.h>
#include "Plugin.h"
class QOpenGLFramebufferObject;
class QImage;
enum Eye {
@ -60,8 +61,12 @@ namespace gpu {
using TexturePointer = std::shared_ptr<Texture>;
}
class NetworkTexture;
using NetworkTexturePointer = QSharedPointer<NetworkTexture>;
typedef struct __GLsync *GLsync;
// Stereo display functionality
// TODO move out of this file don't derive DisplayPlugin from this. Instead use dynamic casting when
// TODO move out of this file don't derive DisplayPlugin from this. Instead use dynamic casting when
// displayPlugin->isStereo returns true
class StereoDisplay {
public:
@ -78,7 +83,7 @@ public:
};
// HMD display functionality
// TODO move out of this file don't derive DisplayPlugin from this. Instead use dynamic casting when
// TODO move out of this file don't derive DisplayPlugin from this. Instead use dynamic casting when
// displayPlugin->isHmd returns true
class HmdDisplay : public StereoDisplay {
public:
@ -142,7 +147,7 @@ public:
virtual float getTargetFrameRate() const { return 1.0f; }
virtual bool hasAsyncReprojection() const { return false; }
/// Returns a boolean value indicating whether the display is currently visible
/// Returns a boolean value indicating whether the display is currently visible
/// to the user. For monitor displays, false might indicate that a screensaver,
/// or power-save mode is active. For HMDs it may reflect a sensor indicating
/// whether the HMD is being worn
@ -204,10 +209,12 @@ public:
// Rate at which rendered frames are being skipped
virtual float droppedFrameRate() const { return -1.0f; }
virtual bool getSupportsAutoSwitch() { return false; }
// Hardware specific stats
virtual QJsonObject getHardwareStats() const { return QJsonObject(); }
virtual void copyTextureToQuickFramebuffer(NetworkTexturePointer source, QOpenGLFramebufferObject* target, GLsync* fenceSync) = 0;
uint32_t presentCount() const { return _presentedFrameIndex; }
// Time since last call to incrementPresentCount (only valid if DEBUG_PAINT_DELAY is defined)
int64_t getPaintDelayUsecs() const;