Merge pull request #14811 from jherico/feature/quest_frame_player

Case 20884: Quest frame player and Oculus libraries
This commit is contained in:
Sam Gateau 2019-02-06 13:15:39 -08:00 committed by GitHub
commit 9ff9117a34
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
84 changed files with 3220 additions and 1306 deletions

View file

@ -80,6 +80,7 @@ endif()
if (ANDROID)
set(GLES_OPTION ON)
set(PLATFORM_QT_COMPONENTS AndroidExtras WebView)
add_definitions(-DHIFI_ANDROID_APP=\"${HIFI_ANDROID_APP}\")
else ()
set(PLATFORM_QT_COMPONENTS WebEngine)
endif ()

View file

@ -3,10 +3,10 @@ apply plugin: 'com.android.application'
android {
signingConfigs {
release {
keyAlias 'key0'
keyPassword 'password'
storeFile file('C:/android/keystore.jks')
storePassword 'password'
storeFile project.hasProperty("HIFI_ANDROID_KEYSTORE") ? file(HIFI_ANDROID_KEYSTORE) : null
storePassword project.hasProperty("HIFI_ANDROID_KEYSTORE_PASSWORD") ? HIFI_ANDROID_KEYSTORE_PASSWORD : ''
keyAlias project.hasProperty("HIFI_ANDROID_KEY_ALIAS") ? HIFI_ANDROID_KEY_ALIAS : ''
keyPassword project.hasProperty("HIFI_ANDROID_KEY_PASSWORD") ? HIFI_ANDROID_KEY_PASSWORD : ''
}
}

View file

@ -1,42 +0,0 @@
//
// InterfaceActivity.java
// gvr-interface/java
//
// Created by Stephen Birarda on 1/26/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
package io.highfidelity.gvrinterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.view.WindowManager;
import android.util.Log;
import org.qtproject.qt5.android.bindings.QtActivity;
public class InterfaceActivity extends QtActivity {
public static native void handleHifiURL(String hifiURLString);
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// Get the intent that started this activity in case we have a hifi:// URL to parse
Intent intent = getIntent();
if (intent.getAction() == Intent.ACTION_VIEW) {
Uri data = intent.getData();
if (data.getScheme().equals("hifi")) {
handleHifiURL(data.toString());
}
}
}
}

View file

@ -31,6 +31,7 @@ import android.view.WindowManager;
import android.widget.FrameLayout;
import android.widget.SlidingDrawer;
import org.qtproject.qt5.android.QtNative;
import org.qtproject.qt5.android.QtLayout;
import org.qtproject.qt5.android.QtSurface;
import org.qtproject.qt5.android.bindings.QtActivity;
@ -166,8 +167,27 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
@Override
protected void onDestroy() {
super.onDestroy();
nativeOnDestroy();
/*
cduarte https://highfidelity.manuscript.com/f/cases/16712/App-freezes-on-opening-randomly
After Qt upgrade to 5.11 we had a black screen crash after closing the application with
the hardware button "Back" and trying to start the app again. It could only be fixed after
totally closing the app swiping it in the list of running apps.
This problem did not happen with the previous Qt version.
After analysing changes we came up with this case and change:
https://codereview.qt-project.org/#/c/218882/
In summary they've moved libs loading to the same thread as main() and as a matter of correctness
in the onDestroy method in QtActivityDelegate, they exit that thread with `QtNative.m_qtThread.exit();`
That exit call is the main reason of this problem.
In this fix we just replace the `QtApplication.invokeDelegate();` call that may end using the
entire onDestroy method including that thread exit line for other three lines that purposely
terminate qt (borrowed from QtActivityDelegate::onDestroy as well).
*/
QtNative.terminateQt();
QtNative.setActivity(null, null);
System.exit(0);
super.onDestroy();
}
@Override

View file

@ -0,0 +1,9 @@
set(TARGET_NAME questFramePlayer)
setup_hifi_library(AndroidExtras)
link_hifi_libraries(shared ktx shaders gpu gl oculusMobile ${PLATFORM_GL_BACKEND})
target_include_directories(${TARGET_NAME} PRIVATE ${HIFI_ANDROID_PRECOMPILED}/ovr/VrApi/Include)
target_link_libraries(${TARGET_NAME} android log m)
target_opengl()
target_oculus_mobile()

View file

@ -0,0 +1,51 @@
apply plugin: 'com.android.application'
android {
signingConfigs {
release {
storeFile project.hasProperty("HIFI_ANDROID_KEYSTORE") ? file(HIFI_ANDROID_KEYSTORE) : null
storePassword project.hasProperty("HIFI_ANDROID_KEYSTORE_PASSWORD") ? HIFI_ANDROID_KEYSTORE_PASSWORD : ''
keyAlias project.hasProperty("HIFI_ANDROID_KEY_ALIAS") ? HIFI_ANDROID_KEY_ALIAS : ''
keyPassword project.hasProperty("HIFI_ANDROID_KEY_PASSWORD") ? HIFI_ANDROID_KEY_PASSWORD : ''
}
}
compileSdkVersion 28
defaultConfig {
applicationId "io.highfidelity.frameplayer"
minSdkVersion 25
targetSdkVersion 28
ndk { abiFilters 'arm64-v8a' }
externalNativeBuild {
cmake {
arguments '-DHIFI_ANDROID=1',
'-DHIFI_ANDROID_APP=questFramePlayer',
'-DANDROID_TOOLCHAIN=clang',
'-DANDROID_STL=c++_shared',
'-DCMAKE_VERBOSE_MAKEFILE=ON'
targets = ['questFramePlayer']
}
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
signingConfig signingConfigs.release
}
}
externalNativeBuild.cmake.path '../../../CMakeLists.txt'
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: '../../libraries/qt/libs')
implementation project(':oculus')
implementation project(':qt')
}

View file

@ -0,0 +1,25 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in C:\Android\SDK/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View file

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="io.highfidelity.frameplayer"
android:versionCode="1"
android:versionName="1.0"
android:installLocation="auto">
<uses-feature android:glEsVersion="0x00030002" android:required="true" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-feature android:name="android.hardware.sensor.accelerometer" android:required="true"/>
<uses-feature android:name="android.hardware.sensor.gyroscope" android:required="true"/>
<uses-feature android:name="android.software.vr.mode" android:required="true"/>
<uses-feature android:name="android.hardware.vr.high_performance" android:required="true"/>
<application android:label="Frame Viewer"
android:allowBackup="false"
android:name="org.qtproject.qt5.android.bindings.QtApplication"
tools:ignore="GoogleAppIndexingWarning,MissingApplicationIcon">
<meta-data android:name="com.samsung.android.vr.application.mode" android:value="vr_only"/>
<activity
android:name=".QuestQtActivity"
android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen"
android:launchMode="singleTask"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:excludeFromRecents="false"
android:alwaysRetainTaskState="true"
android:configChanges="screenSize|screenLayout|orientation|keyboardHidden|keyboard|navigation|uiMode"
>
<!-- JNI nonsense -->
<meta-data android:name="android.app.lib_name" android:value="questFramePlayer"/>
<!-- Qt nonsense -->
<meta-data android:name="android.app.qt_libs_resource_id" android:resource="@array/qt_libs"/>
<meta-data android:name="android.app.bundled_in_lib_resource_id" android:resource="@array/bundled_in_lib"/>
<meta-data android:name="android.app.bundled_in_assets_resource_id" android:resource="@array/bundled_in_assets"/>
<meta-data android:name="android.app.load_local_libs" android:value="plugins/platforms/android/libqtforandroid.so:plugins/bearer/libqandroidbearer.so:lib/libQt5QuickParticles.so"/>
</activity>
<activity
android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen"
android:configChanges="screenSize|screenLayout|orientation|keyboardHidden|keyboard|navigation|uiMode"
android:name=".QuestRenderActivity"
android:label="Frame Player"
android:launchMode="singleInstance"
android:screenOrientation="landscape"
android:excludeFromRecents="false">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View file

@ -0,0 +1,25 @@
//
// Created by Bradley Austin Davis on 2018/10/21
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PlayerWindow.h"
#include <QtWidgets/QFileDialog>
PlayerWindow::PlayerWindow() {
installEventFilter(this);
setFlags(Qt::MSWindowsOwnDC | Qt::Window | Qt::Dialog | Qt::WindowMinMaxButtonsHint | Qt::WindowTitleHint);
setSurfaceType(QSurface::OpenGLSurface);
create();
showFullScreen();
// Ensure the window is visible and the GL context is valid
QCoreApplication::processEvents();
_renderThread.initialize(this);
}
PlayerWindow::~PlayerWindow() {
}

View file

@ -0,0 +1,29 @@
//
// Created by Bradley Austin Davis on 2018/10/21
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QtGui/QWindow>
#include <QtCore/QSettings>
#include <gpu/Forward.h>
#include "RenderThread.h"
// Create a simple OpenGL window that renders text in various ways
class PlayerWindow : public QWindow {
public:
PlayerWindow();
virtual ~PlayerWindow();
protected:
//bool eventFilter(QObject* obj, QEvent* event) override;
//void keyPressEvent(QKeyEvent* event) override;
private:
QSettings _settings;
RenderThread _renderThread;
};

View file

@ -0,0 +1,240 @@
//
// Created by Bradley Austin Davis on 2018/10/21
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderThread.h"
#include <mutex>
#include <jni.h>
#include <android/log.h>
#include <QtCore/QFileInfo>
#include <QtGui/QWindow>
#include <QtGui/QImageReader>
#include <gl/QOpenGLContextWrapper.h>
#include <gpu/FrameIO.h>
#include <gpu/Texture.h>
#include <VrApi_Types.h>
#include <VrApi_Helpers.h>
#include <ovr/VrHandler.h>
#include <ovr/Helpers.h>
#include <VrApi.h>
#include <VrApi_Input.h>
static JNIEnv* _env { nullptr };
static JavaVM* _vm { nullptr };
static jobject _activity { nullptr };
struct HandController{
ovrInputTrackedRemoteCapabilities caps {};
ovrInputStateTrackedRemote state {};
ovrResult stateResult{ ovrSuccess };
ovrTracking tracking {};
ovrResult trackingResult{ ovrSuccess };
void update(ovrMobile* session, double time = 0.0) {
const auto& deviceId = caps.Header.DeviceID;
stateResult = vrapi_GetCurrentInputState(session, deviceId, &state.Header);
trackingResult = vrapi_GetInputTrackingState(session, deviceId, 0.0, &tracking);
}
};
std::vector<HandController> devices;
extern "C" {
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *, void *) {
__android_log_write(ANDROID_LOG_WARN, "QQQ", __FUNCTION__);
return JNI_VERSION_1_6;
}
JNIEXPORT void JNICALL Java_io_highfidelity_frameplayer_QuestQtActivity_nativeOnCreate(JNIEnv* env, jobject obj) {
env->GetJavaVM(&_vm);
_activity = env->NewGlobalRef(obj);
}
}
static const char* FRAME_FILE = "assets:/frames/20190121_1220.json";
static void textureLoader(const std::string& filename, const gpu::TexturePointer& texture, uint16_t layer) {
QImage image;
QImageReader(filename.c_str()).read(&image);
if (layer > 0) {
return;
}
texture->assignStoredMip(0, image.byteCount(), image.constBits());
}
void RenderThread::submitFrame(const gpu::FramePointer& frame) {
std::unique_lock<std::mutex> lock(_frameLock);
_pendingFrames.push(frame);
}
void RenderThread::move(const glm::vec3& v) {
std::unique_lock<std::mutex> lock(_frameLock);
_correction = glm::inverse(glm::translate(mat4(), v)) * _correction;
}
void RenderThread::initialize(QWindow* window) {
std::unique_lock<std::mutex> lock(_frameLock);
setObjectName("RenderThread");
Parent::initialize();
_window = window;
_thread->setObjectName("RenderThread");
}
void RenderThread::setup() {
// Wait until the context has been moved to this thread
{ std::unique_lock<std::mutex> lock(_frameLock); }
ovr::VrHandler::initVr();
__android_log_write(ANDROID_LOG_WARN, "QQQ", "Launching oculus activity");
_vm->AttachCurrentThread(&_env, nullptr);
jclass cls = _env->GetObjectClass(_activity);
jmethodID mid = _env->GetMethodID(cls, "launchOculusActivity", "()V");
_env->CallVoidMethod(_activity, mid);
__android_log_write(ANDROID_LOG_WARN, "QQQ", "Launching oculus activity done");
ovr::VrHandler::setHandler(this);
makeCurrent();
// GPU library init
gpu::Context::init<gpu::gl::GLBackend>();
_gpuContext = std::make_shared<gpu::Context>();
_backend = _gpuContext->getBackend();
_gpuContext->beginFrame();
_gpuContext->endFrame();
makeCurrent();
glGenTextures(1, &_externalTexture);
glBindTexture(GL_TEXTURE_2D, _externalTexture);
static const glm::u8vec4 color{ 0,1,0,0 };
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1, 1, 0, GL_RGBA, GL_UNSIGNED_BYTE, &color);
if (QFileInfo(FRAME_FILE).exists()) {
auto frame = gpu::readFrame(FRAME_FILE, _externalTexture, &textureLoader);
submitFrame(frame);
}
}
void RenderThread::shutdown() {
_activeFrame.reset();
while (!_pendingFrames.empty()) {
_gpuContext->consumeFrameUpdates(_pendingFrames.front());
_pendingFrames.pop();
}
_gpuContext->shutdown();
_gpuContext.reset();
}
void RenderThread::handleInput() {
static std::once_flag once;
std::call_once(once, [&]{
withOvrMobile([&](ovrMobile* session){
int deviceIndex = 0;
ovrInputCapabilityHeader capsHeader;
while (vrapi_EnumerateInputDevices(session, deviceIndex, &capsHeader) >= 0) {
if (capsHeader.Type == ovrControllerType_TrackedRemote) {
HandController controller = {};
controller.caps.Header = capsHeader;
controller.state.Header.ControllerType = ovrControllerType_TrackedRemote;
vrapi_GetInputDeviceCapabilities( session, &controller.caps.Header);
devices.push_back(controller);
}
++deviceIndex;
}
});
});
auto readResult = ovr::VrHandler::withOvrMobile([&](ovrMobile *session) {
for (auto &controller : devices) {
controller.update(session);
}
});
if (readResult) {
for (auto &controller : devices) {
const auto &caps = controller.caps;
if (controller.stateResult >= 0) {
const auto &remote = controller.state;
if (remote.Joystick.x != 0.0f || remote.Joystick.y != 0.0f) {
glm::vec3 translation;
float rotation = 0.0f;
if (caps.ControllerCapabilities & ovrControllerCaps_LeftHand) {
translation = glm::vec3{0.0f, -remote.Joystick.y, 0.0f};
} else {
translation = glm::vec3{remote.Joystick.x, 0.0f, -remote.Joystick.y};
}
float scale = 0.1f + (1.9f * remote.GripTrigger);
_correction = glm::translate(glm::mat4(), translation * scale) * _correction;
}
}
}
}
}
void RenderThread::renderFrame() {
GLuint finalTexture = 0;
glm::uvec2 finalTextureSize;
const auto& tracking = beginFrame();
if (_activeFrame) {
const auto& frame = _activeFrame;
auto& eyeProjections = frame->stereoState._eyeProjections;
auto& eyeOffsets = frame->stereoState._eyeViews;
// Quest
auto frameCorrection = _correction * ovr::toGlm(tracking.HeadPose.Pose);
_backend->setCameraCorrection(glm::inverse(frameCorrection), frame->view);
ovr::for_each_eye([&](ovrEye eye){
const auto& eyeInfo = tracking.Eye[eye];
eyeProjections[eye] = ovr::toGlm(eyeInfo.ProjectionMatrix);
eyeOffsets[eye] = ovr::toGlm(eyeInfo.ViewMatrix);
});
_backend->recycle();
_backend->syncCache();
_gpuContext->enableStereo(true);
if (frame && !frame->batches.empty()) {
_gpuContext->executeFrame(frame);
}
auto& glbackend = (gpu::gl::GLBackend&)(*_backend);
finalTextureSize = { frame->framebuffer->getWidth(), frame->framebuffer->getHeight() };
finalTexture = glbackend.getTextureID(frame->framebuffer->getRenderBuffer(0));
}
presentFrame(finalTexture, finalTextureSize, tracking);
}
bool RenderThread::process() {
pollTask();
if (!vrActive()) {
QThread::msleep(1);
return true;
}
std::queue<gpu::FramePointer> pendingFrames;
{
std::unique_lock<std::mutex> lock(_frameLock);
pendingFrames.swap(_pendingFrames);
}
makeCurrent();
while (!pendingFrames.empty()) {
_activeFrame = pendingFrames.front();
pendingFrames.pop();
_gpuContext->consumeFrameUpdates(_activeFrame);
_activeFrame->stereoState._enable = true;
}
handleInput();
renderFrame();
return true;
}

View file

@ -0,0 +1,44 @@
//
// Created by Bradley Austin Davis on 2018/10/21
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QtCore/QElapsedTimer>
#include <GenericThread.h>
#include <shared/RateCounter.h>
#include <gl/Config.h>
#include <gl/Context.h>
#include <gpu/gl/GLBackend.h>
#include <ovr/VrHandler.h>
class RenderThread : public GenericThread, ovr::VrHandler {
using Parent = GenericThread;
public:
QWindow* _window{ nullptr };
std::mutex _mutex;
gpu::ContextPointer _gpuContext; // initialized during window creation
std::shared_ptr<gpu::Backend> _backend;
std::atomic<size_t> _presentCount{ 0 };
std::mutex _frameLock;
std::queue<gpu::FramePointer> _pendingFrames;
gpu::FramePointer _activeFrame;
uint32_t _externalTexture{ 0 };
glm::mat4 _correction;
void move(const glm::vec3& v);
void setup() override;
bool process() override;
void shutdown() override;
void handleInput();
void submitFrame(const gpu::FramePointer& frame);
void initialize(QWindow* window);
void renderFrame();
};

View file

@ -0,0 +1,56 @@
//
// Created by Bradley Austin Davis on 2018/11/22
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <android/log.h>
#include <QtGui/QGuiApplication>
#include <QtCore/QTimer>
#include <QtCore/QFileInfo>
#include <Trace.h>
#include "PlayerWindow.h"
void messageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
if (!message.isEmpty()) {
const char * local=message.toStdString().c_str();
switch (type) {
case QtDebugMsg:
__android_log_write(ANDROID_LOG_DEBUG,"Interface",local);
break;
case QtInfoMsg:
__android_log_write(ANDROID_LOG_INFO,"Interface",local);
break;
case QtWarningMsg:
__android_log_write(ANDROID_LOG_WARN,"Interface",local);
break;
case QtCriticalMsg:
__android_log_write(ANDROID_LOG_ERROR,"Interface",local);
break;
case QtFatalMsg:
default:
__android_log_write(ANDROID_LOG_FATAL,"Interface",local);
abort();
}
}
}
int main(int argc, char** argv) {
setupHifiApplication("gpuFramePlayer");
QGuiApplication app(argc, argv);
auto oldMessageHandler = qInstallMessageHandler(messageHandler);
DependencyManager::set<tracing::Tracer>();
PlayerWindow window;
__android_log_write(ANDROID_LOG_FATAL,"QQQ","Exec");
app.exec();
__android_log_write(ANDROID_LOG_FATAL,"QQQ","Exec done");
qInstallMessageHandler(oldMessageHandler);
return 0;
}

View file

@ -0,0 +1,53 @@
//
// Created by Bradley Austin Davis on 2018/11/20
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
package io.highfidelity.frameplayer;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import org.qtproject.qt5.android.bindings.QtActivity;
import io.highfidelity.oculus.OculusMobileActivity;
public class QuestQtActivity extends QtActivity {
private native void nativeOnCreate();
private boolean launchedQuestMode = false;
@Override
public void onCreate(Bundle savedInstanceState) {
Log.w("QQQ_Qt", "QuestQtActivity::onCreate");
super.onCreate(savedInstanceState);
nativeOnCreate();
}
@Override
public void onDestroy() {
Log.w("QQQ_Qt", "QuestQtActivity::onDestroy");
super.onDestroy();
}
public void launchOculusActivity() {
Log.w("QQQ_Qt", "QuestQtActivity::launchOculusActivity");
runOnUiThread(()->{
keepInterfaceRunning = true;
launchedQuestMode = true;
moveTaskToBack(true);
startActivity(new Intent(this, QuestRenderActivity.class));
});
}
@Override
public void onResume() {
super.onResume();
if (launchedQuestMode) {
moveTaskToBack(true);
}
}
}

View file

@ -0,0 +1,14 @@
package io.highfidelity.frameplayer;
import android.content.Intent;
import android.os.Bundle;
import io.highfidelity.oculus.OculusMobileActivity;
public class QuestRenderActivity extends OculusMobileActivity {
@Override
public void onCreate(Bundle savedState) {
super.onCreate(savedState);
startActivity(new Intent(this, QuestQtActivity.class));
}
}

View file

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<!--suppress AndroidUnknownAttribute -->
<vector xmlns:api24="http://schemas.android.com/apk/res/android" xmlns:android="http://schemas.android.com/apk/res/android"
android:viewportWidth="192"
android:viewportHeight="192"
android:width="192dp"
android:height="192dp">
<path
android:pathData="M189.5 96.5A93.5 93.5 0 0 1 96 190 93.5 93.5 0 0 1 2.5 96.5 93.5 93.5 0 0 1 96 3 93.5 93.5 0 0 1 189.5 96.5Z"
android:fillColor="#333333" />
<path
android:pathData="M96.2 173.1c-10.3 0 -20.4 -2.1 -29.8 -6 -9.2 -3.8 -17.3 -9.4 -24.3 -16.4 -7 -7 -12.6 -15.2 -16.4 -24.3 -4.1 -9.6 -6.2 -19.6 -6.2 -30 0 -10.3 2.1 -20.4 6 -29.8 3.8 -9.2 9.4 -17.3 16.4 -24.3 7 -7 15.2 -12.6 24.3 -16.4 9.5 -4 19.5 -6 29.8 -6 10.3 0 20.4 2.1 29.8 6 9.2 3.8 17.3 9.4 24.3 16.4 7 7 12.6 15.2 16.4 24.3 4 9.5 6 19.5 6 29.8 0 10.3 -2.1 20.4 -6 29.8 -3.8 9.2 -9.4 17.3 -16.4 24.3 -7 7 -15.2 12.6 -24.3 16.4 -9.2 4.1 -19.3 6.2 -29.6 6.2zm0 -145.3c-37.8 0 -68.6 30.8 -68.6 68.6 0 37.8 30.8 68.6 68.6 68.6 37.8 0 68.6 -30.8 68.6 -68.6 0 -37.8 -30.8 -68.6 -68.6 -68.6z"
android:fillColor="#00b4f0" />
<path
android:pathData="M119.6 129l0 -53.8c3.4 -1.1 5.8 -4.3 5.8 -8 0 -4.6 -3.8 -8.4 -8.4 -8.4 -4.6 0 -8.4 3.8 -8.4 8.4 0 3.6 2.2 6.6 5.4 7.9l0 25L79 83.8 79 64c3.4 -1.1 5.8 -4.3 5.8 -8 0 -4.6 -3.8 -8.4 -8.4 -8.4 -4.6 0 -8.4 3.8 -8.4 8.4 0 3.6 2.2 6.6 5.4 7.9l0 54.1c-3.1 1.2 -5.4 4.3 -5.4 7.9 0 4.6 3.8 8.4 8.4 8.4 4.6 0 8.4 -3.8 8.4 -8.4 0 -3.7 -2.4 -6.9 -5.8 -8l0 -27.3 35 16.3 0 22.2c-3.1 1.2 -5.4 4.3 -5.4 7.9 0 4.6 3.8 8.4 8.4 8.4 4.6 0 8.4 -3.8 8.4 -8.4 0 -3.8 -2.4 -6.9 -5.8 -8z"
android:fillColor="#00b4f0" />
</vector>

View file

@ -0,0 +1,3 @@
<resources>
<string name="app_name" translatable="false">GPU Frame Player</string>
</resources>

View file

@ -73,13 +73,10 @@ RUN mkdir "$HIFI_BASE" && \
RUN git clone https://github.com/jherico/hifi.git && \
cd ~/hifi && \
git checkout feature/quest_move_interface
git checkout feature/quest_frame_player
WORKDIR /home/jenkins/hifi
RUN touch .test6 && \
git fetch && git reset origin/feature/quest_move_interface --hard
RUN mkdir build
# Pre-cache the vcpkg managed dependencies

View file

@ -0,0 +1,17 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 28
defaultConfig {
minSdkVersion 24
targetSdkVersion 28
versionCode 1
versionName "1.0"
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}

View file

@ -0,0 +1,2 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="io.highfidelity.shared.oculus"/>

View file

@ -0,0 +1,103 @@
//
// Created by Bradley Austin Davis on 2018/11/20
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
package io.highfidelity.oculus;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
/**
* Contains a native surface and forwards the activity lifecycle and surface lifecycle
* events to the OculusMobileDisplayPlugin
*/
public class OculusMobileActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = OculusMobileActivity.class.getSimpleName();
static { System.loadLibrary("oculusMobile"); }
private native void nativeOnCreate();
private native static void nativeOnResume();
private native static void nativeOnPause();
private native static void nativeOnDestroy();
private native static void nativeOnSurfaceChanged(Surface s);
private SurfaceView mView;
private SurfaceHolder mSurfaceHolder;
public static void launch(Activity activity) {
if (activity != null) {
activity.runOnUiThread(()->{
activity.startActivity(new Intent(activity, OculusMobileActivity.class));
});
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
Log.w(TAG, "QQQ onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// Create a native surface for VR rendering (Qt GL surfaces are not suitable
// because of the lack of fine control over the surface callbacks)
mView = new SurfaceView(this);
setContentView(mView);
mView.getHolder().addCallback(this);
// Forward the create message to the JNI code
nativeOnCreate();
}
@Override
protected void onDestroy() {
Log.w(TAG, "QQQ onDestroy");
if (mSurfaceHolder != null) {
nativeOnSurfaceChanged(null);
}
nativeOnDestroy();
super.onDestroy();
}
@Override
protected void onResume() {
Log.w(TAG, "QQQ onResume");
super.onResume();
nativeOnResume();
}
@Override
protected void onPause() {
Log.w(TAG, "QQQ onPause");
nativeOnPause();
super.onPause();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.w(TAG, "QQQ surfaceCreated");
nativeOnSurfaceChanged(holder.getSurface());
mSurfaceHolder = holder;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.w(TAG, "QQQ surfaceChanged");
nativeOnSurfaceChanged(holder.getSurface());
mSurfaceHolder = holder;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.w(TAG, "QQQ surfaceDestroyed");
nativeOnSurfaceChanged(null);
mSurfaceHolder = null;
}
}

View file

@ -364,25 +364,7 @@ public class QtActivity extends Activity {
@Override
protected void onDestroy() {
super.onDestroy();
/*
cduarte https://highfidelity.manuscript.com/f/cases/16712/App-freezes-on-opening-randomly
After Qt upgrade to 5.11 we had a black screen crash after closing the application with
the hardware button "Back" and trying to start the app again. It could only be fixed after
totally closing the app swiping it in the list of running apps.
This problem did not happen with the previous Qt version.
After analysing changes we came up with this case and change:
https://codereview.qt-project.org/#/c/218882/
In summary they've moved libs loading to the same thread as main() and as a matter of correctness
in the onDestroy method in QtActivityDelegate, they exit that thread with `QtNative.m_qtThread.exit();`
That exit call is the main reason of this problem.
In this fix we just replace the `QtApplication.invokeDelegate();` call that may end using the
entire onDestroy method including that thread exit line for other three lines that purposely
terminate qt (borrowed from QtActivityDelegate::onDestroy as well).
*/
QtNative.terminateQt();
QtNative.setActivity(null, null);
System.exit(0);
QtApplication.invokeDelegate();
}
//---------------------------------------------------------------------------

View file

@ -1,8 +1,26 @@
//
// Libraries
//
include ':oculus'
project(':oculus').projectDir = new File(settingsDir, 'libraries/oculus')
include ':qt'
project(':qt').projectDir = new File(settingsDir, 'libraries/qt')
//
// Applications
//
include ':interface'
project(':interface').projectDir = new File(settingsDir, 'apps/interface')
//include ':framePlayer'
//project(':framePlayer').projectDir = new File(settingsDir, 'apps/framePlayer')
//
// Test projects
//
include ':framePlayer'
project(':framePlayer').projectDir = new File(settingsDir, 'apps/framePlayer')
include ':questFramePlayer'
project(':questFramePlayer').projectDir = new File(settingsDir, 'apps/questFramePlayer')

View file

@ -10,5 +10,5 @@
#
macro(include_hifi_library_headers LIBRARY)
include_directories("${HIFI_LIBRARY_DIR}/${LIBRARY}/src")
target_include_directories(${TARGET_NAME} PRIVATE "${HIFI_LIBRARY_DIR}/${LIBRARY}/src")
endmacro(include_hifi_library_headers _library _root_dir)

View file

@ -19,8 +19,8 @@ function(LINK_HIFI_LIBRARIES)
endforeach()
foreach(HIFI_LIBRARY ${LIBRARIES_TO_LINK})
include_directories("${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src")
include_directories("${CMAKE_BINARY_DIR}/libraries/${HIFI_LIBRARY}")
target_include_directories(${TARGET_NAME} PRIVATE "${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src")
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_BINARY_DIR}/libraries/${HIFI_LIBRARY}")
# link the actual library - it is static so don't bubble it up
target_link_libraries(${TARGET_NAME} ${HIFI_LIBRARY})
endforeach()

View file

@ -0,0 +1,4 @@
macro(target_egl)
find_library(EGL EGL)
target_link_libraries(${TARGET_NAME} ${EGL})
endmacro()

View file

@ -0,0 +1,20 @@
macro(target_oculus_mobile)
set(INSTALL_DIR ${HIFI_ANDROID_PRECOMPILED}/oculus/VrApi)
# Mobile SDK
set(OVR_MOBILE_INCLUDE_DIRS ${INSTALL_DIR}/Include)
target_include_directories(${TARGET_NAME} PRIVATE ${OVR_MOBILE_INCLUDE_DIRS})
set(OVR_MOBILE_LIBRARY_DIR ${INSTALL_DIR}/Libs/Android/arm64-v8a)
set(OVR_MOBILE_LIBRARY_RELEASE ${OVR_MOBILE_LIBRARY_DIR}/Release/libvrapi.so)
set(OVR_MOBILE_LIBRARY_DEBUG ${OVR_MOBILE_LIBRARY_DIR}/Debug/libvrapi.so)
select_library_configurations(OVR_MOBILE)
target_link_libraries(${TARGET_NAME} ${OVR_MOBILE_LIBRARIES})
# Platform SDK
set(INSTALL_DIR ${HIFI_ANDROID_PRECOMPILED}/oculusPlatform)
set(OVR_PLATFORM_INCLUDE_DIRS ${INSTALL_DIR}/Include)
target_include_directories(${TARGET_NAME} PRIVATE ${OVR_PLATFORM_INCLUDE_DIRS})
set(OVR_PLATFORM_LIBRARIES ${INSTALL_DIR}/Android/libs/arm64-v8a/libovrplatformloader.so)
target_link_libraries(${TARGET_NAME} ${OVR_PLATFORM_LIBRARIES})
endmacro()

View file

@ -1,85 +0,0 @@
set(TARGET_NAME gvr-interface)
if (ANDROID)
set(ANDROID_APK_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/apk-build")
set(ANDROID_APK_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/apk")
set(ANDROID_SDK_ROOT $ENV{ANDROID_HOME})
set(ANDROID_APP_DISPLAY_NAME Interface)
set(ANDROID_API_LEVEL 19)
set(ANDROID_APK_PACKAGE io.highfidelity.gvrinterface)
set(ANDROID_ACTIVITY_NAME io.highfidelity.gvrinterface.InterfaceActivity)
set(ANDROID_APK_VERSION_NAME "0.1")
set(ANDROID_APK_VERSION_CODE 1)
set(ANDROID_APK_FULLSCREEN TRUE)
set(ANDROID_DEPLOY_QT_INSTALL "--install")
set(BUILD_SHARED_LIBS ON)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${ANDROID_APK_OUTPUT_DIR}/libs/${ANDROID_ABI}")
setup_hifi_library(Gui AndroidExtras)
else ()
setup_hifi_project(Gui)
endif ()
include_directories(${Qt5Gui_PRIVATE_INCLUDE_DIRS})
link_hifi_libraries(shared networking audio-client avatars)
if (ANDROID)
find_package(LibOVR)
if (LIBOVR_FOUND)
add_definitions(-DHAVE_LIBOVR)
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES} ${LIBOVR_ANDROID_LIBRARIES} ${TURBOJPEG_LIBRARY})
include_directories(SYSTEM ${LIBOVR_INCLUDE_DIRS})
# we need VRLib, so add a project.properties to our apk build folder that says that
file(RELATIVE_PATH RELATIVE_VRLIB_PATH ${ANDROID_APK_OUTPUT_DIR} "${LIBOVR_VRLIB_DIR}")
file(WRITE "${ANDROID_APK_BUILD_DIR}/project.properties" "android.library.reference.1=${RELATIVE_VRLIB_PATH}")
list(APPEND IGNORE_COPY_LIBS ${LIBOVR_ANDROID_LIBRARIES})
endif ()
endif ()
# the presence of a HOCKEY_APP_ID means we are making a beta build
if (ANDROID AND HOCKEY_APP_ID)
set(HOCKEY_APP_ENABLED true)
set(HOCKEY_APP_ACTIVITY "<activity android:name='net.hockeyapp.android.UpdateActivity' />\n")
set(ANDROID_ACTIVITY_NAME io.highfidelity.gvrinterface.InterfaceBetaActivity)
set(ANDROID_DEPLOY_QT_INSTALL "")
set(ANDROID_APK_CUSTOM_NAME "Interface-beta.apk")
# set the ANDROID_APK_VERSION_CODE to the number of git commits
execute_process(
COMMAND git rev-list --first-parent --count HEAD
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE GIT_COMMIT_COUNT
OUTPUT_STRIP_TRAILING_WHITESPACE
)
set(ANDROID_APK_VERSION_CODE ${GIT_COMMIT_COUNT})
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/templates/InterfaceBetaActivity.java.in" "${ANDROID_APK_BUILD_DIR}/src/io/highfidelity/gvrinterface/InterfaceBetaActivity.java")
elseif (ANDROID)
set(HOCKEY_APP_ENABLED false)
endif ()
if (ANDROID)
set(HIFI_URL_INTENT "<intent-filter>\
\n <action android:name='android.intent.action.VIEW' />\
\n <category android:name='android.intent.category.DEFAULT' />\
\n <category android:name='android.intent.category.BROWSABLE' />\
\n <data android:scheme='hifi' />\
\n </intent-filter>"
)
set(ANDROID_EXTRA_APPLICATION_XML "${HOCKEY_APP_ACTIVITY}")
set(ANDROID_EXTRA_ACTIVITY_XML "${HIFI_URL_INTENT}")
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/templates/hockeyapp.xml.in" "${ANDROID_APK_BUILD_DIR}/res/values/hockeyapp.xml")
qt_create_apk()
endif (ANDROID)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.7 KiB

View file

@ -1,73 +0,0 @@
//
// Client.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/15.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Client.h"
#include <AccountManager.h>
#include <AddressManager.h>
#include <HifiSockAddr.h>
#include <NodeList.h>
#include <PacketHeaders.h>
Client::Client(QObject* parent) :
QObject(parent)
{
// we need to make sure that required dependencies are created
DependencyManager::set<AddressManager>();
setupNetworking();
}
void Client::setupNetworking() {
// once Application order of instantiation is fixed this should be done from AccountManager
AccountManager::getInstance().setAuthURL(DEFAULT_NODE_AUTH_URL);
// setup the NodeList for this client
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
auto nodeList = DependencyManager::set<NodeList>(NodeType::Agent, 0);
// while datagram processing remains simple for targets using Client, we'll handle datagrams
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &Client::processDatagrams);
// every second, ask the NodeList to check in with the domain server
QTimer* domainCheckInTimer = new QTimer(this);
domainCheckInTimer->setInterval(DOMAIN_SERVER_CHECK_IN_MSECS);
connect(domainCheckInTimer, &QTimer::timeout, nodeList.data(), &NodeList::sendDomainServerCheckIn);
// TODO: once the Client knows its Address on start-up we should be able to immediately send a check in here
domainCheckInTimer->start();
// handle the case where the domain stops talking to us
// TODO: can we just have the nodelist do this when it sets up? Is there a user of the NodeList that wouldn't want this?
connect(nodeList.data(), &NodeList::limitOfSilentDomainCheckInsReached, nodeList.data(), &NodeList::reset);
}
void Client::processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket) {
DependencyManager::get<NodeList>()->processNodeData(senderSockAddr, incomingPacket);
}
void Client::processDatagrams() {
HifiSockAddr senderSockAddr;
static QByteArray incomingPacket;
auto nodeList = DependencyManager::get<NodeList>();
while (DependencyManager::get<NodeList>()->getNodeSocket().hasPendingDatagrams()) {
incomingPacket.resize(nodeList->getNodeSocket().pendingDatagramSize());
nodeList->getNodeSocket().readDatagram(incomingPacket.data(), incomingPacket.size(),
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
if (nodeList->packetVersionAndHashMatch(incomingPacket)) {
processVerifiedPacket(senderSockAddr, incomingPacket);
}
}
}

View file

@ -1,33 +0,0 @@
//
// Client.h
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/15.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Client_h
#define hifi_Client_h
#include <QtCore/QObject>
#include <HifiSockAddr.h>
class Client : public QObject {
Q_OBJECT
public:
Client(QObject* parent = 0);
virtual void cleanupBeforeQuit() = 0;
protected:
void setupNetworking();
virtual void processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket);
private slots:
void processDatagrams();
};
#endif // hifi_Client_h

View file

@ -1,191 +0,0 @@
//
// GVRInterface.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 11/18/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GVRInterface.h"
#ifdef ANDROID
#include <jni.h>
#include <qpa/qplatformnativeinterface.h>
#include <QtAndroidExtras/QAndroidJniEnvironment>
#include <QtAndroidExtras/QAndroidJniObject>
#ifdef HAVE_LIBOVR
#include <KeyState.h>
#include <VrApi/VrApi.h>
#endif
#endif
#include <QtCore/QTimer>
#include <QtGui/QKeyEvent>
#include <QtWidgets/QMenuBar>
#include "GVRMainWindow.h"
#include "RenderingClient.h"
static QString launchURLString = QString();
#ifdef ANDROID
extern "C" {
JNIEXPORT void Java_io_highfidelity_gvrinterface_InterfaceActivity_handleHifiURL(JNIEnv *jni, jclass clazz, jstring hifiURLString) {
launchURLString = QAndroidJniObject(hifiURLString).toString();
}
}
#endif
GVRInterface::GVRInterface(int argc, char* argv[]) :
QApplication(argc, argv),
_mainWindow(NULL),
_inVRMode(false)
{
setApplicationName("gvr-interface");
setOrganizationName("highfidelity");
setOrganizationDomain("io");
if (!launchURLString.isEmpty()) {
// did we get launched with a lookup URL? If so it is time to give that to the AddressManager
qDebug() << "We were opened via a hifi URL -" << launchURLString;
}
_client = new RenderingClient(this, launchURLString);
launchURLString = QString();
connect(this, &QGuiApplication::applicationStateChanged, this, &GVRInterface::handleApplicationStateChange);
#if defined(ANDROID) && defined(HAVE_LIBOVR)
QAndroidJniEnvironment jniEnv;
QPlatformNativeInterface* interface = QApplication::platformNativeInterface();
jobject activity = (jobject) interface->nativeResourceForIntegration("QtActivity");
ovr_RegisterHmtReceivers(&*jniEnv, activity);
// PLATFORMACTIVITY_REMOVAL: Temp workaround for PlatformActivity being
// stripped from UnityPlugin. Alternate is to use LOCAL_WHOLE_STATIC_LIBRARIES
// but that increases the size of the plugin by ~1MiB
OVR::linkerPlatformActivity++;
#endif
// call our idle function whenever we can
QTimer* idleTimer = new QTimer(this);
connect(idleTimer, &QTimer::timeout, this, &GVRInterface::idle);
idleTimer->start(0);
// call our quit handler before we go down
connect(this, &QCoreApplication::aboutToQuit, this, &GVRInterface::handleApplicationQuit);
}
void GVRInterface::handleApplicationQuit() {
_client->cleanupBeforeQuit();
}
void GVRInterface::idle() {
#if defined(ANDROID) && defined(HAVE_LIBOVR)
if (!_inVRMode && ovr_IsHeadsetDocked()) {
qDebug() << "The headset just got docked - enter VR mode.";
enterVRMode();
} else if (_inVRMode) {
if (ovr_IsHeadsetDocked()) {
static int counter = 0;
// Get the latest head tracking state, predicted ahead to the midpoint of the time
// it will be displayed. It will always be corrected to the real values by
// time warp, but the closer we get, the less black will be pulled in at the edges.
const double now = ovr_GetTimeInSeconds();
static double prev;
const double rawDelta = now - prev;
prev = now;
const double clampedPrediction = std::min( 0.1, rawDelta * 2);
ovrSensorState sensor = ovrHmd_GetSensorState(OvrHmd, now + clampedPrediction, true );
auto ovrOrientation = sensor.Predicted.Pose.Orientation;
glm::quat newOrientation(ovrOrientation.w, ovrOrientation.x, ovrOrientation.y, ovrOrientation.z);
_client->setOrientation(newOrientation);
if (counter++ % 100000 == 0) {
qDebug() << "GetSensorState in frame" << counter << "-"
<< ovrOrientation.x << ovrOrientation.y << ovrOrientation.z << ovrOrientation.w;
}
} else {
qDebug() << "The headset was undocked - leaving VR mode.";
leaveVRMode();
}
}
OVR::KeyState& backKeyState = _mainWindow->getBackKeyState();
auto backEvent = backKeyState.Update(ovr_GetTimeInSeconds());
if (backEvent == OVR::KeyState::KEY_EVENT_LONG_PRESS) {
qDebug() << "Attemping to start the Platform UI Activity.";
ovr_StartPackageActivity(_ovr, PUI_CLASS_NAME, PUI_GLOBAL_MENU);
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP || backEvent == OVR::KeyState::KEY_EVENT_SHORT_PRESS) {
qDebug() << "Got an event we should cancel for!";
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP) {
qDebug() << "The button is down!";
}
#endif
}
void GVRInterface::handleApplicationStateChange(Qt::ApplicationState state) {
switch(state) {
case Qt::ApplicationActive:
qDebug() << "The application is active.";
break;
case Qt::ApplicationSuspended:
qDebug() << "The application is being suspended.";
break;
default:
break;
}
}
void GVRInterface::enterVRMode() {
#if defined(ANDROID) && defined(HAVE_LIBOVR)
// Default vrModeParms
ovrModeParms vrModeParms;
vrModeParms.AsynchronousTimeWarp = true;
vrModeParms.AllowPowerSave = true;
vrModeParms.DistortionFileName = NULL;
vrModeParms.EnableImageServer = false;
vrModeParms.CpuLevel = 2;
vrModeParms.GpuLevel = 2;
vrModeParms.GameThreadTid = 0;
QAndroidJniEnvironment jniEnv;
QPlatformNativeInterface* interface = QApplication::platformNativeInterface();
jobject activity = (jobject) interface->nativeResourceForIntegration("QtActivity");
vrModeParms.ActivityObject = activity;
ovrHmdInfo hmdInfo;
_ovr = ovr_EnterVrMode(vrModeParms, &hmdInfo);
_inVRMode = true;
#endif
}
void GVRInterface::leaveVRMode() {
#if defined(ANDROID) && defined(HAVE_LIBOVR)
ovr_LeaveVrMode(_ovr);
_inVRMode = false;
#endif
}

View file

@ -1,72 +0,0 @@
//
// GVRInterface.h
// gvr-interface/src
//
// Created by Stephen Birarda on 11/18/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GVRInterface_h
#define hifi_GVRInterface_h
#include <QtWidgets/QApplication>
#if defined(ANDROID) && defined(HAVE_LIBOVR)
class ovrMobile;
class ovrHmdInfo;
// This is set by JNI_OnLoad() when the .so is initially loaded.
// Must use to attach each thread that will use JNI:
namespace OVR {
// PLATFORMACTIVITY_REMOVAL: Temp workaround for PlatformActivity being
// stripped from UnityPlugin. Alternate is to use LOCAL_WHOLE_STATIC_LIBRARIES
// but that increases the size of the plugin by ~1MiB
extern int linkerPlatformActivity;
}
#endif
class GVRMainWindow;
class RenderingClient;
class QKeyEvent;
#if defined(qApp)
#undef qApp
#endif
#define qApp (static_cast<GVRInterface*>(QApplication::instance()))
class GVRInterface : public QApplication {
Q_OBJECT
public:
GVRInterface(int argc, char* argv[]);
RenderingClient* getClient() { return _client; }
void setMainWindow(GVRMainWindow* mainWindow) { _mainWindow = mainWindow; }
protected:
void keyPressEvent(QKeyEvent* event);
private slots:
void handleApplicationStateChange(Qt::ApplicationState state);
void idle();
private:
void handleApplicationQuit();
void enterVRMode();
void leaveVRMode();
#if defined(ANDROID) && defined(HAVE_LIBOVR)
ovrMobile* _ovr;
ovrHmdInfo* _hmdInfo;
#endif
GVRMainWindow* _mainWindow;
RenderingClient* _client;
bool _inVRMode;
};
#endif // hifi_GVRInterface_h

View file

@ -1,176 +0,0 @@
//
// GVRMainWindow.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GVRMainWindow.h"
#include <QtGui/QKeyEvent>
#include <QtWidgets/QApplication>
#include <QtWidgets/QInputDialog>
#include <QtWidgets/QLabel>
#include <QtWidgets/QLineEdit>
#include <QtWidgets/QMenuBar>
#include <QtWidgets/QMessageBox>
#include <QtWidgets/QVBoxLayout>
#ifndef ANDROID
#include <QtWidgets/QDesktopWidget>
#elif defined(HAVE_LIBOVR)
#include <OVR_CAPI.h>
const float LIBOVR_DOUBLE_TAP_DURATION = 0.25f;
const float LIBOVR_LONG_PRESS_DURATION = 0.75f;
#endif
#include <AddressManager.h>
#include "InterfaceView.h"
#include "LoginDialog.h"
#include "RenderingClient.h"
GVRMainWindow::GVRMainWindow(QWidget* parent) :
QMainWindow(parent),
#if defined(ANDROID) && defined(HAVE_LIBOVR)
_backKeyState(LIBOVR_DOUBLE_TAP_DURATION, LIBOVR_LONG_PRESS_DURATION),
_wasBackKeyDown(false),
#endif
_mainLayout(NULL),
_menuBar(NULL),
_loginAction(NULL)
{
#ifndef ANDROID
const int NOTE_4_WIDTH = 2560;
const int NOTE_4_HEIGHT = 1440;
setFixedSize(NOTE_4_WIDTH / 2, NOTE_4_HEIGHT / 2);
#endif
setupMenuBar();
QWidget* baseWidget = new QWidget(this);
// setup a layout so we can vertically align to top
_mainLayout = new QVBoxLayout(baseWidget);
_mainLayout->setAlignment(Qt::AlignTop);
// set the layout on the base widget
baseWidget->setLayout(_mainLayout);
setCentralWidget(baseWidget);
// add the interface view
new InterfaceView(baseWidget);
}
GVRMainWindow::~GVRMainWindow() {
delete _menuBar;
}
void GVRMainWindow::keyPressEvent(QKeyEvent* event) {
#ifdef ANDROID
if (event->key() == Qt::Key_Back) {
// got the Android back key, hand off to OVR KeyState
_backKeyState.HandleEvent(ovr_GetTimeInSeconds(), true, (_wasBackKeyDown ? 1 : 0));
_wasBackKeyDown = true;
return;
}
#endif
QWidget::keyPressEvent(event);
}
void GVRMainWindow::keyReleaseEvent(QKeyEvent* event) {
#ifdef ANDROID
if (event->key() == Qt::Key_Back) {
// release on the Android back key, hand off to OVR KeyState
_backKeyState.HandleEvent(ovr_GetTimeInSeconds(), false, 0);
_wasBackKeyDown = false;
}
#endif
QWidget::keyReleaseEvent(event);
}
void GVRMainWindow::setupMenuBar() {
QMenu* fileMenu = new QMenu("File");
QMenu* helpMenu = new QMenu("Help");
_menuBar = new QMenuBar(0);
_menuBar->addMenu(fileMenu);
_menuBar->addMenu(helpMenu);
QAction* goToAddress = new QAction("Go to Address", fileMenu);
connect(goToAddress, &QAction::triggered, this, &GVRMainWindow::showAddressBar);
fileMenu->addAction(goToAddress);
_loginAction = new QAction("Login", fileMenu);
fileMenu->addAction(_loginAction);
// change the login action depending on our logged in/out state
AccountManager& accountManager = AccountManager::getInstance();
connect(&accountManager, &AccountManager::loginComplete, this, &GVRMainWindow::refreshLoginAction);
connect(&accountManager, &AccountManager::logoutComplete, this, &GVRMainWindow::refreshLoginAction);
// refresh the state now
refreshLoginAction();
QAction* aboutQt = new QAction("About Qt", helpMenu);
connect(aboutQt, &QAction::triggered, qApp, &QApplication::aboutQt);
helpMenu->addAction(aboutQt);
setMenuBar(_menuBar);
}
void GVRMainWindow::showAddressBar() {
// setup the address QInputDialog
QInputDialog* addressDialog = new QInputDialog(this);
addressDialog->setLabelText("Address");
// add the address dialog to the main layout
_mainLayout->addWidget(addressDialog);
connect(addressDialog, &QInputDialog::textValueSelected,
DependencyManager::get<AddressManager>().data(), &AddressManager::handleLookupString);
}
void GVRMainWindow::showLoginDialog() {
LoginDialog* loginDialog = new LoginDialog(this);
// have the acccount manager handle credentials from LoginDialog
AccountManager& accountManager = AccountManager::getInstance();
connect(loginDialog, &LoginDialog::credentialsEntered, &accountManager, &AccountManager::requestAccessToken);
connect(&accountManager, &AccountManager::loginFailed, this, &GVRMainWindow::showLoginFailure);
_mainLayout->addWidget(loginDialog);
}
void GVRMainWindow::showLoginFailure() {
QMessageBox::warning(this, "Login Failed",
"Could not log in with that username and password. Please try again!");
}
void GVRMainWindow::refreshLoginAction() {
AccountManager& accountManager = AccountManager::getInstance();
disconnect(_loginAction, &QAction::triggered, &accountManager, 0);
if (accountManager.isLoggedIn()) {
_loginAction->setText("Logout");
connect(_loginAction, &QAction::triggered, &accountManager, &AccountManager::logout);
} else {
_loginAction->setText("Login");
connect(_loginAction, &QAction::triggered, this, &GVRMainWindow::showLoginDialog);
}
}

View file

@ -1,58 +0,0 @@
//
// GVRMainWindow.h
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_GVRMainWindow_h
#define hifi_GVRMainWindow_h
#include <QtWidgets/QMainWindow>
#if defined(ANDROID) && defined(HAVE_LIBOVR)
#include <KeyState.h>
#endif
class QKeyEvent;
class QMenuBar;
class QVBoxLayout;
class GVRMainWindow : public QMainWindow {
Q_OBJECT
public:
GVRMainWindow(QWidget* parent = 0);
~GVRMainWindow();
public slots:
void showAddressBar();
void showLoginDialog();
void showLoginFailure();
#if defined(ANDROID) && defined(HAVE_LIBOVR)
OVR::KeyState& getBackKeyState() { return _backKeyState; }
#endif
protected:
void keyPressEvent(QKeyEvent* event);
void keyReleaseEvent(QKeyEvent* event);
private slots:
void refreshLoginAction();
private:
void setupMenuBar();
#if defined(ANDROID) && defined(HAVE_LIBOVR)
OVR::KeyState _backKeyState;
bool _wasBackKeyDown;
#endif
QVBoxLayout* _mainLayout;
QMenuBar* _menuBar;
QAction* _loginAction;
};
#endif // hifi_GVRMainWindow_h

View file

@ -1,18 +0,0 @@
//
// InterfaceView.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 1/28/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceView.h"
InterfaceView::InterfaceView(QWidget* parent, Qt::WindowFlags flags) :
QOpenGLWidget(parent, flags)
{
}

View file

@ -1,23 +0,0 @@
//
// InterfaceView.h
// gvr-interface/src
//
// Created by Stephen Birarda on 1/28/14.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_InterfaceView_h
#define hifi_InterfaceView_h
#include <QtWidgets/QOpenGLWidget>
class InterfaceView : public QOpenGLWidget {
Q_OBJECT
public:
InterfaceView(QWidget* parent = 0, Qt::WindowFlags flags = 0);
};
#endif // hifi_InterfaceView_h

View file

@ -1,69 +0,0 @@
//
// LoginDialog.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 2015-02-03.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "LoginDialog.h"
#include <QtWidgets/QDialogButtonBox>
#include <QtWidgets/QGridLayout>
#include <QtWidgets/QLabel>
#include <QtWidgets/QLineEdit>
#include <QtWidgets/QPushButton>
LoginDialog::LoginDialog(QWidget* parent) :
QDialog(parent)
{
setupGUI();
setWindowTitle("Login");
setModal(true);
}
void LoginDialog::setupGUI() {
// setup a grid layout
QGridLayout* formGridLayout = new QGridLayout(this);
_usernameLineEdit = new QLineEdit(this);
QLabel* usernameLabel = new QLabel(this);
usernameLabel->setText("Username");
usernameLabel->setBuddy(_usernameLineEdit);
formGridLayout->addWidget(usernameLabel, 0, 0);
formGridLayout->addWidget(_usernameLineEdit, 1, 0);
_passwordLineEdit = new QLineEdit(this);
_passwordLineEdit->setEchoMode(QLineEdit::Password);
QLabel* passwordLabel = new QLabel(this);
passwordLabel->setText("Password");
passwordLabel->setBuddy(_passwordLineEdit);
formGridLayout->addWidget(passwordLabel, 2, 0);
formGridLayout->addWidget(_passwordLineEdit, 3, 0);
QDialogButtonBox* buttons = new QDialogButtonBox(this);
QPushButton* okButton = buttons->addButton(QDialogButtonBox::Ok);
QPushButton* cancelButton = buttons->addButton(QDialogButtonBox::Cancel);
okButton->setText("Login");
connect(cancelButton, &QPushButton::clicked, this, &QDialog::close);
connect(okButton, &QPushButton::clicked, this, &LoginDialog::loginButtonClicked);
formGridLayout->addWidget(buttons, 4, 0, 1, 2);
setLayout(formGridLayout);
}
void LoginDialog::loginButtonClicked() {
emit credentialsEntered(_usernameLineEdit->text(), _passwordLineEdit->text());
close();
}

View file

@ -1,34 +0,0 @@
//
// LoginDialog.h
// gvr-interface/src
//
// Created by Stephen Birarda on 2015-02-03.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_LoginDialog_h
#define hifi_LoginDialog_h
#include <QtWidgets/QDialog>
class QLineEdit;
class LoginDialog : public QDialog {
Q_OBJECT
public:
LoginDialog(QWidget* parent = 0);
signals:
void credentialsEntered(const QString& username, const QString& password);
private slots:
void loginButtonClicked();
private:
void setupGUI();
QLineEdit* _usernameLineEdit;
QLineEdit* _passwordLineEdit;
};
#endif // hifi_LoginDialog_h

View file

@ -1,156 +0,0 @@
//
// RenderingClient.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/15.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderingClient.h"
#include <QtCore/QThread>
#include <QtWidgets/QInputDialog>
#include <AddressManager.h>
#include <AudioClient.h>
#include <AvatarHashMap.h>
#include <NodeList.h>
RenderingClient* RenderingClient::_instance = NULL;
RenderingClient::RenderingClient(QObject *parent, const QString& launchURLString) :
Client(parent)
{
_instance = this;
// connect to AddressManager and pass it the launch URL, if we have one
auto addressManager = DependencyManager::get<AddressManager>();
connect(addressManager.data(), &AddressManager::locationChangeRequired, this, &RenderingClient::goToLocation);
addressManager->loadSettings(launchURLString);
// tell the NodeList which node types all rendering clients will want to know about
DependencyManager::get<NodeList>()->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer);
DependencyManager::set<AvatarHashMap>();
// get our audio client setup on its own thread
auto audioClient = DependencyManager::set<AudioClient>();
audioClient->setPositionGetter(getPositionForAudio);
audioClient->setOrientationGetter(getOrientationForAudio);
audioClient->startThread();
connect(&_avatarTimer, &QTimer::timeout, this, &RenderingClient::sendAvatarPacket);
_avatarTimer.setInterval(16); // 60 FPS
_avatarTimer.start();
_fakeAvatar.setDisplayName("GearVR");
_fakeAvatar.setFaceModelURL(QUrl(DEFAULT_HEAD_MODEL_URL));
_fakeAvatar.setSkeletonModelURL(QUrl(DEFAULT_BODY_MODEL_URL));
_fakeAvatar.toByteArray(); // Creates HeadData
}
void RenderingClient::sendAvatarPacket() {
_fakeAvatar.setPosition(_position);
_fakeAvatar.setHeadOrientation(_orientation);
QByteArray packet = byteArrayWithPopulatedHeader(PacketTypeAvatarData);
packet.append(_fakeAvatar.toByteArray());
DependencyManager::get<NodeList>()->broadcastToNodes(packet, NodeSet() << NodeType::AvatarMixer);
_fakeAvatar.sendIdentityPacket();
}
void RenderingClient::cleanupBeforeQuit() {
DependencyManager::get<AudioClient>()->cleanupBeforeQuit();
// destroy the AudioClient so it and its thread will safely go down
DependencyManager::destroy<AudioClient>();
}
void RenderingClient::processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket) {
auto nodeList = DependencyManager::get<NodeList>();
PacketType incomingType = packetTypeForPacket(incomingPacket);
switch (incomingType) {
case PacketTypeAudioEnvironment:
case PacketTypeAudioStreamStats:
case PacketTypeMixedAudio:
case PacketTypeSilentAudioFrame: {
if (incomingType == PacketTypeAudioStreamStats) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioStreamStatsPacket",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
} else if (incomingType == PacketTypeAudioEnvironment) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "parseAudioEnvironmentData",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
} else {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "addReceivedAudioToStream",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
}
// update having heard from the audio-mixer and record the bytes received
SharedNodePointer audioMixer = nodeList->sendingNodeForPacket(incomingPacket);
if (audioMixer) {
audioMixer->setLastHeardMicrostamp(usecTimestampNow());
}
break;
}
case PacketTypeBulkAvatarData:
case PacketTypeKillAvatar:
case PacketTypeAvatarIdentity:
case PacketTypeAvatarBillboard: {
// update having heard from the avatar-mixer and record the bytes received
SharedNodePointer avatarMixer = nodeList->sendingNodeForPacket(incomingPacket);
if (avatarMixer) {
avatarMixer->setLastHeardMicrostamp(usecTimestampNow());
QMetaObject::invokeMethod(DependencyManager::get<AvatarHashMap>().data(),
"processAvatarMixerDatagram",
Q_ARG(const QByteArray&, incomingPacket),
Q_ARG(const QWeakPointer<Node>&, avatarMixer));
}
break;
}
default:
Client::processVerifiedPacket(senderSockAddr, incomingPacket);
break;
}
}
void RenderingClient::goToLocation(const glm::vec3& newPosition,
bool hasOrientationChange, const glm::quat& newOrientation,
bool shouldFaceLocation) {
qDebug().nospace() << "RenderingClient goToLocation - moving to " << newPosition.x << ", "
<< newPosition.y << ", " << newPosition.z;
glm::vec3 shiftedPosition = newPosition;
if (hasOrientationChange) {
qDebug().nospace() << "RenderingClient goToLocation - new orientation is "
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
// orient the user to face the target
glm::quat quatOrientation = newOrientation;
if (shouldFaceLocation) {
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
// move the user a couple units away
const float DISTANCE_TO_USER = 2.0f;
shiftedPosition = newPosition - quatOrientation * glm::vec3( 0.0f, 0.0f,-1.0f) * DISTANCE_TO_USER;
}
_orientation = quatOrientation;
}
_position = shiftedPosition;
}

View file

@ -1,57 +0,0 @@
//
// RenderingClient.h
// gvr-interface/src
//
// Created by Stephen Birarda on 1/20/15.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RenderingClient_h
#define hifi_RenderingClient_h
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QTimer>
#include <AvatarData.h>
#include "Client.h"
class RenderingClient : public Client {
Q_OBJECT
public:
RenderingClient(QObject* parent = 0, const QString& launchURLString = QString());
const glm::vec3& getPosition() const { return _position; }
const glm::quat& getOrientation() const { return _orientation; }
void setOrientation(const glm::quat& orientation) { _orientation = orientation; }
static glm::vec3 getPositionForAudio() { return _instance->getPosition(); }
static glm::quat getOrientationForAudio() { return _instance->getOrientation(); }
virtual void cleanupBeforeQuit();
private slots:
void goToLocation(const glm::vec3& newPosition,
bool hasOrientationChange, const glm::quat& newOrientation,
bool shouldFaceLocation);
void sendAvatarPacket();
private:
virtual void processVerifiedPacket(const HifiSockAddr& senderSockAddr, const QByteArray& incomingPacket);
static RenderingClient* _instance;
glm::vec3 _position;
glm::quat _orientation;
QTimer _avatarTimer;
AvatarData _fakeAvatar;
};
#endif // hifi_RenderingClient_h

View file

@ -1,41 +0,0 @@
//
// InterfaceActivity.java
// gvr-interface/java
//
// Created by Stephen Birarda on 1/26/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
package io.highfidelity.gvrinterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.WindowManager;
import android.util.Log;
import org.qtproject.qt5.android.bindings.QtActivity;
public class InterfaceActivity extends QtActivity {
public static native void handleHifiURL(String hifiURLString);
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// Get the intent that started this activity in case we have a hifi:// URL to parse
Intent intent = getIntent();
if (intent.getAction() == Intent.ACTION_VIEW) {
Uri data = intent.getData();
if (data.getScheme().equals("hifi")) {
handleHifiURL(data.toString());
}
}
}
}

View file

@ -1,28 +0,0 @@
//
// main.cpp
// gvr-interface/src
//
// Created by Stephen Birarda on 11/17/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GVRMainWindow.h"
#include "GVRInterface.h"
int main(int argc, char* argv[]) {
GVRInterface app(argc, argv);
GVRMainWindow mainWindow;
#ifdef ANDROID
mainWindow.showFullScreen();
#else
mainWindow.showMaximized();
#endif
app.setMainWindow(&mainWindow);
return app.exec();
}

View file

@ -1,51 +0,0 @@
//
// InterfaceBetaActivity.java
// gvr-interface/java
//
// Created by Stephen Birarda on 1/27/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
package io.highfidelity.gvrinterface;
import android.os.Bundle;
import net.hockeyapp.android.CrashManager;
import net.hockeyapp.android.UpdateManager;
public class InterfaceBetaActivity extends InterfaceActivity {
public String _hockeyAppID;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
_hockeyAppID = getString(R.string.HockeyAppID);
checkForUpdates();
}
@Override
protected void onPause() {
super.onPause();
UpdateManager.unregister();
}
@Override
protected void onResume() {
super.onResume();
checkForCrashes();
}
private void checkForCrashes() {
CrashManager.register(this, _hockeyAppID);
}
private void checkForUpdates() {
// Remove this for store / production builds!
UpdateManager.register(this, _hockeyAppID);
}
}

View file

@ -1,5 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<resources>
<string name="HockeyAppID">${HOCKEY_APP_ID}</string>
<bool name="HockeyAppEnabled">${HOCKEY_APP_ENABLED}</bool>
</resources>

View file

@ -52,6 +52,13 @@ ANDROID_PACKAGES = {
'sharedLibFolder': 'VrApi/Libs/Android/arm64-v8a/Release',
'includeLibs': ['libvrapi.so']
},
'oculusPlatform': {
'file': 'OVRPlatformSDK_v1.32.0.zip',
'versionId': 'jG9DB16zOGxSrmtZy4jcQnwO0TJUuaeL',
'checksum': 'ab5b203b3a39a56ab148d68fff769e05',
'sharedLibFolder': 'Android/libs/arm64-v8a',
'includeLibs': ['libovrplatformloader.so']
},
'openssl': {
'file': 'openssl-1.1.0g_armv8.tgz',
'versionId': 'AiiPjmgUZTgNj7YV1EEx2lL47aDvvvAW',

View file

@ -265,7 +265,7 @@ foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
endforeach()
# include headers for interface and InterfaceConfig.
include_directories("${PROJECT_SOURCE_DIR}/src")
target_include_directories(${TARGET_NAME} PRIVATE "${PROJECT_SOURCE_DIR}/src")
if (ANDROID)
find_library(ANDROID_LOG_LIB log)

View file

@ -1,6 +1,6 @@
set(TARGET_NAME entities)
setup_hifi_library(Network Script)
include_directories(SYSTEM "${OPENSSL_INCLUDE_DIR}")
target_include_directories(${TARGET_NAME} PRIVATE "${OPENSSL_INCLUDE_DIR}")
include_hifi_library_headers(hfm)
include_hifi_library_headers(fbx)
include_hifi_library_headers(gpu)

View file

@ -1,5 +1,5 @@
set(TARGET_NAME gl)
setup_hifi_library(Gui Widgets Qml Quick)
setup_hifi_library(Gui Widgets)
link_hifi_libraries(shared)
target_opengl()

View file

@ -195,6 +195,21 @@ GLAPI PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB;
Q_GUI_EXPORT QOpenGLContext *qt_gl_global_share_context();
#if defined(GL_CUSTOM_CONTEXT)
bool Context::makeCurrent() {
BOOL result = wglMakeCurrent(_hdc, _hglrc);
assert(result);
updateSwapchainMemoryCounter();
return result;
}
void Context::swapBuffers() {
SwapBuffers(_hdc);
}
void Context::doneCurrent() {
wglMakeCurrent(0, 0);
}
#endif
void Context::create(QOpenGLContext* shareContext) {
if (!shareContext) {
shareContext = qt_gl_global_share_context();
@ -297,7 +312,11 @@ void Context::create(QOpenGLContext* shareContext) {
contextAttribs.push_back(0);
}
contextAttribs.push_back(0);
HGLRC shareHglrc = (HGLRC)QOpenGLContextWrapper::nativeContext(shareContext);
HGLRC shareHglrc = nullptr;
if (shareContext) {
auto nativeContextPointer = QOpenGLContextWrapper(shareContext).getNativeContext();
shareHglrc = (HGLRC)nativeContextPointer->context();
}
_hglrc = wglCreateContextAttribsARB(_hdc, shareHglrc, &contextAttribs[0]);
}

View file

@ -61,12 +61,12 @@ void Context::debugMessageHandler(const QOpenGLDebugMessage& debugMessage) {
switch (severity) {
case QOpenGLDebugMessage::NotificationSeverity:
case QOpenGLDebugMessage::LowSeverity:
qCDebug(glLogging) << debugMessage;
return;
default:
qCWarning(glLogging) << debugMessage;
break;
}
qWarning(glLogging) << debugMessage;
return;
}
void Context::setupDebugLogging(QOpenGLContext *context) {
@ -82,6 +82,8 @@ void Context::setupDebugLogging(QOpenGLContext *context) {
}
}
#if !defined(GL_CUSTOM_CONTEXT)
bool Context::makeCurrent() {
updateSwapchainMemoryCounter();
bool result = _qglContext->makeCurrent(_window);
@ -98,6 +100,7 @@ void Context::doneCurrent() {
_qglContext->doneCurrent();
}
}
#endif
Q_GUI_EXPORT QOpenGLContext *qt_gl_global_share_context();
const QSurfaceFormat& getDefaultOpenGLSurfaceFormat();

View file

@ -65,21 +65,9 @@ bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
_offscreenSurface->setFormat(_context->format());
_offscreenSurface->create();
// Due to a https://bugreports.qt.io/browse/QTBUG-65125 we can't rely on `isValid`
// to determine if the offscreen surface was successfully created, so we use
// makeCurrent as a proxy test. Bug is fixed in Qt 5.9.4
#if defined(Q_OS_ANDROID)
if (!_context->makeCurrent(_offscreenSurface)) {
qFatal("Unable to make offscreen surface current");
}
_context->doneCurrent();
#else
if (!_offscreenSurface->isValid()) {
qFatal("Offscreen surface is invalid");
}
#endif
return true;
}

View file

@ -17,6 +17,22 @@
#include <QtPlatformHeaders/QWGLNativeContext>
#endif
QOpenGLContextWrapper::Pointer QOpenGLContextWrapper::currentContextWrapper() {
return std::make_shared<QOpenGLContextWrapper>(QOpenGLContext::currentContext());
}
QOpenGLContextWrapper::NativeContextPointer QOpenGLContextWrapper::getNativeContext() const {
QOpenGLContextWrapper::NativeContextPointer result;
auto nativeHandle = _context->nativeHandle();
if (nativeHandle.canConvert<QGLNativeContext>()) {
result = std::make_shared<QGLNativeContext>();
*result = nativeHandle.value<QGLNativeContext>();
}
return result;
}
uint32_t QOpenGLContextWrapper::currentContextVersion() {
QOpenGLContext* context = QOpenGLContext::currentContext();
if (!context) {
@ -49,19 +65,6 @@ void QOpenGLContextWrapper::setFormat(const QSurfaceFormat& format) {
_context->setFormat(format);
}
#ifdef Q_OS_WIN
void* QOpenGLContextWrapper::nativeContext(QOpenGLContext* context) {
HGLRC result = 0;
if (context != nullptr) {
auto nativeHandle = context->nativeHandle();
if (nativeHandle.canConvert<QWGLNativeContext>()) {
result = nativeHandle.value<QWGLNativeContext>().context();
}
}
return result;
}
#endif
bool QOpenGLContextWrapper::create() {
return _context->create();
}

View file

@ -12,19 +12,31 @@
#ifndef hifi_QOpenGLContextWrapper_h
#define hifi_QOpenGLContextWrapper_h
#include <stdint.h>
#include <QtGlobal>
#include <memory>
class QOpenGLContext;
class QSurface;
class QSurfaceFormat;
class QThread;
#if defined(Q_OS_ANDROID)
#include <EGL/egl.h>
#include <QtPlatformHeaders/QEGLNativeContext>
using QGLNativeContext = QEGLNativeContext;
#elif defined(Q_OS_WIN)
class QWGLNativeContext;
using QGLNativeContext = QWGLNativeContext;
#else
using QGLNativeContext = void*;
#endif
class QOpenGLContextWrapper {
public:
#ifdef Q_OS_WIN
static void* nativeContext(QOpenGLContext* context);
#endif
using Pointer = std::shared_ptr<QOpenGLContextWrapper>;
using NativeContextPointer = std::shared_ptr<QGLNativeContext>;
static Pointer currentContextWrapper();
QOpenGLContextWrapper();
QOpenGLContextWrapper(QOpenGLContext* context);
@ -37,6 +49,8 @@ public:
void setShareContext(QOpenGLContext* otherContext);
void moveToThread(QThread* thread);
NativeContextPointer getNativeContext() const;
static QOpenGLContext* currentContext();
static uint32_t currentContextVersion();

View file

@ -426,6 +426,9 @@ void GLBackend::render(const Batch& batch) {
GL_PROFILE_RANGE(render_gpu_gl, batch.getName().c_str());
_transform._skybox = _stereo._skybox = batch.isSkyboxEnabled();
// FIXME move this to between the transfer and draw passes, so that
// framebuffer setup can see the proper stereo state and enable things
// like foveation
// Allow the batch to override the rendering stereo settings
// for things like full framebuffer copy operations (deferred lighting passes)
bool savedStereo = _stereo._enable;

View file

@ -95,7 +95,7 @@ public:
// Shutdown rendering and persist any required resources
void shutdown() override;
void setCameraCorrection(const Mat4& correction, const Mat4& prevRenderView, bool reset = false);
void setCameraCorrection(const Mat4& correction, const Mat4& prevRenderView, bool reset = false) override;
void render(const Batch& batch) final override;
// This call synchronize the Full Backend cache with the current GLState

View file

@ -24,6 +24,10 @@
#include <gpu/TextureTable.h>
#include <gpu/gl/GLTexelFormat.h>
static const QString FORCE_MOBILE_TEXTURES_STRING{ "HIFI_FORCE_MOBILE_TEXTURES" };
static bool FORCE_MOBILE_TEXTURES = QProcessEnvironment::systemEnvironment().contains(FORCE_MOBILE_TEXTURES_STRING);
using namespace gpu;
using namespace gpu::gl;
using namespace gpu::gl45;
@ -45,9 +49,10 @@ bool GL45Backend::supportedTextureFormat(const gpu::Element& format) {
case gpu::Semantic::COMPRESSED_EAC_RED_SIGNED:
case gpu::Semantic::COMPRESSED_EAC_XY:
case gpu::Semantic::COMPRESSED_EAC_XY_SIGNED:
return false;
return FORCE_MOBILE_TEXTURES;
default:
return true;
return FORCE_MOBILE_TEXTURES ? !format.isCompressed() : true;
}
}

View file

@ -48,6 +48,7 @@ public:
class GLESTexture : public GLTexture {
using Parent = GLTexture;
friend class GLESBackend;
friend class GLESFramebuffer;
GLuint allocate(const Texture& texture);
protected:
GLESTexture(const std::weak_ptr<GLBackend>& backend, const Texture& buffer);

View file

@ -17,6 +17,34 @@
namespace gpu { namespace gles {
// returns the FOV from the projection matrix
static inline vec4 extractFov( const glm::mat4& m) {
static const std::array<vec4, 4> CLIPS{ {
{ 1, 0, 0, 1 },
{ -1, 0, 0, 1 },
{ 0, 1, 0, 1 },
{ 0, -1, 0, 1 }
} };
glm::mat4 mt = glm::transpose(m);
vec4 v, result;
// Left
v = mt * CLIPS[0];
result.x = -atanf(v.z / v.x);
// Right
v = mt * CLIPS[1];
result.y = atanf(v.z / v.x);
// Down
v = mt * CLIPS[2];
result.z = -atanf(v.z / v.y);
// Up
v = mt * CLIPS[3];
result.w = atanf(v.z / v.y);
return result;
}
class GLESFramebuffer : public gl::GLFramebuffer {
using Parent = gl::GLFramebuffer;
static GLuint allocate() {
@ -29,6 +57,24 @@ public:
GLint currentFBO = -1;
glGetIntegerv(GL_DRAW_FRAMEBUFFER_BINDING, &currentFBO);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
vec2 focalPoint{ -1.0f };
#if 0
{
auto backend = _backend.lock();
if (backend && backend->isStereo()) {
glm::mat4 projections[2];
backend->getStereoProjections(projections);
vec4 fov = extractFov(projections[0]);
float fovwidth = fov.x + fov.y;
float fovheight = fov.z + fov.w;
focalPoint.x = fov.y / fovwidth;
focalPoint.y = (fov.z / fovheight) - 0.5f;
}
}
#endif
gl::GLTexture* gltexture = nullptr;
TexturePointer surface;
if (_gpuObject.getColorStamps() != _colorStamps) {
@ -58,7 +104,7 @@ public:
surface = b._texture;
if (surface) {
Q_ASSERT(TextureUsageType::RENDERBUFFER == surface->getUsageType());
gltexture = backend->syncGPUObject(surface);
gltexture = backend->syncGPUObject(surface);
} else {
gltexture = nullptr;
}
@ -66,6 +112,24 @@ public:
if (gltexture) {
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
#if 0
if (glTextureFoveationParametersQCOM && focalPoint.x != -1.0f) {
static GLint FOVEATION_QUERY = 0;
static std::once_flag once;
std::call_once(once, [&]{
glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_FOVEATED_FEATURE_QUERY_QCOM, &FOVEATION_QUERY);
});
static const float foveaArea = 4.0f;
static const float gain = 16.0f;
GLESBackend::GLESTexture* glestexture = static_cast<GLESBackend::GLESTexture*>(gltexture);
glestexture->withPreservedTexture([=]{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_FOVEATED_FEATURE_BITS_QCOM, GL_FOVEATION_ENABLE_BIT_QCOM | GL_FOVEATION_SCALED_BIN_METHOD_BIT_QCOM);
glTextureFoveationParametersQCOM(_id, 0, 0, -focalPoint.x, focalPoint.y, gain * 2.0f, gain, foveaArea);
glTextureFoveationParametersQCOM(_id, 0, 1, focalPoint.x, focalPoint.y, gain * 2.0f, gain, foveaArea);
});
}
#endif
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, colorAttachments[unit], gltexture->_texture, 0,
b._subresource);

View file

@ -66,6 +66,7 @@ public:
virtual void syncProgram(const gpu::ShaderPointer& program) = 0;
virtual void recycle() const = 0;
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) = 0;
virtual void setCameraCorrection(const Mat4& correction, const Mat4& prevRenderView, bool reset = false) {}
virtual bool supportedTextureFormat(const gpu::Element& format) = 0;
@ -117,7 +118,6 @@ public:
static ContextMetricSize textureResourcePopulatedGPUMemSize;
static ContextMetricSize textureResourceIdealGPUMemSize;
protected:
virtual bool isStereo() const {
return _stereo.isStereo();
}
@ -127,6 +127,7 @@ protected:
eyeProjections[i] = _stereo._eyeProjections[i];
}
}
protected:
void getStereoViews(mat4* eyeViews) const {
for (int i = 0; i < 2; ++i) {

View file

@ -34,11 +34,26 @@ public:
return filename;
}
static std::string getBaseDir(const std::string& filename) {
std::string result;
if (0 == filename.find("assets:")) {
auto lastSlash = filename.rfind('/');
result = filename.substr(0, lastSlash + 1);
} else {
std::string result = QFileInfo(filename.c_str()).absoluteDir().canonicalPath().toStdString();
if (*result.rbegin() != '/') {
result += '/';
}
}
return result;
}
Deserializer(const std::string& filename, uint32_t externalTexture, const TextureLoader& loader) :
basename(getBaseName(filename)), externalTexture(externalTexture), textureLoader(loader) {}
basename(getBaseName(filename)), basedir(getBaseDir(filename)), externalTexture(externalTexture), textureLoader(loader) {
}
const std::string basename;
std::string basedir;
const std::string basedir;
std::string binaryFile;
const uint32_t externalTexture;
TextureLoader textureLoader;
@ -302,6 +317,21 @@ TexturePointer Deserializer::readTexture(const json& node, uint32_t external) {
return nullptr;
}
std::string source;
readOptional(source, node, keys::source);
std::string ktxFile;
readOptional(ktxFile, node, keys::ktxFile);
Element ktxTexelFormat, ktxMipFormat;
if (!ktxFile.empty()) {
if (QFileInfo(ktxFile.c_str()).isRelative()) {
ktxFile = basedir + ktxFile;
}
ktx::StoragePointer ktxStorage{ new storage::FileStorage(ktxFile.c_str()) };
auto ktxObject = ktx::KTX::create(ktxStorage);
Texture::evalTextureFormat(ktxObject->getHeader(), ktxTexelFormat, ktxMipFormat);
}
TextureUsageType usageType = node[keys::usageType];
Texture::Type type = node[keys::type];
glm::u16vec4 dims;
@ -312,6 +342,9 @@ TexturePointer Deserializer::readTexture(const json& node, uint32_t external) {
uint16 mips = node[keys::mips];
uint16 samples = node[keys::samples];
Element texelFormat = readElement(node[keys::texelFormat]);
if (!ktxFile.empty() && (ktxMipFormat.isCompressed() != texelFormat.isCompressed())) {
texelFormat = ktxMipFormat;
}
Sampler sampler;
readOptionalTransformed<Sampler>(sampler, node, keys::sampler, [](const json& node) { return readSampler(node); });
TexturePointer result;
@ -325,8 +358,6 @@ TexturePointer Deserializer::readTexture(const json& node, uint32_t external) {
auto& texture = *result;
readOptional(texture._source, node, keys::source);
std::string ktxFile;
readOptional(ktxFile, node, keys::ktxFile);
if (!ktxFile.empty()) {
if (QFileInfo(ktxFile.c_str()).isRelative()) {
ktxFile = basedir + "/" + ktxFile;
@ -359,6 +390,7 @@ ShaderPointer Deserializer::readShader(const json& node) {
// FIXME support procedural shaders
Shader::Type type = node[keys::type];
std::string name = node[keys::name];
uint32_t id = node[keys::id];
ShaderPointer result;
switch (type) {
@ -374,6 +406,9 @@ ShaderPointer Deserializer::readShader(const json& node) {
default:
throw std::runtime_error("not implemented");
}
if (result->getSource().name != name) {
throw std::runtime_error("Bad name match");
}
return result;
}
@ -747,12 +782,6 @@ StereoState readStereoState(const json& node) {
FramePointer Deserializer::deserializeFrame() {
{
std::string filename{ basename + ".json" };
if (0 == basename.find("assets:")) {
auto lastSlash = basename.rfind('/');
basedir = basename.substr(0, lastSlash);
} else {
basedir = QFileInfo(basename.c_str()).absolutePath().toStdString();
}
storage::FileStorage mappedFile(filename.c_str());
frameNode = json::parse(std::string((const char*)mappedFile.data(), mappedFile.size()));
}
@ -808,7 +837,7 @@ FramePointer Deserializer::deserializeFrame() {
swapchains =
readArray<SwapChainPointer>(frameNode, keys::swapchains, [this](const json& node) { return readSwapchain(node); });
queries = readArray<QueryPointer>(frameNode, keys::queries, [this](const json& node) { return readQuery(node); });
queries = readArray<QueryPointer>(frameNode, keys::queries, [](const json& node) { return readQuery(node); });
frame.framebuffer = framebuffers[frameNode[keys::framebuffer].get<uint32_t>()];
frame.view = readMat4(frameNode[keys::view]);
frame.pose = readMat4(frameNode[keys::pose]);

View file

@ -0,0 +1,11 @@
if (ANDROID)
set(TARGET_NAME oculusMobile)
# don't use the setup_hifi_library macro, we don't want ANY qt dependencies
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp" "src/*.c" "src/*.qrc")
add_library(${TARGET_NAME} SHARED ${LIB_SRCS})
target_glm()
target_egl()
target_glad()
target_oculus_mobile()
target_link_libraries(${TARGET_NAME} android log)
endif()

View file

@ -0,0 +1,17 @@
//
// Created by Bradley Austin Davis on 2018/11/23
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <mutex>
#include <functional>
namespace ovr {
using Mutex = std::mutex;
using Condition = std::condition_variable;
using Lock = std::unique_lock<Mutex>;
using Task = std::function<void()>;
}

View file

@ -0,0 +1,93 @@
//
// Created by Bradley Austin Davis on 2018/11/20
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Framebuffer.h"
#include <EGL/egl.h>
#include <glad/glad.h>
#include <android/log.h>
#include <VrApi.h>
#include <VrApi_Helpers.h>
using namespace ovr;
void Framebuffer::updateLayer(int eye, ovrLayerProjection2& layer, const ovrMatrix4f* projectionMatrix ) const {
auto& layerTexture = layer.Textures[eye];
layerTexture.ColorSwapChain = _swapChain;
layerTexture.SwapChainIndex = _index;
if (projectionMatrix) {
layerTexture.TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection( projectionMatrix );
}
layerTexture.TextureRect = { 0, 0, 1, 1 };
}
void Framebuffer::create(const glm::uvec2& size) {
_size = size;
_index = 0;
_validTexture = false;
// Depth renderbuffer
glGenRenderbuffers(1, &_depth);
glBindRenderbuffer(GL_RENDERBUFFER, _depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, _size.x, _size.y);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
// Framebuffer
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depth);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
_swapChain = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8, _size.x, _size.y, 1, 3);
_length = vrapi_GetTextureSwapChainLength(_swapChain);
if (!_length) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Unable to count swap chain textures");
return;
}
for (int i = 0; i < _length; ++i) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, i);
glBindTexture(GL_TEXTURE_2D, chainTexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
void Framebuffer::destroy() {
if (0 != _fbo) {
glDeleteFramebuffers(1, &_fbo);
_fbo = 0;
}
if (0 != _depth) {
glDeleteRenderbuffers(1, &_depth);
_depth = 0;
}
if (_swapChain != nullptr) {
vrapi_DestroyTextureSwapChain(_swapChain);
_swapChain = nullptr;
}
_index = -1;
_length = -1;
}
void Framebuffer::advance() {
_index = (_index + 1) % _length;
_validTexture = false;
}
void Framebuffer::bind() {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
if (!_validTexture) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, _index);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, chainTexId, 0);
_validTexture = true;
}
}

View file

@ -0,0 +1,34 @@
//
// Created by Bradley Austin Davis on 2018/11/20
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <cstdint>
#include <glm/glm.hpp>
#include <VrApi_Types.h>
namespace ovr {
struct Framebuffer {
public:
void updateLayer(int eye, ovrLayerProjection2& layer, const ovrMatrix4f* projectionMatrix = nullptr) const;
void create(const glm::uvec2& size);
void advance();
void destroy();
void bind();
uint32_t _depth { 0 };
uint32_t _fbo{ 0 };
int _length{ -1 };
int _index{ -1 };
bool _validTexture{ false };
glm::uvec2 _size;
ovrTextureSwapChain* _swapChain{ nullptr };
};
} // namespace ovr

View file

@ -0,0 +1,182 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GLContext.h"
#include <array>
#include <vector>
#include <mutex>
#include <android/log.h>
#if !defined(EGL_OPENGL_ES3_BIT_KHR)
#define EGL_OPENGL_ES3_BIT_KHR 0x0040
#endif
using namespace ovr;
static void* getGlProcessAddress(const char *namez) {
auto result = eglGetProcAddress(namez);
return (void*)result;
}
void GLContext::initModule() {
static std::once_flag once;
std::call_once(once, [&]{
gladLoadGLES2Loader(getGlProcessAddress);
});
}
void APIENTRY debugMessageCallback(GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* message,
const void* userParam) {
if (type == GL_DEBUG_TYPE_PERFORMANCE_KHR) {
return;
}
switch (severity) {
case GL_DEBUG_SEVERITY_HIGH:
case GL_DEBUG_SEVERITY_MEDIUM:
break;
default:
return;
}
__android_log_write(ANDROID_LOG_WARN, "QQQ_GL", message);
}
GLContext::~GLContext() {
destroy();
}
EGLConfig GLContext::findConfig(EGLDisplay display) {
// Do NOT use eglChooseConfig, because the Android EGL code pushes in multisample
// flags in eglChooseConfig if the user has selected the "force 4x MSAA" option in
// settings, and that is completely wasted for our warp target.
std::vector<EGLConfig> configs;
{
const int MAX_CONFIGS = 1024;
EGLConfig configsBuffer[MAX_CONFIGS];
EGLint numConfigs = 0;
if (eglGetConfigs(display, configsBuffer, MAX_CONFIGS, &numConfigs) == EGL_FALSE) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_GL", "Failed to fetch configs");
return 0;
}
configs.resize(numConfigs);
memcpy(configs.data(), configsBuffer, sizeof(EGLConfig) * numConfigs);
}
std::vector<std::pair<EGLint, EGLint>> configAttribs{
{ EGL_RED_SIZE, 8 }, { EGL_GREEN_SIZE, 8 }, { EGL_BLUE_SIZE, 8 }, { EGL_ALPHA_SIZE, 8 },
{ EGL_DEPTH_SIZE, 0 }, { EGL_STENCIL_SIZE, 0 }, { EGL_SAMPLES, 0 },
};
auto matchAttrib = [&](EGLConfig config, const std::pair<EGLint, EGLint>& attribAndValue) {
EGLint value = 0;
eglGetConfigAttrib(display, config, attribAndValue.first, &value);
return (attribAndValue.second == value);
};
auto matchAttribFlags = [&](EGLConfig config, const std::pair<EGLint, EGLint>& attribAndValue) {
EGLint value = 0;
eglGetConfigAttrib(display, config, attribAndValue.first, &value);
return (value & attribAndValue.second) == attribAndValue.second;
};
auto matchConfig = [&](EGLConfig config) {
if (!matchAttribFlags(config, { EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT_KHR})) {
return false;
}
// The pbuffer config also needs to be compatible with normal window rendering
// so it can share textures with the window context.
if (!matchAttribFlags(config, { EGL_SURFACE_TYPE, EGL_WINDOW_BIT | EGL_PBUFFER_BIT})) {
return false;
}
for (const auto& attrib : configAttribs) {
if (!matchAttrib(config, attrib)) {
return false;
}
}
return true;
};
for (const auto& config : configs) {
if (matchConfig(config)) {
return config;
}
}
return 0;
}
bool GLContext::makeCurrent() {
return eglMakeCurrent(display, surface, surface, context) != EGL_FALSE;
}
void GLContext::doneCurrent() {
eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
}
bool GLContext::create(EGLDisplay display, EGLContext shareContext) {
this->display = display;
auto config = findConfig(display);
if (config == 0) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_GL", "Failed eglChooseConfig");
return false;
}
EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 3, EGL_NONE };
context = eglCreateContext(display, config, shareContext, contextAttribs);
if (context == EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_GL", "Failed eglCreateContext");
return false;
}
const EGLint surfaceAttribs[] = { EGL_WIDTH, 16, EGL_HEIGHT, 16, EGL_NONE };
surface = eglCreatePbufferSurface(display, config, surfaceAttribs);
if (surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_GL", "Failed eglCreatePbufferSurface");
return false;
}
if (!makeCurrent()) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_GL", "Failed eglMakeCurrent");
return false;
}
ovr::GLContext::initModule();
#ifndef NDEBUG
glDebugMessageCallback(debugMessageCallback, this);
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
#endif
return true;
}
void GLContext::destroy() {
if (context != EGL_NO_CONTEXT) {
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
if (surface != EGL_NO_SURFACE) {
eglDestroySurface(display, surface);
surface = EGL_NO_SURFACE;
}
}

View file

@ -0,0 +1,37 @@
//
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <memory>
#include <EGL/egl.h>
#include <glad/glad.h>
namespace ovr {
struct GLContext {
using Pointer = std::shared_ptr<GLContext>;
EGLSurface surface{ EGL_NO_SURFACE };
EGLContext context{ EGL_NO_CONTEXT };
EGLDisplay display{ EGL_NO_DISPLAY };
~GLContext();
static EGLConfig findConfig(EGLDisplay display);
bool makeCurrent();
void doneCurrent();
bool create(EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY), EGLContext shareContext = EGL_NO_CONTEXT);
void destroy();
operator bool() const { return context != EGL_NO_CONTEXT; }
static void initModule();
};
}
#define CHECK_GL_ERROR() if(false) {}

View file

@ -0,0 +1,38 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Helpers.h"
#include <atomic>
#include <algorithm>
#include <android/log.h>
#include <VrApi_Helpers.h>
using namespace ovr;
void Fov::extend(const Fov& other) {
for (size_t i = 0; i < 4; ++i) {
leftRightUpDown[i] = std::max(leftRightUpDown[i], other.leftRightUpDown[i]);
}
}
void Fov::extract(const ovrMatrix4f& mat) {
auto& fs = leftRightUpDown;
ovrMatrix4f_ExtractFov( &mat, fs, fs + 1, fs + 2, fs + 3);
}
glm::mat4 Fov::withZ(float nearZ, float farZ) const {
const auto& fs = leftRightUpDown;
return ovr::toGlm(ovrMatrix4f_CreateProjectionAsymmetricFov(fs[0], fs[1], fs[2], fs[3], nearZ, farZ));
}
glm::mat4 Fov::withZ(const glm::mat4& other) const {
// FIXME
return withZ(0.01f, 1000.0f);
}

View file

@ -0,0 +1,94 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <functional>
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <VrApi_Types.h>
namespace ovr {
struct Fov {
float leftRightUpDown[4];
Fov() {}
Fov(const ovrMatrix4f& mat) { extract(mat); }
void extract(const ovrMatrix4f& mat);
void extend(const Fov& other);
glm::mat4 withZ(const glm::mat4& other) const;
glm::mat4 withZ(float nearZ, float farZ) const;
};
// Convenience method for looping over each eye with a lambda
static inline void for_each_eye(const std::function<void(ovrEye)>& f) {
f(VRAPI_EYE_LEFT);
f(VRAPI_EYE_RIGHT);
}
static inline void for_each_hand(const std::function<void(ovrHandedness)>& f) {
f(VRAPI_HAND_LEFT);
f(VRAPI_HAND_RIGHT);
}
static inline glm::mat4 toGlm(const ovrMatrix4f& om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
static inline glm::vec3 toGlm(const ovrVector3f& ov) {
return glm::make_vec3(&ov.x);
}
static inline glm::vec2 toGlm(const ovrVector2f& ov) {
return glm::make_vec2(&ov.x);
}
static inline glm::quat toGlm(const ovrQuatf& oq) {
return glm::make_quat(&oq.x);
}
static inline glm::mat4 toGlm(const ovrPosef& op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
static inline ovrMatrix4f fromGlm(const glm::mat4& m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
static inline ovrVector3f fromGlm(const glm::vec3& v) {
return { v.x, v.y, v.z };
}
static inline ovrVector2f fromGlm(const glm::vec2& v) {
return { v.x, v.y };
}
static inline ovrQuatf fromGlm(const glm::quat& q) {
return { q.x, q.y, q.z, q.w };
}
static inline ovrPosef poseFromGlm(const glm::mat4& m) {
glm::vec3 translation = glm::vec3(m[3]) / m[3].w;
glm::quat orientation = glm::quat_cast(m);
ovrPosef result;
result.Orientation = fromGlm(orientation);
result.Position = fromGlm(translation);
return result;
}
}

View file

@ -0,0 +1,40 @@
//
// Created by Bradley Austin Davis on 2018/11/23
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TaskQueue.h"
using namespace ovr;
void TaskQueue::submitTaskBlocking(Lock& lock, const Task& newTask) {
_task = newTask;
_taskPending = true;
_taskCondition.wait(lock, [=]() -> bool { return !_taskPending; });
}
void TaskQueue::submitTaskBlocking(const Task& task) {
Lock lock(_mutex);
submitTaskBlocking(lock, task);
}
void TaskQueue::pollTask() {
Lock lock(_mutex);
if (_taskPending) {
_task();
_taskPending = false;
_taskCondition.notify_one();
}
}
void TaskQueue::withLock(const Task& task) {
Lock lock(_mutex);
task();
}
void TaskQueue::withLockConditional(const LockTask& task) {
Lock lock(_mutex);
task(lock);
}

View file

@ -0,0 +1,42 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <mutex>
#include <functional>
namespace ovr {
using Mutex = std::mutex;
using Condition = std::condition_variable;
using Lock = std::unique_lock<Mutex>;
using Task = std::function<void()>;
using LockTask = std::function<void(Lock& lock)>;
class TaskQueue {
public:
// Execute a task on another thread
void submitTaskBlocking(const Task& task);
void submitTaskBlocking(Lock& lock, const Task& task);
void pollTask();
void withLock(const Task& task);
void withLockConditional(const LockTask& task);
private:
Mutex _mutex;
Task _task;
bool _taskPending{ false };
Condition _taskCondition;
};
}

View file

@ -0,0 +1,337 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "VrHandler.h"
#include <android/native_window_jni.h>
#include <android/log.h>
#include <unistd.h>
#include <VrApi.h>
#include <VrApi_Helpers.h>
#include <VrApi_Types.h>
//#include <OVR_Platform.h>
#include "GLContext.h"
#include "Helpers.h"
#include "Framebuffer.h"
using namespace ovr;
static thread_local bool isRenderThread { false };
struct VrSurface : public TaskQueue {
using HandlerTask = VrHandler::HandlerTask;
JavaVM* vm{nullptr};
jobject oculusActivity{ nullptr };
ANativeWindow* nativeWindow{ nullptr };
VrHandler* handler{nullptr};
ovrMobile* session{nullptr};
bool resumed { false };
GLContext vrglContext;
Framebuffer eyeFbos[2];
uint32_t readFbo{0};
std::atomic<uint32_t> presentIndex{1};
double displayTime{0};
static constexpr float EYE_BUFFER_SCALE = 1.0f;
void onCreate(JNIEnv* env, jobject activity) {
env->GetJavaVM(&vm);
oculusActivity = env->NewGlobalRef(activity);
}
void setResumed(bool newResumed) {
this->resumed = newResumed;
submitRenderThreadTask([this](VrHandler* handler){ updateVrMode(); });
}
void setNativeWindow(ANativeWindow* newNativeWindow) {
auto oldNativeWindow = nativeWindow;
nativeWindow = newNativeWindow;
if (oldNativeWindow) {
ANativeWindow_release(oldNativeWindow);
}
submitRenderThreadTask([this](VrHandler* handler){ updateVrMode(); });
}
void init() {
if (!handler) {
return;
}
EGLContext currentContext = eglGetCurrentContext();
EGLDisplay currentDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
vrglContext.create(currentDisplay, currentContext);
vrglContext.makeCurrent();
glm::uvec2 eyeTargetSize;
withEnv([&](JNIEnv* env){
ovrJava java{ vm, env, oculusActivity };
eyeTargetSize = glm::uvec2 {
vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH) * EYE_BUFFER_SCALE,
vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_HEIGHT) * EYE_BUFFER_SCALE,
};
});
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "QQQ Eye Size %d, %d", eyeTargetSize.x, eyeTargetSize.y);
ovr::for_each_eye([&](ovrEye eye) {
eyeFbos[eye].create(eyeTargetSize);
});
glGenFramebuffers(1, &readFbo);
vrglContext.doneCurrent();
}
void shutdown() {
}
void setHandler(VrHandler *newHandler) {
withLock([&] {
isRenderThread = newHandler != nullptr;
if (handler != newHandler) {
shutdown();
handler = newHandler;
init();
if (handler) {
updateVrMode();
}
}
});
}
void submitRenderThreadTask(const HandlerTask &task) {
withLockConditional([&](Lock &lock) {
if (handler != nullptr) {
submitTaskBlocking(lock, [&] {
task(handler);
});
}
});
}
void withEnv(const std::function<void(JNIEnv*)>& f) {
JNIEnv* env = nullptr;
bool attached = false;
vm->GetEnv((void**)&env, JNI_VERSION_1_6);
if (!env) {
attached = true;
vm->AttachCurrentThread(&env, nullptr);
}
f(env);
if (attached) {
vm->DetachCurrentThread();
}
}
void updateVrMode() {
// For VR mode to be valid, the activity must be between an onResume and
// an onPause call and must additionally have a valid native window handle
bool vrReady = resumed && nullptr != nativeWindow;
// If we're IN VR mode, we'll have a non-null ovrMobile pointer in session
bool vrRunning = session != nullptr;
if (vrReady != vrRunning) {
if (vrRunning) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "vrapi_LeaveVrMode");
vrapi_LeaveVrMode(session);
session = nullptr;
oculusActivity = nullptr;
} else {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "vrapi_EnterVrMode");
withEnv([&](JNIEnv* env){
ovrJava java{ vm, env, oculusActivity };
ovrModeParms modeParms = vrapi_DefaultModeParms(&java);
modeParms.Flags |= VRAPI_MODE_FLAG_NATIVE_WINDOW;
modeParms.Display = (unsigned long long) vrglContext.display;
modeParms.ShareContext = (unsigned long long) vrglContext.context;
modeParms.WindowSurface = (unsigned long long) nativeWindow;
session = vrapi_EnterVrMode(&modeParms);
ovrPosef trackingTransform = vrapi_GetTrackingTransform( session, VRAPI_TRACKING_TRANSFORM_SYSTEM_CENTER_EYE_LEVEL);
vrapi_SetTrackingTransform( session, trackingTransform );
vrapi_SetPerfThread(session, VRAPI_PERF_THREAD_TYPE_RENDERER, pthread_self());
vrapi_SetClockLevels(session, 2, 4);
vrapi_SetExtraLatencyMode(session, VRAPI_EXTRA_LATENCY_MODE_DYNAMIC);
vrapi_SetDisplayRefreshRate(session, 72);
});
}
}
}
void presentFrame(uint32_t sourceTexture, const glm::uvec2 &sourceSize, const ovrTracking2& tracking) {
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = tracking.HeadPose;
if (sourceTexture) {
glBindFramebuffer(GL_READ_FRAMEBUFFER, readFbo);
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, sourceTexture, 0);
GLenum framebufferStatus = glCheckFramebufferStatus(GL_READ_FRAMEBUFFER);
if (GL_FRAMEBUFFER_COMPLETE != framebufferStatus) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "incomplete framebuffer");
}
}
GLenum invalidateAttachment = GL_COLOR_ATTACHMENT0;
ovr::for_each_eye([&](ovrEye eye) {
const auto &eyeTracking = tracking.Eye[eye];
auto &eyeFbo = eyeFbos[eye];
const auto &destSize = eyeFbo._size;
eyeFbo.bind();
glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, 1, &invalidateAttachment);
if (sourceTexture) {
auto sourceWidth = sourceSize.x / 2;
auto sourceX = (eye == VRAPI_EYE_LEFT) ? 0 : sourceWidth;
glBlitFramebuffer(
sourceX, 0, sourceX + sourceWidth, sourceSize.y,
0, 0, destSize.x, destSize.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
}
eyeFbo.updateLayer(eye, layer, &eyeTracking.ProjectionMatrix);
eyeFbo.advance();
});
if (sourceTexture) {
glInvalidateFramebuffer(GL_READ_FRAMEBUFFER, 1, &invalidateAttachment);
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, 0, 0);
}
glFlush();
ovrLayerHeader2 *layerHeader = &layer.Header;
ovrSubmitFrameDescription2 frameDesc = {};
frameDesc.SwapInterval = 2;
frameDesc.FrameIndex = presentIndex;
frameDesc.DisplayTime = displayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = &layerHeader;
vrapi_SubmitFrame2(session, &frameDesc);
++presentIndex;
}
ovrTracking2 beginFrame() {
displayTime = vrapi_GetPredictedDisplayTime(session, presentIndex);
return vrapi_GetPredictedTracking2(session, displayTime);
}
};
static VrSurface SURFACE;
bool VrHandler::vrActive() const {
return SURFACE.session != nullptr;
}
void VrHandler::setHandler(VrHandler* handler) {
SURFACE.setHandler(handler);
}
void VrHandler::pollTask() {
SURFACE.pollTask();
}
void VrHandler::makeCurrent() {
if (!SURFACE.vrglContext.makeCurrent()) {
__android_log_write(ANDROID_LOG_WARN, "QQQ", "Failed to make GL current");
}
}
void VrHandler::doneCurrent() {
SURFACE.vrglContext.doneCurrent();
}
uint32_t VrHandler::currentPresentIndex() const {
return SURFACE.presentIndex;
}
ovrTracking2 VrHandler::beginFrame() {
return SURFACE.beginFrame();
}
void VrHandler::presentFrame(uint32_t sourceTexture, const glm::uvec2 &sourceSize, const ovrTracking2& tracking) const {
SURFACE.presentFrame(sourceTexture, sourceSize, tracking);
}
bool VrHandler::withOvrJava(const OvrJavaTask& task) {
SURFACE.withEnv([&](JNIEnv* env){
ovrJava java{ SURFACE.vm, env, SURFACE.oculusActivity };
task(&java);
});
return true;
}
bool VrHandler::withOvrMobile(const OvrMobileTask &task) {
auto sessionTask = [&]()->bool{
if (!SURFACE.session) {
return false;
}
task(SURFACE.session);
return true;
};
if (isRenderThread) {
return sessionTask();
}
bool result = false;
SURFACE.withLock([&]{
result = sessionTask();
});
return result;
}
void VrHandler::initVr(const char* appId) {
withOvrJava([&](const ovrJava* java){
ovrInitParms initParms = vrapi_DefaultInitParms(java);
initParms.GraphicsAPI = VRAPI_GRAPHICS_API_OPENGL_ES_3;
if (vrapi_Initialize(&initParms) != VRAPI_INITIALIZE_SUCCESS) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Failed vrapi init");
}
});
// if (appId) {
// auto platformInitResult = ovr_PlatformInitializeAndroid(appId, activity.object(), env);
// if (ovrPlatformInitialize_Success != platformInitResult) {
// __android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Failed ovr platform init");
// }
// }
}
void VrHandler::shutdownVr() {
vrapi_Shutdown();
}
extern "C" {
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *, void *) {
__android_log_write(ANDROID_LOG_WARN, "QQQ", "oculusMobile::JNI_OnLoad");
return JNI_VERSION_1_6;
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnCreate(JNIEnv* env, jobject obj) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
SURFACE.onCreate(env, obj);
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnDestroy(JNIEnv*, jclass) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnResume(JNIEnv*, jclass) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
SURFACE.setResumed(true);
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnPause(JNIEnv*, jclass) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
SURFACE.setResumed(false);
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnSurfaceChanged(JNIEnv* env, jclass, jobject surface) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
SURFACE.setNativeWindow(surface ? ANativeWindow_fromSurface( env, surface ) : nullptr);
}
} // extern "C"

View file

@ -0,0 +1,47 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <glm/glm.hpp>
#include <jni.h>
#include <VrApi_Types.h>
#include "TaskQueue.h"
typedef struct ovrMobile ovrMobile;
namespace ovr {
class VrHandler {
public:
using HandlerTask = std::function<void(VrHandler*)>;
using OvrMobileTask = std::function<void(ovrMobile*)>;
using OvrJavaTask = std::function<void(const ovrJava*)>;
static void setHandler(VrHandler* handler);
static bool withOvrMobile(const OvrMobileTask& task);
protected:
static void initVr(const char* appId = nullptr);
static void shutdownVr();
static bool withOvrJava(const OvrJavaTask& task);
uint32_t currentPresentIndex() const;
ovrTracking2 beginFrame();
void presentFrame(uint32_t textureId, const glm::uvec2& size, const ovrTracking2& tracking) const;
bool vrActive() const;
void pollTask();
void makeCurrent();
void doneCurrent();
};
}

View file

@ -0,0 +1,29 @@
#
# Created by Bradley Austin Davis on 2018/11/15
# Copyright 2013-2018 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
#
if (ANDROID)
set(TARGET_NAME oculusMobilePlugin)
setup_hifi_library(AndroidExtras Multimedia)
# if we were passed an Oculus App ID for entitlement checks, send that along
if (DEFINED ENV{OCULUS_APP_ID})
target_compile_definitions(${TARGET_NAME} -DOCULUS_APP_ID="$ENV{OCULUS_APP_ID}")
endif ()
link_hifi_libraries(
shared task gl shaders gpu controllers ui qml
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils
render graphics
oculusMobile
${PLATFORM_GL_BACKEND}
)
include_hifi_library_headers(octree)
target_oculus_mobile()
endif()

View file

@ -0,0 +1,4 @@
#include "Logging.h"
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
Q_LOGGING_CATEGORY(oculusLog, "hifi.plugins.display.oculus")

View file

@ -0,0 +1,13 @@
//
// Created by Bradley Austin Davis on 2018/11/20
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QtCore/QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_DECLARE_LOGGING_CATEGORY(oculusLog)

View file

@ -0,0 +1,694 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusMobileControllerManager.h"
#include <array>
#include <ui-plugins/PluginContainer.h>
#include <controllers/UserInputMapper.h>
#include <controllers/StandardControls.h>
#include <input-plugins/KeyboardMouseDevice.h>
#include <PerfStat.h>
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <StreamUtils.h>
#include <VrApi.h>
#include <VrApi_Input.h>
#include <ovr/Helpers.h>
#include "Logging.h"
#include <ovr/VrHandler.h>
const char* OculusMobileControllerManager::NAME = "Oculus";
const quint64 LOST_TRACKING_DELAY = 3000000;
namespace ovr {
controller::Pose toControllerPose(ovrHandedness hand, const ovrRigidBodyPosef& handPose) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
// forward
// -z
// |
// y| user
// y o----x right
// o-----x user
// | up
// |
// z
//
// Rift
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x---- + \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
// the rotation to align the Touch axes with those of the hands is:
//
// touchToHand = halfTurnAboutY * quaterTurnAboutX
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
// the combination (measurement * offset) is identity at this orientation.
//
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
//
// An approximate offset for the Touch can be obtained by inspection:
//
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
//
// So the full equation is:
//
// Q = combinedMeasurement * touchToHand
//
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
//
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ) * touchToHand;
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET =
glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f, -CONTROLLER_LENGTH_OFFSET / 2.0f, CONTROLLER_LENGTH_OFFSET * 1.5f);
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (hand == VRAPI_HAND_LEFT ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (hand == VRAPI_HAND_LEFT ? leftRotationOffset : rightRotationOffset);
glm::quat rotation = toGlm(handPose.Pose.Orientation);
controller::Pose pose;
pose.translation = toGlm(handPose.Pose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = rotation * toGlm(handPose.AngularVelocity);
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;
}
controller::Pose toControllerPose(ovrHandedness hand,
const ovrRigidBodyPosef& handPose,
const ovrRigidBodyPosef& lastHandPose) {
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ) * touchToHand;
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
static const glm::vec3 CONTROLLER_OFFSET =
glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f, -CONTROLLER_LENGTH_OFFSET / 2.0f, CONTROLLER_LENGTH_OFFSET * 1.5f);
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
auto translationOffset = (hand == VRAPI_HAND_LEFT ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (hand == VRAPI_HAND_LEFT ? leftRotationOffset : rightRotationOffset);
glm::quat rotation = toGlm(handPose.Pose.Orientation);
controller::Pose pose;
pose.translation = toGlm(lastHandPose.Pose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = toGlm(lastHandPose.AngularVelocity);
pose.velocity = toGlm(lastHandPose.LinearVelocity);
pose.valid = true;
return pose;
}
}
class OculusMobileInputDevice : public controller::InputDevice {
friend class OculusMobileControllerManager;
public:
using Pointer = std::shared_ptr<OculusMobileInputDevice>;
static Pointer check(ovrMobile* session);
OculusMobileInputDevice(ovrMobile* session, const std::vector<ovrInputTrackedRemoteCapabilities>& devicesCaps);
void updateHands(ovrMobile* session);
controller::Input::NamedVector getAvailableInputs() const override;
QString getDefaultMappingConfig() const override;
void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
void focusOutEvent() override;
bool triggerHapticPulse(float strength, float duration, controller::Hand hand) override;
private:
void handlePose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
ovrHandedness hand, const ovrRigidBodyPosef& handPose);
void handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
ovrHandedness hand, const ovrRigidBodyPosef& handPose);
void handleHeadPose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
const ovrRigidBodyPosef& headPose);
// perform an action when the TouchDevice mutex is acquired.
using Locker = std::unique_lock<std::recursive_mutex>;
template <typename F>
void withLock(F&& f) { Locker locker(_lock); f(); }
mutable std::recursive_mutex _lock;
ovrTracking2 _headTracking;
struct HandData {
HandData() {
state.Header.ControllerType =ovrControllerType_TrackedRemote;
}
float hapticDuration { 0.0f };
float hapticStrength { 0.0f };
bool valid{ false };
bool lostTracking{ false };
quint64 regainTrackingDeadline;
ovrRigidBodyPosef lastPose;
ovrInputTrackedRemoteCapabilities caps;
ovrInputStateTrackedRemote state;
ovrResult stateResult{ ovrError_NotInitialized };
ovrTracking tracking;
ovrResult trackingResult{ ovrError_NotInitialized };
bool setHapticFeedback(float strength, float duration) {
#if 0
bool success = true;
bool sessionSuccess = ovr::VrHandler::withOvrMobile([&](ovrMobile* session){
if (strength == 0.0f) {
hapticStrength = 0.0f;
hapticDuration = 0.0f;
} else {
hapticStrength = (duration > hapticDuration) ? strength : hapticStrength;
if (vrapi_SetHapticVibrationSimple(session, caps.Header.DeviceID, hapticStrength) != ovrSuccess) {
success = false;
}
hapticDuration = std::max(duration, hapticDuration);
}
});
return success && sessionSuccess;
#else
return true;
#endif
}
void stopHapticPulse() {
ovr::VrHandler::withOvrMobile([&](ovrMobile* session){
vrapi_SetHapticVibrationSimple(session, caps.Header.DeviceID, 0.0f);
});
}
bool isValid() const {
return (stateResult == ovrSuccess) && (trackingResult == ovrSuccess);
}
void update(ovrMobile* session, double time = 0.0) {
const auto& deviceId = caps.Header.DeviceID;
stateResult = vrapi_GetCurrentInputState(session, deviceId, &state.Header);
trackingResult = vrapi_GetInputTrackingState(session, deviceId, 0.0, &tracking);
}
};
std::array<HandData, 2> _hands;
};
OculusMobileInputDevice::Pointer OculusMobileInputDevice::check(ovrMobile *session) {
Pointer result;
std::vector<ovrInputTrackedRemoteCapabilities> devicesCaps;
{
uint32_t deviceIndex { 0 };
ovrInputCapabilityHeader capsHeader;
while (vrapi_EnumerateInputDevices(session, deviceIndex, &capsHeader) >= 0) {
if (capsHeader.Type == ovrControllerType_TrackedRemote) {
ovrInputTrackedRemoteCapabilities caps;
caps.Header = capsHeader;
vrapi_GetInputDeviceCapabilities(session, &caps.Header);
devicesCaps.push_back(caps);
}
++deviceIndex;
}
}
if (!devicesCaps.empty()) {
result.reset(new OculusMobileInputDevice(session, devicesCaps));
}
return result;
}
static OculusMobileInputDevice::Pointer oculusMobileControllers;
bool OculusMobileControllerManager::isHandController() const {
return oculusMobileControllers.operator bool();
}
bool OculusMobileControllerManager::isSupported() const {
return true;
}
bool OculusMobileControllerManager::activate() {
InputPlugin::activate();
checkForConnectedDevices();
return true;
}
void OculusMobileControllerManager::checkForConnectedDevices() {
if (oculusMobileControllers) {
return;
}
ovr::VrHandler::withOvrMobile([&](ovrMobile* session){
oculusMobileControllers = OculusMobileInputDevice::check(session);
if (oculusMobileControllers) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->registerDevice(oculusMobileControllers);
}
});
}
void OculusMobileControllerManager::deactivate() {
InputPlugin::deactivate();
// unregister with UserInputMapper
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
if (oculusMobileControllers) {
userInputMapper->removeDevice(oculusMobileControllers->getDeviceID());
oculusMobileControllers.reset();
}
}
void OculusMobileControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
PerformanceTimer perfTimer("OculusMobileInputDevice::update");
checkForConnectedDevices();
if (!oculusMobileControllers) {
return;
}
bool updated = ovr::VrHandler::withOvrMobile([&](ovrMobile* session){
oculusMobileControllers->updateHands(session);
});
if (updated) {
oculusMobileControllers->update(deltaTime, inputCalibrationData);
}
}
void OculusMobileControllerManager::pluginFocusOutEvent() {
if (oculusMobileControllers) {
oculusMobileControllers->focusOutEvent();
}
}
QStringList OculusMobileControllerManager::getSubdeviceNames() {
QStringList devices;
if (oculusMobileControllers) {
devices << oculusMobileControllers->getName();
}
return devices;
}
using namespace controller;
static const std::vector<std::pair<ovrButton, StandardButtonChannel>> BUTTON_MAP { {
{ ovrButton_Up, DU },
{ ovrButton_Down, DD },
{ ovrButton_Left, DL },
{ ovrButton_Right, DR },
{ ovrButton_Enter, START },
{ ovrButton_Back, BACK },
{ ovrButton_X, X },
{ ovrButton_Y, Y },
{ ovrButton_A, A },
{ ovrButton_B, B },
{ ovrButton_LThumb, LS },
{ ovrButton_RThumb, RS },
//{ ovrButton_LShoulder, LB },
//{ ovrButton_RShoulder, RB },
} };
static const std::vector<std::pair<ovrTouch, StandardButtonChannel>> LEFT_TOUCH_MAP { {
{ ovrTouch_X, LEFT_PRIMARY_THUMB_TOUCH },
{ ovrTouch_Y, LEFT_SECONDARY_THUMB_TOUCH },
{ ovrTouch_LThumb, LS_TOUCH },
{ ovrTouch_ThumbUp, LEFT_THUMB_UP },
{ ovrTouch_IndexTrigger, LEFT_PRIMARY_INDEX_TOUCH },
{ ovrTouch_IndexPointing, LEFT_INDEX_POINT },
} };
static const std::vector<std::pair<ovrTouch, StandardButtonChannel>> RIGHT_TOUCH_MAP { {
{ ovrTouch_A, RIGHT_PRIMARY_THUMB_TOUCH },
{ ovrTouch_B, RIGHT_SECONDARY_THUMB_TOUCH },
{ ovrTouch_RThumb, RS_TOUCH },
{ ovrTouch_ThumbUp, RIGHT_THUMB_UP },
{ ovrTouch_IndexTrigger, RIGHT_PRIMARY_INDEX_TOUCH },
{ ovrTouch_IndexPointing, RIGHT_INDEX_POINT },
} };
void OculusMobileInputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
_buttonPressedMap.clear();
int numTrackedControllers = 0;
quint64 currentTime = usecTimestampNow();
handleHeadPose(deltaTime, inputCalibrationData, _headTracking.HeadPose);
static const auto REQUIRED_HAND_STATUS = VRAPI_TRACKING_STATUS_ORIENTATION_TRACKED | VRAPI_TRACKING_STATUS_POSITION_TRACKED;
ovr::for_each_hand([&](ovrHandedness hand) {
size_t handIndex = (hand == VRAPI_HAND_LEFT) ? 0 : 1;
int controller = (hand == VRAPI_HAND_LEFT) ? controller::LEFT_HAND : controller::RIGHT_HAND;
auto& handData = _hands[handIndex];
const auto& tracking = handData.tracking;
++numTrackedControllers;
// Disable hand tracking while in Oculus Dash (Dash renders it's own hands)
// if (!hasInputFocus) {
// _poseStateMap.erase(controller);
// _poseStateMap[controller].valid = false;
// return;
// }
if (REQUIRED_HAND_STATUS == (tracking.Status & REQUIRED_HAND_STATUS)) {
_poseStateMap.erase(controller);
handlePose(deltaTime, inputCalibrationData, hand, tracking.HeadPose);
handData.lostTracking = false;
handData.lastPose = tracking.HeadPose;
return;
}
if (handData.lostTracking) {
if (currentTime > handData.regainTrackingDeadline) {
_poseStateMap.erase(controller);
_poseStateMap[controller].valid = false;
return;
}
} else {
quint64 deadlineToRegainTracking = currentTime + LOST_TRACKING_DELAY;
handData.regainTrackingDeadline = deadlineToRegainTracking;
handData.lostTracking = true;
}
handleRotationForUntrackedHand(inputCalibrationData, hand, tracking.HeadPose);
});
using namespace controller;
// Axes
{
const auto& inputState = _hands[0].state;
_axisStateMap[LX].value = inputState.JoystickNoDeadZone.x;
_axisStateMap[LY].value = inputState.JoystickNoDeadZone.y;
_axisStateMap[LT].value = inputState.IndexTrigger;
_axisStateMap[LEFT_GRIP].value = inputState.GripTrigger;
for (const auto& pair : BUTTON_MAP) {
if (inputState.Buttons & pair.first) {
_buttonPressedMap.insert(pair.second);
qDebug()<<"AAAA:BUTTON PRESSED "<<pair.second;
}
}
for (const auto& pair : LEFT_TOUCH_MAP) {
if (inputState.Touches & pair.first) {
_buttonPressedMap.insert(pair.second);
}
}
}
{
const auto& inputState = _hands[1].state;
_axisStateMap[RX].value = inputState.JoystickNoDeadZone.x;
_axisStateMap[RY].value = inputState.JoystickNoDeadZone.y;
_axisStateMap[RT].value = inputState.IndexTrigger;
_axisStateMap[RIGHT_GRIP].value = inputState.GripTrigger;
for (const auto& pair : BUTTON_MAP) {
if (inputState.Buttons & pair.first) {
_buttonPressedMap.insert(pair.second);
}
}
for (const auto& pair : RIGHT_TOUCH_MAP) {
if (inputState.Touches & pair.first) {
_buttonPressedMap.insert(pair.second);
}
}
}
// Haptics
{
Locker locker(_lock);
for (auto& hand : _hands) {
if (hand.hapticDuration) {
hand.hapticDuration -= deltaTime * 1000.0f; // milliseconds
} else {
hand.stopHapticPulse();
}
}
}
}
void OculusMobileInputDevice::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
void OculusMobileInputDevice::handlePose(float deltaTime,
const controller::InputCalibrationData& inputCalibrationData,
ovrHandedness hand, const ovrRigidBodyPosef& handPose) {
auto poseId = (hand == VRAPI_HAND_LEFT) ? controller::LEFT_HAND : controller::RIGHT_HAND;
auto& pose = _poseStateMap[poseId];
pose = ovr::toControllerPose(hand, handPose);
// transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
pose = pose.transform(controllerToAvatar);
}
void OculusMobileInputDevice::handleHeadPose(float deltaTime,
const controller::InputCalibrationData& inputCalibrationData,
const ovrRigidBodyPosef& headPose) {
glm::mat4 mat = createMatFromQuatAndPos(ovr::toGlm(headPose.Pose.Orientation),
ovr::toGlm(headPose.Pose.Position));
//perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z
glm::mat4 matYFlip = mat * Matrices::Y_180;
controller::Pose pose(extractTranslation(matYFlip),
glmExtractRotation(matYFlip),
ovr::toGlm(headPose.LinearVelocity), // XXX * matYFlip ?
ovr::toGlm(headPose.AngularVelocity));
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) *
inputCalibrationData.defaultHeadMat;
pose.valid = true;
_poseStateMap[controller::HEAD] = pose.postTransform(defaultHeadOffset).transform(sensorToAvatar);
}
void OculusMobileInputDevice::handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
ovrHandedness hand, const ovrRigidBodyPosef& handPose) {
auto poseId = (hand == VRAPI_HAND_LEFT ? controller::LEFT_HAND : controller::RIGHT_HAND);
auto& pose = _poseStateMap[poseId];
const auto& lastHandPose = (hand == VRAPI_HAND_LEFT) ? _hands[0].lastPose : _hands[1].lastPose;
pose = ovr::toControllerPose(hand, handPose, lastHandPose);
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
pose = pose.transform(controllerToAvatar);
}
bool OculusMobileInputDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
Locker locker(_lock);
bool success = true;
qDebug()<<"AAAA: Haptic duration %f " << duration;
if (hand == controller::BOTH || hand == controller::LEFT) {
success &= _hands[0].setHapticFeedback(strength, duration);
}
if (hand == controller::BOTH || hand == controller::RIGHT) {
success &= _hands[0].setHapticFeedback(strength, duration);
}
return success;
}
/**jsdoc
* <p>The <code>Controller.Hardware.OculusTouch</code> object has properties representing Oculus Rift. The property values are
* integer IDs, uniquely identifying each output. <em>Read-only.</em> These can be mapped to actions or functions or
* <code>Controller.Standard</code> items in a {@link RouteObject} mapping.</p>
* <table>
* <thead>
* <tr><th>Property</th><th>Type</th><th>Data</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td colspan="4"><strong>Buttons</strong></td></tr>
* <tr><td><code>A</code></td><td>number</td><td>number</td><td>"A" button pressed.</td></tr>
* <tr><td><code>B</code></td><td>number</td><td>number</td><td>"B" button pressed.</td></tr>
* <tr><td><code>X</code></td><td>number</td><td>number</td><td>"X" button pressed.</td></tr>
* <tr><td><code>Y</code></td><td>number</td><td>number</td><td>"Y" button pressed.</td></tr>
* <tr><td><code>LeftApplicationMenu</code></td><td>number</td><td>number</td><td>Left application menu button pressed.
* </td></tr>
* <tr><td><code>RightApplicationMenu</code></td><td>number</td><td>number</td><td>Right application menu button pressed.
* </td></tr>
* <tr><td colspan="4"><strong>Sticks</strong></td></tr>
* <tr><td><code>LX</code></td><td>number</td><td>number</td><td>Left stick x-axis scale.</td></tr>
* <tr><td><code>LY</code></td><td>number</td><td>number</td><td>Left stick y-axis scale.</td></tr>
* <tr><td><code>RX</code></td><td>number</td><td>number</td><td>Right stick x-axis scale.</td></tr>
* <tr><td><code>RY</code></td><td>number</td><td>number</td><td>Right stick y-axis scale.</td></tr>
* <tr><td><code>LS</code></td><td>number</td><td>number</td><td>Left stick button pressed.</td></tr>
* <tr><td><code>RS</code></td><td>number</td><td>number</td><td>Right stick button pressed.</td></tr>
* <tr><td><code>LSTouch</code></td><td>number</td><td>number</td><td>Left stick is touched.</td></tr>
* <tr><td><code>RSTouch</code></td><td>number</td><td>number</td><td>Right stick is touched.</td></tr>
* <tr><td colspan="4"><strong>Triggers</strong></td></tr>
* <tr><td><code>LT</code></td><td>number</td><td>number</td><td>Left trigger scale.</td></tr>
* <tr><td><code>RT</code></td><td>number</td><td>number</td><td>Right trigger scale.</td></tr>
* <tr><td><code>LeftGrip</code></td><td>number</td><td>number</td><td>Left grip scale.</td></tr>
* <tr><td><code>RightGrip</code></td><td>number</td><td>number</td><td>Right grip scale.</td></tr>
* <tr><td colspan="4"><strong>Finger Abstractions</strong></td></tr>
* <tr><td><code>LeftPrimaryThumbTouch</code></td><td>number</td><td>number</td><td>Left thumb touching primary thumb
* button.</td></tr>
* <tr><td><code>LeftSecondaryThumbTouch</code></td><td>number</td><td>number</td><td>Left thumb touching secondary thumb
* button.</td></tr>
* <tr><td><code>LeftThumbUp</code></td><td>number</td><td>number</td><td>Left thumb not touching primary or secondary
* thumb buttons.</td></tr>
* <tr><td><code>RightPrimaryThumbTouch</code></td><td>number</td><td>number</td><td>Right thumb touching primary thumb
* button.</td></tr>
* <tr><td><code>RightSecondaryThumbTouch</code></td><td>number</td><td>number</td><td>Right thumb touching secondary thumb
* button.</td></tr>
* <tr><td><code>RightThumbUp</code></td><td>number</td><td>number</td><td>Right thumb not touching primary or secondary
* thumb buttons.</td></tr>
* <tr><td><code>LeftPrimaryIndexTouch</code></td><td>number</td><td>number</td><td>Left index finger is touching primary
* index finger control.</td></tr>
* <tr><td><code>LeftIndexPoint</code></td><td>number</td><td>number</td><td>Left index finger is pointing, not touching
* primary or secondary index finger controls.</td></tr>
* <tr><td><code>RightPrimaryIndexTouch</code></td><td>number</td><td>number</td><td>Right index finger is touching primary
* index finger control.</td></tr>
* <tr><td><code>RightIndexPoint</code></td><td>number</td><td>number</td><td>Right index finger is pointing, not touching
* primary or secondary index finger controls.</td></tr>
* <tr><td colspan="4"><strong>Avatar Skeleton</strong></td></tr>
* <tr><td><code>Head</code></td><td>number</td><td>{@link Pose}</td><td>Head pose.</td></tr>
* <tr><td><code>LeftHand</code></td><td>number</td><td>{@link Pose}</td><td>Left hand pose.</td></tr>
* <tr><td><code>RightHand</code></td><td>number</td><td>{@link Pose}</td><td>right hand pose.</td></tr>
* </tbody>
* </table>
* @typedef {object} Controller.Hardware-OculusTouch
*/
controller::Input::NamedVector OculusMobileInputDevice::getAvailableInputs() const {
using namespace controller;
QVector<Input::NamedPair> availableInputs{
// buttons
makePair(A, "A"),
makePair(B, "B"),
makePair(X, "X"),
makePair(Y, "Y"),
// trackpad analogs
makePair(LX, "LX"),
makePair(LY, "LY"),
makePair(RX, "RX"),
makePair(RY, "RY"),
// triggers
makePair(LT, "LT"),
makePair(RT, "RT"),
// trigger buttons
//makePair(LB, "LB"),
//makePair(RB, "RB"),
// side grip triggers
makePair(LEFT_GRIP, "LeftGrip"),
makePair(RIGHT_GRIP, "RightGrip"),
// joystick buttons
makePair(LS, "LS"),
makePair(RS, "RS"),
makePair(LEFT_HAND, "LeftHand"),
makePair(RIGHT_HAND, "RightHand"),
makePair(HEAD, "Head"),
makePair(LEFT_PRIMARY_THUMB_TOUCH, "LeftPrimaryThumbTouch"),
makePair(LEFT_SECONDARY_THUMB_TOUCH, "LeftSecondaryThumbTouch"),
makePair(RIGHT_PRIMARY_THUMB_TOUCH, "RightPrimaryThumbTouch"),
makePair(RIGHT_SECONDARY_THUMB_TOUCH, "RightSecondaryThumbTouch"),
makePair(LEFT_PRIMARY_INDEX_TOUCH, "LeftPrimaryIndexTouch"),
makePair(RIGHT_PRIMARY_INDEX_TOUCH, "RightPrimaryIndexTouch"),
makePair(LS_TOUCH, "LSTouch"),
makePair(RS_TOUCH, "RSTouch"),
makePair(LEFT_THUMB_UP, "LeftThumbUp"),
makePair(RIGHT_THUMB_UP, "RightThumbUp"),
makePair(LEFT_INDEX_POINT, "LeftIndexPoint"),
makePair(RIGHT_INDEX_POINT, "RightIndexPoint"),
makePair(BACK, "LeftApplicationMenu"),
makePair(START, "RightApplicationMenu"),
};
return availableInputs;
}
OculusMobileInputDevice::OculusMobileInputDevice(ovrMobile* session, const std::vector<ovrInputTrackedRemoteCapabilities>& devicesCaps) : controller::InputDevice("OculusTouch") {
qWarning() << "QQQ" << __FUNCTION__ << "Found " << devicesCaps.size() << "devices";
for (const auto& deviceCaps : devicesCaps) {
size_t handIndex = -1;
if (deviceCaps.ControllerCapabilities & ovrControllerCaps_LeftHand) {
handIndex = 0;
} else if (deviceCaps.ControllerCapabilities & ovrControllerCaps_RightHand) {
handIndex = 1;
} else {
continue;
}
HandData& handData = _hands[handIndex];
handData.state.Header.ControllerType = ovrControllerType_TrackedRemote;
handData.valid = true;
handData.caps = deviceCaps;
handData.update(session);
}
}
void OculusMobileInputDevice::updateHands(ovrMobile* session) {
_headTracking = vrapi_GetPredictedTracking2(session, 0.0);
for (auto& hand : _hands) {
hand.update(session);
}
}
QString OculusMobileInputDevice::getDefaultMappingConfig() const {
static const QString MAPPING_JSON = PathUtils::resourcesPath() + "/controllers/oculus_touch.json";
return MAPPING_JSON;
}
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
InputPluginList getInputPlugins() {
InputPlugin* PLUGIN_POOL[] = {
new KeyboardMouseDevice(),
new OculusMobileControllerManager(),
nullptr
};
InputPluginList result;
for (int i = 0; PLUGIN_POOL[i]; ++i) {
InputPlugin* plugin = PLUGIN_POOL[i];
if (plugin->isSupported()) {
result.push_back(InputPluginPointer(plugin));
}
}
return result;
}

View file

@ -0,0 +1,43 @@
//
// Created by Bradley Austin Davis on 2016/03/04
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi__OculusMobileControllerManager
#define hifi__OculusMobileControllerManager
#include <QObject>
#include <unordered_set>
#include <map>
#include <GLMHelpers.h>
#include <controllers/InputDevice.h>
#include <plugins/InputPlugin.h>
class OculusMobileControllerManager : public InputPlugin {
Q_OBJECT
public:
// Plugin functions
bool isSupported() const override;
const QString getName() const override { return NAME; }
bool isHandController() const override;
bool isHeadController() const override { return true; }
QStringList getSubdeviceNames() override;
bool activate() override;
void deactivate() override;
void pluginFocusOutEvent() override;
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
private:
static const char* NAME;
void checkForConnectedDevices();
};
#endif // hifi__OculusMobileControllerManager

View file

@ -0,0 +1,269 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusMobileDisplayPlugin.h"
#include <QtAndroidExtras/QAndroidJniEnvironment>
#include <glm/gtc/matrix_transform.hpp>
#include <VrApi_Types.h>
#include <VrApi_Helpers.h>
#include <AbstractViewStateInterface.h>
#include <gpu/Frame.h>
#include <gpu/Context.h>
#include <gpu/gl/GLBackend.h>
#include <ViewFrustum.h>
#include <plugins/PluginManager.h>
#include <ui-plugins/PluginContainer.h>
#include <controllers/Pose.h>
#include <display-plugins/CompositorHelper.h>
#include <gpu/Frame.h>
#include <gl/Config.h>
#include <gl/GLWidget.h>
#include <gl/Context.h>
#include <MainWindow.h>
#include <AddressManager.h>
#include <ovr/Helpers.h>
#include <VrApi.h>
using namespace ovr;
const char* OculusMobileDisplayPlugin::NAME { "Oculus Rift" };
//thread_local bool renderThread = false;
#define OCULUS_APP_ID 2331695256865113
OculusMobileDisplayPlugin::OculusMobileDisplayPlugin() {
}
OculusMobileDisplayPlugin::~OculusMobileDisplayPlugin() {
}
void OculusMobileDisplayPlugin::init() {
Parent::init();
initVr();
emit deviceConnected(getName());
}
void OculusMobileDisplayPlugin::deinit() {
shutdownVr();
Parent::deinit();
}
bool OculusMobileDisplayPlugin::internalActivate() {
_renderTargetSize = { 1024, 512 };
_cullingProjection = ovr::toGlm(ovrMatrix4f_CreateProjectionFov(90.0f, 90.0f, 0.0f, 0.0f, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
withOvrJava([&](const ovrJava* java){
_renderTargetSize = glm::uvec2{
vrapi_GetSystemPropertyInt(java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH),
vrapi_GetSystemPropertyInt(java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_HEIGHT),
};
});
ovr::for_each_eye([&](ovrEye eye){
_eyeProjections[eye] = _cullingProjection;
});
// This must come after the initialization, so that the values calculated
// above are available during the customizeContext call (when not running
// in threaded present mode)
return Parent::internalActivate();
}
void OculusMobileDisplayPlugin::internalDeactivate() {
Parent::internalDeactivate();
// ovr::releaseRenderSession(_session);
}
void OculusMobileDisplayPlugin::customizeContext() {
qWarning() << "QQQ" << __FUNCTION__ << "done";
gl::initModuleGl();
_mainContext = _container->getPrimaryWidget()->context();
_mainContext->makeCurrent();
ovr::VrHandler::setHandler(this);
_mainContext->doneCurrent();
_mainContext->makeCurrent();
Parent::customizeContext();
qWarning() << "QQQ" << __FUNCTION__ << "done";
}
void OculusMobileDisplayPlugin::uncustomizeContext() {
ovr::VrHandler::setHandler(nullptr);
_mainContext->doneCurrent();
_mainContext->makeCurrent();
Parent::uncustomizeContext();
}
QRectF OculusMobileDisplayPlugin::getPlayAreaRect() {
QRectF result;
VrHandler::withOvrMobile([&](ovrMobile* session){
ovrPosef pose;
ovrVector3f scale;
if (ovrSuccess != vrapi_GetBoundaryOrientedBoundingBox(session, &pose, &scale)) {
return;
}
// FIXME extract the center from the pose
glm::vec2 center { 0 };
glm::vec2 dimensions = glm::vec2(scale.x, scale.z);
dimensions *= 2.0f;
result = QRectF(center.x, center.y, dimensions.x, dimensions.y);
});
return result;
}
glm::mat4 OculusMobileDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
glm::mat4 result = baseProjection;
VrHandler::withOvrMobile([&](ovrMobile* session){
auto trackingState = vrapi_GetPredictedTracking2(session, 0.0);
result = ovr::Fov{ trackingState.Eye[eye].ProjectionMatrix }.withZ(baseProjection);
});
return result;
}
glm::mat4 OculusMobileDisplayPlugin::getCullingProjection(const glm::mat4& baseProjection) const {
glm::mat4 result = baseProjection;
VrHandler::withOvrMobile([&](ovrMobile* session){
auto trackingState = vrapi_GetPredictedTracking2(session, 0.0);
ovr::Fov fovs[2];
for (size_t i = 0; i < 2; ++i) {
fovs[i].extract(trackingState.Eye[i].ProjectionMatrix);
}
fovs[0].extend(fovs[1]);
return fovs[0].withZ(baseProjection);
});
return result;
}
void OculusMobileDisplayPlugin::resetSensors() {
VrHandler::withOvrMobile([&](ovrMobile* session){
vrapi_RecenterPose(session);
});
_currentRenderFrameInfo.renderPose = glm::mat4(); // identity
}
float OculusMobileDisplayPlugin::getTargetFrameRate() const {
float result = 0.0f;
VrHandler::withOvrJava([&](const ovrJava* java){
result = vrapi_GetSystemPropertyFloat(java, VRAPI_SYS_PROP_DISPLAY_REFRESH_RATE);
});
return result;
}
bool OculusMobileDisplayPlugin::isHmdMounted() const {
bool result = false;
VrHandler::withOvrJava([&](const ovrJava* java){
result = VRAPI_FALSE != vrapi_GetSystemStatusInt(java, VRAPI_SYS_STATUS_MOUNTED);
});
return result;
}
static void goToDevMobile() {
auto addressManager = DependencyManager::get<AddressManager>();
auto currentAddress = addressManager->currentAddress().toString().toStdString();
if (std::string::npos == currentAddress.find("dev-mobile")) {
addressManager->handleLookupString("hifi://dev-mobile/495.236,501.017,482.434/0,0.97452,0,-0.224301");
//addressManager->handleLookupString("hifi://dev-mobile/504,498,491/0,0,0,0");
//addressManager->handleLookupString("hifi://dev-mobile/0,-1,1");
}
}
// Called on the render thread, establishes the rough tracking for the upcoming
bool OculusMobileDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
static QAndroidJniEnvironment* jniEnv = nullptr;
if (nullptr == jniEnv) {
jniEnv = new QAndroidJniEnvironment();
}
bool result = false;
_currentRenderFrameInfo = FrameInfo();
ovrTracking2 trackingState = {};
static bool resetTrackingTransform = true;
static glm::mat4 transformOffset;
VrHandler::withOvrMobile([&](ovrMobile* session){
if (resetTrackingTransform) {
auto pose = vrapi_GetTrackingTransform( session, VRAPI_TRACKING_TRANSFORM_SYSTEM_CENTER_FLOOR_LEVEL);
transformOffset = glm::inverse(ovr::toGlm(pose));
vrapi_SetTrackingTransform( session, pose);
resetTrackingTransform = false;
}
// Find a better way of
_currentRenderFrameInfo.predictedDisplayTime = vrapi_GetPredictedDisplayTime(session, currentPresentIndex() + 2);
trackingState = vrapi_GetPredictedTracking2(session, _currentRenderFrameInfo.predictedDisplayTime);
result = true;
});
if (result) {
_currentRenderFrameInfo.renderPose = transformOffset;
withNonPresentThreadLock([&] {
_currentRenderFrameInfo.sensorSampleTime = trackingState.HeadPose.TimeInSeconds;
_currentRenderFrameInfo.renderPose = transformOffset * ovr::toGlm(trackingState.HeadPose.Pose);
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
_frameInfos[frameIndex] = _currentRenderFrameInfo;
_ipd = vrapi_GetInterpupillaryDistance(&trackingState);
ovr::for_each_eye([&](ovrEye eye){
_eyeProjections[eye] = ovr::toGlm(trackingState.Eye[eye].ProjectionMatrix);
_eyeOffsets[eye] = glm::translate(mat4(), vec3{ _ipd * (eye == VRAPI_EYE_LEFT ? -0.5f : 0.5f), 0.0f, 0.0f });
});
});
}
// static uint32_t count = 0;
// if ((++count % 1000) == 0) {
// AbstractViewStateInterface::instance()->postLambdaEvent([] {
// goToDevMobile();
// });
// }
return result && Parent::beginFrameRender(frameIndex);
}
ovrTracking2 presentTracking;
void OculusMobileDisplayPlugin::updatePresentPose() {
static QAndroidJniEnvironment* jniEnv = nullptr;
if (nullptr == jniEnv) {
jniEnv = new QAndroidJniEnvironment();
}
VrHandler::withOvrMobile([&](ovrMobile* session){
presentTracking = beginFrame();
_currentPresentFrameInfo.sensorSampleTime = vrapi_GetTimeInSeconds();
_currentPresentFrameInfo.predictedDisplayTime = presentTracking.HeadPose.TimeInSeconds;
_currentPresentFrameInfo.presentPose = ovr::toGlm(presentTracking.HeadPose.Pose);
});
}
void OculusMobileDisplayPlugin::internalPresent() {
VrHandler::pollTask();
if (!vrActive()) {
QThread::msleep(1);
return;
}
auto sourceTexture = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0));
glm::uvec2 sourceSize{ _compositeFramebuffer->getWidth(), _compositeFramebuffer->getHeight() };
VrHandler::presentFrame(sourceTexture, sourceSize, presentTracking);
_presentRate.increment();
}
DisplayPluginList getDisplayPlugins() {
static DisplayPluginList result;
static std::once_flag once;
std::call_once(once, [&]{
auto plugin = std::make_shared<OculusMobileDisplayPlugin>();
plugin->isSupported();
result.push_back(plugin);
});
return result;
}

View file

@ -0,0 +1,65 @@
//
// Created by Bradley Austin Davis on 2018/11/15
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <EGL/egl.h>
#include <QTimer>
#include <QtPlatformHeaders/QEGLNativeContext>
#include <QtAndroidExtras/QAndroidJniObject>
#include <gl/Context.h>
#include <ovr/VrHandler.h>
typedef struct ovrTextureSwapChain ovrTextureSwapChain;
typedef struct ovrMobile ovrMobile;
typedef struct ANativeWindow ANativeWindow;
class OculusMobileDisplayPlugin : public HmdDisplayPlugin, public ovr::VrHandler {
using Parent = HmdDisplayPlugin;
public:
OculusMobileDisplayPlugin();
virtual ~OculusMobileDisplayPlugin();
bool isSupported() const override { return true; };
bool hasAsyncReprojection() const override { return true; }
bool getSupportsAutoSwitch() override final { return false; }
QThread::Priority getPresentPriority() override { return QThread::TimeCriticalPriority; }
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override;
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override;
// Stereo specific methods
void resetSensors() override final;
bool beginFrameRender(uint32_t frameIndex) override;
QRectF getPlayAreaRect() override;
float getTargetFrameRate() const override;
void init() override;
void deinit() override;
protected:
const QString getName() const override { return NAME; }
bool internalActivate() override;
void internalDeactivate() override;
void customizeContext() override;
void uncustomizeContext() override;
void updatePresentPose() override;
void internalPresent() override;
void hmdPresent() override { throw std::runtime_error("Unused"); }
bool isHmdMounted() const override;
static const char* NAME;
mutable gl::Context* _mainContext{ nullptr };
uint32_t _readFbo;
};

View file

@ -64,11 +64,37 @@ void PlayerWindow::loadFrame() {
}
void PlayerWindow::keyPressEvent(QKeyEvent* event) {
bool isShifted = event->modifiers().testFlag(Qt::ShiftModifier);
float moveScale = isShifted ? 10.0f : 1.0f;
switch (event->key()) {
case Qt::Key_F1:
loadFrame();
return;
case Qt::Key_W:
_renderThread.move(vec3{ 0, 0, -0.1f } * moveScale);
return;
case Qt::Key_S:
_renderThread.move(vec3{ 0, 0, 0.1f } * moveScale);
return;
case Qt::Key_A:
_renderThread.move(vec3{ -0.1f, 0, 0 } * moveScale);
return;
case Qt::Key_D:
_renderThread.move(vec3{ 0.1f, 0, 0 } * moveScale);
return;
case Qt::Key_E:
_renderThread.move(vec3{ 0, 0.1f, 0 } * moveScale);
return;
case Qt::Key_F:
_renderThread.move(vec3{ 0, -0.1f, 0 } * moveScale);
return;
default:
break;
}
@ -106,5 +132,4 @@ void PlayerWindow::loadFrame(const QString& path) {
}
resize(size.x, size.y);
}
_renderThread.submitFrame(frame);
}

View file

@ -20,6 +20,11 @@ void RenderThread::resize(const QSize& newSize) {
_pendingSize.push(newSize);
}
void RenderThread::move(const glm::vec3& v) {
std::unique_lock<std::mutex> lock(_frameLock);
_correction = glm::inverse(glm::translate(mat4(), v)) * _correction;
}
void RenderThread::initialize(QWindow* window) {
std::unique_lock<std::mutex> lock(_frameLock);
setObjectName("RenderThread");
@ -27,9 +32,12 @@ void RenderThread::initialize(QWindow* window) {
_window = window;
#ifdef USE_GL
_window->setFormat(getDefaultOpenGLSurfaceFormat());
_context.setWindow(window);
_context.create();
_context.makeCurrent();
if (!_context.makeCurrent()) {
qFatal("Unable to make context current");
}
QOpenGLContextWrapper(_context.qglContext()).makeCurrent(_window);
glGenTextures(1, &_externalTexture);
glBindTexture(GL_TEXTURE_2D, _externalTexture);
@ -105,6 +113,13 @@ void RenderThread::renderFrame(gpu::FramePointer& frame) {
#ifdef USE_GL
_context.makeCurrent();
#endif
if (_correction != glm::mat4()) {
std::unique_lock<std::mutex> lock(_frameLock);
if (_correction != glm::mat4()) {
_backend->setCameraCorrection(_correction, _activeFrame->view);
//_prevRenderView = _correction * _activeFrame->view;
}
}
_backend->recycle();
_backend->syncCache();
@ -139,18 +154,29 @@ void RenderThread::renderFrame(gpu::FramePointer& frame) {
using namespace vks::debug::marker;
beginRegion(commandBuffer, "executeFrame", glm::vec4{ 1, 1, 1, 1 });
#endif
auto& glbackend = (gpu::gl::GLBackend&)(*_backend);
glm::uvec2 fboSize{ frame->framebuffer->getWidth(), frame->framebuffer->getHeight() };
auto fbo = glbackend.getFramebufferID(frame->framebuffer);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClearColor(0, 0, 0, 1);
glClearDepth(0);
glClear(GL_DEPTH_BUFFER_BIT);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
//_gpuContext->enableStereo(true);
if (frame && !frame->batches.empty()) {
_gpuContext->executeFrame(frame);
}
#ifdef USE_GL
auto& glbackend = (gpu::gl::GLBackend&)(*_backend);
glm::uvec2 fboSize{ frame->framebuffer->getWidth(), frame->framebuffer->getHeight() };
auto fbo = glbackend.getFramebufferID(frame->framebuffer);
glDisable(GL_FRAMEBUFFER_SRGB);
glBlitNamedFramebuffer(fbo, 0, 0, 0, fboSize.x, fboSize.y, 0, 0, windowSize.width(), windowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
//glDisable(GL_FRAMEBUFFER_SRGB);
//glClear(GL_COLOR_BUFFER_BIT);
glBindFramebuffer(GL_READ_FRAMEBUFFER, fbo);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBlitFramebuffer(
0, 0, fboSize.x, fboSize.y,
0, 0, windowSize.width(), windowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
(void)CHECK_GL_ERROR();
_context.swapBuffers();
@ -183,11 +209,11 @@ bool RenderThread::process() {
pendingFrames.swap(_pendingFrames);
pendingSize.swap(_pendingSize);
}
while (!pendingFrames.empty()) {
_activeFrame = pendingFrames.front();
_gpuContext->consumeFrameUpdates(_activeFrame);
pendingFrames.pop();
_gpuContext->consumeFrameUpdates(_activeFrame);
}
while (!pendingSize.empty()) {

View file

@ -55,6 +55,8 @@ public:
std::queue<QSize> _pendingSize;
gpu::FramePointer _activeFrame;
uint32_t _externalTexture{ 0 };
void move(const glm::vec3& v);
glm::mat4 _correction;
void resize(const QSize& newSize);

62
tools/noramlizeFrame.py Normal file
View file

@ -0,0 +1,62 @@
import os
import json
import shutil
import sys
def scriptRelative(*paths):
scriptdir = os.path.dirname(os.path.realpath(sys.argv[0]))
result = os.path.join(scriptdir, *paths)
result = os.path.realpath(result)
result = os.path.normcase(result)
return result
class FrameProcessor:
def __init__(self, filename):
self.filename = filename
dir, name = os.path.split(self.filename)
self.dir = dir
self.ktxDir = os.path.join(self.dir, 'ktx')
os.makedirs(self.ktxDir, exist_ok=True)
self.resDir = scriptRelative("../interface/resources")
if (name.endswith(".json")):
self.name = name[0:-5]
else:
self.name = name
self.filename = self.filename + '.json'
with open(self.filename, 'r') as f:
self.json = json.load(f)
def processKtx(self, texture):
if texture is None: return
if not 'ktxFile' in texture: return
sourceKtx = texture['ktxFile']
if sourceKtx.startswith(':'):
sourceKtx = sourceKtx[1:]
while sourceKtx.startswith('/'):
sourceKtx = sourceKtx[1:]
sourceKtx = os.path.join(self.resDir, sourceKtx)
sourceKtxDir, sourceKtxName = os.path.split(sourceKtx)
destKtx = os.path.join(self.ktxDir, sourceKtxName)
if not os.path.isfile(destKtx):
shutil.copy(sourceKtx, destKtx)
newValue = 'ktx/' + sourceKtxName
texture['ktxFile'] = newValue
def process(self):
for texture in self.json['textures']:
self.processKtx(texture)
with open(self.filename, 'w') as f:
json.dump(self.json, f, indent=2)
fp = FrameProcessor("D:/Frames/20190114_1629.json")
fp.process()
#C:\Users\bdavi\git\hifi\interface\resources\meshes