Switch Oculus mobile to single draw FBO with multiple color attachments

This commit is contained in:
Brad Davis 2019-03-12 15:01:11 -07:00
parent cb311408c6
commit 19f856b760
6 changed files with 372 additions and 92 deletions

View file

@ -0,0 +1,37 @@
#version 320 es
precision highp float;
precision highp sampler2D;
layout(location = 0) in vec4 vTexCoordLR;
layout(location = 0) out vec4 FragColorL;
layout(location = 1) out vec4 FragColorR;
uniform sampler2D sampler;
// https://software.intel.com/en-us/node/503873
// sRGB ====> Linear
vec3 color_sRGBToLinear(vec3 srgb) {
return mix(pow((srgb + vec3(0.055)) / vec3(1.055), vec3(2.4)), srgb / vec3(12.92), vec3(lessThanEqual(srgb, vec3(0.04045))));
}
vec4 color_sRGBAToLinear(vec4 srgba) {
return vec4(color_sRGBToLinear(srgba.xyz), srgba.w);
}
// Linear ====> sRGB
vec3 color_LinearTosRGB(vec3 lrgb) {
return mix(vec3(1.055) * pow(vec3(lrgb), vec3(0.41666)) - vec3(0.055), vec3(lrgb) * vec3(12.92), vec3(lessThan(lrgb, vec3(0.0031308))));
}
vec4 color_LinearTosRGBA(vec4 lrgba) {
return vec4(color_LinearTosRGB(lrgba.xyz), lrgba.w);
}
// FIXME switch to texelfetch for getting from the source texture
void main() {
FragColorL = color_LinearTosRGBA(texture(sampler, vTexCoordLR.xy));
FragColorR = color_LinearTosRGBA(texture(sampler, vTexCoordLR.zw));
}

View file

@ -0,0 +1,21 @@
#version 320 es
layout(location = 0) out vec4 vTexCoordLR;
void main(void) {
const float depth = 0.0;
const vec4 UNIT_QUAD[4] = vec4[4](
vec4(-1.0, -1.0, depth, 1.0),
vec4(1.0, -1.0, depth, 1.0),
vec4(-1.0, 1.0, depth, 1.0),
vec4(1.0, 1.0, depth, 1.0)
);
vec4 pos = UNIT_QUAD[gl_VertexID];
gl_Position = pos;
vTexCoordLR.xy = pos.xy;
vTexCoordLR.xy += 1.0;
vTexCoordLR.y *= 0.5;
vTexCoordLR.x *= 0.25;
vTexCoordLR.zw = vTexCoordLR.xy;
vTexCoordLR.z += 0.5;
}

View file

@ -7,6 +7,7 @@
//
package io.highfidelity.oculus;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
@ -24,7 +25,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
private static final String TAG = OculusMobileActivity.class.getSimpleName();
static { System.loadLibrary("oculusMobile"); }
private native void nativeOnCreate();
private native void nativeOnCreate(AssetManager assetManager);
private native static void nativeOnResume();
private native static void nativeOnPause();
private native static void nativeOnSurfaceChanged(Surface s);
@ -53,7 +54,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
mView = new SurfaceView(this);
mView.getHolder().addCallback(this);
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnCreate();
}
@ -81,7 +82,7 @@ public class OculusMobileActivity extends QtActivity implements SurfaceHolder.Ca
Log.w(TAG, "QQQ onResume");
super.onResume();
//Reconnect the global reference back to handler
nativeOnCreate();
nativeOnCreate(getAssets());
questNativeOnResume();
nativeOnResume();

View file

@ -7,59 +7,44 @@
//
#include "Framebuffer.h"
#include <array>
#include <EGL/egl.h>
#include <glad/glad.h>
#include <android/log.h>
#include <VrApi.h>
#include <VrApi_Helpers.h>
#include "Helpers.h"
using namespace ovr;
void Framebuffer::updateLayer(int eye, ovrLayerProjection2& layer, const ovrMatrix4f* projectionMatrix ) const {
auto& layerTexture = layer.Textures[eye];
layerTexture.ColorSwapChain = _swapChain;
layerTexture.SwapChainIndex = _index;
layerTexture.ColorSwapChain = _swapChainInfos[eye].swapChain;
layerTexture.SwapChainIndex = _swapChainInfos[eye].index;
if (projectionMatrix) {
layerTexture.TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection( projectionMatrix );
}
layerTexture.TextureRect = { 0, 0, 1, 1 };
}
void Framebuffer::SwapChainInfo::destroy() {
if (swapChain != nullptr) {
vrapi_DestroyTextureSwapChain(swapChain);
swapChain = nullptr;
}
index = -1;
length = -1;
}
void Framebuffer::create(const glm::uvec2& size) {
_size = size;
_index = 0;
_validTexture = false;
// Depth renderbuffer
/* glGenRenderbuffers(1, &_depth);
glBindRenderbuffer(GL_RENDERBUFFER, _depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, _size.x, _size.y);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
*/
// Framebuffer
glGenFramebuffers(1, &_fbo);
// glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
// glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depth);
// glBindFramebuffer(GL_FRAMEBUFFER, 0);
_swapChain = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8, _size.x, _size.y, 1, 3);
_length = vrapi_GetTextureSwapChainLength(_swapChain);
if (!_length) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Unable to count swap chain textures");
return;
}
for (int i = 0; i < _length; ++i) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, i);
glBindTexture(GL_TEXTURE_2D, chainTexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
ovr::for_each_eye([&](ovrEye eye) {
_swapChainInfos[eye].create(size);
});
glBindTexture(GL_TEXTURE_2D, 0);
glGenFramebuffers(1, &_fbo);
}
void Framebuffer::destroy() {
@ -67,28 +52,80 @@ void Framebuffer::destroy() {
glDeleteFramebuffers(1, &_fbo);
_fbo = 0;
}
if (0 != _depth) {
glDeleteRenderbuffers(1, &_depth);
_depth = 0;
}
if (_swapChain != nullptr) {
vrapi_DestroyTextureSwapChain(_swapChain);
_swapChain = nullptr;
}
_index = -1;
_length = -1;
ovr::for_each_eye([&](ovrEye eye) {
_swapChainInfos[eye].destroy();
});
}
void Framebuffer::advance() {
_index = (_index + 1) % _length;
_validTexture = false;
ovr::for_each_eye([&](ovrEye eye) {
_swapChainInfos[eye].advance();
});
}
void Framebuffer::bind() {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
if (!_validTexture) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(_swapChain, _index);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, chainTexId, 0);
_validTexture = true;
void Framebuffer::bind(GLenum target) {
glBindFramebuffer(target, _fbo);
_swapChainInfos[0].bind(target, GL_COLOR_ATTACHMENT0);
_swapChainInfos[1].bind(target, GL_COLOR_ATTACHMENT1);
}
void Framebuffer::invalidate(GLenum target) {
static const std::array<GLenum, 2> INVALIDATE_ATTACHMENTS {{ GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1 }};
glInvalidateFramebuffer(target, static_cast<GLsizei>(INVALIDATE_ATTACHMENTS.size()), INVALIDATE_ATTACHMENTS.data());
}
void Framebuffer::drawBuffers(ovrEye eye) const {
static const std::array<std::array<GLenum, 2>, 3> EYE_DRAW_BUFFERS { {
{GL_COLOR_ATTACHMENT0, GL_NONE},
{GL_NONE, GL_COLOR_ATTACHMENT1},
{GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1}
} };
switch(eye) {
case VRAPI_EYE_LEFT:
case VRAPI_EYE_RIGHT:
case VRAPI_EYE_COUNT: {
const auto& eyeDrawBuffers = EYE_DRAW_BUFFERS[eye];
glDrawBuffers(static_cast<GLsizei>(eyeDrawBuffers.size()), eyeDrawBuffers.data());
}
break;
default:
throw std::runtime_error("Invalid eye for drawBuffers");
}
}
void Framebuffer::SwapChainInfo::create(const glm::uvec2 &size) {
index = 0;
validTexture = false;
swapChain = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8, size.x, size.y, 1, 3);
length = vrapi_GetTextureSwapChainLength(swapChain);
if (!length) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_OVR", "Unable to count swap chain textures");
throw std::runtime_error("Unable to create Oculus texture swap chain");
}
for (int i = 0; i < length; ++i) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(swapChain, i);
glBindTexture(GL_TEXTURE_2D, chainTexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
}
void Framebuffer::SwapChainInfo::advance() {
index = (index + 1) % length;
validTexture = false;
}
void Framebuffer::SwapChainInfo::bind(uint32_t target, uint32_t attachment) {
if (!validTexture) {
GLuint chainTexId = vrapi_GetTextureSwapChainHandle(swapChain, index);
glFramebufferTexture(target, attachment, chainTexId, 0);
validTexture = true;
}
}

View file

@ -9,6 +9,7 @@
#include <cstdint>
#include <glm/glm.hpp>
#include <glad/glad.h>
#include <VrApi_Types.h>
@ -20,15 +21,28 @@ public:
void create(const glm::uvec2& size);
void advance();
void destroy();
void bind();
void bind(GLenum target = GL_DRAW_FRAMEBUFFER);
void invalidate(GLenum target = GL_DRAW_FRAMEBUFFER);
void drawBuffers(ovrEye eye) const;
uint32_t _depth { 0 };
const glm::uvec2& size() const { return _size; }
private:
uint32_t _fbo{ 0 };
int _length{ -1 };
int _index{ -1 };
bool _validTexture{ false };
glm::uvec2 _size;
ovrTextureSwapChain* _swapChain{ nullptr };
struct SwapChainInfo {
int length{ -1 };
int index{ -1 };
bool validTexture{ false };
ovrTextureSwapChain* swapChain{ nullptr };
void create(const glm::uvec2& size);
void destroy();
void advance();
void bind(GLenum target, GLenum attachment);
};
SwapChainInfo _swapChainInfos[VRAPI_FRAME_LAYER_EYE_MAX];
};
} // namespace ovr

View file

@ -9,37 +9,186 @@
#include <android/native_window_jni.h>
#include <android/log.h>
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <unistd.h>
#include <algorithm>
#include <array>
#include <VrApi.h>
#include <VrApi_Helpers.h>
#include <VrApi_Types.h>
//#include <OVR_Platform.h>
#include "GLContext.h"
#include "Helpers.h"
#include "Framebuffer.h"
static AAssetManager* ASSET_MANAGER = nullptr;
#define USE_BLIT_PRESENT 0
#if !USE_BLIT_PRESENT
static std::string getTextAsset(const char* assetPath) {
if (!ASSET_MANAGER || !assetPath) {
return nullptr;
}
AAsset* asset = AAssetManager_open(ASSET_MANAGER, assetPath, AASSET_MODE_BUFFER);
if (!asset) {
return {};
}
auto length = AAsset_getLength(asset);
if (0 == length) {
AAsset_close(asset);
return {};
}
auto buffer = AAsset_getBuffer(asset);
if (!buffer) {
AAsset_close(asset);
return {};
}
std::string result { static_cast<const char*>(buffer), static_cast<size_t>(length) };
AAsset_close(asset);
return result;
}
static std::string getShaderInfoLog(GLuint glshader) {
std::string result;
GLint infoLength = 0;
glGetShaderiv(glshader, GL_INFO_LOG_LENGTH, &infoLength);
if (infoLength > 0) {
char* temp = new char[infoLength];
glGetShaderInfoLog(glshader, infoLength, NULL, temp);
result = std::string(temp);
delete[] temp;
}
return result;
}
static GLuint buildShader(GLenum shaderDomain, const char* shader) {
GLuint glshader = glCreateShader(shaderDomain);
if (!glshader) {
throw std::runtime_error("Bad shader");
}
glShaderSource(glshader, 1, &shader, NULL);
glCompileShader(glshader);
GLint compiled = 0;
glGetShaderiv(glshader, GL_COMPILE_STATUS, &compiled);
// if compilation fails
if (!compiled) {
std::string compileError = getShaderInfoLog(glshader);
glDeleteShader(glshader);
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "Shader compile error: %s", compileError.c_str());
return 0;
}
return glshader;
}
static std::string getProgramInfoLog(GLuint glprogram) {
std::string result;
GLint infoLength = 0;
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
if (infoLength > 0) {
char* temp = new char[infoLength];
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
result = std::string(temp);
delete[] temp;
}
return result;
}
static GLuint buildProgram(const char* vertex, const char* fragment) {
// A brand new program:
GLuint glprogram { 0 }, glvertex { 0 }, glfragment { 0 };
try {
glprogram = glCreateProgram();
if (0 == glprogram) {
throw std::runtime_error("Failed to create program, is GL context current?");
}
glvertex = buildShader(GL_VERTEX_SHADER, vertex);
if (0 == glvertex) {
throw std::runtime_error("Failed to create or compile vertex shader");
}
glAttachShader(glprogram, glvertex);
glfragment = buildShader(GL_FRAGMENT_SHADER, fragment);
if (0 == glfragment) {
throw std::runtime_error("Failed to create or compile fragment shader");
}
glAttachShader(glprogram, glfragment);
GLint linked { 0 };
glLinkProgram(glprogram);
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
if (!linked) {
std::string linkErrorLog = getProgramInfoLog(glprogram);
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "Program link error: %s", linkErrorLog.c_str());
throw std::runtime_error("Failed to link program, is the interface between the fragment and vertex shaders correct?");
}
} catch(const std::runtime_error& error) {
if (0 != glprogram) {
glDeleteProgram(glprogram);
glprogram = 0;
}
}
if (0 != glvertex) {
glDeleteShader(glvertex);
}
if (0 != glfragment) {
glDeleteShader(glfragment);
}
if (0 == glprogram) {
throw std::runtime_error("Failed to build program");
}
return glprogram;
}
#endif
using namespace ovr;
static thread_local bool isRenderThread { false };
struct VrSurface : public TaskQueue {
using HandlerTask = VrHandler::HandlerTask;
using HandlerTask = ovr::VrHandler::HandlerTask;
JavaVM* vm{nullptr};
jobject oculusActivity{ nullptr };
ANativeWindow* nativeWindow{ nullptr };
VrHandler* handler{nullptr};
ovr::VrHandler* handler{nullptr};
ovrMobile* session{nullptr};
bool resumed { false };
GLContext vrglContext;
Framebuffer eyeFbos[2];
uint32_t readFbo{0};
ovr::GLContext vrglContext;
ovr::Framebuffer eyesFbo;
#if USE_BLIT_PRESENT
GLuint readFbo { 0 };
#else
GLuint renderProgram { 0 };
GLuint renderVao { 0 };
#endif
std::atomic<uint32_t> presentIndex{1};
double displayTime{0};
// Not currently set by anything
@ -76,6 +225,16 @@ struct VrSurface : public TaskQueue {
vrglContext.create(currentDisplay, currentContext, noErrorContext);
vrglContext.makeCurrent();
#if USE_BLIT_PRESENT
glGenFramebuffers(1, &readFbo);
#else
glGenVertexArrays(1, &renderVao);
const char* vertex = nullptr;
auto vertexShader = getTextAsset("shaders/present.vert");
auto fragmentShader = getTextAsset("shaders/present.frag");
renderProgram = buildProgram(vertexShader.c_str(), fragmentShader.c_str());
#endif
glm::uvec2 eyeTargetSize;
withEnv([&](JNIEnv* env){
ovrJava java{ vm, env, oculusActivity };
@ -85,10 +244,7 @@ struct VrSurface : public TaskQueue {
};
});
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "QQQ Eye Size %d, %d", eyeTargetSize.x, eyeTargetSize.y);
ovr::for_each_eye([&](ovrEye eye) {
eyeFbos[eye].create(eyeTargetSize);
});
glGenFramebuffers(1, &readFbo);
eyesFbo.create(eyeTargetSize);
vrglContext.doneCurrent();
}
@ -178,38 +334,51 @@ struct VrSurface : public TaskQueue {
void presentFrame(uint32_t sourceTexture, const glm::uvec2 &sourceSize, const ovrTracking2& tracking) {
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = tracking.HeadPose;
eyesFbo.bind();
if (sourceTexture) {
eyesFbo.invalidate();
#if USE_BLIT_PRESENT
glBindFramebuffer(GL_READ_FRAMEBUFFER, readFbo);
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, sourceTexture, 0);
GLenum framebufferStatus = glCheckFramebufferStatus(GL_READ_FRAMEBUFFER);
if (GL_FRAMEBUFFER_COMPLETE != framebufferStatus) {
__android_log_print(ANDROID_LOG_WARN, "QQQ_OVR", "incomplete framebuffer");
}
}
GLenum invalidateAttachment = GL_COLOR_ATTACHMENT0;
ovr::for_each_eye([&](ovrEye eye) {
const auto &eyeTracking = tracking.Eye[eye];
auto &eyeFbo = eyeFbos[eye];
const auto &destSize = eyeFbo._size;
eyeFbo.bind();
glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, 1, &invalidateAttachment);
if (sourceTexture) {
const auto &destSize = eyesFbo.size();
ovr::for_each_eye([&](ovrEye eye) {
auto sourceWidth = sourceSize.x / 2;
auto sourceX = (eye == VRAPI_EYE_LEFT) ? 0 : sourceWidth;
// Each eye blit uses a different draw buffer
eyesFbo.drawBuffers(eye);
glBlitFramebuffer(
sourceX, 0, sourceX + sourceWidth, sourceSize.y,
0, 0, destSize.x, destSize.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
}
eyeFbo.updateLayer(eye, layer, &eyeTracking.ProjectionMatrix);
eyeFbo.advance();
});
if (sourceTexture) {
glInvalidateFramebuffer(GL_READ_FRAMEBUFFER, 1, &invalidateAttachment);
});
static const std::array<GLenum, 1> READ_INVALIDATE_ATTACHMENTS {{ GL_COLOR_ATTACHMENT0 }};
glInvalidateFramebuffer(GL_READ_FRAMEBUFFER, (GLuint)READ_INVALIDATE_ATTACHMENTS.size(), READ_INVALIDATE_ATTACHMENTS.data());
glFramebufferTexture(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, 0, 0);
#else
eyesFbo.drawBuffers(VRAPI_EYE_COUNT);
const auto &destSize = eyesFbo.size();
glViewport(0, 0, destSize.x, destSize.y);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, sourceTexture);
glBindVertexArray(renderVao);
glUseProgram(renderProgram);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glUseProgram(0);
glBindVertexArray(0);
#endif
} else {
eyesFbo.drawBuffers(VRAPI_EYE_COUNT);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
glFlush();
ovr::for_each_eye([&](ovrEye eye) {
const auto &eyeTracking = tracking.Eye[eye];
eyesFbo.updateLayer(eye, layer, &eyeTracking.ProjectionMatrix);
});
eyesFbo.advance();
ovrLayerHeader2 *layerHeader = &layer.Header;
ovrSubmitFrameDescription2 frameDesc = {};
@ -321,8 +490,9 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *, void *) {
return JNI_VERSION_1_6;
}
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnCreate(JNIEnv* env, jobject obj) {
JNIEXPORT void JNICALL Java_io_highfidelity_oculus_OculusMobileActivity_nativeOnCreate(JNIEnv* env, jobject obj, jobject assetManager) {
__android_log_write(ANDROID_LOG_WARN, "QQQ_JNI", __FUNCTION__);
ASSET_MANAGER = AAssetManager_fromJava(env, assetManager);
SURFACE.onCreate(env, obj);
}