Merge branch 'feature/quest_frame_player' into feature/quest

This commit is contained in:
Brad Davis 2019-01-30 16:16:51 -08:00
commit c46d73128b
12 changed files with 101 additions and 30 deletions

View file

@ -3,10 +3,10 @@ apply plugin: 'com.android.application'
android {
signingConfigs {
release {
keyAlias 'key0'
keyPassword 'password'
storeFile file('C:/android/keystore.jks')
storePassword 'password'
storeFile project.hasProperty("HIFI_ANDROID_KEYSTORE") ? file(HIFI_ANDROID_KEYSTORE) : null
storePassword project.hasProperty("HIFI_ANDROID_KEYSTORE_PASSWORD") ? HIFI_ANDROID_KEYSTORE_PASSWORD : ''
keyAlias project.hasProperty("HIFI_ANDROID_KEY_ALIAS") ? HIFI_ANDROID_KEY_ALIAS : ''
keyPassword project.hasProperty("HIFI_ANDROID_KEY_PASSWORD") ? HIFI_ANDROID_KEY_PASSWORD : ''
}
}

View file

@ -3,10 +3,10 @@ apply plugin: 'com.android.application'
android {
signingConfigs {
release {
keyAlias 'key0'
keyPassword 'password'
storeFile file('C:/android/keystore.jks')
storePassword 'password'
storeFile project.hasProperty("HIFI_ANDROID_KEYSTORE") ? file(HIFI_ANDROID_KEYSTORE) : null
storePassword project.hasProperty("HIFI_ANDROID_KEYSTORE_PASSWORD") ? HIFI_ANDROID_KEYSTORE_PASSWORD : ''
keyAlias project.hasProperty("HIFI_ANDROID_KEY_ALIAS") ? HIFI_ANDROID_KEY_ALIAS : ''
keyPassword project.hasProperty("HIFI_ANDROID_KEY_PASSWORD") ? HIFI_ANDROID_KEY_PASSWORD : ''
}
}

View file

@ -194,12 +194,9 @@ void RenderThread::renderFrame() {
// Quest
auto frameCorrection = _correction * ovr::toGlm(tracking.HeadPose.Pose);
_backend->setCameraCorrection(glm::inverse(frameCorrection), frame->view);
vec4 fovs[2];
ovr::for_each_eye([&](ovrEye eye){
const auto& eyeInfo = tracking.Eye[eye];
eyeProjections[eye] = ovr::toGlm(eyeInfo.ProjectionMatrix);
auto& fov = fovs[eye];
ovrMatrix4f_ExtractFov(&eyeInfo.ProjectionMatrix, &fov.x, &fov.y, &fov.z, &fov.w);
eyeOffsets[eye] = ovr::toGlm(eyeInfo.ViewMatrix);
});
_backend->recycle();

View file

@ -39,6 +39,7 @@ public class QuestQtActivity extends QtActivity {
keepInterfaceRunning = true;
launchedQuestMode = true;
moveTaskToBack(true);
startActivity(new Intent(this, QuestRenderActivity.class));
});
}

View file

@ -73,13 +73,10 @@ RUN mkdir "$HIFI_BASE" && \
RUN git clone https://github.com/jherico/hifi.git && \
cd ~/hifi && \
git checkout feature/quest_move_interface
git checkout feature/quest_frame_player
WORKDIR /home/jenkins/hifi
RUN touch .test6 && \
git fetch && git reset origin/feature/quest_move_interface --hard
RUN mkdir build
# Pre-cache the vcpkg managed dependencies

View file

@ -1,5 +1,6 @@
set(TARGET_NAME entities)
setup_hifi_library(Network Script)
target_include_directories(${TARGET_NAME} PRIVATE "${OPENSSL_INCLUDE_DIR}")
include_hifi_library_headers(hfm)
include_hifi_library_headers(fbx)
include_hifi_library_headers(gpu)

View file

@ -434,6 +434,9 @@ void GLBackend::render(const Batch& batch) {
GL_PROFILE_RANGE(render_gpu_gl, batch.getName().c_str());
_transform._skybox = _stereo._skybox = batch.isSkyboxEnabled();
// FIXME move this to between the transfer and draw passes, so that
// framebuffer setup can see the proper stereo state and enable things
// like foveation
// Allow the batch to override the rendering stereo settings
// for things like full framebuffer copy operations (deferred lighting passes)
bool savedStereo = _stereo._enable;

View file

@ -48,6 +48,7 @@ public:
class GLESTexture : public GLTexture {
using Parent = GLTexture;
friend class GLESBackend;
friend class GLESFramebuffer;
GLuint allocate(const Texture& texture);
protected:
GLESTexture(const std::weak_ptr<GLBackend>& backend, const Texture& buffer);

View file

@ -17,6 +17,34 @@
namespace gpu { namespace gles {
// returns the FOV from the projection matrix
static inline vec4 extractFov( const glm::mat4& m) {
static const std::array<vec4, 4> CLIPS{ {
{ 1, 0, 0, 1 },
{ -1, 0, 0, 1 },
{ 0, 1, 0, 1 },
{ 0, -1, 0, 1 }
} };
glm::mat4 mt = glm::transpose(m);
vec4 v, result;
// Left
v = mt * CLIPS[0];
result.x = -atanf(v.z / v.x);
// Right
v = mt * CLIPS[1];
result.y = atanf(v.z / v.x);
// Down
v = mt * CLIPS[2];
result.z = -atanf(v.z / v.y);
// Up
v = mt * CLIPS[3];
result.w = atanf(v.z / v.y);
return result;
}
class GLESFramebuffer : public gl::GLFramebuffer {
using Parent = gl::GLFramebuffer;
static GLuint allocate() {
@ -29,6 +57,24 @@ public:
GLint currentFBO = -1;
glGetIntegerv(GL_DRAW_FRAMEBUFFER_BINDING, &currentFBO);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
vec2 focalPoint{ -1.0f };
#if 0
{
auto backend = _backend.lock();
if (backend && backend->isStereo()) {
glm::mat4 projections[2];
backend->getStereoProjections(projections);
vec4 fov = extractFov(projections[0]);
float fovwidth = fov.x + fov.y;
float fovheight = fov.z + fov.w;
focalPoint.x = fov.y / fovwidth;
focalPoint.y = (fov.z / fovheight) - 0.5f;
}
}
#endif
gl::GLTexture* gltexture = nullptr;
TexturePointer surface;
if (_gpuObject.getColorStamps() != _colorStamps) {
@ -58,7 +104,7 @@ public:
surface = b._texture;
if (surface) {
Q_ASSERT(TextureUsageType::RENDERBUFFER == surface->getUsageType());
gltexture = backend->syncGPUObject(surface);
gltexture = backend->syncGPUObject(surface);
} else {
gltexture = nullptr;
}
@ -66,6 +112,24 @@ public:
if (gltexture) {
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
#if 0
if (glTextureFoveationParametersQCOM && focalPoint.x != -1.0f) {
static GLint FOVEATION_QUERY = 0;
static std::once_flag once;
std::call_once(once, [&]{
glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_FOVEATED_FEATURE_QUERY_QCOM, &FOVEATION_QUERY);
});
static const float foveaArea = 4.0f;
static const float gain = 16.0f;
GLESBackend::GLESTexture* glestexture = static_cast<GLESBackend::GLESTexture*>(gltexture);
glestexture->withPreservedTexture([=]{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_FOVEATED_FEATURE_BITS_QCOM, GL_FOVEATION_ENABLE_BIT_QCOM | GL_FOVEATION_SCALED_BIN_METHOD_BIT_QCOM);
glTextureFoveationParametersQCOM(_id, 0, 0, -focalPoint.x, focalPoint.y, gain * 2.0f, gain, foveaArea);
glTextureFoveationParametersQCOM(_id, 0, 1, focalPoint.x, focalPoint.y, gain * 2.0f, gain, foveaArea);
});
}
#endif
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, colorAttachments[unit], gltexture->_texture, 0,
b._subresource);

View file

@ -118,7 +118,6 @@ public:
static ContextMetricSize textureResourcePopulatedGPUMemSize;
static ContextMetricSize textureResourceIdealGPUMemSize;
protected:
virtual bool isStereo() const {
return _stereo.isStereo();
}
@ -128,6 +127,7 @@ protected:
eyeProjections[i] = _stereo._eyeProjections[i];
}
}
protected:
void getStereoViews(mat4* eyeViews) const {
for (int i = 0; i < 2; ++i) {

View file

@ -34,16 +34,26 @@ public:
return filename;
}
Deserializer(const std::string& filename, uint32_t externalTexture, const TextureLoader& loader) :
basename(getBaseName(filename)), externalTexture(externalTexture), textureLoader(loader) {
basedir = QFileInfo(filename.c_str()).absoluteDir().canonicalPath().toStdString();
if (*basedir.rbegin() != '/') {
basedir += '/';
static std::string getBaseDir(const std::string& filename) {
std::string result;
if (0 == filename.find("assets:")) {
auto lastSlash = filename.rfind('/');
result = filename.substr(0, lastSlash + 1);
} else {
std::string result = QFileInfo(filename.c_str()).absoluteDir().canonicalPath().toStdString();
if (*result.rbegin() != '/') {
result += '/';
}
}
return result;
}
Deserializer(const std::string& filename, uint32_t externalTexture, const TextureLoader& loader) :
basename(getBaseName(filename)), basedir(getBaseDir(filename)), externalTexture(externalTexture), textureLoader(loader) {
}
const std::string basename;
std::string basedir;
const std::string basedir;
std::string binaryFile;
const uint32_t externalTexture;
TextureLoader textureLoader;
@ -772,12 +782,6 @@ StereoState readStereoState(const json& node) {
FramePointer Deserializer::deserializeFrame() {
{
std::string filename{ basename + ".json" };
if (0 == basename.find("assets:")) {
auto lastSlash = basename.rfind('/');
basedir = basename.substr(0, lastSlash);
} else {
basedir = QFileInfo(basename.c_str()).absolutePath().toStdString();
}
storage::FileStorage mappedFile(filename.c_str());
frameNode = json::parse(std::string((const char*)mappedFile.data(), mappedFile.size()));
}

View file

@ -32,9 +32,12 @@ void RenderThread::initialize(QWindow* window) {
_window = window;
#ifdef USE_GL
_window->setFormat(getDefaultOpenGLSurfaceFormat());
_context.setWindow(window);
_context.create();
_context.makeCurrent();
if (!_context.makeCurrent()) {
qFatal("Unable to make context current");
}
QOpenGLContextWrapper(_context.qglContext()).makeCurrent(_window);
glGenTextures(1, &_externalTexture);
glBindTexture(GL_TEXTURE_2D, _externalTexture);