Merge pull request #4361 from samcake/temp0

evolve the Transform stack away from the legacy GL to the brand new shader way
This commit is contained in:
Brad Hefta-Gaub 2015-03-03 11:43:01 -08:00
commit b118912708
64 changed files with 532 additions and 137 deletions

View file

@ -15,12 +15,21 @@ function(AUTOSCRIBE_SHADER SHADER_FILE)
list(APPEND SHADER_INCLUDE_FILES ${includeFile})
endforeach()
#Extract the unique include shader paths
foreach(SHADER_INCLUDE ${SHADER_INCLUDE_FILES})
get_filename_component(INCLUDE_DIR ${SHADER_INCLUDE} PATH)
list(APPEND SHADER_INCLUDES_PATHS ${INCLUDE_DIR})
endforeach()
#Extract the unique include shader paths
set(INCLUDES ${HIFI_LIBRARIES_SHADER_INCLUDE_FILES})
#message(Hifi for includes ${INCLUDES})
foreach(EXTRA_SHADER_INCLUDE ${INCLUDES})
list(APPEND SHADER_INCLUDES_PATHS ${EXTRA_SHADER_INCLUDE})
endforeach()
list(REMOVE_DUPLICATES SHADER_INCLUDES_PATHS)
#message(ready for includes ${SHADER_INCLUDES_PATHS})
# make the scribe include arguments
set(SCRIBE_INCLUDES)
@ -64,6 +73,17 @@ endfunction()
macro(AUTOSCRIBE_SHADER_LIB)
file(RELATIVE_PATH RELATIVE_LIBRARY_DIR_PATH ${CMAKE_CURRENT_SOURCE_DIR} "${HIFI_LIBRARY_DIR}")
foreach(HIFI_LIBRARY ${ARGN})
#if (NOT TARGET ${HIFI_LIBRARY})
# file(GLOB_RECURSE HIFI_LIBRARIES_SHADER_INCLUDE_FILES ${RELATIVE_LIBRARY_DIR_PATH}/${HIFI_LIBRARY}/src/)
#endif ()
#file(GLOB_RECURSE HIFI_LIBRARIES_SHADER_INCLUDE_FILES ${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src/*.slh)
list(APPEND HIFI_LIBRARIES_SHADER_INCLUDE_FILES ${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src)
endforeach()
#message(${HIFI_LIBRARIES_SHADER_INCLUDE_FILES})
file(GLOB_RECURSE SHADER_INCLUDE_FILES src/*.slh)
file(GLOB_RECURSE SHADER_SOURCE_FILES src/*.slv src/*.slf)

View file

@ -2547,7 +2547,9 @@ void Application::updateShadowMap() {
glm::ortho(minima.x, maxima.x, minima.y, maxima.y, -maxima.z, -minima.z) * glm::mat4_cast(inverseRotation));
// update the shadow view frustum
_shadowViewFrustum.setPosition(rotation * ((minima + maxima) * 0.5f));
// glm::vec3 shadowFrustumCenter = glm::vec3((minima.x + maxima.x) * 0.5f, (minima.y + maxima.y) * 0.5f, (minima.z + maxima.z) * 0.5f);
glm::vec3 shadowFrustumCenter = rotation * ((minima + maxima) * 0.5f);
_shadowViewFrustum.setPosition(shadowFrustumCenter);
_shadowViewFrustum.setOrientation(rotation);
_shadowViewFrustum.setOrthographic(true);
_shadowViewFrustum.setWidth(maxima.x - minima.x);
@ -2577,8 +2579,10 @@ void Application::updateShadowMap() {
// this is what is used for rendering the Entities and avatars
Transform viewTransform;
viewTransform.setRotation(rotation);
// viewTransform.postTranslate(shadowFrustumCenter);
setViewTransform(viewTransform);
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonOffset(1.1f, 4.0f); // magic numbers courtesy http://www.eecs.berkeley.edu/~ravir/6160/papers/shadowmaps.ppt

View file

@ -367,7 +367,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode, bool
// render body
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
renderBody(renderMode, postLighting, glowLevel);
renderBody(frustum, renderMode, postLighting, glowLevel);
}
if (!postLighting && renderMode != SHADOW_RENDER_MODE) {
@ -477,7 +477,7 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::renderBody(RenderMode renderMode, bool postLighting, float glowLevel) {
void Avatar::renderBody(ViewFrustum* renderFrustum, RenderMode renderMode, bool postLighting, float glowLevel) {
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
{
@ -494,11 +494,13 @@ void Avatar::renderBody(RenderMode renderMode, bool postLighting, float glowLeve
if (postLighting) {
getHand()->render(false, modelRenderMode);
} else {
_skeletonModel.render(1.0f, modelRenderMode);
renderAttachments(renderMode);
RenderArgs args;
args._viewFrustum = renderFrustum;
_skeletonModel.render(1.0f, modelRenderMode, &args);
renderAttachments(renderMode, &args);
}
}
getHead()->render(1.0f, modelRenderMode, postLighting);
getHead()->render(1.0f, renderFrustum, modelRenderMode, postLighting);
}
bool Avatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderMode renderMode) const {
@ -525,11 +527,11 @@ void Avatar::simulateAttachments(float deltaTime) {
}
}
void Avatar::renderAttachments(RenderMode renderMode) {
void Avatar::renderAttachments(RenderMode renderMode, RenderArgs* args) {
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
foreach (Model* model, _attachmentModels) {
model->render(1.0f, modelRenderMode);
model->render(1.0f, modelRenderMode, args);
}
}

View file

@ -223,11 +223,11 @@ protected:
float calculateDisplayNameScaleFactor(const glm::vec3& textPosition, bool inHMD);
void renderDisplayName();
virtual void renderBody(RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
virtual void renderBody(ViewFrustum* renderFrustum, RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const glm::vec3& cameraPosition, RenderMode renderMode) const;
void simulateAttachments(float deltaTime);
virtual void renderAttachments(RenderMode renderMode);
virtual void renderAttachments(RenderMode renderMode, RenderArgs* args);
virtual void updateJointMappings();

View file

@ -39,6 +39,8 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
setPupilDilation(_owningHead->getPupilDilation());
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
invalidCalculatedMeshBoxes();
if (isActive()) {
setOffset(-_geometry->getFBXGeometry().neckPivot);
Model::simulateInternal(deltaTime);

View file

@ -243,13 +243,15 @@ void Head::relaxLean(float deltaTime) {
_deltaLeanForward *= relaxationFactor;
}
void Head::render(float alpha, Model::RenderMode mode, bool postLighting) {
void Head::render(float alpha, ViewFrustum* renderFrustum, Model::RenderMode mode, bool postLighting) {
if (postLighting) {
if (_renderLookatVectors) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
}
} else {
_faceModel.render(alpha, mode);
RenderArgs args;
args._viewFrustum = renderFrustum;
_faceModel.render(alpha, mode, &args);
}
}

View file

@ -40,7 +40,7 @@ public:
void init();
void reset();
void simulate(float deltaTime, bool isMine, bool billboard = false);
void render(float alpha, Model::RenderMode mode, bool postLighting);
void render(float alpha, ViewFrustum* renderFrustum, Model::RenderMode mode, bool postLighting);
void setScale(float scale);
void setPosition(glm::vec3 position) { _position = position; }
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }

View file

@ -1067,7 +1067,7 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName, const g
Avatar::attach(modelURL, jointName, translation, rotation, scale, allowDuplicates, useSaved);
}
void MyAvatar::renderBody(RenderMode renderMode, bool postLighting, float glowLevel) {
void MyAvatar::renderBody(ViewFrustum* renderFrustum, RenderMode renderMode, bool postLighting, float glowLevel) {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
return; // wait until both models are loaded
}
@ -1076,15 +1076,17 @@ void MyAvatar::renderBody(RenderMode renderMode, bool postLighting, float glowLe
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
if (!postLighting) {
_skeletonModel.render(1.0f, modelRenderMode);
renderAttachments(renderMode);
RenderArgs args;
args._viewFrustum = renderFrustum;
_skeletonModel.render(1.0f, modelRenderMode, &args);
renderAttachments(renderMode, &args);
}
// Render head so long as the camera isn't inside it
const Camera *camera = Application::getInstance()->getCamera();
const glm::vec3 cameraPos = camera->getPosition();
if (shouldRenderHead(cameraPos, renderMode)) {
getHead()->render(1.0f, modelRenderMode, postLighting);
getHead()->render(1.0f, renderFrustum, modelRenderMode, postLighting);
}
if (postLighting) {
getHand()->render(true, modelRenderMode);
@ -1875,9 +1877,9 @@ void MyAvatar::onToggleRagdoll() {
}
}
void MyAvatar::renderAttachments(RenderMode renderMode) {
void MyAvatar::renderAttachments(RenderMode renderMode, RenderArgs* args) {
if (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON || renderMode == MIRROR_RENDER_MODE) {
Avatar::renderAttachments(renderMode);
Avatar::renderAttachments(renderMode, args);
return;
}
const FBXGeometry& geometry = _skeletonModel.getGeometry()->getFBXGeometry();
@ -1887,7 +1889,7 @@ void MyAvatar::renderAttachments(RenderMode renderMode) {
for (int i = 0; i < _attachmentData.size(); i++) {
const QString& jointName = _attachmentData.at(i).jointName;
if (jointName != headJointName && jointName != "Head") {
_attachmentModels.at(i)->render(1.0f, modelRenderMode);
_attachmentModels.at(i)->render(1.0f, modelRenderMode, args);
}
}
}

View file

@ -38,7 +38,7 @@ public:
void updateFromTrackers(float deltaTime);
void render(const glm::vec3& cameraPosition, RenderMode renderMode = NORMAL_RENDER_MODE, bool postLighting = false);
void renderBody(RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
void renderBody(ViewFrustum* renderFrustum, RenderMode renderMode, bool postLighting, float glowLevel = 0.0f);
bool shouldRenderHead(const glm::vec3& cameraPosition, RenderMode renderMode) const;
void renderDebugBodyPoints();
@ -187,7 +187,7 @@ signals:
void transformChanged();
protected:
virtual void renderAttachments(RenderMode renderMode);
virtual void renderAttachments(RenderMode renderMode, RenderArgs* args);
private:
float _turningKeyPressTime;

View file

@ -348,8 +348,14 @@ void EntityTreeRenderer::leaveAllEntities() {
void EntityTreeRenderer::render(RenderArgs::RenderMode renderMode, RenderArgs::RenderSide renderSide) {
if (_tree && !_shuttingDown) {
Model::startScene(renderSide);
RenderArgs args = { this, _viewFrustum, getSizeScale(), getBoundaryLevelAdjust(), renderMode, renderSide,
ViewFrustum* frustum = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
_viewState->getShadowViewFrustum() : _viewState->getCurrentViewFrustum();
RenderArgs args = { this, frustum, getSizeScale(), getBoundaryLevelAdjust(), renderMode, renderSide,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
_tree->lockForRead();
_tree->recurseTreeWithOperation(renderOperation, &args);

View file

@ -1,5 +1,7 @@
set(TARGET_NAME gpu)
AUTOSCRIBE_SHADER_LIB(gpu)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library()

View file

@ -153,10 +153,10 @@ void Batch::setViewTransform(const Transform& view) {
_params.push_back(_transforms.cache(view));
}
void Batch::setProjectionTransform(const Transform& proj) {
void Batch::setProjectionTransform(const Mat4& proj) {
ADD_COMMAND(setProjectionTransform);
_params.push_back(_transforms.cache(proj));
_params.push_back(cacheData(sizeof(Mat4), &proj));
}
void Batch::setUniformBuffer(uint32 slot, const BufferPointer& buffer, Offset offset, Offset size) {

View file

@ -53,6 +53,11 @@ enum Primitive {
NUM_PRIMITIVES,
};
enum ReservedSlot {
TRANSFORM_OBJECT_SLOT = 6,
TRANSFORM_CAMERA_SLOT = 7,
};
class Batch {
public:
typedef Stream::Slot Slot;
@ -89,7 +94,7 @@ public:
// with the ModelTransformu to create the equivalent of the glModelViewMatrix
void setModelTransform(const Transform& model);
void setViewTransform(const Transform& view);
void setProjectionTransform(const Transform& proj);
void setProjectionTransform(const Mat4& proj);
// Shader Stage
void setUniformBuffer(uint32 slot, const BufferPointer& buffer, Offset offset, Offset size);

View file

@ -8,8 +8,8 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not CONFIG_SLH@>
<@def CONFIG_SLH@>
<@if not GPU_CONFIG_SLH@>
<@def GPU_CONFIG_SLH@>
<@if GLPROFILE == PC_GL @>
<@def VERSION_HEADER #version 330 compatibility@>

View file

@ -29,6 +29,21 @@ class Batch;
class Backend {
public:
class TransformObject {
public:
Mat4 _model;
Mat4 _modelInverse;
};
class TransformCamera {
public:
Mat4 _view;
Mat4 _viewInverse;
Mat4 _projectionViewUntranslated;
Mat4 _projection;
Vec4 _viewport;
};
template< typename T >
static void setGPUObject(const Buffer& buffer, T* bo) {
buffer.setGPUObject(reinterpret_cast<GPUObject*>(bo));

View file

@ -11,6 +11,7 @@
#ifndef hifi_gpu_Format_h
#define hifi_gpu_Format_h
#include <glm/glm.hpp>
#include <assert.h>
namespace gpu {
@ -24,6 +25,12 @@ typedef char int8;
typedef uint32 Offset;
typedef glm::mat4 Mat4;
typedef glm::mat3 Mat3;
typedef glm::vec4 Vec4;
typedef glm::vec3 Vec3;
typedef glm::vec2 Vec2;
// Description of a scalar type
enum Type {

View file

@ -73,28 +73,32 @@ GLBackend::GLBackend() :
_input(),
_transform()
{
initTransform();
}
GLBackend::~GLBackend() {
killTransform();
}
void GLBackend::renderBatch(Batch& batch) {
void GLBackend::render(Batch& batch) {
uint32 numCommands = batch.getCommands().size();
const Batch::Commands::value_type* command = batch.getCommands().data();
const Batch::CommandOffsets::value_type* offset = batch.getCommandOffsets().data();
GLBackend backend;
for (unsigned int i = 0; i < numCommands; i++) {
CommandCall call = _commandCalls[(*command)];
(backend.*(call))(batch, *offset);
(this->*(call))(batch, *offset);
command++;
offset++;
}
}
void GLBackend::renderBatch(Batch& batch) {
GLBackend backend;
backend.render(batch);
}
void GLBackend::checkGLError() {
GLenum error = glGetError();
if (!error) {
@ -386,18 +390,93 @@ void GLBackend::do_setViewTransform(Batch& batch, uint32 paramOffset) {
}
void GLBackend::do_setProjectionTransform(Batch& batch, uint32 paramOffset) {
_transform._projection = batch._transforms.get(batch._params[paramOffset]._uint);
memcpy(&_transform._projection, batch.editData(batch._params[paramOffset]._uint), sizeof(Mat4));
_transform._invalidProj = true;
}
void GLBackend::initTransform() {
#if defined(Q_OS_WIN)
glGenBuffers(1, &_transform._transformObjectBuffer);
glGenBuffers(1, &_transform._transformCameraBuffer);
glBindBuffer(GL_UNIFORM_BUFFER, _transform._transformObjectBuffer);
glBufferData(GL_UNIFORM_BUFFER, sizeof(_transform._transformObject), (const void*) &_transform._transformObject, GL_DYNAMIC_DRAW);
glBindBuffer(GL_UNIFORM_BUFFER, _transform._transformCameraBuffer);
glBufferData(GL_UNIFORM_BUFFER, sizeof(_transform._transformCamera), (const void*) &_transform._transformCamera, GL_DYNAMIC_DRAW);
glBindBuffer(GL_UNIFORM_BUFFER, 0);
#else
#endif
}
void GLBackend::killTransform() {
#if defined(Q_OS_WIN)
glDeleteBuffers(1, &_transform._transformObjectBuffer);
glDeleteBuffers(1, &_transform._transformCameraBuffer);
#else
#endif
}
void GLBackend::updateTransform() {
// Check all the dirty flags and update the state accordingly
if (_transform._invalidProj) {
// TODO: implement the projection matrix assignment to gl state
/* if (_transform._lastMode != GL_PROJECTION) {
_transform._transformCamera._projection = _transform._projection;
}
if (_transform._invalidView) {
_transform._view.getInverseMatrix(_transform._transformCamera._view);
_transform._view.getMatrix(_transform._transformCamera._viewInverse);
}
if (_transform._invalidModel) {
_transform._model.getMatrix(_transform._transformObject._model);
_transform._model.getInverseMatrix(_transform._transformObject._modelInverse);
}
if (_transform._invalidView || _transform._invalidProj) {
Mat4 viewUntranslated = _transform._transformCamera._view;
viewUntranslated[3] = Vec4(0.0f, 0.0f, 0.0f, 1.0f);
_transform._transformCamera._projectionViewUntranslated = _transform._transformCamera._projection * viewUntranslated;
}
if (_transform._invalidView || _transform._invalidProj) {
#if defined(Q_OS_WIN)
glBindBufferBase(GL_UNIFORM_BUFFER, TRANSFORM_CAMERA_SLOT, 0);
glBindBuffer(GL_ARRAY_BUFFER, _transform._transformCameraBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(_transform._transformCamera), (const void*) &_transform._transformCamera, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
CHECK_GL_ERROR();
#endif
}
if (_transform._invalidModel) {
#if defined(Q_OS_WIN)
glBindBufferBase(GL_UNIFORM_BUFFER, TRANSFORM_OBJECT_SLOT, 0);
glBindBuffer(GL_ARRAY_BUFFER, _transform._transformObjectBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(_transform._transformObject), (const void*) &_transform._transformObject, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
CHECK_GL_ERROR();
#endif
}
#if defined(Q_OS_WIN)
glBindBufferBase(GL_UNIFORM_BUFFER, TRANSFORM_OBJECT_SLOT, _transform._transformObjectBuffer);
glBindBufferBase(GL_UNIFORM_BUFFER, TRANSFORM_CAMERA_SLOT, _transform._transformCameraBuffer);
CHECK_GL_ERROR();
#endif
#if defined(Q_OS_MAC) || defined(Q_OS_LINUX)
// Do it again for fixed pipeline until we can get rid of it
if (_transform._invalidProj) {
if (_transform._lastMode != GL_PROJECTION) {
glMatrixMode(GL_PROJECTION);
_transform._lastMode = GL_PROJECTION;
}
CHECK_GL_ERROR();*/
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&_transform._projection));
CHECK_GL_ERROR();
}
if (_transform._invalidModel || _transform._invalidView) {
@ -430,10 +509,11 @@ void GLBackend::updateTransform() {
}
}
CHECK_GL_ERROR();
_transform._invalidModel = false;
_transform._invalidView = false;
}
#endif
// Flags are clean
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = false;
}
void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {

View file

@ -27,10 +27,13 @@ public:
GLBackend();
~GLBackend();
void render(Batch& batch);
static void renderBatch(Batch& batch);
static void checkGLError();
class GLBuffer : public GPUObject {
public:
@ -113,11 +116,17 @@ protected:
void do_setViewTransform(Batch& batch, uint32 paramOffset);
void do_setProjectionTransform(Batch& batch, uint32 paramOffset);
void initTransform();
void killTransform();
void updateTransform();
struct TransformStageState {
TransformObject _transformObject;
TransformCamera _transformCamera;
GLuint _transformObjectBuffer;
GLuint _transformCameraBuffer;
Transform _model;
Transform _view;
Transform _projection;
Mat4 _projection;
bool _invalidModel;
bool _invalidView;
bool _invalidProj;
@ -125,12 +134,14 @@ protected:
GLenum _lastMode;
TransformStageState() :
_transformObjectBuffer(0),
_transformCameraBuffer(0),
_model(),
_view(),
_projection(),
_invalidModel(true),
_invalidView(true),
_invalidProj(true),
_invalidProj(false),
_lastMode(GL_TEXTURE) {}
} _transform;

View file

@ -49,7 +49,7 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
GL_UNSIGNED_BYTE
};
//#define CHECK_GL_ERROR() ::gpu::GLBackend::checkGLError()
#define CHECK_GL_ERROR()
#define CHECK_GL_ERROR() ::gpu::GLBackend::checkGLError()
//#define CHECK_GL_ERROR()
#endif

View file

@ -0,0 +1,95 @@
<!
// gpu/TransformState.slh
//
// Created by Sam Gateau on 2/10/15.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not GPU_TRANSFORM_STATE_SLH@>
<@def GPU_TRANSFORM_STATE_SLH@>
struct TransformObject {
mat4 _model;
mat4 _modelInverse;
};
struct TransformCamera {
mat4 _view;
mat4 _viewInverse;
mat4 _projectionViewUntranslated;
mat4 _projection;
vec4 _viewport;
};
vec4 transformModelToClipPos(TransformCamera camera, TransformObject object, vec4 pos) {
<@if GLPROFILE == MAC_GL@>
return gl_ModelViewProjectionMatrix * pos;
<@elif GLPROFILE == PC_GL@>
vec4 epos = (object._model * pos) + vec4(-pos.w * camera._viewInverse[3].xyz, 0.0);
return camera._projectionViewUntranslated * epos;
// Equivalent to the following but hoppefully a bit more accurate
// return camera._projection * camera._view * object._model * pos;
<@endif@>
}
vec3 transformModelToEyeDir(TransformCamera camera, TransformObject object, vec3 dir) {
<@if GLPROFILE == MAC_GL@>
return gl_NormalMatrix * dir;
<@elif GLPROFILE == PC_GL@>
vec3 mr0 = vec3(object._modelInverse[0].x, object._modelInverse[1].x, object._modelInverse[2].x);
vec3 mr1 = vec3(object._modelInverse[0].y, object._modelInverse[1].y, object._modelInverse[2].y);
vec3 mr2 = vec3(object._modelInverse[0].z, object._modelInverse[1].z, object._modelInverse[2].z);
vec3 mvc0 = vec3(dot(camera._viewInverse[0].xyz, mr0), dot(camera._viewInverse[0].xyz, mr1), dot(camera._viewInverse[0].xyz, mr2));
vec3 mvc1 = vec3(dot(camera._viewInverse[1].xyz, mr0), dot(camera._viewInverse[1].xyz, mr1), dot(camera._viewInverse[1].xyz, mr2));
vec3 mvc2 = vec3(dot(camera._viewInverse[2].xyz, mr0), dot(camera._viewInverse[2].xyz, mr1), dot(camera._viewInverse[2].xyz, mr2));
vec3 result = vec3(dot(mvc0, dir), dot(mvc1, dir), dot(mvc2, dir));
return result;
<@endif@>
}
<@if GLPROFILE == PC_GL@>
uniform transformObjectBuffer {
TransformObject object;
};
TransformObject getTransformObject() {
return object;
}
uniform transformCameraBuffer {
TransformCamera camera;
};
TransformCamera getTransformCamera() {
return camera;
}
<@elif GLPROFILE == MAC_GL@>
TransformObject getTransformObject() {
TransformObject object;
return object;
}
TransformCamera getTransformCamera() {
TransformCamera camera;
return camera;
}
<@else@>
TransformObject getTransformObject() {
TransformObject object;
return object;
}
TransformCamera getTransformCamera() {
TransformCamera camera;
return camera;
}
<@endif@>
<@endif@>

View file

@ -1,4 +1,6 @@
set(TARGET_NAME model)
AUTOSCRIBE_SHADER_LIB(gpu)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library()

View file

@ -8,8 +8,8 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not LIGHT_SLH@>
<@def LIGHT_SLH@>
<@if not MODEL_LIGHT_SLH@>
<@def MODEL_LIGHT_SLH@>
struct Light {
vec4 _position;

View file

@ -8,8 +8,8 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not MATERIAL_SLH@>
<@def MATERIAL_SLH@>
<@if not MODEL_MATERIAL_SLH@>
<@def MODEL_MATERIAL_SLH@>
struct Material {
vec4 _diffuse;
@ -26,10 +26,10 @@ float getMaterialShininess(Material m) { return m._specular.a; }
<@if GLPROFILE == PC_GL@>
uniform materialBuffer {
Material mat;
Material _mat;
};
Material getMaterial() {
return mat;
return _mat;
}
<@elif GLPROFILE == MAC_GL@>
uniform vec4 materialBuffer[2];

View file

@ -577,6 +577,10 @@ void ViewFrustum::computeOffAxisFrustum(float& left, float& right, float& bottom
// compute our dimensions the usual way
float hheight = _nearClip * tanf(_fieldOfView * 0.5f * RADIANS_PER_DEGREE);
float hwidth = _aspectRatio * hheight;
if (isOrthographic()) {
hheight = getHeight();
hwidth = getWidth();
}
// get our frustum corners in view space
glm::mat4 eyeMatrix = glm::mat4_cast(glm::inverse(_eyeOffsetOrientation)) * glm::translate(-_eyeOffsetPosition);
@ -866,4 +870,20 @@ float ViewFrustum::distanceToCamera(const glm::vec3& point) const {
return distanceToPoint;
}
void ViewFrustum::evalProjectionMatrix(glm::mat4& proj) const {
if (isOrthographic()) {
glm::vec3 frustumCenter = glm::inverse( _orientation) * _position;
proj = glm::ortho(frustumCenter.x -0.5f * getWidth(),
frustumCenter.x +0.5f * getWidth(),
frustumCenter.y -0.5f * getHeight(),
frustumCenter.y +0.5f * getHeight(),
-getFarClip(), -getNearClip());
} else {
float left, right, bottom, top, near, far;
glm::vec4 clip0, clip1;
computeOffAxisFrustum(left, right, bottom, top, near, far, clip0, clip1);
proj = glm::perspective(glm::radians(getFieldOfView()), getAspectRatio(), getNearClip(), getFarClip());
}
}

View file

@ -124,6 +124,7 @@ public:
float distanceToCamera(const glm::vec3& point) const;
void evalProjectionMatrix(glm::mat4& proj) const;
private:
// Used for keyhole calculations
ViewFrustum::location pointInKeyhole(const glm::vec3& point) const;

View file

@ -1,6 +1,6 @@
set(TARGET_NAME render-utils)
AUTOSCRIBE_SHADER_LIB()
AUTOSCRIBE_SHADER_LIB(gpu model)
# pull in the resources.qrc file
qt5_add_resources(QT_RESOURCES_FILE "${CMAKE_CURRENT_SOURCE_DIR}/res/fonts/fonts.qrc")

View file

@ -32,6 +32,9 @@ public:
/// gets the current view frustum for rendering the view state
virtual ViewFrustum* getCurrentViewFrustum() = 0;
/// gets the shadow view frustum for rendering the view state
virtual ViewFrustum* getShadowViewFrustum() = 0;
virtual bool getShadowsEnabled() = 0;
virtual bool getCascadeShadowsEnabled() = 0;

View file

@ -49,7 +49,7 @@ vec4 evalSphericalLight(SphericalHarmonics sh, vec3 direction ) {
uniform SphericalHarmonics ambientSphere;
// Everything about light
<@include Light.slh@>
<@include model/Light.slh@>
// The view Matrix
uniform mat4 invViewMat;
@ -95,13 +95,15 @@ vec3 evalAmbienSphereGlobalColor(float shadowAttenuation, vec3 position, vec3 no
vec3 evalLightmappedColor(float shadowAttenuation, vec3 normal, vec3 diffuse, vec3 lightmap) {
Light light = getLight();
vec3 fragNormal = vec3(invViewMat * vec4(normal, 0.0));
float diffuseDot = dot(fragNormal, -getLightDirection(light));
// need to catch normals perpendicular to the projection plane hence the magic number for the threshold
// it should be just 0, but we have innacurracy so we need to overshoot
const float PERPENDICULAR_THRESHOLD = -0.005;
float facingLight = step(PERPENDICULAR_THRESHOLD, diffuseDot);
//float facingLight = step(PERPENDICULAR_THRESHOLD, diffuseDot);
// evaluate the shadow test but only relevant for light facing fragments
float lightAttenuation = (1 - facingLight) + facingLight * shadowAttenuation;

View file

@ -223,6 +223,23 @@ void Model::initProgram(ProgramObject& program, Model::Locations& locations, boo
}
#endif
#if defined(Q_OS_WIN)
loc = glGetUniformBlockIndex(program.programId(), "transformObjectBuffer");
if (loc >= 0) {
glUniformBlockBinding(program.programId(), loc, gpu::TRANSFORM_OBJECT_SLOT);
// locations.materialBufferUnit = 1;
}
#endif
#if defined(Q_OS_WIN)
loc = glGetUniformBlockIndex(program.programId(), "transformCameraBuffer");
if (loc >= 0) {
glUniformBlockBinding(program.programId(), loc, gpu::TRANSFORM_CAMERA_SLOT);
// locations.materialBufferUnit = 1;
}
#endif
//program.link();
if (!program.isLinked()) {
program.release();
}
@ -323,6 +340,9 @@ void Model::init() {
_shadowProgram.addShaderFromSourceCode(QGLShader::Vertex, model_shadow_vert);
_shadowProgram.addShaderFromSourceCode(QGLShader::Fragment, model_shadow_frag);
// Shadow program uses the same locations as standard rendering path but we still need to set the bindings
Model::Locations tempLoc;
initProgram(_shadowProgram, tempLoc);
_skinProgram.addShaderFromSourceCode(QGLShader::Vertex, skin_model_vert);
_skinProgram.addShaderFromSourceCode(QGLShader::Fragment, model_frag);
@ -667,7 +687,7 @@ bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(alpha, mode);
attachment->render(alpha, mode, args);
}
if (_meshStates.isEmpty()) {
return false;
@ -687,6 +707,13 @@ bool Model::renderCore(float alpha, RenderMode mode, RenderArgs* args) {
_renderBatch.clear();
gpu::Batch& batch = _renderBatch;
// Setup the projection matrix
if (args && args->_viewFrustum) {
glm::mat4 proj;
args->_viewFrustum->evalProjectionMatrix(proj);
batch.setProjectionTransform(proj);
}
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(Transform());
@ -1659,11 +1686,26 @@ void Model::setupBatchTransform(gpu::Batch& batch) {
void Model::endScene(RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
#if defined(ANDROID)
#else
glPushMatrix();
#endif
RenderArgs::RenderSide renderSide = RenderArgs::MONO;
if (args) {
renderSide = args->_renderSide;
}
gpu::GLBackend backend;
if (args) {
glm::mat4 proj;
args->_viewFrustum->evalProjectionMatrix(proj);
gpu::Batch batch;
batch.setProjectionTransform(proj);
backend.render(batch);
}
// Do the rendering batch creation for mono or left eye, not for right eye
if (renderSide != RenderArgs::STEREO_RIGHT) {
// Let's introduce a gpu::Batch to capture all the calls to the graphics api
@ -1818,19 +1860,15 @@ void Model::endScene(RenderMode mode, RenderArgs* args) {
// Render!
{
PROFILE_RANGE("render Batch");
#if defined(ANDROID)
#else
glPushMatrix();
#endif
::gpu::GLBackend::renderBatch(_sceneRenderBatch);
#if defined(ANDROID)
#else
glPopMatrix();
#endif
backend.render(_sceneRenderBatch);
}
#if defined(ANDROID)
#else
glPopMatrix();
#endif
// restore all the default material settings
_viewState->setupWorldLight();

View file

@ -269,6 +269,9 @@ protected:
/// first free ancestor.
float getLimbLength(int jointIndex) const;
/// Allow sub classes to force invalidating the bboxes
void invalidCalculatedMeshBoxes() { _calculatedMeshBoxesValid = false; }
private:
friend class AnimationHandle;

View file

@ -31,6 +31,8 @@ uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
uniform float shadowScale;
uniform mat4 shadowMatrices[4];
vec2 samples[8] = vec2[8](
vec2(-2.0, -2.0),
vec2(2.0, -2.0),

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// model.frag
@ -13,7 +13,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// model.vert
@ -11,6 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
uniform mat4 texcoordMatrices[MAX_TEXCOORDS];
@ -19,16 +20,19 @@ uniform mat4 texcoordMatrices[MAX_TEXCOORDS];
varying vec4 normal;
void main(void) {
// transform and store the normal for interpolation
normal = normalize(gl_ModelViewMatrix * vec4(gl_Normal, 0.0));
// pass along the diffuse color
gl_FrontColor = gl_Color;
// and the texture coordinates
gl_TexCoord[0] = texcoordMatrices[0] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0);
// use standard pipeline transform
gl_Position = ftransform();
}
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, gl_Vertex);
// transform and store the normal for interpolation
normal = vec4(normalize(transformModelToEyeDir(cam, obj, gl_Normal)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
uniform mat4 texcoordMatrices[MAX_TEXCOORDS];
@ -25,9 +27,6 @@ varying vec4 normal;
varying vec2 interpolatedTexcoord1;
void main(void) {
// transform and store the normal for interpolation
normal = normalize(gl_ModelViewMatrix * vec4(gl_Normal, 0.0));
// pass along the diffuse color
gl_FrontColor = gl_Color;
@ -35,8 +34,13 @@ void main(void) {
gl_TexCoord[0] = texcoordMatrices[0] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0);
// interpolatedTexcoord1 = vec2(texcoordMatrices[1] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0)).xy;
interpolatedTexcoord1 = vec2(texcoordMatrices[1] * vec4(texcoord1.xy, 0.0, 1.0)).xy;
// use standard pipeline transform
gl_Position = ftransform();
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, gl_Vertex);
// transform and store the normal for interpolation
normal = vec4(normalize(transformModelToEyeDir(cam, obj, gl_Normal)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
uniform mat4 texcoordMatrices[MAX_TEXCOORDS];
@ -32,8 +34,8 @@ varying vec2 interpolatedTexcoord1;
void main(void) {
// transform and store the normal and tangent for interpolation
interpolatedNormal = gl_ModelViewMatrix * vec4(gl_Normal, 0.0);
interpolatedTangent = gl_ModelViewMatrix * vec4(tangent, 0.0);
//interpolatedNormal = gl_ModelViewMatrix * vec4(gl_Normal, 0.0);
//interpolatedTangent = gl_ModelViewMatrix * vec4(tangent, 0.0);
// pass along the diffuse color
gl_FrontColor = gl_Color;
@ -43,5 +45,11 @@ void main(void) {
interpolatedTexcoord1 = vec2(texcoordMatrices[1] * vec4(texcoord1.xy, 0.0, 1.0)).xy;
// use standard pipeline transform
gl_Position = ftransform();
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, gl_Vertex);
// transform and store the normal for interpolation
interpolatedNormal = vec4(normalize(transformModelToEyeDir(cam, obj, gl_Normal)), 0.0);
interpolatedTangent = vec4(normalize(transformModelToEyeDir(cam, obj, tangent)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
uniform mat4 texcoordMatrices[MAX_TEXCOORDS];
@ -27,8 +29,8 @@ varying vec4 interpolatedTangent;
void main(void) {
// transform and store the normal and tangent for interpolation
interpolatedNormal = gl_ModelViewMatrix * vec4(gl_Normal, 0.0);
interpolatedTangent = gl_ModelViewMatrix * vec4(tangent, 0.0);
//interpolatedNormal = gl_ModelViewMatrix * vec4(gl_Normal, 0.0);
//interpolatedTangent = gl_ModelViewMatrix * vec4(tangent, 0.0);
// pass along the diffuse color
gl_FrontColor = gl_Color;
@ -36,6 +38,12 @@ void main(void) {
// and the texture coordinates
gl_TexCoord[0] = texcoordMatrices[0] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0);
// use standard pipeline transform
gl_Position = ftransform();
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, gl_Vertex);
// transform and store the normal for interpolation
interpolatedNormal = vec4(normalize(transformModelToEyeDir(cam, obj, gl_Normal)), 0.0);
interpolatedTangent = vec4(normalize(transformModelToEyeDir(cam, obj, tangent)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -11,8 +11,11 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
void main(void) {
// just use standard pipeline transform
gl_Position = ftransform();
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, gl_Vertex);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -14,7 +14,7 @@
<@include DeferredBufferWrite.slh@>
<@include Material.slh@>
<@include model/Material.slh@>
// the diffuse texture
uniform sampler2D diffuseMap;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -19,7 +19,7 @@
<@include DeferredLighting.slh@>
// Everything about light
<@include Light.slh@>
<@include model/Light.slh@>
// The view Matrix
uniform mat4 invViewMat;

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// sdf_text.frag

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// sdf_text.vert

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
const int MAX_CLUSTERS = 128;
const int INDICES_PER_VERTEX = 4;
@ -34,14 +36,18 @@ void main(void) {
position += clusterMatrix * gl_Vertex * clusterWeight;
normal += clusterMatrix * vec4(gl_Normal, 0.0) * clusterWeight;
}
normal = normalize(gl_ModelViewMatrix * normal);
// pass along the diffuse color
gl_FrontColor = gl_Color;
// and the texture coordinates
gl_TexCoord[0] = texcoordMatrices[0] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0);
gl_Position = gl_ModelViewProjectionMatrix * position;
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, position);
// transform and store the normal for interpolation
normal = vec4(normalize(transformModelToEyeDir(cam, obj, normal.xyz)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_TEXCOORDS = 2;
const int MAX_CLUSTERS = 128;
const int INDICES_PER_VERTEX = 4;
@ -42,8 +44,8 @@ void main(void) {
interpolatedNormal += clusterMatrix * vec4(gl_Normal, 0.0) * clusterWeight;
interpolatedTangent += clusterMatrix * vec4(tangent, 0.0) * clusterWeight;
}
interpolatedNormal = gl_ModelViewMatrix * interpolatedNormal;
interpolatedTangent = gl_ModelViewMatrix * interpolatedTangent;
// interpolatedNormal = gl_ModelViewMatrix * interpolatedNormal;
// interpolatedTangent = gl_ModelViewMatrix * interpolatedTangent;
// pass along the diffuse color
gl_FrontColor = gl_Color;
@ -52,4 +54,12 @@ void main(void) {
gl_TexCoord[0] = texcoordMatrices[0] * vec4(gl_MultiTexCoord0.xy, 0.0, 1.0);
gl_Position = gl_ModelViewProjectionMatrix * interpolatedPosition;
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, interpolatedPosition);
interpolatedNormal = vec4(normalize(transformModelToEyeDir(cam, obj, interpolatedNormal.xyz)), 0.0);
interpolatedTangent = vec4(normalize(transformModelToEyeDir(cam, obj, interpolatedTangent.xyz)), 0.0);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -12,6 +12,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
const int MAX_CLUSTERS = 128;
const int INDICES_PER_VERTEX = 4;
@ -27,5 +29,9 @@ void main(void) {
float clusterWeight = clusterWeights[i];
position += clusterMatrix * gl_Vertex * clusterWeight;
}
gl_Position = gl_ModelViewProjectionMatrix * position;
// use standard pipeline transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
gl_Position = transformModelToClipPos(cam, obj, position);
}

View file

@ -1,4 +1,4 @@
<@include Config.slh@>
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
@ -19,7 +19,7 @@
<@include DeferredLighting.slh@>
// Everything about light
<@include Light.slh@>
<@include model/Light.slh@>
// The view Matrix
uniform mat4 invViewMat;

View file

@ -90,6 +90,10 @@ public:
Mat4& getMatrix(Mat4& result) const;
Mat4& getInverseMatrix(Mat4& result) const;
Mat4& getInverseTransposeMatrix(Mat4& result) const;
Mat4& getRotationScaleMatrix(Mat4& result) const;
Mat4& getRotationScaleMatrixInverse(Mat4& result) const;
Transform& evalInverse(Transform& result) const;
@ -329,6 +333,24 @@ inline Transform::Mat4& Transform::getInverseMatrix(Transform::Mat4& result) con
return inverse.getMatrix(result);
}
inline Transform::Mat4& Transform::getInverseTransposeMatrix(Transform::Mat4& result) const {
getInverseMatrix(result);
result = glm::transpose(result);
return result;
}
inline Transform::Mat4& Transform::getRotationScaleMatrix(Mat4& result) const {
getMatrix(result);
result[3] = Vec4(0.0f, 0.0f, 0.0f, 1.0f);
return result;
}
inline Transform::Mat4& Transform::getRotationScaleMatrixInverse(Mat4& result) const {
getInverseMatrix(result);
result[3] = Vec4(0.0f, 0.0f, 0.0f, 1.0f);
return result;
}
inline void Transform::evalFromRawMatrix(const Mat4& matrix) {
// for now works only in the case of TRS transformation
if ((matrix[0][3] == 0) && (matrix[1][3] == 0) && (matrix[2][3] == 0) && (matrix[3][3] == 1.0f)) {