Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels

Conflicts:
	interface/src/ui/MetavoxelEditor.cpp
This commit is contained in:
Andrzej Kapolka 2014-11-13 12:17:37 -08:00
commit 8a0ff4acf5
34 changed files with 1825 additions and 554 deletions

View file

@ -1,72 +0,0 @@
//
// concertCamera.js
//
// Created by Philip Rosedale on June 24, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Move a camera through a series of pre-set locations by pressing number keys
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var oldMode;
var avatarPosition;
var cameraNumber = 0;
var freeCamera = false;
var cameraLocations = [ {x: 7971.9, y: 241.3, z: 7304.1}, {x: 7973.0, y: 241.3, z: 7304.1}, {x: 7975.5, y: 241.3, z: 7304.1}, {x: 7972.3, y: 241.3, z: 7303.3}, {x: 7971.0, y: 241.3, z: 7304.3}, {x: 7973.5, y: 240.7, z: 7302.5} ];
var cameraLookAts = [ {x: 7971.1, y: 241.3, z: 7304.1}, {x: 7972.1, y: 241.3, z: 7304.1}, {x: 7972.1, y: 241.3, z: 7304.1}, {x: 7972.1, y: 241.3, z: 7304.1}, {x: 7972.1, y: 241.3, z: 7304.1}, {x: 7971.3, y: 241.3, z: 7304.2} ];
function saveCameraState() {
oldMode = Camera.mode;
avatarPosition = MyAvatar.position;
Camera.setModeShiftPeriod(0.0);
Camera.mode = "independent";
}
function restoreCameraState() {
Camera.stopLooking();
Camera.mode = oldMode;
}
function update(deltaTime) {
if (freeCamera) {
var delta = Vec3.subtract(MyAvatar.position, avatarPosition);
if (Vec3.length(delta) > 0.05) {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
}
}
function keyPressEvent(event) {
var choice = parseInt(event.text);
if ((choice > 0) && (choice <= cameraLocations.length)) {
print("camera " + choice);
if (!freeCamera) {
saveCameraState();
freeCamera = true;
}
Camera.mode = "independent";
Camera.setPosition(cameraLocations[choice - 1]);
Camera.keepLookingAt(cameraLookAts[choice - 1]);
}
if (event.text == "ESC") {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
if (event.text == "0") {
// Show camera location in log
var cameraLocation = Camera.getPosition();
print(cameraLocation.x + ", " + cameraLocation.y + ", " + cameraLocation.z);
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -1,72 +0,0 @@
//
// concertCamera.js
//
// Created by Philip Rosedale on June 24, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Move a camera through a series of pre-set locations by pressing number keys
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var oldMode;
var avatarPosition;
var cameraNumber = 0;
var freeCamera = false;
var cameraLocations = [ {x: 8027.5, y: 237.5, z: 7305.7}, {x: 8027.5, y: 237.5, z: 7306.6}, {x: 8027.5, y: 237.5, z: 7308.0}, {x: 8027.5, y: 237.5, z: 7303.0}, {x: 8030.8, y: 238.6, z: 7311.4}, {x: 8030.9, y: 237.1, z: 7308.0} ];
var cameraLookAts = [ {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7305.7}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0}, {x: 8027.5, y: 237.5, z: 7304.0} ];
function saveCameraState() {
oldMode = Camera.mode;
avatarPosition = MyAvatar.position;
Camera.setModeShiftPeriod(0.0);
Camera.mode = "independent";
}
function restoreCameraState() {
Camera.stopLooking();
Camera.mode = oldMode;
}
function update(deltaTime) {
if (freeCamera) {
var delta = Vec3.subtract(MyAvatar.position, avatarPosition);
if (Vec3.length(delta) > 0.05) {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
}
}
function keyPressEvent(event) {
var choice = parseInt(event.text);
if ((choice > 0) && (choice <= cameraLocations.length)) {
print("camera " + choice);
if (!freeCamera) {
saveCameraState();
freeCamera = true;
}
Camera.mode = "independent";
Camera.setPosition(cameraLocations[choice - 1]);
Camera.keepLookingAt(cameraLookAts[choice - 1]);
}
if (event.text == "ESC") {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
if (event.text == "0") {
// Show camera location in log
var cameraLocation = Camera.getPosition();
print(cameraLocation.x + ", " + cameraLocation.y + ", " + cameraLocation.z);
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -1,72 +0,0 @@
//
// concertCamera.js
//
// Created by Philip Rosedale on June 24, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Move a camera through a series of pre-set locations by pressing number keys
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var oldMode;
var avatarPosition;
var cameraNumber = 0;
var freeCamera = false;
var cameraLocations = [ {x: 2921.5, y: 251.3, z: 8254.8}, {x: 2921.5, y: 251.3, z: 8254.4}, {x: 2921.5, y: 251.3, z: 8252.2}, {x: 2921.5, y: 251.3, z: 8247.2}, {x: 2921.4, y: 251.3, z: 8255.7} ];
var cameraLookAts = [ {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.5, y: 251.3, z: 8255.7}, {x: 2921.4 , y: 251.3, z: 8255.1} ];
function saveCameraState() {
oldMode = Camera.mode;
avatarPosition = MyAvatar.position;
Camera.setModeShiftPeriod(0.0);
Camera.mode = "independent";
}
function restoreCameraState() {
Camera.stopLooking();
Camera.mode = oldMode;
}
function update(deltaTime) {
if (freeCamera) {
var delta = Vec3.subtract(MyAvatar.position, avatarPosition);
if (Vec3.length(delta) > 0.05) {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
}
}
function keyPressEvent(event) {
var choice = parseInt(event.text);
if ((choice > 0) && (choice <= cameraLocations.length)) {
print("camera " + choice);
if (!freeCamera) {
saveCameraState();
freeCamera = true;
}
Camera.mode = "independent";
Camera.setPosition(cameraLocations[choice - 1]);
Camera.keepLookingAt(cameraLookAts[choice - 1]);
}
if (event.text == "ESC") {
cameraNumber = 0;
freeCamera = false;
restoreCameraState();
}
if (event.text == "0") {
// Show camera location in log
var cameraLocation = Camera.getPosition();
print(cameraLocation.x + ", " + cameraLocation.y + ", " + cameraLocation.z);
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -22,6 +22,9 @@ EntityPropertyDialogBox = (function () {
var dimensionZ;
var rescalePercentage;
var editModelID = -1;
var previousAnimationIsPlaying;
var previousAnimationFrameIndex;
var previousAnimationSettings;
that.cleanup = function () {
};
@ -47,10 +50,15 @@ EntityPropertyDialogBox = (function () {
array.push({ label: "Animation URL:", value: properties.animationURL });
index++;
array.push({ label: "Animation is playing:", value: properties.animationIsPlaying });
previousAnimationIsPlaying = properties.animationIsPlaying;
index++;
array.push({ label: "Animation FPS:", value: properties.animationFPS });
index++;
array.push({ label: "Animation Frame:", value: properties.animationFrameIndex });
previousAnimationFrameIndex = properties.animationFrameIndex;
index++;
array.push({ label: "Animation Settings:", value: properties.animationSettings });
previousAnimationSettings = properties.animationSettings;
index++;
array.push({ label: "Textures:", value: properties.textures });
index++;
@ -237,9 +245,29 @@ EntityPropertyDialogBox = (function () {
if (properties.type == "Model") {
properties.modelURL = array[index++].value;
properties.animationURL = array[index++].value;
properties.animationIsPlaying = array[index++].value;
var newAnimationIsPlaying = array[index++].value;
if (previousAnimationIsPlaying != newAnimationIsPlaying) {
properties.animationIsPlaying = newAnimationIsPlaying;
} else {
delete properties.animationIsPlaying;
}
properties.animationFPS = array[index++].value;
properties.animationFrameIndex = array[index++].value;
var newAnimationFrameIndex = array[index++].value;
if (previousAnimationFrameIndex != newAnimationFrameIndex) {
properties.animationFrameIndex = newAnimationFrameIndex;
} else {
delete properties.animationFrameIndex;
}
var newAnimationSettings = array[index++].value;
if (previousAnimationSettings != newAnimationSettings) {
properties.animationSettings = newAnimationSettings;
} else {
delete properties.animationSettings;
}
properties.textures = array[index++].value;
index++; // skip textureNames label
}

View file

@ -109,6 +109,7 @@ static unsigned STARFIELD_SEED = 1;
static const int BANDWIDTH_METER_CLICK_MAX_DRAG_LENGTH = 6; // farther dragged clicks are ignored
const qint64 MAXIMUM_CACHE_SIZE = 10737418240; // 10GB
static QTimer* idleTimer = NULL;
@ -159,6 +160,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_lastQueriedViewFrustum(),
_lastQueriedTime(usecTimestampNow()),
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT)),
_viewTransform(new gpu::Transform()),
_scaleMirror(1.0f),
_rotateMirror(0.0f),
_raiseMirror(0.0f),
@ -836,12 +838,14 @@ void Application::controlledBroadcastToNodes(const QByteArray& packet, const Nod
}
bool Application::event(QEvent* event) {
// handle custom URL
if (event->type() == QEvent::FileOpen) {
QFileOpenEvent* fileEvent = static_cast<QFileOpenEvent*>(event);
if (fileEvent->url().isValid()) {
openUrl(fileEvent->url());
if (!fileEvent->url().isEmpty()) {
AddressManager::getInstance().handleLookupString(fileEvent->url().toLocalFile());
}
return false;
@ -2795,6 +2799,8 @@ void Application::updateShadowMap() {
// store view matrix without translation, which we'll use for precision-sensitive objects
updateUntranslatedViewMatrix();
// TODO: assign an equivalent viewTransform object to the application to match the current path which uses glMatrixStack
// setViewTransform(viewTransform);
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonOffset(1.1f, 4.0f); // magic numbers courtesy http://www.eecs.berkeley.edu/~ravir/6160/papers/shadowmaps.ppt
@ -2902,6 +2908,19 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
// store view matrix without translation, which we'll use for precision-sensitive objects
updateUntranslatedViewMatrix(-whichCamera.getPosition());
// Equivalent to what is happening with _untranslatedViewMatrix and the _viewMatrixTranslation
// the viewTransofmr object is updatded with the correct values and saved,
// this is what is used for rendering the Entities and avatars
gpu::Transform viewTransform;
viewTransform.setTranslation(whichCamera.getPosition());
viewTransform.setRotation(rotation);
viewTransform.postTranslate(eyeOffsetPos);
viewTransform.postRotate(eyeOffsetOrient);
if (whichCamera.getMode() == CAMERA_MODE_MIRROR) {
viewTransform.setScale(gpu::Transform::Vec3(-1.0f, 1.0f, 1.0f));
}
setViewTransform(viewTransform);
glTranslatef(_viewMatrixTranslation.x, _viewMatrixTranslation.y, _viewMatrixTranslation.z);
// Setup 3D lights (after the camera transform, so that they are positioned in world space)
@ -3019,13 +3038,16 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
}
}
bool mirrorMode = (whichCamera.getMode() == CAMERA_MODE_MIRROR);
{
PerformanceTimer perfTimer("avatars");
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
false, selfAvatarOnly);
}
{
PROFILE_RANGE("DeferredLighting");
PerformanceTimer perfTimer("lighting");
@ -3084,7 +3106,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
emit renderingInWorldInterface();
}
}
if (Menu::getInstance()->isOptionChecked(MenuOption::Wireframe)) {
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
}
@ -3095,6 +3117,10 @@ void Application::updateUntranslatedViewMatrix(const glm::vec3& viewMatrixTransl
_viewMatrixTranslation = viewMatrixTranslation;
}
void Application::setViewTransform(const gpu::Transform& view) {
(*_viewTransform) = view;
}
void Application::loadTranslatedViewMatrix(const glm::vec3& translation) {
glLoadMatrixf((const GLfloat*)&_untranslatedViewMatrix);
glTranslatef(translation.x + _viewMatrixTranslation.x, translation.y + _viewMatrixTranslation.y,

View file

@ -232,6 +232,9 @@ public:
const glm::vec3& getViewMatrixTranslation() const { return _viewMatrixTranslation; }
void setViewMatrixTranslation(const glm::vec3& translation) { _viewMatrixTranslation = translation; }
const gpu::TransformPointer& getViewTransform() const { return _viewTransform; }
void setViewTransform(const gpu::Transform& view);
/// if you need to access the application settings, use lockSettings()/unlockSettings()
QSettings* lockSettings() { _settingsMutex.lock(); return _settings; }
void unlockSettings() { _settingsMutex.unlock(); }
@ -523,6 +526,7 @@ private:
QRect _mirrorViewRect;
RearMirrorTools* _rearMirrorTools;
gpu::TransformPointer _viewTransform;
glm::mat4 _untranslatedViewMatrix;
glm::vec3 _viewMatrixTranslation;
glm::mat4 _projectionMatrix;

View file

@ -437,10 +437,15 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(entitiesDebugMenu, MenuOption::DisplayModelElementChildProxies, 0, false);
addCheckableActionToQMenuAndActionHash(entitiesDebugMenu, MenuOption::DisableLightEntities, 0, false);
addCheckableActionToQMenuAndActionHash(entitiesDebugMenu, MenuOption::DontReduceMaterialSwitches, 0, false);
addCheckableActionToQMenuAndActionHash(entitiesDebugMenu, MenuOption::RenderEntitiesAsScene, 0, false);
QMenu* entityCullingMenu = entitiesDebugMenu->addMenu("Culling");
addCheckableActionToQMenuAndActionHash(entityCullingMenu, MenuOption::DontCullOutOfViewMeshParts, 0, false);
addCheckableActionToQMenuAndActionHash(entityCullingMenu, MenuOption::DontCullTooSmallMeshParts, 0, false);
addCheckableActionToQMenuAndActionHash(entityCullingMenu, MenuOption::DontReduceMaterialSwitches, 0, false);
QMenu* voxelOptionsMenu = developerMenu->addMenu("Voxels");
addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::VoxelTextures);

View file

@ -447,6 +447,7 @@ namespace MenuOption {
const QString ReloadAllScripts = "Reload All Scripts";
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderDualContourSurfaces = "Render Dual Contour Surfaces";
const QString RenderEntitiesAsScene = "Render Entities as Scene";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
const QString RenderHeightfields = "Render Heightfields";

View file

@ -39,6 +39,7 @@
#include "Recorder.h"
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "renderer/AnimationHandle.h"
#include "ui/TextRenderer.h"
using namespace std;

View file

@ -253,7 +253,45 @@ void EntityTreeRenderer::checkEnterLeaveEntities() {
}
void EntityTreeRenderer::render(RenderArgs::RenderMode renderMode) {
OctreeRenderer::render(renderMode);
bool dontRenderAsScene = !Menu::getInstance()->isOptionChecked(MenuOption::RenderEntitiesAsScene);
if (dontRenderAsScene) {
OctreeRenderer::render(renderMode);
} else {
if (_tree) {
Model::startScene();
RenderArgs args = { this, _viewFrustum, getSizeScale(), getBoundaryLevelAdjust(), renderMode,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
_tree->lockForRead();
_tree->recurseTreeWithOperation(renderOperation, &args);
Model::RenderMode modelRenderMode = renderMode == RenderArgs::SHADOW_RENDER_MODE
? Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
// we must call endScene while we still have the tree locked so that no one deletes a model
// on us while rendering the scene
Model::endScene(modelRenderMode, &args);
_tree->unlock();
// stats...
_meshesConsidered = args._meshesConsidered;
_meshesRendered = args._meshesRendered;
_meshesOutOfView = args._meshesOutOfView;
_meshesTooSmall = args._meshesTooSmall;
_elementsTouched = args._elementsTouched;
_itemsRendered = args._itemsRendered;
_itemsOutOfView = args._itemsOutOfView;
_itemsTooSmall = args._itemsTooSmall;
_materialSwitches = args._materialSwitches;
_trianglesRendered = args._trianglesRendered;
_quadsRendered = args._quadsRendered;
_translucentMeshPartsRendered = args._translucentMeshPartsRendered;
_opaqueMeshPartsRendered = args._opaqueMeshPartsRendered;
}
}
deleteReleasedModels(); // seems like as good as any other place to do some memory cleanup
}

View file

@ -172,7 +172,12 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
// TODO: this is the majority of model render time. And rendering of a cube model vs the basic Box render
// is significantly more expensive. Is there a way to call this that doesn't cost us as much?
PerformanceTimer perfTimer("model->render");
_model->render(alpha, modelRenderMode, args);
bool dontRenderAsScene = !Menu::getInstance()->isOptionChecked(MenuOption::RenderEntitiesAsScene);
if (dontRenderAsScene) {
_model->render(alpha, modelRenderMode, args);
} else {
_model->renderInScene(alpha, args);
}
} else {
// if we couldn't get a model, then just draw a cube
glColor3ub(getColor()[RED_INDEX],getColor()[GREEN_INDEX],getColor()[BLUE_INDEX]);

View file

@ -21,7 +21,11 @@ Batch::Batch() :
_commandOffsets(),
_params(),
_resources(),
_data(){
_data(),
_buffers(),
_streamFormats(),
_transforms()
{
}
Batch::~Batch() {
@ -32,8 +36,10 @@ void Batch::clear() {
_commandOffsets.clear();
_params.clear();
_resources.clear();
_buffers.clear();
_data.clear();
_buffers.clear();
_streamFormats.clear();
_transforms.clear();
}
uint32 Batch::cacheResource(Resource* res) {
@ -128,3 +134,22 @@ void Batch::setIndexBuffer(Type type, const BufferPointer& buffer, Offset offset
_params.push_back(_buffers.cache(buffer));
_params.push_back(type);
}
void Batch::setModelTransform(const TransformPointer& model) {
ADD_COMMAND(setModelTransform);
_params.push_back(_transforms.cache(model));
}
void Batch::setViewTransform(const TransformPointer& view) {
ADD_COMMAND(setViewTransform);
_params.push_back(_transforms.cache(view));
}
void Batch::setProjectionTransform(const TransformPointer& proj) {
ADD_COMMAND(setProjectionTransform);
_params.push_back(_transforms.cache(proj));
}

View file

@ -14,10 +14,10 @@
#include <assert.h>
#include "InterfaceConfig.h"
#include "Transform.h"
#include <vector>
#include "gpu/Format.h"
#include "gpu/Resource.h"
#include "gpu/Stream.h"
#if defined(NSIGHT_FOUND)
@ -50,6 +50,10 @@ enum Primitive {
NUM_PRIMITIVES,
};
typedef ::Transform Transform;
typedef QSharedPointer< ::gpu::Transform > TransformPointer;
typedef std::vector< TransformPointer > Transforms;
class Batch {
public:
typedef Stream::Slot Slot;
@ -60,11 +64,16 @@ public:
void clear();
// Drawcalls
void draw(Primitive primitiveType, uint32 numVertices, uint32 startVertex = 0);
void drawIndexed(Primitive primitiveType, uint32 nbIndices, uint32 startIndex = 0);
void drawInstanced(uint32 nbInstances, Primitive primitiveType, uint32 nbVertices, uint32 startVertex = 0, uint32 startInstance = 0);
void drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, uint32 nbIndices, uint32 startIndex = 0, uint32 startInstance = 0);
// Input Stage
// InputFormat
// InputBuffers
// IndexBuffer
void setInputFormat(const Stream::FormatPointer& format);
void setInputStream(Slot startChannel, const BufferStream& stream); // not a command, just unroll into a loop of setInputBuffer
@ -72,6 +81,16 @@ public:
void setIndexBuffer(Type type, const BufferPointer& buffer, Offset offset);
// Transform Stage
// Vertex position is transformed by ModelTransform from object space to world space
// Then by the inverse of the ViewTransform from world space to eye space
// finaly projected into the clip space by the projection transform
// WARNING: ViewTransform transform from eye space to world space, its inverse is composed
// with the ModelTransformu to create the equivalent of the glModelViewMatrix
void setModelTransform(const TransformPointer& model);
void setViewTransform(const TransformPointer& view);
void setProjectionTransform(const TransformPointer& proj);
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
@ -138,11 +157,13 @@ public:
COMMAND_drawIndexedInstanced,
COMMAND_setInputFormat,
COMMAND_setInputBuffer,
COMMAND_setIndexBuffer,
COMMAND_setModelTransform,
COMMAND_setViewTransform,
COMMAND_setProjectionTransform,
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
@ -266,6 +287,7 @@ public:
typedef Cache<Buffer>::Vector BufferCaches;
typedef Cache<Stream::Format>::Vector StreamFormatCaches;
typedef Cache<Transform>::Vector TransformCaches;
typedef unsigned char Byte;
typedef std::vector<Byte> Bytes;
@ -299,11 +321,12 @@ public:
CommandOffsets _commandOffsets;
Params _params;
Resources _resources;
Bytes _data;
BufferCaches _buffers;
StreamFormatCaches _streamFormats;
TransformCaches _transforms;
Bytes _data;
protected:
};

View file

@ -24,11 +24,13 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_drawIndexedInstanced),
(&::gpu::GLBackend::do_setInputFormat),
(&::gpu::GLBackend::do_setInputBuffer),
(&::gpu::GLBackend::do_setIndexBuffer),
(&::gpu::GLBackend::do_setModelTransform),
(&::gpu::GLBackend::do_setViewTransform),
(&::gpu::GLBackend::do_setProjectionTransform),
(&::gpu::GLBackend::do_glEnable),
(&::gpu::GLBackend::do_glDisable),
@ -111,18 +113,16 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
GLBackend::GLBackend() :
_needInputFormatUpdate(true),
_inputFormat(0),
_inputBuffersState(0),
_inputBuffers(_inputBuffersState.size(), BufferPointer(0)),
_inputBufferOffsets(_inputBuffersState.size(), 0),
_inputBufferStrides(_inputBuffersState.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0),
_inputAttributeActivation(0)
_inputAttributeActivation(0),
_transform()
{
}
@ -183,6 +183,7 @@ void GLBackend::checkGLError() {
void GLBackend::do_draw(Batch& batch, uint32 paramOffset) {
updateInput();
updateTransform();
Primitive primitiveType = (Primitive)batch._params[paramOffset + 2]._uint;
GLenum mode = _primitiveToGLmode[primitiveType];
@ -195,6 +196,7 @@ void GLBackend::do_draw(Batch& batch, uint32 paramOffset) {
void GLBackend::do_drawIndexed(Batch& batch, uint32 paramOffset) {
updateInput();
updateTransform();
Primitive primitiveType = (Primitive)batch._params[paramOffset + 2]._uint;
GLenum mode = _primitiveToGLmode[primitiveType];
@ -425,6 +427,82 @@ void GLBackend::do_setIndexBuffer(Batch& batch, uint32 paramOffset) {
CHECK_GL_ERROR();
}
// Transform Stage
void GLBackend::do_setModelTransform(Batch& batch, uint32 paramOffset) {
TransformPointer modelTransform = batch._transforms.get(batch._params[paramOffset]._uint);
if (_transform._model.isNull() || (modelTransform != _transform._model)) {
_transform._model = modelTransform;
_transform._invalidModel = true;
}
}
void GLBackend::do_setViewTransform(Batch& batch, uint32 paramOffset) {
TransformPointer viewTransform = batch._transforms.get(batch._params[paramOffset]._uint);
if (_transform._view.isNull() || (viewTransform != _transform._view)) {
_transform._view = viewTransform;
_transform._invalidView = true;
}
}
void GLBackend::do_setProjectionTransform(Batch& batch, uint32 paramOffset) {
TransformPointer projectionTransform = batch._transforms.get(batch._params[paramOffset]._uint);
if (_transform._projection.isNull() || (projectionTransform != _transform._projection)) {
_transform._projection = projectionTransform;
_transform._invalidProj = true;
}
}
void GLBackend::updateTransform() {
if (_transform._invalidProj) {
// TODO: implement the projection matrix assignment to gl state
/* if (_transform._lastMode != GL_PROJECTION) {
glMatrixMode(GL_PROJECTION);
_transform._lastMode = GL_PROJECTION;
}
CHECK_GL_ERROR();*/
_transform._invalidProj;
}
if (_transform._invalidModel || _transform._invalidView) {
if (!_transform._model.isNull()) {
if (_transform._lastMode != GL_MODELVIEW) {
glMatrixMode(GL_MODELVIEW);
_transform._lastMode = GL_MODELVIEW;
}
Transform::Mat4 modelView;
if (!_transform._view.isNull()) {
Transform mvx;
Transform::inverseMult(mvx, (*_transform._view), (*_transform._model));
mvx.getMatrix(modelView);
} else {
_transform._model->getMatrix(modelView);
}
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
} else {
if (!_transform._view.isNull()) {
if (_transform._lastMode != GL_MODELVIEW) {
glMatrixMode(GL_MODELVIEW);
_transform._lastMode = GL_MODELVIEW;
}
Transform::Mat4 modelView;
_transform._view->getInverseMatrix(modelView);
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
} else {
// TODO: eventually do something about the matrix when neither view nor model is specified?
// glLoadIdentity();
}
}
CHECK_GL_ERROR();
_transform._invalidModel = false;
_transform._invalidView = false;
}
}
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API

View file

@ -52,11 +52,22 @@ public:
protected:
// Draw Stage
void do_draw(Batch& batch, uint32 paramOffset);
void do_drawIndexed(Batch& batch, uint32 paramOffset);
void do_drawInstanced(Batch& batch, uint32 paramOffset);
void do_drawIndexedInstanced(Batch& batch, uint32 paramOffset);
// Input Stage
void do_setInputFormat(Batch& batch, uint32 paramOffset);
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
void do_setIndexBuffer(Batch& batch, uint32 paramOffset);
void updateInput();
bool _needInputFormatUpdate;
Stream::FormatPointer _inputFormat;
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> InputBuffersState;
InputBuffersState _inputBuffersState;
Buffers _inputBuffers;
Offsets _inputBufferOffsets;
Offsets _inputBufferStrides;
@ -68,18 +79,31 @@ protected:
typedef std::bitset<MAX_NUM_ATTRIBUTES> InputActivationCache;
InputActivationCache _inputAttributeActivation;
void do_draw(Batch& batch, uint32 paramOffset);
void do_drawIndexed(Batch& batch, uint32 paramOffset);
void do_drawInstanced(Batch& batch, uint32 paramOffset);
void do_drawIndexedInstanced(Batch& batch, uint32 paramOffset);
// Transform Stage
void do_setModelTransform(Batch& batch, uint32 paramOffset);
void do_setViewTransform(Batch& batch, uint32 paramOffset);
void do_setProjectionTransform(Batch& batch, uint32 paramOffset);
void updateInput();
void do_setInputFormat(Batch& batch, uint32 paramOffset);
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
void updateTransform();
struct TransformStageState {
TransformPointer _model;
TransformPointer _view;
TransformPointer _projection;
bool _invalidModel;
bool _invalidView;
bool _invalidProj;
void do_setVertexBuffer(Batch& batch, uint32 paramOffset);
void do_setIndexBuffer(Batch& batch, uint32 paramOffset);
GLenum _lastMode;
TransformStageState() :
_model(0),
_view(0),
_projection(0),
_invalidModel(true),
_invalidView(true),
_invalidProj(true),
_lastMode(GL_TEXTURE) {}
} _transform;
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long

View file

@ -0,0 +1,177 @@
//
// AnimationHandle.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 10/18/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AnimationHandle.h"
#include "Application.h"
void AnimationHandle::setURL(const QUrl& url) {
if (_url != url) {
_animation = Application::getInstance()->getAnimationCache()->getAnimation(_url = url);
_jointMappings.clear();
}
}
static void insertSorted(QList<AnimationHandlePointer>& handles, const AnimationHandlePointer& handle) {
for (QList<AnimationHandlePointer>::iterator it = handles.begin(); it != handles.end(); it++) {
if (handle->getPriority() > (*it)->getPriority()) {
handles.insert(it, handle);
return;
}
}
handles.append(handle);
}
void AnimationHandle::setPriority(float priority) {
if (_priority == priority) {
return;
}
if (isRunning()) {
_model->_runningAnimations.removeOne(_self);
if (priority < _priority) {
replaceMatchingPriorities(priority);
}
_priority = priority;
insertSorted(_model->_runningAnimations, _self);
} else {
_priority = priority;
}
}
void AnimationHandle::setStartAutomatically(bool startAutomatically) {
_animationLoop.setStartAutomatically(startAutomatically);
if (getStartAutomatically() && !isRunning()) {
start();
}
}
void AnimationHandle::setMaskedJoints(const QStringList& maskedJoints) {
_maskedJoints = maskedJoints;
_jointMappings.clear();
}
void AnimationHandle::setRunning(bool running) {
if (isRunning() == running) {
// if we're already running, this is the same as a restart
if (running) {
// move back to the beginning
setFrameIndex(getFirstFrame());
}
return;
}
_animationLoop.setRunning(running);
if (isRunning()) {
if (!_model->_runningAnimations.contains(_self)) {
insertSorted(_model->_runningAnimations, _self);
}
} else {
_model->_runningAnimations.removeOne(_self);
replaceMatchingPriorities(0.0f);
}
emit runningChanged(isRunning());
}
AnimationHandle::AnimationHandle(Model* model) :
QObject(model),
_model(model),
_priority(1.0f)
{
}
AnimationDetails AnimationHandle::getAnimationDetails() const {
AnimationDetails details(_role, _url, getFPS(), _priority, getLoop(), getHold(),
getStartAutomatically(), getFirstFrame(), getLastFrame(), isRunning(), getFrameIndex());
return details;
}
void AnimationHandle::setAnimationDetails(const AnimationDetails& details) {
setRole(details.role);
setURL(details.url);
setFPS(details.fps);
setPriority(details.priority);
setLoop(details.loop);
setHold(details.hold);
setStartAutomatically(details.startAutomatically);
setFirstFrame(details.firstFrame);
setLastFrame(details.lastFrame);
setRunning(details.running);
setFrameIndex(details.frameIndex);
// NOTE: AnimationDetails doesn't support maskedJoints
//setMaskedJoints(const QStringList& maskedJoints);
}
void AnimationHandle::simulate(float deltaTime) {
_animationLoop.simulate(deltaTime);
// update the joint mappings if necessary/possible
if (_jointMappings.isEmpty()) {
if (_model->isActive()) {
_jointMappings = _model->getGeometry()->getJointMappings(_animation);
}
if (_jointMappings.isEmpty()) {
return;
}
if (!_maskedJoints.isEmpty()) {
const FBXGeometry& geometry = _model->getGeometry()->getFBXGeometry();
for (int i = 0; i < _jointMappings.size(); i++) {
int& mapping = _jointMappings[i];
if (mapping != -1 && _maskedJoints.contains(geometry.joints.at(mapping).name)) {
mapping = -1;
}
}
}
}
const FBXGeometry& animationGeometry = _animation->getGeometry();
if (animationGeometry.animationFrames.isEmpty()) {
stop();
return;
}
// TODO: When moving the loop/frame calculations to AnimationLoop class, we changed this behavior
// see AnimationLoop class for more details. Do we need to support clamping the endFrameIndex to
// the max number of frames in the geometry???
//
// float endFrameIndex = qMin(_lastFrame, animationGeometry.animationFrames.size() - (_loop ? 0.0f : 1.0f));
// blend between the closest two frames
applyFrame(getFrameIndex());
}
void AnimationHandle::applyFrame(float frameIndex) {
const FBXGeometry& animationGeometry = _animation->getGeometry();
int frameCount = animationGeometry.animationFrames.size();
const FBXAnimationFrame& floorFrame = animationGeometry.animationFrames.at((int)glm::floor(frameIndex) % frameCount);
const FBXAnimationFrame& ceilFrame = animationGeometry.animationFrames.at((int)glm::ceil(frameIndex) % frameCount);
float frameFraction = glm::fract(frameIndex);
for (int i = 0; i < _jointMappings.size(); i++) {
int mapping = _jointMappings.at(i);
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction), _priority);
}
}
}
void AnimationHandle::replaceMatchingPriorities(float newPriority) {
for (int i = 0; i < _jointMappings.size(); i++) {
int mapping = _jointMappings.at(i);
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
if (_priority == state._animationPriority) {
state._animationPriority = newPriority;
}
}
}
}

View file

@ -0,0 +1,110 @@
//
// AnimationHandle.h
// interface/src/renderer
//
// Created by Andrzej Kapolka on 10/18/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AnimationHandle_h
#define hifi_AnimationHandle_h
#include <QObject>
#include <QString>
#include <QStringList>
#include <QUrl>
#include <QVector>
#include <AnimationCache.h>
#include <AnimationLoop.h>
class AnimationHandle;
class Model;
typedef QSharedPointer<AnimationHandle> AnimationHandlePointer;
typedef QWeakPointer<AnimationHandle> WeakAnimationHandlePointer;
/// Represents a handle to a model animation.
class AnimationHandle : public QObject {
Q_OBJECT
public:
void setRole(const QString& role) { _role = role; }
const QString& getRole() const { return _role; }
void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
void setPriority(float priority);
float getPriority() const { return _priority; }
void setMaskedJoints(const QStringList& maskedJoints);
const QStringList& getMaskedJoints() const { return _maskedJoints; }
void setFPS(float fps) { _animationLoop.setFPS(fps); }
float getFPS() const { return _animationLoop.getFPS(); }
void setLoop(bool loop) { _animationLoop.setLoop(loop); }
bool getLoop() const { return _animationLoop.getLoop(); }
void setHold(bool hold) { _animationLoop.setHold(hold); }
bool getHold() const { return _animationLoop.getHold(); }
void setStartAutomatically(bool startAutomatically);
bool getStartAutomatically() const { return _animationLoop.getStartAutomatically(); }
void setFirstFrame(float firstFrame) { _animationLoop.setFirstFrame(firstFrame); }
float getFirstFrame() const { return _animationLoop.getFirstFrame(); }
void setLastFrame(float lastFrame) { _animationLoop.setLastFrame(lastFrame); }
float getLastFrame() const { return _animationLoop.getLastFrame(); }
void setRunning(bool running);
bool isRunning() const { return _animationLoop.isRunning(); }
void setFrameIndex(float frameIndex) { _animationLoop.setFrameIndex(frameIndex); }
float getFrameIndex() const { return _animationLoop.getFrameIndex(); }
AnimationDetails getAnimationDetails() const;
void setAnimationDetails(const AnimationDetails& details);
signals:
void runningChanged(bool running);
public slots:
void start() { setRunning(true); }
void stop() { setRunning(false); }
private:
friend class Model;
AnimationHandle(Model* model);
void simulate(float deltaTime);
void applyFrame(float frameIndex);
void replaceMatchingPriorities(float newPriority);
Model* _model;
WeakAnimationHandlePointer _self;
AnimationPointer _animation;
QString _role;
QUrl _url;
float _priority;
QStringList _maskedJoints;
QVector<int> _jointMappings;
AnimationLoop _animationLoop;
};
#endif // hifi_AnimationHandle_h

View file

@ -23,6 +23,7 @@
#include <ShapeCollider.h>
#include <SphereShape.h>
#include "AnimationHandle.h"
#include "Application.h"
#include "Model.h"
@ -517,17 +518,7 @@ void Model::recalcuateMeshBoxes() {
}
}
bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(alpha, mode);
}
if (_meshStates.isEmpty()) {
return false;
}
void Model::renderSetup(RenderArgs* args) {
// if we don't have valid mesh boxes, calculate them now, this only matters in cases
// where our caller has passed RenderArgs which will include a view frustum we can cull
// against. We cache the results of these calculations so long as the model hasn't been
@ -549,10 +540,40 @@ bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
if (!_meshGroupsKnown) {
segregateMeshGroups();
}
}
bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(alpha, mode);
}
if (_meshStates.isEmpty()) {
return false;
}
renderSetup(args);
return renderCore(alpha, mode, args);
}
bool Model::renderCore(float alpha, RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
// Let's introduce a gpu::Batch to capture all the calls to the graphics api
gpu::Batch batch;
_renderBatch.clear();
gpu::Batch& batch = _renderBatch;
GLBATCH(glPushMatrix)();
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(gpu::TransformPointer(new gpu::Transform()));
}
(*_transforms[0]) = gpu::Transform((*Application::getInstance()->getViewTransform()));
// apply entity translation offset to the viewTransform in one go (it's a preTranslate because viewTransform goes from world to eye space)
_transforms[0]->preTranslate(-_translation);
batch.setViewTransform(_transforms[0]);
GLBATCH(glDisable)(GL_COLOR_MATERIAL);
@ -677,11 +698,12 @@ bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
GLBATCH(glBindBuffer)(GL_ELEMENT_ARRAY_BUFFER, 0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glPopMatrix)();
// Render!
{
PROFILE_RANGE("render Batch");
::gpu::GLBackend::renderBatch(batch);
batch.clear();
}
// restore all the default material settings
@ -1459,6 +1481,279 @@ void Model::deleteGeometry() {
_blendedBlendshapeCoefficients.clear();
}
// Scene rendering support
QVector<Model*> Model::_modelsInScene;
gpu::Batch Model::_sceneRenderBatch;
void Model::startScene() {
_modelsInScene.clear();
}
void Model::setupBatchTransform(gpu::Batch& batch) {
GLBATCH(glPushMatrix)();
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(gpu::TransformPointer(new gpu::Transform()));
}
(*_transforms[0]) = gpu::Transform((*Application::getInstance()->getViewTransform()));
_transforms[0]->preTranslate(-_translation);
batch.setViewTransform(_transforms[0]);
}
void Model::endScene(RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
// Let's introduce a gpu::Batch to capture all the calls to the graphics api
_sceneRenderBatch.clear();
gpu::Batch& batch = _sceneRenderBatch;
GLBATCH(glDisable)(GL_COLOR_MATERIAL);
if (mode == DIFFUSE_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
GLBATCH(glDisable)(GL_CULL_FACE);
} else {
GLBATCH(glEnable)(GL_CULL_FACE);
if (mode == SHADOW_RENDER_MODE) {
GLBATCH(glCullFace)(GL_FRONT);
}
}
// render opaque meshes with alpha testing
GLBATCH(glDisable)(GL_BLEND);
GLBATCH(glEnable)(GL_ALPHA_TEST);
if (mode == SHADOW_RENDER_MODE) {
GLBATCH(glAlphaFunc)(GL_EQUAL, 0.0f);
}
/*Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(
mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE,
mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE,
mode == DEFAULT_RENDER_MODE);
*/
{
GLenum buffers[3];
int bufferCount = 0;
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
}
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
}
if (mode == DEFAULT_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
}
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
const float DEFAULT_ALPHA_THRESHOLD = 0.5f;
int opaqueMeshPartsRendered = 0;
// now, for each model in the scene, render the mesh portions
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
opaqueMeshPartsRendered += model->renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
// render translucent meshes afterwards
//Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(false, true, true);
{
GLenum buffers[2];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
int translucentParts = 0;
const float MOSTLY_OPAQUE_THRESHOLD = 0.75f;
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
GLBATCH(glDisable)(GL_ALPHA_TEST);
GLBATCH(glEnable)(GL_BLEND);
GLBATCH(glDepthMask)(false);
GLBATCH(glDepthFunc)(GL_LEQUAL);
//Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(true);
{
GLenum buffers[1];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
const float MOSTLY_TRANSPARENT_THRESHOLD = 0.0f;
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, true, false, args);
GLBATCH(glPopMatrix)();
}
foreach(Model* model, _modelsInScene) {
model->setupBatchTransform(batch);
translucentParts += model->renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
GLBATCH(glPopMatrix)();
}
}
GLBATCH(glDepthMask)(true);
GLBATCH(glDepthFunc)(GL_LESS);
GLBATCH(glDisable)(GL_CULL_FACE);
if (mode == SHADOW_RENDER_MODE) {
GLBATCH(glCullFace)(GL_BACK);
}
// deactivate vertex arrays after drawing
GLBATCH(glDisableClientState)(GL_NORMAL_ARRAY);
GLBATCH(glDisableClientState)(GL_VERTEX_ARRAY);
GLBATCH(glDisableClientState)(GL_TEXTURE_COORD_ARRAY);
GLBATCH(glDisableClientState)(GL_COLOR_ARRAY);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::TANGENT);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::SKIN_CLUSTER_INDEX);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::SKIN_CLUSTER_WEIGHT);
// bind with 0 to switch back to normal operation
GLBATCH(glBindBuffer)(GL_ARRAY_BUFFER, 0);
GLBATCH(glBindBuffer)(GL_ELEMENT_ARRAY_BUFFER, 0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
// Render!
{
PROFILE_RANGE("render Batch");
::gpu::GLBackend::renderBatch(batch);
}
// restore all the default material settings
Application::getInstance()->setupWorldLight();
if (args) {
args->_translucentMeshPartsRendered = translucentParts;
args->_opaqueMeshPartsRendered = opaqueMeshPartsRendered;
}
}
bool Model::renderInScene(float alpha, RenderArgs* args) {
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->renderInScene(alpha);
}
if (_meshStates.isEmpty()) {
return false;
}
renderSetup(args);
_modelsInScene.push_back(this);
return true;
}
void Model::segregateMeshGroups() {
_meshesTranslucentTangents.clear();
_meshesTranslucent.clear();
@ -1849,20 +2144,17 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
}
GLBATCH(glPushMatrix)();
//Application::getInstance()->loadTranslatedViewMatrix(_translation);
GLBATCH(glLoadMatrixf)((const GLfloat*)&Application::getInstance()->getUntranslatedViewMatrix());
glm::vec3 viewMatTranslation = Application::getInstance()->getViewMatrixTranslation();
GLBATCH(glTranslatef)(_translation.x + viewMatTranslation.x, _translation.y + viewMatTranslation.y,
_translation.z + viewMatTranslation.z);
const MeshState& state = _meshStates.at(i);
if (state.clusterMatrices.size() > 1) {
GLBATCH(glUniformMatrix4fv)(skinLocations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
batch.setModelTransform(gpu::TransformPointer());
} else {
GLBATCH(glMultMatrixf)((const GLfloat*)&state.clusterMatrices[0]);
gpu::TransformPointer modelTransform(new gpu::Transform(state.clusterMatrices[0]));
batch.setModelTransform(modelTransform);
}
if (mesh.blendshapes.isEmpty()) {
batch.setInputFormat(networkMesh._vertexFormat);
batch.setInputStream(0, *networkMesh._vertexStream);
@ -1980,166 +2272,3 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
return meshPartsRendered;
}
void AnimationHandle::setURL(const QUrl& url) {
if (_url != url) {
_animation = Application::getInstance()->getAnimationCache()->getAnimation(_url = url);
_jointMappings.clear();
}
}
static void insertSorted(QList<AnimationHandlePointer>& handles, const AnimationHandlePointer& handle) {
for (QList<AnimationHandlePointer>::iterator it = handles.begin(); it != handles.end(); it++) {
if (handle->getPriority() > (*it)->getPriority()) {
handles.insert(it, handle);
return;
}
}
handles.append(handle);
}
void AnimationHandle::setPriority(float priority) {
if (_priority == priority) {
return;
}
if (_running) {
_model->_runningAnimations.removeOne(_self);
if (priority < _priority) {
replaceMatchingPriorities(priority);
}
_priority = priority;
insertSorted(_model->_runningAnimations, _self);
} else {
_priority = priority;
}
}
void AnimationHandle::setStartAutomatically(bool startAutomatically) {
if ((_startAutomatically = startAutomatically) && !_running) {
start();
}
}
void AnimationHandle::setMaskedJoints(const QStringList& maskedJoints) {
_maskedJoints = maskedJoints;
_jointMappings.clear();
}
void AnimationHandle::setRunning(bool running) {
if (_running == running) {
if (running) {
// move back to the beginning
_frameIndex = _firstFrame;
}
return;
}
if ((_running = running)) {
if (!_model->_runningAnimations.contains(_self)) {
insertSorted(_model->_runningAnimations, _self);
}
_frameIndex = _firstFrame;
} else {
_model->_runningAnimations.removeOne(_self);
replaceMatchingPriorities(0.0f);
}
emit runningChanged(_running);
}
AnimationHandle::AnimationHandle(Model* model) :
QObject(model),
_model(model),
_fps(30.0f),
_priority(1.0f),
_loop(false),
_hold(false),
_startAutomatically(false),
_firstFrame(0.0f),
_lastFrame(FLT_MAX),
_running(false) {
}
AnimationDetails AnimationHandle::getAnimationDetails() const {
AnimationDetails details(_role, _url, _fps, _priority, _loop, _hold,
_startAutomatically, _firstFrame, _lastFrame, _running, _frameIndex);
return details;
}
void AnimationHandle::simulate(float deltaTime) {
_frameIndex += deltaTime * _fps;
// update the joint mappings if necessary/possible
if (_jointMappings.isEmpty()) {
if (_model->isActive()) {
_jointMappings = _model->getGeometry()->getJointMappings(_animation);
}
if (_jointMappings.isEmpty()) {
return;
}
if (!_maskedJoints.isEmpty()) {
const FBXGeometry& geometry = _model->getGeometry()->getFBXGeometry();
for (int i = 0; i < _jointMappings.size(); i++) {
int& mapping = _jointMappings[i];
if (mapping != -1 && _maskedJoints.contains(geometry.joints.at(mapping).name)) {
mapping = -1;
}
}
}
}
const FBXGeometry& animationGeometry = _animation->getGeometry();
if (animationGeometry.animationFrames.isEmpty()) {
stop();
return;
}
float endFrameIndex = qMin(_lastFrame, animationGeometry.animationFrames.size() - (_loop ? 0.0f : 1.0f));
float startFrameIndex = qMin(_firstFrame, endFrameIndex);
if ((!_loop && (_frameIndex < startFrameIndex || _frameIndex > endFrameIndex)) || startFrameIndex == endFrameIndex) {
// passed the end; apply the last frame
applyFrame(glm::clamp(_frameIndex, startFrameIndex, endFrameIndex));
if (!_hold) {
stop();
}
return;
}
// wrap within the the desired range
if (_frameIndex < startFrameIndex) {
_frameIndex = endFrameIndex - glm::mod(endFrameIndex - _frameIndex, endFrameIndex - startFrameIndex);
} else if (_frameIndex > endFrameIndex) {
_frameIndex = startFrameIndex + glm::mod(_frameIndex - startFrameIndex, endFrameIndex - startFrameIndex);
}
// blend between the closest two frames
applyFrame(_frameIndex);
}
void AnimationHandle::applyFrame(float frameIndex) {
const FBXGeometry& animationGeometry = _animation->getGeometry();
int frameCount = animationGeometry.animationFrames.size();
const FBXAnimationFrame& floorFrame = animationGeometry.animationFrames.at((int)glm::floor(frameIndex) % frameCount);
const FBXAnimationFrame& ceilFrame = animationGeometry.animationFrames.at((int)glm::ceil(frameIndex) % frameCount);
float frameFraction = glm::fract(frameIndex);
for (int i = 0; i < _jointMappings.size(); i++) {
int mapping = _jointMappings.at(i);
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction), _priority);
}
}
}
void AnimationHandle::replaceMatchingPriorities(float newPriority) {
for (int i = 0; i < _jointMappings.size(); i++) {
int mapping = _jointMappings.at(i);
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
if (_priority == state._animationPriority) {
state._animationPriority = newPriority;
}
}
}
}

View file

@ -16,10 +16,12 @@
#include <QObject>
#include <QUrl>
#include "Transform.h"
#include <AABox.h>
#include <AnimationCache.h>
#include <PhysicsEntity.h>
#include "AnimationHandle.h"
#include "GeometryCache.h"
#include "InterfaceConfig.h"
#include "JointState.h"
@ -28,19 +30,13 @@
class QScriptEngine;
class AnimationHandle;
class Shape;
class RenderArgs;
class ViewFrustum;
typedef QSharedPointer<AnimationHandle> AnimationHandlePointer;
typedef QWeakPointer<AnimationHandle> WeakAnimationHandlePointer;
namespace gpu {
class Batch;
}
#include "gpu/Stream.h"
#include "gpu/Batch.h"
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject, public PhysicsEntity {
@ -93,6 +89,11 @@ public:
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE };
bool render(float alpha = 1.0f, RenderMode mode = DEFAULT_RENDER_MODE, RenderArgs* args = NULL);
// Scene rendering support
static void startScene();
bool renderInScene(float alpha = 1.0f, RenderArgs* args = NULL);
static void endScene(RenderMode mode = DEFAULT_RENDER_MODE, RenderArgs* args = NULL);
/// Sets the URL of the model to render.
/// \param fallback the URL of a fallback model to render if the requested model fails to load
@ -259,14 +260,13 @@ protected:
/// Computes and returns the extended length of the limb terminating at the specified joint and starting at the joint's
/// first free ancestor.
float getLimbLength(int jointIndex) const;
private:
friend class AnimationHandle;
void applyNextGeometry();
void deleteGeometry();
int renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold, bool hasTangents, bool hasSpecular, bool isSkinned, RenderArgs* args = NULL);
QVector<JointState> createJointStates(const FBXGeometry& geometry);
void initJointTransforms();
@ -283,7 +283,9 @@ private:
QUrl _url;
gpu::Buffers _blendedVertexBuffers;
gpu::Transforms _transforms;
gpu::Batch _renderBatch;
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
QVector<Model*> _attachments;
@ -394,91 +396,25 @@ private:
QVector<int> _meshesOpaqueTangentsSpecularSkinned;
QVector<int> _meshesOpaqueSpecularSkinned;
// Scene rendering support
static QVector<Model*> _modelsInScene;
static gpu::Batch _sceneRenderBatch;
static void endSceneSimple(RenderMode mode = DEFAULT_RENDER_MODE, RenderArgs* args = NULL);
static void endSceneSplitPass(RenderMode mode = DEFAULT_RENDER_MODE, RenderArgs* args = NULL);
// helper functions used by render() or renderInScene()
void renderSetup(RenderArgs* args);
bool renderCore(float alpha, RenderMode mode, RenderArgs* args);
int renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold,
bool hasTangents, bool hasSpecular, bool isSkinned, RenderArgs* args = NULL);
void setupBatchTransform(gpu::Batch& batch);
};
Q_DECLARE_METATYPE(QPointer<Model>)
Q_DECLARE_METATYPE(QWeakPointer<NetworkGeometry>)
Q_DECLARE_METATYPE(QVector<glm::vec3>)
/// Represents a handle to a model animation.
class AnimationHandle : public QObject {
Q_OBJECT
public:
void setRole(const QString& role) { _role = role; }
const QString& getRole() const { return _role; }
void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
void setFPS(float fps) { _fps = fps; }
float getFPS() const { return _fps; }
void setPriority(float priority);
float getPriority() const { return _priority; }
void setLoop(bool loop) { _loop = loop; }
bool getLoop() const { return _loop; }
void setHold(bool hold) { _hold = hold; }
bool getHold() const { return _hold; }
void setStartAutomatically(bool startAutomatically);
bool getStartAutomatically() const { return _startAutomatically; }
void setFirstFrame(float firstFrame) { _firstFrame = firstFrame; }
float getFirstFrame() const { return _firstFrame; }
void setLastFrame(float lastFrame) { _lastFrame = lastFrame; }
float getLastFrame() const { return _lastFrame; }
void setMaskedJoints(const QStringList& maskedJoints);
const QStringList& getMaskedJoints() const { return _maskedJoints; }
void setRunning(bool running);
bool isRunning() const { return _running; }
void setFrameIndex(float frameIndex) { _frameIndex = glm::clamp(_frameIndex, _firstFrame, _lastFrame); }
float getFrameIndex() const { return _frameIndex; }
AnimationDetails getAnimationDetails() const;
signals:
void runningChanged(bool running);
public slots:
void start() { setRunning(true); }
void stop() { setRunning(false); }
private:
friend class Model;
AnimationHandle(Model* model);
void simulate(float deltaTime);
void applyFrame(float frameIndex);
void replaceMatchingPriorities(float newPriority);
Model* _model;
WeakAnimationHandlePointer _self;
AnimationPointer _animation;
QString _role;
QUrl _url;
float _fps;
float _priority;
bool _loop;
bool _hold;
bool _startAutomatically;
float _firstFrame;
float _lastFrame;
QStringList _maskedJoints;
bool _running;
QVector<int> _jointMappings;
float _frameIndex;
};
#endif // hifi_Model_h

View file

@ -94,5 +94,4 @@ Q_DECLARE_METATYPE(AnimationDetails);
QScriptValue animationDetailsToScriptValue(QScriptEngine* engine, const AnimationDetails& event);
void animationDetailsFromScriptValue(const QScriptValue& object, AnimationDetails& event);
#endif // hifi_AnimationCache_h

View file

@ -0,0 +1,95 @@
//
// AnimationLoop.cpp
// libraries/animation
//
// Created by Brad Hefta-Gaub on 11/12/14.
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AnimationCache.h"
#include "AnimationLoop.h"
AnimationLoop::AnimationLoop() :
_fps(30.0f),
_loop(false),
_hold(false),
_startAutomatically(false),
_firstFrame(0.0f),
_lastFrame(FLT_MAX),
_running(false),
_frameIndex(0.0f)
{
}
AnimationLoop::AnimationLoop(const AnimationDetails& animationDetails) :
_fps(animationDetails.fps),
_loop(animationDetails.loop),
_hold(animationDetails.hold),
_startAutomatically(animationDetails.startAutomatically),
_firstFrame(animationDetails.firstFrame),
_lastFrame(animationDetails.lastFrame),
_running(animationDetails.running),
_frameIndex(animationDetails.frameIndex)
{
}
AnimationLoop::AnimationLoop(float fps, bool loop, bool hold, bool startAutomatically, float firstFrame,
float lastFrame, bool running, float frameIndex) :
_fps(fps),
_loop(loop),
_hold(hold),
_startAutomatically(startAutomatically),
_firstFrame(firstFrame),
_lastFrame(lastFrame),
_running(running),
_frameIndex(frameIndex)
{
}
void AnimationLoop::simulate(float deltaTime) {
_frameIndex += deltaTime * _fps;
// If we knew the number of frames from the animation, we'd consider using it here
// animationGeometry.animationFrames.size()
float maxFrame = _lastFrame;
float endFrameIndex = qMin(_lastFrame, maxFrame - (_loop ? 0.0f : 1.0f));
float startFrameIndex = qMin(_firstFrame, endFrameIndex);
if ((!_loop && (_frameIndex < startFrameIndex || _frameIndex > endFrameIndex)) || startFrameIndex == endFrameIndex) {
// passed the end; apply the last frame
_frameIndex = glm::clamp(_frameIndex, startFrameIndex, endFrameIndex);
if (!_hold) {
stop();
}
} else {
// wrap within the the desired range
if (_frameIndex < startFrameIndex) {
_frameIndex = endFrameIndex - glm::mod(endFrameIndex - _frameIndex, endFrameIndex - startFrameIndex);
} else if (_frameIndex > endFrameIndex) {
_frameIndex = startFrameIndex + glm::mod(_frameIndex - startFrameIndex, endFrameIndex - startFrameIndex);
}
}
}
void AnimationLoop::setStartAutomatically(bool startAutomatically) {
if ((_startAutomatically = startAutomatically) && !isRunning()) {
start();
}
}
void AnimationLoop::setRunning(bool running) {
if (_running == running) {
if (running) {
// move back to the beginning
_frameIndex = _firstFrame;
}
return;
}
if ((_running = running)) {
_frameIndex = _firstFrame;
}
}

View file

@ -0,0 +1,63 @@
//
// AnimationLoop.h
// libraries/script-engine/src/
//
// Created by Brad Hefta-Gaub on 11/12/14.
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AnimationLoop_h
#define hifi_AnimationLoop_h
class AnimationDetails;
class AnimationLoop {
public:
AnimationLoop();
AnimationLoop(const AnimationDetails& animationDetails);
AnimationLoop(float fps, bool loop, bool hold, bool startAutomatically, float firstFrame,
float lastFrame, bool running, float frameIndex);
void setFPS(float fps) { _fps = fps; }
float getFPS() const { return _fps; }
void setLoop(bool loop) { _loop = loop; }
bool getLoop() const { return _loop; }
void setHold(bool hold) { _hold = hold; }
bool getHold() const { return _hold; }
void setStartAutomatically(bool startAutomatically);
bool getStartAutomatically() const { return _startAutomatically; }
void setFirstFrame(float firstFrame) { _firstFrame = firstFrame; }
float getFirstFrame() const { return _firstFrame; }
void setLastFrame(float lastFrame) { _lastFrame = lastFrame; }
float getLastFrame() const { return _lastFrame; }
void setRunning(bool running);
bool isRunning() const { return _running; }
void setFrameIndex(float frameIndex) { _frameIndex = glm::clamp(frameIndex, _firstFrame, _lastFrame); }
float getFrameIndex() const { return _frameIndex; }
void start() { setRunning(true); }
void stop() { setRunning(false); }
void simulate(float deltaTime);
private:
float _fps;
bool _loop;
bool _hold;
bool _startAutomatically;
float _firstFrame;
float _lastFrame;
bool _running;
float _frameIndex;
};
#endif // hifi_AnimationLoop_h

View file

@ -11,6 +11,7 @@
#include <QDebug>
#include <QObject>
#include <QtCore/QJsonDocument>
#include <ByteCountCoding.h>
#include <GLMHelpers.h>
@ -66,6 +67,7 @@ EntityItemProperties::EntityItemProperties() :
_animationIsPlaying(ModelEntityItem::DEFAULT_ANIMATION_IS_PLAYING),
_animationFrameIndex(ModelEntityItem::DEFAULT_ANIMATION_FRAME_INDEX),
_animationFPS(ModelEntityItem::DEFAULT_ANIMATION_FPS),
_animationSettings(""),
_glowLevel(0.0f),
_localRenderAlpha(1.0f),
_isSpotlight(false),
@ -76,6 +78,8 @@ EntityItemProperties::EntityItemProperties() :
_animationIsPlayingChanged(false),
_animationFrameIndexChanged(false),
_animationFPSChanged(false),
_animationSettingsChanged(false),
_glowLevelChanged(false),
_localRenderAlphaChanged(false),
_isSpotlightChanged(false),
@ -117,6 +121,58 @@ void EntityItemProperties::setSittingPoints(const QVector<SittingPoint>& sitting
}
}
void EntityItemProperties::setAnimationSettings(const QString& value) {
// the animations setting is a JSON string that may contain various animation settings.
// if it includes fps, frameIndex, or running, those values will be parsed out and
// will over ride the regular animation settings
QJsonDocument settingsAsJson = QJsonDocument::fromJson(value.toUtf8());
QJsonObject settingsAsJsonObject = settingsAsJson.object();
QVariantMap settingsMap = settingsAsJsonObject.toVariantMap();
if (settingsMap.contains("fps")) {
float fps = settingsMap["fps"].toFloat();
setAnimationFPS(fps);
}
if (settingsMap.contains("frameIndex")) {
float frameIndex = settingsMap["frameIndex"].toFloat();
setAnimationFrameIndex(frameIndex);
}
if (settingsMap.contains("running")) {
bool running = settingsMap["running"].toBool();
setAnimationIsPlaying(running);
}
_animationSettings = value;
_animationSettingsChanged = true;
}
QString EntityItemProperties::getAnimationSettings() const {
// the animations setting is a JSON string that may contain various animation settings.
// if it includes fps, frameIndex, or running, those values will be parsed out and
// will over ride the regular animation settings
QString value = _animationSettings;
QJsonDocument settingsAsJson = QJsonDocument::fromJson(value.toUtf8());
QJsonObject settingsAsJsonObject = settingsAsJson.object();
QVariantMap settingsMap = settingsAsJsonObject.toVariantMap();
QVariant fpsValue(getAnimationFPS());
settingsMap["fps"] = fpsValue;
QVariant frameIndexValue(getAnimationFrameIndex());
settingsMap["frameIndex"] = frameIndexValue;
QVariant runningValue(getAnimationIsPlaying());
settingsMap["running"] = runningValue;
settingsAsJsonObject = QJsonObject::fromVariantMap(settingsMap);
QJsonDocument newDocument(settingsAsJsonObject);
QByteArray jsonByteArray = newDocument.toJson(QJsonDocument::Compact);
QString jsonByteString(jsonByteArray);
return jsonByteString;
}
void EntityItemProperties::debugDump() const {
qDebug() << "EntityItemProperties...";
@ -149,6 +205,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
CHECK_PROPERTY_CHANGE(PROP_ANIMATION_PLAYING, animationIsPlaying);
CHECK_PROPERTY_CHANGE(PROP_ANIMATION_FRAME_INDEX, animationFrameIndex);
CHECK_PROPERTY_CHANGE(PROP_ANIMATION_FPS, animationFPS);
CHECK_PROPERTY_CHANGE(PROP_ANIMATION_SETTINGS, animationSettings);
CHECK_PROPERTY_CHANGE(PROP_VISIBLE, visible);
CHECK_PROPERTY_CHANGE(PROP_REGISTRATION_POINT, registrationPoint);
CHECK_PROPERTY_CHANGE(PROP_ANGULAR_VELOCITY, angularVelocity);
@ -201,8 +258,9 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine) cons
COPY_PROPERTY_TO_QSCRIPTVALUE(modelURL);
COPY_PROPERTY_TO_QSCRIPTVALUE(animationURL);
COPY_PROPERTY_TO_QSCRIPTVALUE(animationIsPlaying);
COPY_PROPERTY_TO_QSCRIPTVALUE(animationFrameIndex);
COPY_PROPERTY_TO_QSCRIPTVALUE(animationFPS);
COPY_PROPERTY_TO_QSCRIPTVALUE(animationFrameIndex);
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(animationSettings,getAnimationSettings());
COPY_PROPERTY_TO_QSCRIPTVALUE(glowLevel);
COPY_PROPERTY_TO_QSCRIPTVALUE(localRenderAlpha);
COPY_PROPERTY_TO_QSCRIPTVALUE(ignoreForCollisions);
@ -276,6 +334,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object) {
COPY_PROPERTY_FROM_QSCRIPTVALUE_BOOL(animationIsPlaying, setAnimationIsPlaying);
COPY_PROPERTY_FROM_QSCRIPTVALUE_FLOAT(animationFPS, setAnimationFPS);
COPY_PROPERTY_FROM_QSCRIPTVALUE_FLOAT(animationFrameIndex, setAnimationFrameIndex);
COPY_PROPERTY_FROM_QSCRIPTVALUE_STRING(animationSettings, setAnimationSettings);
COPY_PROPERTY_FROM_QSCRIPTVALUE_FLOAT(glowLevel, setGlowLevel);
COPY_PROPERTY_FROM_QSCRIPTVALUE_FLOAT(localRenderAlpha, setLocalRenderAlpha);
COPY_PROPERTY_FROM_QSCRIPTVALUE_BOOL(ignoreForCollisions, setIgnoreForCollisions);
@ -452,6 +511,7 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
APPEND_ENTITY_PROPERTY(PROP_CUTOFF, appendValue, properties.getCutoff());
APPEND_ENTITY_PROPERTY(PROP_LOCKED, appendValue, properties.getLocked());
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, appendValue, properties.getTextures());
APPEND_ENTITY_PROPERTY(PROP_ANIMATION_SETTINGS, appendValue, properties.getAnimationSettings());
}
if (propertyCount > 0) {
int endOfEntityItemData = packetData->getUncompressedByteOffset();
@ -661,7 +721,8 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_CUTOFF, float, setCutoff);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LOCKED, bool, setLocked);
READ_ENTITY_PROPERTY_STRING_TO_PROPERTIES(PROP_TEXTURES, setTextures);
READ_ENTITY_PROPERTY_STRING_TO_PROPERTIES(PROP_ANIMATION_SETTINGS, setAnimationSettings);
return valid;
}
@ -714,6 +775,7 @@ void EntityItemProperties::markAllChanged() {
_animationIsPlayingChanged = true;
_animationFrameIndexChanged = true;
_animationFPSChanged = true;
_animationSettingsChanged = true;
_glowLevelChanged = true;
_localRenderAlphaChanged = true;
_isSpotlightChanged = true;

View file

@ -80,8 +80,9 @@ enum EntityPropertyList {
// used by Model entities
PROP_TEXTURES,
PROP_ANIMATION_SETTINGS,
PROP_LAST_ITEM = PROP_CUTOFF
PROP_LAST_ITEM = PROP_ANIMATION_SETTINGS
};
typedef PropertyFlags<EntityPropertyList> EntityPropertyFlags;
@ -178,6 +179,8 @@ public:
float getAnimationFrameIndex() const { return _animationFrameIndex; }
bool getAnimationIsPlaying() const { return _animationIsPlaying; }
float getAnimationFPS() const { return _animationFPS; }
QString getAnimationSettings() const;
float getGlowLevel() const { return _glowLevel; }
float getLocalRenderAlpha() const { return _localRenderAlpha; }
const QString& getScript() const { return _script; }
@ -189,6 +192,8 @@ public:
void setAnimationFrameIndex(float value) { _animationFrameIndex = value; _animationFrameIndexChanged = true; }
void setAnimationIsPlaying(bool value) { _animationIsPlaying = value; _animationIsPlayingChanged = true; }
void setAnimationFPS(float value) { _animationFPS = value; _animationFPSChanged = true; }
void setAnimationSettings(const QString& value);
void setGlowLevel(float value) { _glowLevel = value; _glowLevelChanged = true; }
void setLocalRenderAlpha(float value) { _localRenderAlpha = value; _localRenderAlphaChanged = true; }
void setScript(const QString& value) { _script = value; _scriptChanged = true; }
@ -342,6 +347,7 @@ private:
bool _animationIsPlaying;
float _animationFrameIndex;
float _animationFPS;
QString _animationSettings;
float _glowLevel;
float _localRenderAlpha;
bool _isSpotlight;
@ -352,6 +358,7 @@ private:
bool _animationIsPlayingChanged;
bool _animationFrameIndexChanged;
bool _animationFPSChanged;
bool _animationSettingsChanged;
bool _glowLevelChanged;
bool _localRenderAlphaChanged;
bool _isSpotlightChanged;

View file

@ -40,6 +40,18 @@
} \
}
#define READ_ENTITY_PROPERTY_SETTER(P,T,M) \
if (propertyFlags.getHasProperty(P)) { \
T fromBuffer; \
memcpy(&fromBuffer, dataAt, sizeof(fromBuffer)); \
dataAt += sizeof(fromBuffer); \
bytesRead += sizeof(fromBuffer); \
if (overwriteLocalData) { \
M(fromBuffer); \
} \
}
#define READ_ENTITY_PROPERTY_QUAT(P,M) \
if (propertyFlags.getHasProperty(P)) { \
glm::quat fromBuffer; \

View file

@ -9,6 +9,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QtCore/QJsonDocument>
#include <ByteCountCoding.h>
#include <GLMHelpers.h>
@ -33,7 +35,6 @@ ModelEntityItem::ModelEntityItem(const EntityItemID& entityItemID, const EntityI
{
_type = EntityTypes::Model;
setProperties(properties, true);
_animationFrameIndex = 0.0f;
_lastAnimated = usecTimestampNow();
_jointMappingCompleted = false;
_color[0] = _color[1] = _color[2] = 0;
@ -50,6 +51,7 @@ EntityItemProperties ModelEntityItem::getProperties() const {
COPY_ENTITY_PROPERTY_TO_PROPERTIES(animationFPS, getAnimationFPS);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(glowLevel, getGlowLevel);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(textures, getTextures);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(animationSettings, getAnimationSettings);
return properties;
}
@ -64,6 +66,7 @@ bool ModelEntityItem::setProperties(const EntityItemProperties& properties, bool
SET_ENTITY_PROPERTY_FROM_PROPERTIES(animationFrameIndex, setAnimationFrameIndex);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(animationFPS, setAnimationFPS);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(textures, setTextures);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(animationSettings, setAnimationSettings);
if (somethingChanged) {
bool wantDebug = false;
@ -100,10 +103,29 @@ int ModelEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
READ_ENTITY_PROPERTY_COLOR(PROP_COLOR, _color);
READ_ENTITY_PROPERTY_STRING(PROP_MODEL_URL, setModelURL);
READ_ENTITY_PROPERTY_STRING(PROP_ANIMATION_URL, setAnimationURL);
READ_ENTITY_PROPERTY(PROP_ANIMATION_FPS, float, _animationFPS);
READ_ENTITY_PROPERTY(PROP_ANIMATION_FRAME_INDEX, float, _animationFrameIndex);
READ_ENTITY_PROPERTY(PROP_ANIMATION_PLAYING, bool, _animationIsPlaying);
// Because we're using AnimationLoop which will reset the frame index if you change it's running state
// we want to read these values in the order they appear in the buffer, but call our setters in an
// order that allows AnimationLoop to preserve the correct frame rate.
float animationFPS = getAnimationFPS();
float animationFrameIndex = getAnimationFrameIndex();
bool animationIsPlaying = getAnimationIsPlaying();
READ_ENTITY_PROPERTY(PROP_ANIMATION_FPS, float, animationFPS);
READ_ENTITY_PROPERTY(PROP_ANIMATION_FRAME_INDEX, float, animationFrameIndex);
READ_ENTITY_PROPERTY(PROP_ANIMATION_PLAYING, bool, animationIsPlaying);
if (propertyFlags.getHasProperty(PROP_ANIMATION_PLAYING)) {
setAnimationIsPlaying(animationIsPlaying);
}
if (propertyFlags.getHasProperty(PROP_ANIMATION_FPS)) {
setAnimationFPS(animationFPS);
}
if (propertyFlags.getHasProperty(PROP_ANIMATION_FRAME_INDEX)) {
setAnimationFrameIndex(animationFrameIndex);
}
READ_ENTITY_PROPERTY_STRING(PROP_TEXTURES, setTextures);
READ_ENTITY_PROPERTY_STRING(PROP_ANIMATION_SETTINGS, setAnimationSettings);
return bytesRead;
}
@ -199,19 +221,25 @@ int ModelEntityItem::oldVersionReadEntityDataFromBuffer(const unsigned char* dat
bytesRead += animationURLLength;
// animationIsPlaying
memcpy(&_animationIsPlaying, dataAt, sizeof(_animationIsPlaying));
dataAt += sizeof(_animationIsPlaying);
bytesRead += sizeof(_animationIsPlaying);
bool animationIsPlaying;
memcpy(&animationIsPlaying, dataAt, sizeof(animationIsPlaying));
dataAt += sizeof(animationIsPlaying);
bytesRead += sizeof(animationIsPlaying);
setAnimationIsPlaying(animationIsPlaying);
// animationFrameIndex
memcpy(&_animationFrameIndex, dataAt, sizeof(_animationFrameIndex));
dataAt += sizeof(_animationFrameIndex);
bytesRead += sizeof(_animationFrameIndex);
float animationFrameIndex;
memcpy(&animationFrameIndex, dataAt, sizeof(animationFrameIndex));
dataAt += sizeof(animationFrameIndex);
bytesRead += sizeof(animationFrameIndex);
setAnimationFrameIndex(animationFrameIndex);
// animationFPS
memcpy(&_animationFPS, dataAt, sizeof(_animationFPS));
dataAt += sizeof(_animationFPS);
bytesRead += sizeof(_animationFPS);
float animationFPS;
memcpy(&animationFPS, dataAt, sizeof(animationFPS));
dataAt += sizeof(animationFPS);
bytesRead += sizeof(animationFPS);
setAnimationFPS(animationFPS);
}
}
return bytesRead;
@ -227,6 +255,7 @@ EntityPropertyFlags ModelEntityItem::getEntityProperties(EncodeBitstreamParams&
requestedProperties += PROP_ANIMATION_FPS;
requestedProperties += PROP_ANIMATION_FRAME_INDEX;
requestedProperties += PROP_ANIMATION_PLAYING;
requestedProperties += PROP_ANIMATION_SETTINGS;
requestedProperties += PROP_TEXTURES;
return requestedProperties;
@ -249,6 +278,7 @@ void ModelEntityItem::appendSubclassData(OctreePacketData* packetData, EncodeBit
APPEND_ENTITY_PROPERTY(PROP_ANIMATION_FRAME_INDEX, appendValue, getAnimationFrameIndex());
APPEND_ENTITY_PROPERTY(PROP_ANIMATION_PLAYING, appendValue, getAnimationIsPlaying());
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, appendValue, getTextures());
APPEND_ENTITY_PROPERTY(PROP_ANIMATION_SETTINGS, appendValue, getAnimationSettings());
}
@ -314,7 +344,7 @@ QVector<glm::quat> ModelEntityItem::getAnimationFrame() {
int frameCount = frames.size();
if (frameCount > 0) {
int animationFrameIndex = (int)(glm::floor(_animationFrameIndex)) % frameCount;
int animationFrameIndex = (int)(glm::floor(getAnimationFrameIndex())) % frameCount;
if (animationFrameIndex < 0 || animationFrameIndex > frameCount) {
animationFrameIndex = 0;
@ -363,7 +393,7 @@ void ModelEntityItem::update(const quint64& updateTime) {
if (getAnimationIsPlaying()) {
float deltaTime = (float)(now - _lastAnimated) / (float)USECS_PER_SECOND;
_lastAnimated = now;
_animationFrameIndex += deltaTime * _animationFPS;
_animationLoop.simulate(deltaTime);
} else {
_lastAnimated = now;
}
@ -377,3 +407,94 @@ void ModelEntityItem::debugDump() const {
qDebug() << " model URL:" << getModelURL();
}
void ModelEntityItem::setAnimationSettings(const QString& value) {
// the animations setting is a JSON string that may contain various animation settings.
// if it includes fps, frameIndex, or running, those values will be parsed out and
// will over ride the regular animation settings
QJsonDocument settingsAsJson = QJsonDocument::fromJson(value.toUtf8());
QJsonObject settingsAsJsonObject = settingsAsJson.object();
QVariantMap settingsMap = settingsAsJsonObject.toVariantMap();
if (settingsMap.contains("fps")) {
float fps = settingsMap["fps"].toFloat();
setAnimationFPS(fps);
}
if (settingsMap.contains("frameIndex")) {
float frameIndex = settingsMap["frameIndex"].toFloat();
setAnimationFrameIndex(frameIndex);
}
if (settingsMap.contains("running")) {
bool running = settingsMap["running"].toBool();
setAnimationIsPlaying(running);
}
if (settingsMap.contains("firstFrame")) {
float firstFrame = settingsMap["firstFrame"].toFloat();
setAnimationFirstFrame(firstFrame);
}
if (settingsMap.contains("lastFrame")) {
float lastFrame = settingsMap["lastFrame"].toFloat();
setAnimationLastFrame(lastFrame);
}
if (settingsMap.contains("loop")) {
bool loop = settingsMap["loop"].toBool();
setAnimationLoop(loop);
}
if (settingsMap.contains("hold")) {
bool hold = settingsMap["hold"].toBool();
setAnimationHold(hold);
}
if (settingsMap.contains("startAutomatically")) {
bool startAutomatically = settingsMap["startAutomatically"].toBool();
setAnimationStartAutomatically(startAutomatically);
}
_animationSettings = value;
}
QString ModelEntityItem::getAnimationSettings() const {
// the animations setting is a JSON string that may contain various animation settings.
// if it includes fps, frameIndex, or running, those values will be parsed out and
// will over ride the regular animation settings
QString value = _animationSettings;
QJsonDocument settingsAsJson = QJsonDocument::fromJson(value.toUtf8());
QJsonObject settingsAsJsonObject = settingsAsJson.object();
QVariantMap settingsMap = settingsAsJsonObject.toVariantMap();
QVariant fpsValue(getAnimationFPS());
settingsMap["fps"] = fpsValue;
QVariant frameIndexValue(getAnimationFrameIndex());
settingsMap["frameIndex"] = frameIndexValue;
QVariant runningValue(getAnimationIsPlaying());
settingsMap["running"] = runningValue;
QVariant firstFrameValue(getAnimationFirstFrame());
settingsMap["firstFrame"] = firstFrameValue;
QVariant lastFrameValue(getAnimationLastFrame());
settingsMap["lastFrame"] = lastFrameValue;
QVariant loopValue(getAnimationLoop());
settingsMap["loop"] = loopValue;
QVariant holdValue(getAnimationHold());
settingsMap["hold"] = holdValue;
QVariant startAutomaticallyValue(getAnimationStartAutomatically());
settingsMap["startAutomatically"] = startAutomaticallyValue;
settingsAsJsonObject = QJsonObject::fromVariantMap(settingsMap);
QJsonDocument newDocument(settingsAsJsonObject);
QByteArray jsonByteArray = newDocument.toJson(QJsonDocument::Compact);
QString jsonByteString(jsonByteArray);
return jsonByteString;
}

View file

@ -12,6 +12,8 @@
#ifndef hifi_ModelEntityItem_h
#define hifi_ModelEntityItem_h
#include <AnimationLoop.h>
#include "EntityItem.h"
class ModelEntityItem : public EntityItem {
@ -73,21 +75,38 @@ public:
void setModelURL(const QString& url) { _modelURL = url; }
void setAnimationURL(const QString& url) { _animationURL = url; }
static const float DEFAULT_ANIMATION_FRAME_INDEX;
void setAnimationFrameIndex(float value) { _animationFrameIndex = value; }
void setAnimationFrameIndex(float value) { _animationLoop.setFrameIndex(value); }
void setAnimationSettings(const QString& value);
static const bool DEFAULT_ANIMATION_IS_PLAYING;
void setAnimationIsPlaying(bool value) { _animationIsPlaying = value; }
void setAnimationIsPlaying(bool value) { _animationLoop.setRunning(value); }
static const float DEFAULT_ANIMATION_FPS;
void setAnimationFPS(float value) { _animationFPS = value; }
void setAnimationFPS(float value) { _animationLoop.setFPS(value); }
void setAnimationLoop(bool loop) { _animationLoop.setLoop(loop); }
bool getAnimationLoop() const { return _animationLoop.getLoop(); }
void setAnimationHold(bool hold) { _animationLoop.setHold(hold); }
bool getAnimationHold() const { return _animationLoop.getHold(); }
void setAnimationStartAutomatically(bool startAutomatically) { _animationLoop.setStartAutomatically(startAutomatically); }
bool getAnimationStartAutomatically() const { return _animationLoop.getStartAutomatically(); }
void setAnimationFirstFrame(float firstFrame) { _animationLoop.setFirstFrame(firstFrame); }
float getAnimationFirstFrame() const { return _animationLoop.getFirstFrame(); }
void setAnimationLastFrame(float lastFrame) { _animationLoop.setLastFrame(lastFrame); }
float getAnimationLastFrame() const { return _animationLoop.getLastFrame(); }
void mapJoints(const QStringList& modelJointNames);
QVector<glm::quat> getAnimationFrame();
bool jointsMapped() const { return _jointMappingCompleted; }
bool getAnimationIsPlaying() const { return _animationIsPlaying; }
float getAnimationFrameIndex() const { return _animationFrameIndex; }
float getAnimationFPS() const { return _animationFPS; }
bool getAnimationIsPlaying() const { return _animationLoop.isRunning(); }
float getAnimationFrameIndex() const { return _animationLoop.getFrameIndex(); }
float getAnimationFPS() const { return _animationLoop.getFPS(); }
QString getAnimationSettings() const;
static const QString DEFAULT_TEXTURES;
const QString& getTextures() const { return _textures; }
@ -106,9 +125,8 @@ protected:
quint64 _lastAnimated;
QString _animationURL;
float _animationFrameIndex; // we keep this as a float and round to int only when we need the exact index
bool _animationIsPlaying;
float _animationFPS;
AnimationLoop _animationLoop;
QString _animationSettings;
QString _textures;
// used on client side

View file

@ -75,7 +75,7 @@ PacketVersion versionForPacketType(PacketType type) {
return 1;
case PacketTypeEntityAddOrEdit:
case PacketTypeEntityData:
return VERSION_ENTITIES_SUPPORT_DIMENSIONS;
return VERSION_ENTITIES_MODELS_HAVE_ANIMATION_SETTINGS;
case PacketTypeEntityErase:
return 2;
case PacketTypeAudioStreamStats:

View file

@ -124,6 +124,7 @@ const PacketVersion VERSION_ROOT_ELEMENT_HAS_DATA = 2;
const PacketVersion VERSION_ENTITIES_SUPPORT_SPLIT_MTU = 3;
const PacketVersion VERSION_ENTITIES_HAS_FILE_BREAKS = VERSION_ENTITIES_SUPPORT_SPLIT_MTU;
const PacketVersion VERSION_ENTITIES_SUPPORT_DIMENSIONS = 4;
const PacketVersion VERSION_ENTITIES_MODELS_HAVE_ANIMATION_SETTINGS = 5;
const PacketVersion VERSION_VOXELS_HAS_FILE_BREAKS = 1;
#endif // hifi_PacketHeaders_h

View file

@ -23,6 +23,7 @@
#include <GeometryUtil.h>
#include <OctalCode.h>
#include <LogHandler.h>
#include <PacketHeaders.h>
#include <SharedUtil.h>
#include <Shape.h>
@ -71,6 +72,10 @@ void Octree::recurseTreeWithPostOperation(RecurseOctreeOperation operation, void
void Octree::recurseElementWithOperation(OctreeElement* element, RecurseOctreeOperation operation, void* extraData,
int recursionCount) {
if (recursionCount > DANGEROUSLY_DEEP_RECURSION) {
static QString repeatedMessage
= LogHandler::getInstance().addRepeatedMessageRegex(
"Octree::recurseElementWithOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!");
qDebug() << "Octree::recurseElementWithOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!";
return;
}
@ -89,7 +94,11 @@ void Octree::recurseElementWithOperation(OctreeElement* element, RecurseOctreeOp
void Octree::recurseElementWithPostOperation(OctreeElement* element, RecurseOctreeOperation operation, void* extraData,
int recursionCount) {
if (recursionCount > DANGEROUSLY_DEEP_RECURSION) {
qDebug() << "Octree::recurseElementWithOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!\n";
static QString repeatedMessage
= LogHandler::getInstance().addRepeatedMessageRegex(
"Octree::recurseElementWithPostOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!");
qDebug() << "Octree::recurseElementWithPostOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!";
return;
}
@ -115,6 +124,10 @@ void Octree::recurseElementWithOperationDistanceSorted(OctreeElement* element, R
const glm::vec3& point, void* extraData, int recursionCount) {
if (recursionCount > DANGEROUSLY_DEEP_RECURSION) {
static QString repeatedMessage
= LogHandler::getInstance().addRepeatedMessageRegex(
"Octree::recurseElementWithOperationDistanceSorted() reached DANGEROUSLY_DEEP_RECURSION, bailing!");
qDebug() << "Octree::recurseElementWithOperationDistanceSorted() reached DANGEROUSLY_DEEP_RECURSION, bailing!";
return;
}
@ -152,7 +165,11 @@ void Octree::recurseTreeWithOperator(RecurseOctreeOperator* operatorObject) {
bool Octree::recurseElementWithOperator(OctreeElement* element, RecurseOctreeOperator* operatorObject, int recursionCount) {
if (recursionCount > DANGEROUSLY_DEEP_RECURSION) {
qDebug() << "Octree::recurseElementWithOperation() reached DANGEROUSLY_DEEP_RECURSION, bailing!";
static QString repeatedMessage
= LogHandler::getInstance().addRepeatedMessageRegex(
"Octree::recurseElementWithOperator() reached DANGEROUSLY_DEEP_RECURSION, bailing!");
qDebug() << "Octree::recurseElementWithOperator() reached DANGEROUSLY_DEEP_RECURSION, bailing!";
return false;
}

View file

@ -13,6 +13,7 @@
#include <QString>
#include <QStringList>
#include <LogHandler.h>
#include <PacketHeaders.h>
#include <SharedUtil.h>
@ -854,8 +855,12 @@ void OctreeSceneStats::trackIncomingOctreePacket(const QByteArray& packet,
const int MAX_RESONABLE_FLIGHT_TIME = 200 * USECS_PER_SECOND; // 200 seconds is more than enough time for a packet to arrive
const int MIN_RESONABLE_FLIGHT_TIME = 0;
if (flightTime > MAX_RESONABLE_FLIGHT_TIME || flightTime < MIN_RESONABLE_FLIGHT_TIME) {
static QString repeatedMessage
= LogHandler::getInstance().addRepeatedMessageRegex(
"ignoring unreasonable packet... flightTime: -?\\d+ nodeClockSkewUsec: -?\\d+ usecs");
qDebug() << "ignoring unreasonable packet... flightTime:" << flightTime
<< " nodeClockSkewUsec:" << nodeClockSkewUsec << " usecs";;
<< "nodeClockSkewUsec:" << nodeClockSkewUsec << "usecs";;
return; // ignore any packets that are unreasonable
}

View file

@ -34,6 +34,15 @@ struct xColor {
unsigned char blue;
};
inline QDebug& operator<<(QDebug& dbg, const xColor& c) {
dbg.nospace() << "{type='xColor'"
", red=" << c.red <<
", green=" << c.green <<
", blue=" << c.blue <<
"}";
return dbg;
}
static const float ZERO = 0.0f;
static const float ONE = 1.0f;
static const float ONE_HALF = 0.5f;

View file

@ -0,0 +1,71 @@
//
// Transform.cpp
// shared/src/gpu
//
// Created by Sam Gateau on 11/4/2014.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Transform.h"
void Transform::evalRotationScale(Quat& rotation, Vec3& scale, const Mat3& rotationScaleMatrix) {
const float ACCURACY_THREASHOLD = 0.00001f;
// Following technique taken from:
// http://callumhay.blogspot.com/2010/10/decomposing-affine-transforms.html
// Extract the rotation component - this is done using polar decompostion, where
// we successively average the matrix with its inverse transpose until there is
// no/a very small difference between successive averages
float norm;
int count = 0;
Mat3 rotationMat = rotationScaleMatrix;
do {
Mat3 currInvTranspose = glm::inverse(glm::transpose(rotationMat));
Mat3 nextRotation = 0.5f * (rotationMat + currInvTranspose);
norm = 0.0;
for (int i = 0; i < 3; i++) {
float n = static_cast<float>(
fabs(rotationMat[0][i] - nextRotation[0][i]) +
fabs(rotationMat[1][i] - nextRotation[1][i]) +
fabs(rotationMat[2][i] - nextRotation[2][i]));
norm = (norm > n ? norm : n);
}
rotationMat = nextRotation;
} while (count < 100 && norm > ACCURACY_THREASHOLD);
// extract scale of the matrix as the length of each axis
Mat3 scaleMat = glm::inverse(rotationMat) * rotationScaleMatrix;
scale = glm::max(Vec3(ACCURACY_THREASHOLD), Vec3(scaleMat[0][0], scaleMat[1][1], scaleMat[2][2]));
// Let's work on a local matrix containing rotation only
Mat3 matRot(
rotationScaleMatrix[0] / scale.x,
rotationScaleMatrix[1] / scale.y,
rotationScaleMatrix[2] / scale.z);
// Beware!!! needs to detect for the case there is a negative scale
// Based on the determinant sign we just can flip the scale sign of one component: we choose X axis
float determinant = glm::determinant(matRot);
if (determinant < 0.f) {
scale.x = -scale.x;
matRot[0] *= -1.f;
}
// Beware: even though the matRot is supposed to be normalized at that point,
// glm::quat_cast doesn't always return a normalized quaternion...
// rotation = glm::normalize(glm::quat_cast(matRot));
rotation = (glm::quat_cast(matRot));
}

View file

@ -0,0 +1,397 @@
//
// Transform.h
// shared/src/gpu
//
// Created by Sam Gateau on 11/4/2014.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_gpu_Transform_h
#define hifi_gpu_Transform_h
#include <assert.h>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtx/quaternion.hpp>
#include <bitset>
class Transform {
public:
typedef glm::mat4 Mat4;
typedef glm::mat3 Mat3;
typedef glm::vec4 Vec4;
typedef glm::vec3 Vec3;
typedef glm::vec2 Vec2;
typedef glm::quat Quat;
Transform() :
_translation(0),
_rotation(1.0f, 0, 0, 0),
_scale(1.0f),
_flags(FLAG_CACHE_INVALID_BITSET) // invalid cache
{
}
Transform(const Transform& transform) :
_translation(transform._translation),
_rotation(transform._rotation),
_scale(transform._scale),
_flags(transform._flags)
{
invalidCache();
}
Transform(const Mat4& raw) {
evalFromRawMatrix(raw);
}
~Transform() {}
void setIdentity();
const Vec3& getTranslation() const;
void setTranslation(const Vec3& translation);
void preTranslate(const Vec3& translation);
void postTranslate(const Vec3& translation);
const Quat& getRotation() const;
void setRotation(const Quat& rotation);
void preRotate(const Quat& rotation);
void postRotate(const Quat& rotation);
const Vec3& getScale() const;
void setScale(float scale);
void setScale(const Vec3& scale);
void postScale(float scale);
void postScale(const Vec3& scale);
bool isIdentity() const { return (_flags & ~Flags(FLAG_CACHE_INVALID_BITSET)).none(); }
bool isTranslating() const { return _flags[FLAG_TRANSLATION]; }
bool isRotating() const { return _flags[FLAG_ROTATION]; }
bool isScaling() const { return _flags[FLAG_SCALING]; }
bool isUniform() const { return !isNonUniform(); }
bool isNonUniform() const { return _flags[FLAG_NON_UNIFORM]; }
void evalFromRawMatrix(const Mat4& matrix);
void evalFromRawMatrix(const Mat3& rotationScalematrix);
Mat4& getMatrix(Mat4& result) const;
Mat4& getInverseMatrix(Mat4& result) const;
Transform& evalInverse(Transform& result) const;
static void evalRotationScale(Quat& rotation, Vec3& scale, const Mat3& rotationScaleMatrix);
static Transform& mult(Transform& result, const Transform& left, const Transform& right);
// Left will be inversed before the multiplication
static Transform& inverseMult(Transform& result, const Transform& left, const Transform& right);
protected:
enum Flag {
FLAG_CACHE_INVALID = 0,
FLAG_TRANSLATION,
FLAG_ROTATION,
FLAG_SCALING,
FLAG_NON_UNIFORM,
FLAG_ZERO_SCALE,
FLAG_PROJECTION,
NUM_FLAGS,
FLAG_CACHE_INVALID_BITSET = 1,
};
typedef std::bitset<NUM_FLAGS> Flags;
// TRS
Vec3 _translation;
Quat _rotation;
Vec3 _scale;
mutable Flags _flags;
// Cached transform
mutable Mat4 _matrix;
bool isCacheInvalid() const { return _flags[FLAG_CACHE_INVALID]; }
void validCache() const { _flags.set(FLAG_CACHE_INVALID, false); }
void invalidCache() const { _flags.set(FLAG_CACHE_INVALID, true); }
void flagTranslation() { _flags.set(FLAG_TRANSLATION, true); }
void flagRotation() { _flags.set(FLAG_ROTATION, true); }
void flagScaling() { _flags.set(FLAG_SCALING, true); }
void unflagScaling() { _flags.set(FLAG_SCALING, false); }
void flagUniform() { _flags.set(FLAG_NON_UNIFORM, false); }
void flagNonUniform() { _flags.set(FLAG_NON_UNIFORM, true); }
void updateCache() const;
};
inline void Transform::setIdentity() {
_translation = Vec3(0);
_rotation = Quat(1.0f, 0, 0, 0);
_scale = Vec3(1.0f);
_flags = Flags(FLAG_CACHE_INVALID_BITSET);
}
inline const Transform::Vec3& Transform::getTranslation() const {
return _translation;
}
inline void Transform::setTranslation(const Vec3& translation) {
invalidCache();
flagTranslation();
_translation = translation;
}
inline void Transform::preTranslate(const Vec3& translation) {
invalidCache();
flagTranslation();
_translation += translation;
}
inline void Transform::postTranslate(const Vec3& translation) {
invalidCache();
flagTranslation();
Vec3 scaledT = translation;
if (isScaling()) scaledT *= _scale;
if (isRotating()) {
_translation += glm::rotate(_rotation, scaledT);
} else {
_translation += scaledT;
}
}
inline const Transform::Quat& Transform::getRotation() const {
return _rotation;
}
inline void Transform::setRotation(const Quat& rotation) {
invalidCache();
flagRotation();
_rotation = rotation;
}
inline void Transform::preRotate(const Quat& rotation) {
invalidCache();
if (isRotating()) {
_rotation = rotation * _rotation;
} else {
_rotation = rotation;
}
flagRotation();
_translation = glm::rotate(rotation, _translation);
}
inline void Transform::postRotate(const Quat& rotation) {
invalidCache();
if (isNonUniform()) {
Quat newRot;
Vec3 newScale;
Mat3 scaleRot(glm::mat3_cast(rotation));
scaleRot[0] *= _scale;
scaleRot[1] *= _scale;
scaleRot[2] *= _scale;
evalRotationScale(newRot, newScale, scaleRot);
if (isRotating()) {
_rotation *= newRot;
} else {
_rotation = newRot;
}
setScale(newScale);
} else {
if (isRotating()) {
_rotation *= rotation;
} else {
_rotation = rotation;
}
}
flagRotation();
}
inline const Transform::Vec3& Transform::getScale() const {
return _scale;
}
inline void Transform::setScale(float scale) {
invalidCache();
flagUniform();
if (scale == 1.0f) {
unflagScaling();
} else {
flagScaling();
}
_scale = Vec3(scale);
}
inline void Transform::setScale(const Vec3& scale) {
if ((scale.x == scale.y) && (scale.x == scale.z)) {
setScale(scale.x);
} else {
invalidCache();
flagScaling();
flagNonUniform();
_scale = scale;
}
}
inline void Transform::postScale(float scale) {
if (scale == 1.0f) return;
if (isScaling()) {
// if already scaling, just invalid cache and aply uniform scale
invalidCache();
_scale *= scale;
} else {
setScale(scale);
}
}
inline void Transform::postScale(const Vec3& scale) {
invalidCache();
if (isScaling()) {
_scale *= scale;
} else {
_scale = scale;
}
flagScaling();
}
inline Transform::Mat4& Transform::getMatrix(Transform::Mat4& result) const {
updateCache();
result = _matrix;
return result;
}
inline Transform::Mat4& Transform::getInverseMatrix(Transform::Mat4& result) const {
Transform inverse;
evalInverse(inverse);
return inverse.getMatrix(result);
}
inline void Transform::evalFromRawMatrix(const Mat4& matrix) {
// for now works only in the case of TRS transformation
if ((matrix[0][3] == 0) && (matrix[1][3] == 0) && (matrix[2][3] == 0) && (matrix[3][3] == 1.f)) {
setTranslation(Vec3(matrix[3]));
evalFromRawMatrix(Mat3(matrix));
}
}
inline void Transform::evalFromRawMatrix(const Mat3& rotationScaleMatrix) {
Quat rotation;
Vec3 scale;
evalRotationScale(rotation, scale, rotationScaleMatrix);
setRotation(rotation);
setScale(scale);
}
inline Transform& Transform::evalInverse(Transform& inverse) const {
inverse.setIdentity();
if (isScaling()) {
// TODO: At some point we will face the case when scale is 0 and so 1/0 will blow up...
// WHat should we do for this one?
assert(_scale.x != 0);
assert(_scale.y != 0);
assert(_scale.z != 0);
if (isNonUniform()) {
inverse.setScale(Vec3(1.0f/_scale.x, 1.0f/_scale.y, 1.0f/_scale.z));
} else {
inverse.setScale(1.0f/_scale.x);
}
}
if (isRotating()) {
inverse.postRotate(glm::conjugate(_rotation));
}
if (isTranslating()) {
inverse.postTranslate(-_translation);
}
return inverse;
}
inline Transform& Transform::mult( Transform& result, const Transform& left, const Transform& right) {
result = left;
if (right.isTranslating()) {
result.postTranslate(right.getTranslation());
}
if (right.isRotating()) {
result.postRotate(right.getRotation());
}
if (right.isScaling()) {
result.postScale(right.getScale());
}
// HACK: In case of an issue in the Transform multiplication results, to make sure this code is
// working properly uncomment the next 2 lines and compare the results, they should be the same...
// Transform::Mat4 mv = left.getMatrix() * right.getMatrix();
// Transform::Mat4 mv2 = result.getMatrix();
return result;
}
inline Transform& Transform::inverseMult( Transform& result, const Transform& left, const Transform& right) {
result.setIdentity();
if (left.isScaling()) {
const Vec3& s = left.getScale();
result.setScale(Vec3(1.0f / s.x, 1.0f / s.y, 1.0f / s.z));
}
if (left.isRotating()) {
result.postRotate(glm::conjugate(left.getRotation()));
}
if (left.isTranslating() || right.isTranslating()) {
result.postTranslate(right.getTranslation() - left.getTranslation());
}
if (right.isRotating()) {
result.postRotate(right.getRotation());
}
if (right.isScaling()) {
result.postScale(right.getScale());
}
// HACK: In case of an issue in the Transform multiplication results, to make sure this code is
// working properly uncomment the next 2 lines and compare the results, they should be the same...
// Transform::Mat4 mv = left.getMatrix() * right.getMatrix();
// Transform::Mat4 mv2 = result.getMatrix();
return result;
}
inline void Transform::updateCache() const {
if (isCacheInvalid()) {
if (isRotating()) {
Mat3 rot = glm::mat3_cast(_rotation);
if (isScaling()) {
rot[0] *= _scale.x;
rot[1] *= _scale.y;
rot[2] *= _scale.z;
}
_matrix[0] = Vec4(rot[0], 0.f);
_matrix[1] = Vec4(rot[1], 0.f);
_matrix[2] = Vec4(rot[2], 0.f);
} else {
_matrix[0] = Vec4(_scale.x, 0.f, 0.f, 0.f);
_matrix[1] = Vec4(0.f, _scale.y, 0.f, 0.f);
_matrix[2] = Vec4(0.f, 0.f, _scale.z, 0.f);
}
_matrix[3] = Vec4(_translation, 1.0f);
validCache();
}
}
#endif