mirror of
https://github.com/AleziaKurdis/overte.git
synced 2025-04-08 11:13:37 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into one
This commit is contained in:
commit
36eb4eafc6
29 changed files with 164 additions and 102 deletions
|
@ -17,6 +17,9 @@ EntityTreeHeadlessViewer::EntityTreeHeadlessViewer()
|
|||
}
|
||||
|
||||
EntityTreeHeadlessViewer::~EntityTreeHeadlessViewer() {
|
||||
if (_simulation) {
|
||||
_simulation->setEntityTree(nullptr); // Break shared_ptr cycle.
|
||||
}
|
||||
}
|
||||
|
||||
void EntityTreeHeadlessViewer::init() {
|
||||
|
|
|
@ -571,6 +571,8 @@ void EntityScriptServer::aboutToFinish() {
|
|||
entityScriptingInterface->setPacketSender(nullptr);
|
||||
}
|
||||
|
||||
DependencyManager::destroy<AssignmentParentFinder>();
|
||||
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
||||
DependencyManager::destroy<PluginManager>();
|
||||
|
|
|
@ -90,7 +90,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
# for the second parent of HEAD (not HEAD) since that is the
|
||||
# SHA of the commit merged to master for the build
|
||||
if (PR_BUILD)
|
||||
set(_GIT_LOG_FORMAT "%p")
|
||||
set(_GIT_LOG_FORMAT "%p %h")
|
||||
else ()
|
||||
set(_GIT_LOG_FORMAT "%h")
|
||||
endif ()
|
||||
|
|
|
@ -1440,9 +1440,9 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
|
|||
|
||||
glm::quat deltaQuat = desiredQuat * glm::inverse(headQuat);
|
||||
|
||||
// limit swing rotation of the deltaQuat by a 30 degree cone.
|
||||
// limit swing rotation of the deltaQuat by a 25 degree cone.
|
||||
// TODO: use swing twist decomposition constraint instead, for off axis rotation clamping.
|
||||
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
|
||||
const float MAX_ANGLE = 25.0f * RADIANS_PER_DEGREE;
|
||||
if (fabsf(glm::angle(deltaQuat)) > MAX_ANGLE) {
|
||||
deltaQuat = glm::angleAxis(glm::clamp(glm::angle(deltaQuat), -MAX_ANGLE, MAX_ANGLE), glm::axis(deltaQuat));
|
||||
}
|
||||
|
|
|
@ -220,30 +220,44 @@ void Head::calculateMouthShapes(float deltaTime) {
|
|||
|
||||
void Head::applyEyelidOffset(glm::quat headOrientation) {
|
||||
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
|
||||
|
||||
if (disableEyelidAdjustment) {
|
||||
bool isBlinking = (_rightEyeBlinkVelocity != 0.0f && _rightEyeBlinkVelocity != 0.0f);
|
||||
if (disableEyelidAdjustment || isBlinking) {
|
||||
return;
|
||||
}
|
||||
|
||||
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FORWARD, getLookAtPosition() - _eyePosition);
|
||||
eyeRotation = eyeRotation * glm::angleAxis(safeEulerAngles(headOrientation).y, IDENTITY_UP); // Rotation w.r.t. head
|
||||
float eyePitch = safeEulerAngles(eyeRotation).x;
|
||||
const float EYE_PITCH_TO_COEFFICIENT = 3.5f; // Empirically determined
|
||||
const float MAX_EYELID_OFFSET = 1.5f;
|
||||
const float BLINK_DOWN_MULTIPLIER = 0.25f;
|
||||
const float OPEN_DOWN_MULTIPLIER = 0.3f;
|
||||
const float BROW_UP_MULTIPLIER = 0.5f;
|
||||
|
||||
const float EYE_PITCH_TO_COEFFICIENT = 1.6f; // Empirically determined
|
||||
const float MAX_EYELID_OFFSET = 0.8f; // So that don't fully close eyes when looking way down
|
||||
float eyelidOffset = glm::clamp(-eyePitch * EYE_PITCH_TO_COEFFICIENT, -1.0f, MAX_EYELID_OFFSET);
|
||||
glm::vec3 lookAt = glm::normalize(getLookAtPosition() - _eyePosition);
|
||||
glm::vec3 headUp = headOrientation * Vectors::UNIT_Y;
|
||||
float eyePitch = (PI / 2.0f) - acos(glm::dot(lookAt, headUp));
|
||||
float eyelidOffset = glm::clamp(abs(eyePitch * EYE_PITCH_TO_COEFFICIENT), 0.0f, MAX_EYELID_OFFSET);
|
||||
|
||||
for (int i = 0; i < 2; i++) {
|
||||
const int LEFT_EYE = 8;
|
||||
float eyeCoefficient = _transientBlendshapeCoefficients[i] - _transientBlendshapeCoefficients[LEFT_EYE + i];
|
||||
eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f);
|
||||
if (eyeCoefficient > 0.0f) {
|
||||
_transientBlendshapeCoefficients[i] = eyeCoefficient;
|
||||
_transientBlendshapeCoefficients[LEFT_EYE + i] = 0.0f;
|
||||
float blinkUpCoefficient = -eyelidOffset;
|
||||
float blinkDownCoefficient = BLINK_DOWN_MULTIPLIER * eyelidOffset;
|
||||
|
||||
float openUpCoefficient = eyelidOffset;
|
||||
float openDownCoefficient = OPEN_DOWN_MULTIPLIER * eyelidOffset;
|
||||
|
||||
float browsUpCoefficient = BROW_UP_MULTIPLIER * eyelidOffset;
|
||||
float browsDownCoefficient = 0.0f;
|
||||
|
||||
} else {
|
||||
_transientBlendshapeCoefficients[i] = 0.0f;
|
||||
_transientBlendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient;
|
||||
bool isLookingUp = (eyePitch > 0);
|
||||
|
||||
if (isLookingUp) {
|
||||
for (int i = 0; i < 2; i++) {
|
||||
_transientBlendshapeCoefficients[EYE_BLINK_INDICES[i]] = blinkUpCoefficient;
|
||||
_transientBlendshapeCoefficients[EYE_OPEN_INDICES[i]] = openUpCoefficient;
|
||||
_transientBlendshapeCoefficients[BROWS_U_INDICES[i]] = browsUpCoefficient;
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < 2; i++) {
|
||||
_transientBlendshapeCoefficients[EYE_BLINK_INDICES[i]] = blinkDownCoefficient;
|
||||
_transientBlendshapeCoefficients[EYE_OPEN_INDICES[i]] = openDownCoefficient;
|
||||
_transientBlendshapeCoefficients[BROWS_U_INDICES[i]] = browsDownCoefficient;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#include <QtCore/QJsonArray>
|
||||
#include <QVector>
|
||||
|
||||
#include <FaceshiftConstants.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <shared/JSONHelpers.h>
|
||||
|
||||
|
@ -33,7 +32,7 @@ HeadData::HeadData(AvatarData* owningAvatar) :
|
|||
_summedBlendshapeCoefficients(QVector<float>(0, 0.0f)),
|
||||
_owningAvatar(owningAvatar)
|
||||
{
|
||||
|
||||
computeBlendshapesLookupMap();
|
||||
}
|
||||
|
||||
glm::quat HeadData::getRawOrientation() const {
|
||||
|
@ -71,16 +70,10 @@ void HeadData::setOrientation(const glm::quat& orientation) {
|
|||
setHeadOrientation(orientation);
|
||||
}
|
||||
|
||||
//Lazily construct a lookup map from the blendshapes
|
||||
static const QMap<QString, int>& getBlendshapesLookupMap() {
|
||||
static std::once_flag once;
|
||||
static QMap<QString, int> blendshapeLookupMap;
|
||||
std::call_once(once, [&] {
|
||||
for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
|
||||
blendshapeLookupMap[FACESHIFT_BLENDSHAPES[i]] = i;
|
||||
}
|
||||
});
|
||||
return blendshapeLookupMap;
|
||||
void HeadData::computeBlendshapesLookupMap(){
|
||||
for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
|
||||
_blendshapeLookupMap[FACESHIFT_BLENDSHAPES[i]] = i;
|
||||
}
|
||||
}
|
||||
|
||||
int HeadData::getNumSummedBlendshapeCoefficients() const {
|
||||
|
@ -108,11 +101,10 @@ const QVector<float>& HeadData::getSummedBlendshapeCoefficients() {
|
|||
}
|
||||
|
||||
void HeadData::setBlendshape(QString name, float val) {
|
||||
const auto& blendshapeLookupMap = getBlendshapesLookupMap();
|
||||
|
||||
//Check to see if the named blendshape exists, and then set its value if it does
|
||||
auto it = blendshapeLookupMap.find(name);
|
||||
if (it != blendshapeLookupMap.end()) {
|
||||
auto it = _blendshapeLookupMap.find(name);
|
||||
if (it != _blendshapeLookupMap.end()) {
|
||||
if (_blendshapeCoefficients.size() <= it.value()) {
|
||||
_blendshapeCoefficients.resize(it.value() + 1);
|
||||
}
|
||||
|
@ -123,6 +115,18 @@ void HeadData::setBlendshape(QString name, float val) {
|
|||
}
|
||||
}
|
||||
|
||||
int HeadData::getBlendshapeIndex(const QString& name) {
|
||||
auto it = _blendshapeLookupMap.find(name);
|
||||
int index = it != _blendshapeLookupMap.end() ? it.value() : -1;
|
||||
return index;
|
||||
}
|
||||
|
||||
void HeadData::getBlendshapeIndices(const std::vector<QString>& blendShapeNames, std::vector<int>& indexes) {
|
||||
for (auto& name : blendShapeNames) {
|
||||
indexes.push_back(getBlendshapeIndex(name));
|
||||
}
|
||||
}
|
||||
|
||||
static const QString JSON_AVATAR_HEAD_ROTATION = QStringLiteral("rotation");
|
||||
static const QString JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS = QStringLiteral("blendShapes");
|
||||
static const QString JSON_AVATAR_HEAD_LEAN_FORWARD = QStringLiteral("leanForward");
|
||||
|
@ -131,10 +135,9 @@ static const QString JSON_AVATAR_HEAD_LOOKAT = QStringLiteral("lookAt");
|
|||
|
||||
QJsonObject HeadData::toJson() const {
|
||||
QJsonObject headJson;
|
||||
const auto& blendshapeLookupMap = getBlendshapesLookupMap();
|
||||
QJsonObject blendshapesJson;
|
||||
for (auto name : blendshapeLookupMap.keys()) {
|
||||
auto index = blendshapeLookupMap[name];
|
||||
for (auto name : _blendshapeLookupMap.keys()) {
|
||||
auto index = _blendshapeLookupMap[name];
|
||||
float value = 0.0f;
|
||||
if (index < _blendshapeCoefficients.size()) {
|
||||
value += _blendshapeCoefficients[index];
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <FaceshiftConstants.h>
|
||||
|
||||
// degrees
|
||||
const float MIN_HEAD_YAW = -180.0f;
|
||||
|
@ -55,6 +56,8 @@ public:
|
|||
void setOrientation(const glm::quat& orientation);
|
||||
|
||||
void setBlendshape(QString name, float val);
|
||||
int getBlendshapeIndex(const QString& name);
|
||||
void getBlendshapeIndices(const std::vector<QString>& blendShapeNames, std::vector<int>& indexes);
|
||||
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||
const QVector<float>& getSummedBlendshapeCoefficients();
|
||||
int getNumSummedBlendshapeCoefficients() const;
|
||||
|
@ -114,6 +117,7 @@ protected:
|
|||
QVector<float> _blendshapeCoefficients;
|
||||
QVector<float> _transientBlendshapeCoefficients;
|
||||
QVector<float> _summedBlendshapeCoefficients;
|
||||
QMap<QString, int> _blendshapeLookupMap;
|
||||
AvatarData* _owningAvatar;
|
||||
|
||||
private:
|
||||
|
@ -122,6 +126,7 @@ private:
|
|||
HeadData& operator= (const HeadData&);
|
||||
|
||||
void setHeadOrientation(const glm::quat& orientation);
|
||||
void computeBlendshapesLookupMap();
|
||||
};
|
||||
|
||||
#endif // hifi_HeadData_h
|
||||
|
|
|
@ -48,12 +48,12 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
return;
|
||||
}
|
||||
|
||||
// check the program cache
|
||||
// pick the program version
|
||||
// check the program cache
|
||||
// pick the program version
|
||||
// check the program cache
|
||||
// pick the program version
|
||||
// check the program cache
|
||||
// pick the program version
|
||||
#ifdef GPU_STEREO_CAMERA_BUFFER
|
||||
GLuint glprogram = pipelineObject->_program->getProgram((GLShader::Version) isStereo());
|
||||
GLuint glprogram = pipelineObject->_program->getProgram((GLShader::Version)isStereo());
|
||||
#else
|
||||
GLuint glprogram = pipelineObject->_program->getProgram();
|
||||
#endif
|
||||
|
@ -85,10 +85,11 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
} else {
|
||||
cameraCorrectionBuffer = syncGPUObject(*_pipeline._cameraCorrectionBufferIdentity._buffer);
|
||||
}
|
||||
// Invalidate uniform buffer cache slot
|
||||
_uniform._buffers[_pipeline._cameraCorrectionLocation].reset();
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, _pipeline._cameraCorrectionLocation, cameraCorrectionBuffer->_id, 0, sizeof(CameraCorrection));
|
||||
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
(void)CHECK_GL_ERROR();
|
||||
_pipeline._invalidProgram = false;
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +98,7 @@ void GLBackend::updatePipeline() {
|
|||
if (_pipeline._invalidProgram) {
|
||||
// doing it here is aproblem for calls to glUniform.... so will do it on assing...
|
||||
glUseProgram(_pipeline._program);
|
||||
(void) CHECK_GL_ERROR();
|
||||
(void)CHECK_GL_ERROR();
|
||||
_pipeline._invalidProgram = false;
|
||||
}
|
||||
|
||||
|
@ -106,12 +107,12 @@ void GLBackend::updatePipeline() {
|
|||
// first reset to default what should be
|
||||
// the fields which were not to default and are default now
|
||||
resetPipelineState(_pipeline._state->_signature);
|
||||
|
||||
|
||||
// Update the signature cache with what's going to be touched
|
||||
_pipeline._stateSignatureCache |= _pipeline._state->_signature;
|
||||
|
||||
// And perform
|
||||
for (auto command: _pipeline._state->_commands) {
|
||||
for (auto command : _pipeline._state->_commands) {
|
||||
command->run(this);
|
||||
}
|
||||
} else {
|
||||
|
@ -142,8 +143,8 @@ void GLBackend::releaseUniformBuffer(uint32_t slot) {
|
|||
if (buf) {
|
||||
auto* object = Backend::getGPUObject<GLBuffer>(*buf);
|
||||
if (object) {
|
||||
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
|
||||
(void) CHECK_GL_ERROR();
|
||||
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
buf.reset();
|
||||
}
|
||||
|
@ -157,8 +158,9 @@ void GLBackend::resetUniformStage() {
|
|||
|
||||
void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 3]._uint;
|
||||
if (slot >(GLuint)MAX_NUM_UNIFORM_BUFFERS) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setUniformBuffer: Trying to set a uniform Buffer at slot #" << slot << " which doesn't exist. MaxNumUniformBuffers = " << getMaxNumUniformBuffers();
|
||||
if (slot > (GLuint)MAX_NUM_UNIFORM_BUFFERS) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setUniformBuffer: Trying to set a uniform Buffer at slot #" << slot
|
||||
<< " which doesn't exist. MaxNumUniformBuffers = " << getMaxNumUniformBuffers();
|
||||
return;
|
||||
}
|
||||
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
|
@ -169,7 +171,7 @@ void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
|||
releaseUniformBuffer(slot);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// check cache before thinking
|
||||
if (_uniform._buffers[slot] == uniformBuffer) {
|
||||
return;
|
||||
|
@ -181,7 +183,7 @@ void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
|||
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, rangeStart, rangeSize);
|
||||
|
||||
_uniform._buffers[slot] = uniformBuffer;
|
||||
(void) CHECK_GL_ERROR();
|
||||
(void)CHECK_GL_ERROR();
|
||||
} else {
|
||||
releaseUniformBuffer(slot);
|
||||
return;
|
||||
|
@ -195,8 +197,8 @@ void GLBackend::releaseResourceTexture(uint32_t slot) {
|
|||
if (object) {
|
||||
GLuint target = object->_target;
|
||||
glActiveTexture(GL_TEXTURE0 + slot);
|
||||
glBindTexture(target, 0); // RELEASE
|
||||
(void) CHECK_GL_ERROR();
|
||||
glBindTexture(target, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
tex.reset();
|
||||
}
|
||||
|
@ -212,11 +214,11 @@ void GLBackend::resetResourceStage() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint)MAX_NUM_RESOURCE_BUFFERS) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" << slot << " which doesn't exist. MaxNumResourceBuffers = " << getMaxNumResourceBuffers();
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" << slot
|
||||
<< " which doesn't exist. MaxNumResourceBuffers = " << getMaxNumResourceBuffers();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -237,7 +239,7 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
|||
// If successful bind then cache it
|
||||
if (bindResourceBuffer(slot, resourceBuffer)) {
|
||||
_resource._buffers[slot] = resourceBuffer;
|
||||
} else { // else clear slot and cache
|
||||
} else { // else clear slot and cache
|
||||
releaseResourceBuffer(slot);
|
||||
return;
|
||||
}
|
||||
|
@ -245,8 +247,9 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
|||
|
||||
void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint) MAX_NUM_RESOURCE_TEXTURES) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" << slot << " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
if (slot >= (GLuint)MAX_NUM_RESOURCE_TEXTURES) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" << slot
|
||||
<< " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -265,11 +268,14 @@ void GLBackend::bindResourceTexture(uint32_t slot, const TexturePointer& resourc
|
|||
void GLBackend::do_setResourceFramebufferSwapChainTexture(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint)MAX_NUM_RESOURCE_TEXTURES) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceFramebufferSwapChainTexture: Trying to set a resource Texture at slot #" << slot << " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
qCDebug(gpugllogging)
|
||||
<< "GLBackend::do_setResourceFramebufferSwapChainTexture: Trying to set a resource Texture at slot #" << slot
|
||||
<< " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
return;
|
||||
}
|
||||
|
||||
auto swapChain = std::static_pointer_cast<FramebufferSwapChain>(batch._swapChains.get(batch._params[paramOffset + 0]._uint));
|
||||
auto swapChain =
|
||||
std::static_pointer_cast<FramebufferSwapChain>(batch._swapChains.get(batch._params[paramOffset + 0]._uint));
|
||||
|
||||
if (!swapChain) {
|
||||
releaseResourceTexture(slot);
|
||||
|
|
|
@ -168,7 +168,9 @@ void GLBackend::TransformStageState::update(size_t commandIndex, const StereoSta
|
|||
|
||||
void GLBackend::TransformStageState::bindCurrentCamera(int eye) const {
|
||||
if (_currentCameraOffset != INVALID_OFFSET) {
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, TRANSFORM_CAMERA_SLOT, _cameraBuffer, _currentCameraOffset + eye * _cameraUboSize, sizeof(CameraBufferElement));
|
||||
static_assert(TRANSFORM_CAMERA_SLOT >= MAX_NUM_UNIFORM_BUFFERS, "TransformCamera may overlap pipeline uniform buffer slots. Invalidate uniform buffer slot cache for safety (call _uniform._buffers[TRANSFORM_CAMERA_SLOT].reset()).");
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, TRANSFORM_CAMERA_SLOT, _cameraBuffer, _currentCameraOffset + eye * _cameraUboSize,
|
||||
sizeof(CameraBufferElement));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -199,7 +199,9 @@ void NLPacket::readVersion() {
|
|||
}
|
||||
|
||||
void NLPacket::readSourceID() {
|
||||
if (!PacketTypeEnum::getNonSourcedPackets().contains(_type)) {
|
||||
if (PacketTypeEnum::getNonSourcedPackets().contains(_type)) {
|
||||
_sourceID = NULL_LOCAL_ID;
|
||||
} else {
|
||||
_sourceID = sourceIDInHeader(*this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,12 +20,6 @@
|
|||
|
||||
#include "OctreeLogging.h"
|
||||
|
||||
OctreeProcessor::OctreeProcessor() :
|
||||
_tree(NULL),
|
||||
_managedTree(false)
|
||||
{
|
||||
}
|
||||
|
||||
void OctreeProcessor::init() {
|
||||
if (!_tree) {
|
||||
_tree = createTree();
|
||||
|
@ -34,6 +28,9 @@ void OctreeProcessor::init() {
|
|||
}
|
||||
|
||||
OctreeProcessor::~OctreeProcessor() {
|
||||
if (_tree) {
|
||||
_tree->eraseAllOctreeElements(false);
|
||||
}
|
||||
}
|
||||
|
||||
void OctreeProcessor::setTree(OctreePointer newTree) {
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
class OctreeProcessor : public QObject, public QEnableSharedFromThis<OctreeProcessor> {
|
||||
Q_OBJECT
|
||||
public:
|
||||
OctreeProcessor();
|
||||
virtual ~OctreeProcessor();
|
||||
|
||||
virtual char getMyNodeType() const = 0;
|
||||
|
@ -61,7 +60,7 @@ protected:
|
|||
virtual OctreePointer createTree() = 0;
|
||||
|
||||
OctreePointer _tree;
|
||||
bool _managedTree;
|
||||
bool _managedTree { false };
|
||||
|
||||
SimpleMovingAverage _elementsPerPacket;
|
||||
SimpleMovingAverage _entitiesPerPacket;
|
||||
|
|
|
@ -269,7 +269,8 @@ void CharacterController::playerStep(btCollisionWorld* collisionWorld, btScalar
|
|||
}
|
||||
btQuaternion deltaRot = desiredRot * startRot.inverse();
|
||||
float angularSpeed = deltaRot.getAngle() / _followTimeRemaining;
|
||||
btQuaternion angularDisplacement = btQuaternion(deltaRot.getAxis(), angularSpeed * dt);
|
||||
glm::vec3 rotationAxis = glm::normalize(glm::axis(bulletToGLM(deltaRot))); // deltaRot.getAxis() is inaccurate
|
||||
btQuaternion angularDisplacement = btQuaternion(glmToBullet(rotationAxis), angularSpeed * dt);
|
||||
btQuaternion endRot = angularDisplacement * startRot;
|
||||
|
||||
// in order to accumulate displacement of avatar position, we need to take _shapeLocalOffset into account.
|
||||
|
|
|
@ -198,7 +198,7 @@ vec3 evalGlobalLightingAlphaBlended(mat4 invViewMat, float shadowAttenuation, fl
|
|||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surfaceWS, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse;
|
||||
color += (ambientSpecular + directionalSpecular) / opacity;
|
||||
color += evalSpecularWithOpacity(ambientSpecular + directionalSpecular, opacity);
|
||||
|
||||
return color;
|
||||
}
|
||||
|
@ -231,7 +231,7 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surfaceWS, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse;
|
||||
color += (ambientSpecular + directionalSpecular) / opacity;
|
||||
color += evalSpecularWithOpacity(ambientSpecular + directionalSpecular, opacity);
|
||||
|
||||
// Haze
|
||||
if ((isHazeEnabled() > 0.0) && (hazeParams.hazeMode & HAZE_MODE_IS_ACTIVE) == HAZE_MODE_IS_ACTIVE) {
|
||||
|
@ -269,7 +269,7 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surface, metallic, fresnel, albedo, shadowAttenuation);
|
||||
|
||||
color += ambientDiffuse + directionalDiffuse;
|
||||
color += (ambientSpecular + directionalSpecular) / opacity;
|
||||
color += evalSpecularWithOpacity(ambientSpecular + directionalSpecular, opacity);
|
||||
|
||||
// Haze
|
||||
if ((isHazeEnabled() > 0.0) && (hazeParams.hazeMode & HAZE_MODE_IS_ACTIVE) == HAZE_MODE_IS_ACTIVE) {
|
||||
|
|
|
@ -197,7 +197,7 @@ vec3 evalGlobalLightingAlphaBlended(mat4 invViewMat, float shadowAttenuation, fl
|
|||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surfaceWS, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse;
|
||||
color += (ambientSpecular + directionalSpecular) / opacity;
|
||||
color += evalSpecularWithOpacity(ambientSpecular + directionalSpecular, opacity);
|
||||
|
||||
return color;
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surfaceWS, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse;
|
||||
color += (ambientSpecular + directionalSpecular) / opacity;
|
||||
color += evalSpecularWithOpacity(ambientSpecular + directionalSpecular, opacity);
|
||||
|
||||
// Haze
|
||||
// FIXME - temporarily removed until we support it for forward...
|
||||
|
|
|
@ -143,6 +143,6 @@ vec4 evalLocalLighting(ivec3 cluster, int numLights, vec3 fragWorldPos, SurfaceD
|
|||
fragSpecular *= isSpecularEnabled();
|
||||
|
||||
fragColor.rgb += fragDiffuse;
|
||||
fragColor.rgb += fragSpecular / opacity;
|
||||
fragColor.rgb += evalSpecularWithOpacity(fragSpecular, opacity);
|
||||
return fragColor;
|
||||
}
|
|
@ -314,6 +314,9 @@ void evalFragShadingGloss(out vec3 diffuse, out vec3 specular,
|
|||
specular = shading.xyz;
|
||||
}
|
||||
|
||||
vec3 evalSpecularWithOpacity(vec3 specular, float opacity) {
|
||||
return specular / opacity;
|
||||
}
|
||||
|
||||
<@if not GETFRESNEL0@>
|
||||
<@def GETFRESNEL0@>
|
||||
|
|
|
@ -267,7 +267,7 @@ vec3 fetchLightmapMap(vec2 uv) {
|
|||
|
||||
<@func discardTransparent(opacity)@>
|
||||
{
|
||||
if (<$opacity$> < 1.0) {
|
||||
if (<$opacity$> < 1e-6) {
|
||||
discard;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -44,6 +44,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -46,6 +46,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -45,6 +45,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -54,6 +54,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -31,6 +31,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -41,6 +41,7 @@ void main(void) {
|
|||
|
||||
float opacity = getMaterialOpacity(mat) * _alpha;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
|
|
|
@ -64,3 +64,18 @@ const char* FACESHIFT_BLENDSHAPES[] = {
|
|||
};
|
||||
|
||||
const int NUM_FACESHIFT_BLENDSHAPES = sizeof(FACESHIFT_BLENDSHAPES) / sizeof(char*);
|
||||
|
||||
const int EYE_BLINK_L_INDEX = 0;
|
||||
const int EYE_BLINK_R_INDEX = 1;
|
||||
const int EYE_SQUINT_L_INDEX = 2;
|
||||
const int EYE_SQUINT_R_INDEX = 3;
|
||||
const int EYE_OPEN_L_INDEX = 8;
|
||||
const int EYE_OPEN_R_INDEX = 9;
|
||||
const int BROWS_U_L_INDEX = 17;
|
||||
const int BROWS_U_R_INDEX = 18;
|
||||
|
||||
|
||||
const int EYE_BLINK_INDICES[] = { EYE_BLINK_L_INDEX, EYE_BLINK_R_INDEX };
|
||||
const int EYE_SQUINT_INDICES[] = { EYE_SQUINT_L_INDEX, EYE_SQUINT_R_INDEX };
|
||||
const int EYE_OPEN_INDICES[] = { EYE_OPEN_L_INDEX, EYE_OPEN_R_INDEX };
|
||||
const int BROWS_U_INDICES[] = { BROWS_U_L_INDEX, BROWS_U_R_INDEX };
|
||||
|
|
|
@ -16,5 +16,10 @@
|
|||
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||
/// The size of FACESHIFT_BLENDSHAPES
|
||||
extern const int NUM_FACESHIFT_BLENDSHAPES;
|
||||
// Eyes and Brows indices
|
||||
extern const int EYE_BLINK_INDICES[];
|
||||
extern const int EYE_OPEN_INDICES[];
|
||||
extern const int BROWS_U_INDICES[];
|
||||
extern const int EYE_SQUINT_INDICES[];
|
||||
|
||||
#endif // hifi_FaceshiftConstants_h
|
||||
#endif // hifi_FaceshiftConstants_h
|
||||
|
|
|
@ -108,14 +108,13 @@ createControllerDisplay = function(config) {
|
|||
for (var partName in controller.parts) {
|
||||
overlayID = this.overlays[i++];
|
||||
var part = controller.parts[partName];
|
||||
localPosition = Vec3.sum(controller.position, Vec3.multiplyQbyV(controller.rotation, part.naturalPosition));
|
||||
localPosition = Vec3.subtract(part.naturalPosition, controller.naturalPosition);
|
||||
var localRotation;
|
||||
var value = this.partValues[partName];
|
||||
var offset, rotation;
|
||||
if (value !== undefined) {
|
||||
if (part.type === "linear") {
|
||||
var axis = Vec3.multiplyQbyV(controller.rotation, part.axis);
|
||||
offset = Vec3.multiply(part.maxTranslation * value, axis);
|
||||
offset = Vec3.multiply(part.maxTranslation * value, part.axis);
|
||||
localPosition = Vec3.sum(localPosition, offset);
|
||||
localRotation = undefined;
|
||||
} else if (part.type === "joystick") {
|
||||
|
@ -126,8 +125,8 @@ createControllerDisplay = function(config) {
|
|||
} else {
|
||||
offset = { x: 0, y: 0, z: 0 };
|
||||
}
|
||||
localPosition = Vec3.sum(controller.position, Vec3.multiplyQbyV(controller.rotation, Vec3.sum(offset, part.naturalPosition)));
|
||||
localRotation = Quat.multiply(controller.rotation, rotation);
|
||||
localPosition = Vec3.sum(offset, localPosition);
|
||||
localRotation = rotation;
|
||||
} else if (part.type === "rotational") {
|
||||
value = clamp(value, part.minValue, part.maxValue);
|
||||
var pct = (value - part.minValue) / part.maxValue;
|
||||
|
@ -139,8 +138,8 @@ createControllerDisplay = function(config) {
|
|||
} else {
|
||||
offset = { x: 0, y: 0, z: 0 };
|
||||
}
|
||||
localPosition = Vec3.sum(controller.position, Vec3.multiplyQbyV(controller.rotation, Vec3.sum(offset, part.naturalPosition)));
|
||||
localRotation = Quat.multiply(controller.rotation, rotation);
|
||||
localPosition = Vec3.sum(offset, localPosition);
|
||||
localRotation = rotation;
|
||||
}
|
||||
}
|
||||
if (localRotation !== undefined) {
|
||||
|
@ -169,9 +168,11 @@ createControllerDisplay = function(config) {
|
|||
|
||||
if (controller.naturalPosition) {
|
||||
position = Vec3.sum(Vec3.multiplyQbyV(controller.rotation, controller.naturalPosition), position);
|
||||
} else {
|
||||
controller.naturalPosition = { x: 0, y: 0, z: 0 };
|
||||
}
|
||||
|
||||
var overlayID = Overlays.addOverlay("model", {
|
||||
var baseOverlayID = Overlays.addOverlay("model", {
|
||||
url: controller.modelURL,
|
||||
dimensions: Vec3.multiply(sensorScaleFactor, controller.dimensions),
|
||||
localRotation: controller.rotation,
|
||||
|
@ -181,23 +182,21 @@ createControllerDisplay = function(config) {
|
|||
ignoreRayIntersection: true
|
||||
});
|
||||
|
||||
controllerDisplay.overlays.push(overlayID);
|
||||
overlayID = null;
|
||||
controllerDisplay.overlays.push(baseOverlayID);
|
||||
|
||||
if (controller.parts) {
|
||||
for (var partName in controller.parts) {
|
||||
var part = controller.parts[partName];
|
||||
var partPosition = Vec3.sum(controller.position, Vec3.multiplyQbyV(controller.rotation, part.naturalPosition));
|
||||
var innerRotation = controller.rotation;
|
||||
var localPosition = Vec3.subtract(part.naturalPosition, controller.naturalPosition);
|
||||
var localRotation = { x: 0, y: 0, z: 0, w: 1 }
|
||||
|
||||
controllerDisplay.parts[partName] = controller.parts[partName];
|
||||
|
||||
var properties = {
|
||||
url: part.modelURL,
|
||||
localPosition: partPosition,
|
||||
localRotation: innerRotation,
|
||||
parentID: MyAvatar.SELF_ID,
|
||||
parentJointIndex: controller.jointIndex,
|
||||
localPosition: localPosition,
|
||||
localRotation: localRotation,
|
||||
parentID: baseOverlayID,
|
||||
ignoreRayIntersection: true
|
||||
};
|
||||
|
||||
|
@ -207,11 +206,10 @@ createControllerDisplay = function(config) {
|
|||
properties['textures'] = textures;
|
||||
}
|
||||
|
||||
overlayID = Overlays.addOverlay("model", properties);
|
||||
var overlayID = Overlays.addOverlay("model", properties);
|
||||
|
||||
if (part.type === "rotational") {
|
||||
var input = resolveHardware(part.input);
|
||||
print("Mapping to: ", part.input, input);
|
||||
mapping.from([input]).peek().to(function(partName) {
|
||||
return function(value) {
|
||||
// insert the most recent controller value into controllerDisplay.partValues.
|
||||
|
|
|
@ -310,7 +310,7 @@ function printToPolaroid(image_url) {
|
|||
"gravity": { "x": 0, "y": -2.5, "z": 0 },
|
||||
|
||||
"velocity": { "x": 0, "y": 1.95, "z": 0 },
|
||||
"angularVelocity": { "x": -1.0, "y": 0, "z": -1.3 },
|
||||
"angularVelocity": Vec3.multiplyQbyV(MyAvatar.orientation, { "x": -1.0, "y": 0, "z": -1.3 }),
|
||||
|
||||
"dynamic": true,
|
||||
"collisionsWillMove": true,
|
||||
|
|
Loading…
Reference in a new issue