mirror of
https://github.com/overte-org/overte.git
synced 2025-08-07 12:30:40 +02:00
Merge remote-tracking branch 'upstream/master' into modelTextures
This commit is contained in:
commit
c4670322ab
47 changed files with 1271 additions and 486 deletions
|
@ -4,12 +4,15 @@ android {
|
||||||
compileSdkVersion 26
|
compileSdkVersion 26
|
||||||
//buildToolsVersion '27.0.3'
|
//buildToolsVersion '27.0.3'
|
||||||
|
|
||||||
|
def appVersionCode = Integer.valueOf(RELEASE_NUMBER ?: 1)
|
||||||
|
def appVersionName = RELEASE_NUMBER ?: "1.0"
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
applicationId "io.highfidelity.hifiinterface"
|
applicationId "io.highfidelity.hifiinterface"
|
||||||
minSdkVersion 24
|
minSdkVersion 24
|
||||||
targetSdkVersion 26
|
targetSdkVersion 26
|
||||||
versionCode 1
|
versionCode appVersionCode
|
||||||
versionName "1.0"
|
versionName appVersionName
|
||||||
ndk { abiFilters 'arm64-v8a' }
|
ndk { abiFilters 'arm64-v8a' }
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
cmake {
|
cmake {
|
||||||
|
|
|
@ -291,18 +291,6 @@ AssetServer::AssetServer(ReceivedMessage& message) :
|
||||||
_bakingTaskPool(this),
|
_bakingTaskPool(this),
|
||||||
_filesizeLimit(AssetUtils::MAX_UPLOAD_SIZE)
|
_filesizeLimit(AssetUtils::MAX_UPLOAD_SIZE)
|
||||||
{
|
{
|
||||||
// store the current state of image compression so we can reset it when this assignment is complete
|
|
||||||
_wasColorTextureCompressionEnabled = image::isColorTexturesCompressionEnabled();
|
|
||||||
_wasGrayscaleTextureCompressionEnabled = image::isGrayscaleTexturesCompressionEnabled();
|
|
||||||
_wasNormalTextureCompressionEnabled = image::isNormalTexturesCompressionEnabled();
|
|
||||||
_wasCubeTextureCompressionEnabled = image::isCubeTexturesCompressionEnabled();
|
|
||||||
|
|
||||||
// enable compression in image library
|
|
||||||
image::setColorTexturesCompressionEnabled(true);
|
|
||||||
image::setGrayscaleTexturesCompressionEnabled(true);
|
|
||||||
image::setNormalTexturesCompressionEnabled(true);
|
|
||||||
image::setCubeTexturesCompressionEnabled(true);
|
|
||||||
|
|
||||||
BAKEABLE_TEXTURE_EXTENSIONS = image::getSupportedFormats();
|
BAKEABLE_TEXTURE_EXTENSIONS = image::getSupportedFormats();
|
||||||
qDebug() << "Supported baking texture formats:" << BAKEABLE_MODEL_EXTENSIONS;
|
qDebug() << "Supported baking texture formats:" << BAKEABLE_MODEL_EXTENSIONS;
|
||||||
|
|
||||||
|
@ -354,12 +342,6 @@ void AssetServer::aboutToFinish() {
|
||||||
while (_pendingBakes.size() > 0) {
|
while (_pendingBakes.size() > 0) {
|
||||||
QCoreApplication::processEvents();
|
QCoreApplication::processEvents();
|
||||||
}
|
}
|
||||||
|
|
||||||
// re-set defaults in image library
|
|
||||||
image::setColorTexturesCompressionEnabled(_wasCubeTextureCompressionEnabled);
|
|
||||||
image::setGrayscaleTexturesCompressionEnabled(_wasGrayscaleTextureCompressionEnabled);
|
|
||||||
image::setNormalTexturesCompressionEnabled(_wasNormalTextureCompressionEnabled);
|
|
||||||
image::setCubeTexturesCompressionEnabled(_wasCubeTextureCompressionEnabled);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void AssetServer::run() {
|
void AssetServer::run() {
|
||||||
|
|
|
@ -167,11 +167,6 @@ private:
|
||||||
using RequestQueue = QVector<QPair<QSharedPointer<ReceivedMessage>, SharedNodePointer>>;
|
using RequestQueue = QVector<QPair<QSharedPointer<ReceivedMessage>, SharedNodePointer>>;
|
||||||
RequestQueue _queuedRequests;
|
RequestQueue _queuedRequests;
|
||||||
|
|
||||||
bool _wasColorTextureCompressionEnabled { false };
|
|
||||||
bool _wasGrayscaleTextureCompressionEnabled { false };
|
|
||||||
bool _wasNormalTextureCompressionEnabled { false };
|
|
||||||
bool _wasCubeTextureCompressionEnabled { false };
|
|
||||||
|
|
||||||
uint64_t _filesizeLimit;
|
uint64_t _filesizeLimit;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ set(EXTERNAL_NAME serverless-content)
|
||||||
|
|
||||||
ExternalProject_Add(
|
ExternalProject_Add(
|
||||||
${EXTERNAL_NAME}
|
${EXTERNAL_NAME}
|
||||||
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC68.zip
|
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC68-v2.zip
|
||||||
URL_MD5 a068f74d4045e257cfa7926fe6e38ad5
|
URL_MD5 f7d290471baf7f5694c147217b8fc548
|
||||||
CONFIGURE_COMMAND ""
|
CONFIGURE_COMMAND ""
|
||||||
BUILD_COMMAND ""
|
BUILD_COMMAND ""
|
||||||
INSTALL_COMMAND ""
|
INSTALL_COMMAND ""
|
||||||
|
|
|
@ -95,7 +95,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND git log -1 --format=${_GIT_LOG_FORMAT}
|
COMMAND git log -1 --abbrev=7 --format=${_GIT_LOG_FORMAT}
|
||||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||||
OUTPUT_VARIABLE _GIT_LOG_OUTPUT
|
OUTPUT_VARIABLE _GIT_LOG_OUTPUT
|
||||||
ERROR_VARIABLE _GIT_LOG_ERROR
|
ERROR_VARIABLE _GIT_LOG_ERROR
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
{
|
{
|
||||||
"releaseType": "@RELEASE_TYPE@",
|
"releaseType": "@RELEASE_TYPE@",
|
||||||
|
"buildNumber": "@BUILD_NUMBER@",
|
||||||
|
"stableBuild": "@STABLE_BUILD@",
|
||||||
"buildIdentifier": "@BUILD_VERSION@",
|
"buildIdentifier": "@BUILD_VERSION@",
|
||||||
"organization": "@BUILD_ORGANIZATION@"
|
"organization": "@BUILD_ORGANIZATION@"
|
||||||
}
|
}
|
||||||
|
|
|
@ -905,7 +905,6 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
|
||||||
DependencyManager::set<DiscoverabilityManager>();
|
DependencyManager::set<DiscoverabilityManager>();
|
||||||
DependencyManager::set<SceneScriptingInterface>();
|
DependencyManager::set<SceneScriptingInterface>();
|
||||||
DependencyManager::set<OffscreenUi>();
|
DependencyManager::set<OffscreenUi>();
|
||||||
DependencyManager::set<AutoUpdater>();
|
|
||||||
DependencyManager::set<Midi>();
|
DependencyManager::set<Midi>();
|
||||||
DependencyManager::set<PathUtils>();
|
DependencyManager::set<PathUtils>();
|
||||||
DependencyManager::set<InterfaceDynamicFactory>();
|
DependencyManager::set<InterfaceDynamicFactory>();
|
||||||
|
@ -1439,17 +1438,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
||||||
// add firstRun flag from settings to launch event
|
// add firstRun flag from settings to launch event
|
||||||
Setting::Handle<bool> firstRun { Settings::firstRun, true };
|
Setting::Handle<bool> firstRun { Settings::firstRun, true };
|
||||||
|
|
||||||
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
|
|
||||||
auto& userActivityLogger = UserActivityLogger::getInstance();
|
|
||||||
if (!userActivityLogger.isDisabledSettingSet()) {
|
|
||||||
// the user activity logger is opt-out for Interface
|
|
||||||
// but it's defaulted to disabled for other targets
|
|
||||||
// so we need to enable it here if it has never been disabled by the user
|
|
||||||
userActivityLogger.disable(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
QString machineFingerPrint = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
|
QString machineFingerPrint = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
|
||||||
|
|
||||||
|
auto& userActivityLogger = UserActivityLogger::getInstance();
|
||||||
if (userActivityLogger.isEnabled()) {
|
if (userActivityLogger.isEnabled()) {
|
||||||
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
||||||
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
||||||
|
@ -1784,10 +1775,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
||||||
// If launched from Steam, let it handle updates
|
// If launched from Steam, let it handle updates
|
||||||
const QString HIFI_NO_UPDATER_COMMAND_LINE_KEY = "--no-updater";
|
const QString HIFI_NO_UPDATER_COMMAND_LINE_KEY = "--no-updater";
|
||||||
bool noUpdater = arguments().indexOf(HIFI_NO_UPDATER_COMMAND_LINE_KEY) != -1;
|
bool noUpdater = arguments().indexOf(HIFI_NO_UPDATER_COMMAND_LINE_KEY) != -1;
|
||||||
if (!noUpdater) {
|
bool buildCanUpdate = BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable
|
||||||
|
|| BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master;
|
||||||
|
if (!noUpdater && buildCanUpdate) {
|
||||||
constexpr auto INSTALLER_TYPE_CLIENT_ONLY = "client_only";
|
constexpr auto INSTALLER_TYPE_CLIENT_ONLY = "client_only";
|
||||||
|
|
||||||
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
auto applicationUpdater = DependencyManager::set<AutoUpdater>();
|
||||||
|
|
||||||
AutoUpdater::InstallerType type = installerType == INSTALLER_TYPE_CLIENT_ONLY
|
AutoUpdater::InstallerType type = installerType == INSTALLER_TYPE_CLIENT_ONLY
|
||||||
? AutoUpdater::InstallerType::CLIENT_ONLY : AutoUpdater::InstallerType::FULL;
|
? AutoUpdater::InstallerType::CLIENT_ONLY : AutoUpdater::InstallerType::FULL;
|
||||||
|
|
|
@ -124,7 +124,7 @@ Menu::Menu() {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Edit > Delete
|
// Edit > Delete
|
||||||
auto deleteAction =addActionToQMenuAndActionHash(editMenu, "Delete", QKeySequence::Delete);
|
auto deleteAction = addActionToQMenuAndActionHash(editMenu, "Delete", QKeySequence::Delete);
|
||||||
connect(deleteAction, &QAction::triggered, [] {
|
connect(deleteAction, &QAction::triggered, [] {
|
||||||
QKeyEvent* keyEvent = new QKeyEvent(QEvent::KeyPress, Qt::Key_Delete, Qt::ControlModifier);
|
QKeyEvent* keyEvent = new QKeyEvent(QEvent::KeyPress, Qt::Key_Delete, Qt::ControlModifier);
|
||||||
QCoreApplication::postEvent(QCoreApplication::instance(), keyEvent);
|
QCoreApplication::postEvent(QCoreApplication::instance(), keyEvent);
|
||||||
|
|
|
@ -21,17 +21,6 @@ AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisi
|
||||||
_type = MOTIONSTATE_TYPE_AVATAR;
|
_type = MOTIONSTATE_TYPE_AVATAR;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarMotionState::handleEasyChanges(uint32_t& flags) {
|
|
||||||
ObjectMotionState::handleEasyChanges(flags);
|
|
||||||
if (flags & Simulation::DIRTY_PHYSICS_ACTIVATION && !_body->isActive()) {
|
|
||||||
_body->activate();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool AvatarMotionState::handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) {
|
|
||||||
return ObjectMotionState::handleHardAndEasyChanges(flags, engine);
|
|
||||||
}
|
|
||||||
|
|
||||||
AvatarMotionState::~AvatarMotionState() {
|
AvatarMotionState::~AvatarMotionState() {
|
||||||
assert(_avatar);
|
assert(_avatar);
|
||||||
_avatar = nullptr;
|
_avatar = nullptr;
|
||||||
|
@ -57,9 +46,6 @@ PhysicsMotionType AvatarMotionState::computePhysicsMotionType() const {
|
||||||
const btCollisionShape* AvatarMotionState::computeNewShape() {
|
const btCollisionShape* AvatarMotionState::computeNewShape() {
|
||||||
ShapeInfo shapeInfo;
|
ShapeInfo shapeInfo;
|
||||||
std::static_pointer_cast<Avatar>(_avatar)->computeShapeInfo(shapeInfo);
|
std::static_pointer_cast<Avatar>(_avatar)->computeShapeInfo(shapeInfo);
|
||||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents();
|
|
||||||
halfExtents.y = 0.0f;
|
|
||||||
_diameter = 2.0f * glm::length(halfExtents);
|
|
||||||
return getShapeManager()->getShape(shapeInfo);
|
return getShapeManager()->getShape(shapeInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,31 +60,25 @@ void AvatarMotionState::getWorldTransform(btTransform& worldTrans) const {
|
||||||
worldTrans.setRotation(glmToBullet(getObjectRotation()));
|
worldTrans.setRotation(glmToBullet(getObjectRotation()));
|
||||||
if (_body) {
|
if (_body) {
|
||||||
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// virtual
|
// virtual
|
||||||
void AvatarMotionState::setWorldTransform(const btTransform& worldTrans) {
|
void AvatarMotionState::setWorldTransform(const btTransform& worldTrans) {
|
||||||
|
// HACK: The PhysicsEngine does not actually move OTHER avatars -- instead it slaves their local RigidBody to the transform
|
||||||
|
// as specified by a remote simulation. However, to give the remote simulation time to respond to our own objects we tie
|
||||||
|
// the other avatar's body to its true position with a simple spring. This is a HACK that will have to be improved later.
|
||||||
const float SPRING_TIMESCALE = 0.5f;
|
const float SPRING_TIMESCALE = 0.5f;
|
||||||
float tau = PHYSICS_ENGINE_FIXED_SUBSTEP / SPRING_TIMESCALE;
|
float tau = PHYSICS_ENGINE_FIXED_SUBSTEP / SPRING_TIMESCALE;
|
||||||
btVector3 currentPosition = worldTrans.getOrigin();
|
btVector3 currentPosition = worldTrans.getOrigin();
|
||||||
btVector3 offsetToTarget = glmToBullet(getObjectPosition()) - currentPosition;
|
btVector3 targetPosition = glmToBullet(getObjectPosition());
|
||||||
float distance = offsetToTarget.length();
|
btTransform newTransform;
|
||||||
if ((1.0f - tau) * distance > _diameter) {
|
newTransform.setOrigin((1.0f - tau) * currentPosition + tau * targetPosition);
|
||||||
// the avatar body is far from its target --> slam position
|
newTransform.setRotation(glmToBullet(getObjectRotation()));
|
||||||
btTransform newTransform;
|
_body->setWorldTransform(newTransform);
|
||||||
newTransform.setOrigin(currentPosition + offsetToTarget);
|
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||||
newTransform.setRotation(glmToBullet(getObjectRotation()));
|
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||||
_body->setWorldTransform(newTransform);
|
|
||||||
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
|
||||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
|
||||||
} else {
|
|
||||||
// the avatar body is near its target --> slam velocity
|
|
||||||
btVector3 velocity = glmToBullet(getObjectLinearVelocity()) + (1.0f / SPRING_TIMESCALE) * offsetToTarget;
|
|
||||||
_body->setLinearVelocity(velocity);
|
|
||||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// These pure virtual methods must be implemented for each MotionState type
|
// These pure virtual methods must be implemented for each MotionState type
|
||||||
|
@ -165,8 +145,3 @@ void AvatarMotionState::computeCollisionGroupAndMask(int32_t& group, int32_t& ma
|
||||||
mask = Physics::getDefaultCollisionMask(group);
|
mask = Physics::getDefaultCollisionMask(group);
|
||||||
}
|
}
|
||||||
|
|
||||||
// virtual
|
|
||||||
float AvatarMotionState::getMass() const {
|
|
||||||
return std::static_pointer_cast<Avatar>(_avatar)->computeMass();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
|
@ -23,9 +23,6 @@ class AvatarMotionState : public ObjectMotionState {
|
||||||
public:
|
public:
|
||||||
AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape);
|
AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape);
|
||||||
|
|
||||||
virtual void handleEasyChanges(uint32_t& flags) override;
|
|
||||||
virtual bool handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) override;
|
|
||||||
|
|
||||||
virtual PhysicsMotionType getMotionType() const override { return _motionType; }
|
virtual PhysicsMotionType getMotionType() const override { return _motionType; }
|
||||||
|
|
||||||
virtual uint32_t getIncomingDirtyFlags() override;
|
virtual uint32_t getIncomingDirtyFlags() override;
|
||||||
|
@ -67,8 +64,6 @@ public:
|
||||||
|
|
||||||
virtual void computeCollisionGroupAndMask(int32_t& group, int32_t& mask) const override;
|
virtual void computeCollisionGroupAndMask(int32_t& group, int32_t& mask) const override;
|
||||||
|
|
||||||
virtual float getMass() const override;
|
|
||||||
|
|
||||||
friend class AvatarManager;
|
friend class AvatarManager;
|
||||||
friend class Avatar;
|
friend class Avatar;
|
||||||
|
|
||||||
|
@ -81,7 +76,6 @@ protected:
|
||||||
virtual const btCollisionShape* computeNewShape() override;
|
virtual const btCollisionShape* computeNewShape() override;
|
||||||
|
|
||||||
AvatarSharedPointer _avatar;
|
AvatarSharedPointer _avatar;
|
||||||
float _diameter { 0.0f };
|
|
||||||
|
|
||||||
uint32_t _dirtyFlags;
|
uint32_t _dirtyFlags;
|
||||||
};
|
};
|
||||||
|
|
|
@ -2117,6 +2117,31 @@ bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
||||||
return !defaultMode || !firstPerson || !insideHead;
|
return !defaultMode || !firstPerson || !insideHead;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||||
|
if (hasScriptedBlendshapes == _hasScriptedBlendShapes) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!hasScriptedBlendshapes) {
|
||||||
|
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload
|
||||||
|
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
|
||||||
|
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||||
|
sendAvatarDataPacket(true);
|
||||||
|
}
|
||||||
|
_hasScriptedBlendShapes = hasScriptedBlendshapes;
|
||||||
|
}
|
||||||
|
|
||||||
|
void MyAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||||
|
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||||
|
}
|
||||||
|
|
||||||
|
void MyAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||||
|
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||||
|
}
|
||||||
|
|
||||||
|
void MyAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||||
|
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||||
|
}
|
||||||
|
|
||||||
void MyAvatar::updateOrientation(float deltaTime) {
|
void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
|
|
||||||
// Smoothly rotate body with arrow keys
|
// Smoothly rotate body with arrow keys
|
||||||
|
|
|
@ -86,6 +86,10 @@ class MyAvatar : public Avatar {
|
||||||
* @property {number} audioListenerModeCamera=1 - The audio listening position is at the camera. <em>Read-only.</em>
|
* @property {number} audioListenerModeCamera=1 - The audio listening position is at the camera. <em>Read-only.</em>
|
||||||
* @property {number} audioListenerModeCustom=2 - The audio listening position is at a the position specified by set by the
|
* @property {number} audioListenerModeCustom=2 - The audio listening position is at a the position specified by set by the
|
||||||
* <code>customListenPosition</code> and <code>customListenOrientation</code> property values. <em>Read-only.</em>
|
* <code>customListenPosition</code> and <code>customListenOrientation</code> property values. <em>Read-only.</em>
|
||||||
|
* @property {boolean} hasScriptedBlendshapes=false - Blendshapes will be transmitted over the network if set to true.
|
||||||
|
* @property {boolean} hasProceduralBlinkFaceMovement=true - procedural blinking will be turned on if set to true.
|
||||||
|
* @property {boolean} hasProceduralEyeFaceMovement=true - procedural eye movement will be turned on if set to true.
|
||||||
|
* @property {boolean} hasAudioEnabledFaceMovement=true - If set to true, voice audio will move the mouth Blendshapes while MyAvatar.hasScriptedBlendshapes is enabled.
|
||||||
* @property {Vec3} customListenPosition=Vec3.ZERO - The listening position used when the <code>audioListenerMode</code>
|
* @property {Vec3} customListenPosition=Vec3.ZERO - The listening position used when the <code>audioListenerMode</code>
|
||||||
* property value is <code>audioListenerModeCustom</code>.
|
* property value is <code>audioListenerModeCustom</code>.
|
||||||
* @property {Quat} customListenOrientation=Quat.IDENTITY - The listening orientation used when the
|
* @property {Quat} customListenOrientation=Quat.IDENTITY - The listening orientation used when the
|
||||||
|
@ -187,6 +191,10 @@ class MyAvatar : public Avatar {
|
||||||
Q_PROPERTY(AudioListenerMode audioListenerModeHead READ getAudioListenerModeHead)
|
Q_PROPERTY(AudioListenerMode audioListenerModeHead READ getAudioListenerModeHead)
|
||||||
Q_PROPERTY(AudioListenerMode audioListenerModeCamera READ getAudioListenerModeCamera)
|
Q_PROPERTY(AudioListenerMode audioListenerModeCamera READ getAudioListenerModeCamera)
|
||||||
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
|
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
|
||||||
|
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
|
||||||
|
Q_PROPERTY(bool hasProceduralBlinkFaceMovement READ getHasProceduralBlinkFaceMovement WRITE setHasProceduralBlinkFaceMovement)
|
||||||
|
Q_PROPERTY(bool hasProceduralEyeFaceMovement READ getHasProceduralEyeFaceMovement WRITE setHasProceduralEyeFaceMovement)
|
||||||
|
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
|
||||||
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
|
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
|
||||||
|
|
||||||
Q_PROPERTY(glm::vec3 leftHandPosition READ getLeftHandPosition)
|
Q_PROPERTY(glm::vec3 leftHandPosition READ getLeftHandPosition)
|
||||||
|
@ -1380,6 +1388,14 @@ private:
|
||||||
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
|
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
|
||||||
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
|
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
|
||||||
bool getShouldRenderLocally() const { return _shouldRender; }
|
bool getShouldRenderLocally() const { return _shouldRender; }
|
||||||
|
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
||||||
|
bool getHasScriptedBlendshapes() const override { return _hasScriptedBlendShapes; }
|
||||||
|
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||||
|
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||||
|
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||||
|
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||||
|
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||||
|
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||||
bool isMyAvatar() const override { return true; }
|
bool isMyAvatar() const override { return true; }
|
||||||
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
|
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
|
||||||
virtual glm::vec3 getSkeletonPosition() const override;
|
virtual glm::vec3 getSkeletonPosition() const override;
|
||||||
|
@ -1488,6 +1504,7 @@ private:
|
||||||
bool _hmdRollControlEnabled { true };
|
bool _hmdRollControlEnabled { true };
|
||||||
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
|
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
|
||||||
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
|
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
|
||||||
|
std::atomic<bool> _hasScriptedBlendShapes { false };
|
||||||
|
|
||||||
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
||||||
glm::mat4 _sensorToWorldMatrix { glm::mat4() };
|
glm::mat4 _sensorToWorldMatrix { glm::mat4() };
|
||||||
|
|
|
@ -46,32 +46,18 @@ void MyHead::simulate(float deltaTime) {
|
||||||
auto player = DependencyManager::get<recording::Deck>();
|
auto player = DependencyManager::get<recording::Deck>();
|
||||||
// Only use face trackers when not playing back a recording.
|
// Only use face trackers when not playing back a recording.
|
||||||
if (!player->isPlaying()) {
|
if (!player->isPlaying()) {
|
||||||
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
|
auto faceTracker = qApp->getActiveFaceTracker();
|
||||||
_isFaceTrackerConnected = faceTracker != nullptr && !faceTracker->isMuted();
|
const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
|
||||||
|
_isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
|
||||||
if (_isFaceTrackerConnected) {
|
if (_isFaceTrackerConnected) {
|
||||||
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
if (hasActualFaceTrackerConnected) {
|
||||||
|
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||||
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
|
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
|
|
||||||
calculateMouthShapes(deltaTime);
|
|
||||||
|
|
||||||
const int JAW_OPEN_BLENDSHAPE = 21;
|
|
||||||
const int MMMM_BLENDSHAPE = 34;
|
|
||||||
const int FUNNEL_BLENDSHAPE = 40;
|
|
||||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
|
||||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
|
||||||
_transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
|
|
||||||
_transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
|
|
||||||
_transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
|
|
||||||
_transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
|
|
||||||
_transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
|
|
||||||
}
|
|
||||||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||||
|
// if eye tracker is connected we should get the data here.
|
||||||
}
|
}
|
||||||
Parent::simulate(deltaTime);
|
Parent::simulate(deltaTime);
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,6 +81,13 @@ int main(int argc, const char* argv[]) {
|
||||||
|
|
||||||
// Instance UserActivityLogger now that the settings are loaded
|
// Instance UserActivityLogger now that the settings are loaded
|
||||||
auto& ual = UserActivityLogger::getInstance();
|
auto& ual = UserActivityLogger::getInstance();
|
||||||
|
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
|
||||||
|
if (!ual.isDisabledSettingSet()) {
|
||||||
|
// the user activity logger is opt-out for Interface
|
||||||
|
// but it's defaulted to disabled for other targets
|
||||||
|
// so we need to enable it here if it has never been disabled by the user
|
||||||
|
ual.disable(false);
|
||||||
|
}
|
||||||
qDebug() << "UserActivityLogger is enabled:" << ual.isEnabled();
|
qDebug() << "UserActivityLogger is enabled:" << ual.isEnabled();
|
||||||
|
|
||||||
if (ual.isEnabled()) {
|
if (ual.isEnabled()) {
|
||||||
|
|
|
@ -21,19 +21,31 @@ UpdateDialog::UpdateDialog(QQuickItem* parent) :
|
||||||
OffscreenQmlDialog(parent)
|
OffscreenQmlDialog(parent)
|
||||||
{
|
{
|
||||||
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
||||||
int currentVersion = QCoreApplication::applicationVersion().toInt();
|
if (applicationUpdater) {
|
||||||
int latestVersion = applicationUpdater.data()->getBuildData().lastKey();
|
|
||||||
_updateAvailableDetails = "v" + QString::number(latestVersion) + " released on "
|
|
||||||
+ QString(applicationUpdater.data()->getBuildData()[latestVersion]["releaseTime"]).replace(" ", " ");
|
|
||||||
|
|
||||||
_releaseNotes = "";
|
auto buildData = applicationUpdater.data()->getBuildData();
|
||||||
for (int i = latestVersion; i > currentVersion; i--) {
|
ApplicationVersion latestVersion = buildData.lastKey();
|
||||||
if (applicationUpdater.data()->getBuildData().contains(i)) {
|
_updateAvailableDetails = "v" + latestVersion.versionString + " released on "
|
||||||
QString releaseNotes = applicationUpdater.data()->getBuildData()[i]["releaseNotes"];
|
+ QString(buildData[latestVersion]["releaseTime"]).replace(" ", " ");
|
||||||
releaseNotes.remove("<br />");
|
|
||||||
releaseNotes.remove(QRegExp("^\n+"));
|
_releaseNotes = "";
|
||||||
_releaseNotes += "\n" + QString().sprintf("%d", i) + "\n" + releaseNotes + "\n";
|
|
||||||
|
auto it = buildData.end();
|
||||||
|
while (it != buildData.begin()) {
|
||||||
|
--it;
|
||||||
|
|
||||||
|
if (applicationUpdater->getCurrentVersion() < it.key()) {
|
||||||
|
// grab the release notes for this later version
|
||||||
|
QString releaseNotes = it.value()["releaseNotes"];
|
||||||
|
releaseNotes.remove("<br />");
|
||||||
|
releaseNotes.remove(QRegExp("^\n+"));
|
||||||
|
_releaseNotes += "\n" + it.key().versionString + "\n" + releaseNotes + "\n";
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,5 +59,5 @@ const QString& UpdateDialog::releaseNotes() const {
|
||||||
|
|
||||||
void UpdateDialog::triggerUpgrade() {
|
void UpdateDialog::triggerUpgrade() {
|
||||||
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
||||||
applicationUpdater.data()->performAutoUpdate(applicationUpdater.data()->getBuildData().lastKey());
|
applicationUpdater.data()->openLatestUpdateURL();
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,13 +11,16 @@
|
||||||
|
|
||||||
#include "AutoUpdater.h"
|
#include "AutoUpdater.h"
|
||||||
|
|
||||||
#include <BuildInfo.h>
|
|
||||||
|
|
||||||
#include <NetworkAccessManager.h>
|
|
||||||
#include <SharedUtil.h>
|
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
|
|
||||||
AutoUpdater::AutoUpdater() {
|
#include <ApplicationVersion.h>
|
||||||
|
#include <BuildInfo.h>
|
||||||
|
#include <NetworkAccessManager.h>
|
||||||
|
#include <SharedUtil.h>
|
||||||
|
|
||||||
|
AutoUpdater::AutoUpdater() :
|
||||||
|
_currentVersion(BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable ? BuildInfo::VERSION : BuildInfo::BUILD_NUMBER)
|
||||||
|
{
|
||||||
#if defined Q_OS_WIN32
|
#if defined Q_OS_WIN32
|
||||||
_operatingSystem = "windows";
|
_operatingSystem = "windows";
|
||||||
#elif defined Q_OS_MAC
|
#elif defined Q_OS_MAC
|
||||||
|
@ -33,9 +36,22 @@ void AutoUpdater::checkForUpdate() {
|
||||||
this->getLatestVersionData();
|
this->getLatestVersionData();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const QUrl BUILDS_XML_URL("https://highfidelity.com/builds.xml");
|
||||||
|
const QUrl MASTER_BUILDS_XML_URL("https://highfidelity.com/dev-builds.xml");
|
||||||
|
|
||||||
void AutoUpdater::getLatestVersionData() {
|
void AutoUpdater::getLatestVersionData() {
|
||||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||||
QNetworkRequest latestVersionRequest(BUILDS_XML_URL);
|
|
||||||
|
QUrl buildsURL;
|
||||||
|
|
||||||
|
if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable) {
|
||||||
|
buildsURL = BUILDS_XML_URL;
|
||||||
|
} else if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master) {
|
||||||
|
buildsURL = MASTER_BUILDS_XML_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
QNetworkRequest latestVersionRequest(buildsURL);
|
||||||
|
|
||||||
latestVersionRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
|
latestVersionRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
|
||||||
latestVersionRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
latestVersionRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||||
QNetworkReply* reply = networkAccessManager.get(latestVersionRequest);
|
QNetworkReply* reply = networkAccessManager.get(latestVersionRequest);
|
||||||
|
@ -52,12 +68,22 @@ void AutoUpdater::parseLatestVersionData() {
|
||||||
QString clientOnly;
|
QString clientOnly;
|
||||||
};
|
};
|
||||||
|
|
||||||
int version { 0 };
|
QString version;
|
||||||
QString downloadUrl;
|
QString downloadUrl;
|
||||||
QString releaseTime;
|
QString releaseTime;
|
||||||
QString releaseNotes;
|
QString releaseNotes;
|
||||||
QString commitSha;
|
QString commitSha;
|
||||||
QString pullRequestNumber;
|
QString pullRequestNumber;
|
||||||
|
|
||||||
|
QString versionKey;
|
||||||
|
|
||||||
|
// stable builds look at the stable_version node (semantic version)
|
||||||
|
// master builds look at the version node (build number)
|
||||||
|
if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable) {
|
||||||
|
versionKey = "stable_version";
|
||||||
|
} else if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master) {
|
||||||
|
versionKey = "version";
|
||||||
|
}
|
||||||
|
|
||||||
while (xml.readNextStartElement()) {
|
while (xml.readNextStartElement()) {
|
||||||
if (xml.name() == "projects") {
|
if (xml.name() == "projects") {
|
||||||
|
@ -77,8 +103,8 @@ void AutoUpdater::parseLatestVersionData() {
|
||||||
QHash<QString, InstallerURLs> campaignInstallers;
|
QHash<QString, InstallerURLs> campaignInstallers;
|
||||||
|
|
||||||
while (xml.readNextStartElement()) {
|
while (xml.readNextStartElement()) {
|
||||||
if (xml.name() == "version") {
|
if (xml.name() == versionKey) {
|
||||||
version = xml.readElementText().toInt();
|
version = xml.readElementText();
|
||||||
} else if (xml.name() == "url") {
|
} else if (xml.name() == "url") {
|
||||||
downloadUrl = xml.readElementText();
|
downloadUrl = xml.readElementText();
|
||||||
} else if (xml.name() == "installers") {
|
} else if (xml.name() == "installers") {
|
||||||
|
@ -159,31 +185,31 @@ void AutoUpdater::parseLatestVersionData() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoUpdater::checkVersionAndNotify() {
|
void AutoUpdater::checkVersionAndNotify() {
|
||||||
if (BuildInfo::BUILD_TYPE != BuildInfo::BuildType::Stable || _builds.empty()) {
|
if (_builds.empty()) {
|
||||||
// No version checking is required in nightly/PR/dev builds or when no build
|
// no build data was found for this platform
|
||||||
// data was found for the platform
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
int latestVersionAvailable = _builds.lastKey();
|
|
||||||
if (QCoreApplication::applicationVersion().toInt() < latestVersionAvailable) {
|
qDebug() << "Checking if update version" << _builds.lastKey().versionString
|
||||||
|
<< "is newer than current version" << _currentVersion.versionString;
|
||||||
|
|
||||||
|
if (_builds.lastKey() > _currentVersion) {
|
||||||
emit newVersionIsAvailable();
|
emit newVersionIsAvailable();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoUpdater::performAutoUpdate(int version) {
|
void AutoUpdater::openLatestUpdateURL() {
|
||||||
// NOTE: This is not yet auto updating - however this is a checkpoint towards that end
|
const QMap<QString, QString>& chosenVersion = _builds.last();
|
||||||
// Next PR will handle the automatic download, upgrading and application restart
|
|
||||||
const QMap<QString, QString>& chosenVersion = _builds.value(version);
|
|
||||||
const QUrl& downloadUrl = chosenVersion.value("downloadUrl");
|
const QUrl& downloadUrl = chosenVersion.value("downloadUrl");
|
||||||
QDesktopServices::openUrl(downloadUrl);
|
QDesktopServices::openUrl(downloadUrl);
|
||||||
QCoreApplication::quit();
|
QCoreApplication::quit();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoUpdater::downloadUpdateVersion(int version) {
|
void AutoUpdater::downloadUpdateVersion(const QString& version) {
|
||||||
emit newVersionIsDownloaded();
|
emit newVersionIsDownloaded();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AutoUpdater::appendBuildData(int versionNumber,
|
void AutoUpdater::appendBuildData(const QString& versionNumber,
|
||||||
const QString& downloadURL,
|
const QString& downloadURL,
|
||||||
const QString& releaseTime,
|
const QString& releaseTime,
|
||||||
const QString& releaseNotes,
|
const QString& releaseNotes,
|
||||||
|
@ -194,6 +220,6 @@ void AutoUpdater::appendBuildData(int versionNumber,
|
||||||
thisBuildDetails.insert("releaseTime", releaseTime);
|
thisBuildDetails.insert("releaseTime", releaseTime);
|
||||||
thisBuildDetails.insert("releaseNotes", releaseNotes);
|
thisBuildDetails.insert("releaseNotes", releaseNotes);
|
||||||
thisBuildDetails.insert("pullRequestNumber", pullRequestNumber);
|
thisBuildDetails.insert("pullRequestNumber", pullRequestNumber);
|
||||||
_builds.insert(versionNumber, thisBuildDetails);
|
_builds.insert(ApplicationVersion(versionNumber), thisBuildDetails);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,10 +26,9 @@
|
||||||
#include <QtNetwork/QNetworkReply>
|
#include <QtNetwork/QNetworkReply>
|
||||||
#include <QtNetwork/QNetworkRequest>
|
#include <QtNetwork/QNetworkRequest>
|
||||||
|
|
||||||
|
#include <ApplicationVersion.h>
|
||||||
#include <DependencyManager.h>
|
#include <DependencyManager.h>
|
||||||
|
|
||||||
const QUrl BUILDS_XML_URL("https://highfidelity.com/builds.xml");
|
|
||||||
|
|
||||||
class AutoUpdater : public QObject, public Dependency {
|
class AutoUpdater : public QObject, public Dependency {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
|
@ -43,25 +42,29 @@ public:
|
||||||
};
|
};
|
||||||
|
|
||||||
void checkForUpdate();
|
void checkForUpdate();
|
||||||
const QMap<int, QMap<QString, QString>>& getBuildData() { return _builds; }
|
const QMap<ApplicationVersion, QMap<QString, QString>>& getBuildData() { return _builds; }
|
||||||
void performAutoUpdate(int version);
|
void openLatestUpdateURL();
|
||||||
void setInstallerType(InstallerType type) { _installerType = type; }
|
void setInstallerType(InstallerType type) { _installerType = type; }
|
||||||
void setInstallerCampaign(QString campaign) { _installerCampaign = campaign; }
|
void setInstallerCampaign(QString campaign) { _installerCampaign = campaign; }
|
||||||
|
|
||||||
|
const ApplicationVersion& getCurrentVersion() const { return _currentVersion; }
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
void latestVersionDataParsed();
|
void latestVersionDataParsed();
|
||||||
void newVersionIsAvailable();
|
void newVersionIsAvailable();
|
||||||
void newVersionIsDownloaded();
|
void newVersionIsDownloaded();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QMap<int, QMap<QString, QString>> _builds;
|
QMap<ApplicationVersion, QMap<QString, QString>> _builds;
|
||||||
QString _operatingSystem;
|
QString _operatingSystem;
|
||||||
InstallerType _installerType { InstallerType::FULL };
|
InstallerType _installerType { InstallerType::FULL };
|
||||||
QString _installerCampaign { "" };
|
QString _installerCampaign { "" };
|
||||||
|
|
||||||
|
ApplicationVersion _currentVersion;
|
||||||
|
|
||||||
void getLatestVersionData();
|
void getLatestVersionData();
|
||||||
void downloadUpdateVersion(int version);
|
void downloadUpdateVersion(const QString& version);
|
||||||
void appendBuildData(int versionNumber,
|
void appendBuildData(const QString& versionNumber,
|
||||||
const QString& downloadURL,
|
const QString& downloadURL,
|
||||||
const QString& releaseTime,
|
const QString& releaseTime,
|
||||||
const QString& releaseNotes,
|
const QString& releaseNotes,
|
||||||
|
|
|
@ -861,6 +861,7 @@ bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// virtual
|
||||||
void Avatar::simulateAttachments(float deltaTime) {
|
void Avatar::simulateAttachments(float deltaTime) {
|
||||||
assert(_attachmentModels.size() == _attachmentModelsTexturesLoaded.size());
|
assert(_attachmentModels.size() == _attachmentModelsTexturesLoaded.size());
|
||||||
PerformanceTimer perfTimer("attachments");
|
PerformanceTimer perfTimer("attachments");
|
||||||
|
@ -1543,14 +1544,12 @@ void Avatar::updateDisplayNameAlpha(bool showDisplayName) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// virtual
|
||||||
void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
||||||
float uniformScale = getModelScale();
|
float uniformScale = getModelScale();
|
||||||
float radius = uniformScale * _skeletonModel->getBoundingCapsuleRadius();
|
shapeInfo.setCapsuleY(uniformScale * _skeletonModel->getBoundingCapsuleRadius(),
|
||||||
float height = uniformScale * _skeletonModel->getBoundingCapsuleHeight();
|
0.5f * uniformScale * _skeletonModel->getBoundingCapsuleHeight());
|
||||||
shapeInfo.setCapsuleY(radius, 0.5f * height);
|
shapeInfo.setOffset(uniformScale * _skeletonModel->getBoundingCapsuleOffset());
|
||||||
|
|
||||||
glm::vec3 offset = uniformScale * _skeletonModel->getBoundingCapsuleOffset();
|
|
||||||
shapeInfo.setOffset(offset);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
|
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
|
||||||
|
@ -1573,8 +1572,9 @@ float Avatar::computeMass() {
|
||||||
return _density * TWO_PI * radius * radius * (glm::length(end - start) + 2.0f * radius / 3.0f);
|
return _density * TWO_PI * radius * radius * (glm::length(end - start) + 2.0f * radius / 3.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// virtual
|
||||||
void Avatar::rebuildCollisionShape() {
|
void Avatar::rebuildCollisionShape() {
|
||||||
addPhysicsFlags(Simulation::DIRTY_SHAPE | Simulation::DIRTY_MASS);
|
addPhysicsFlags(Simulation::DIRTY_SHAPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Avatar::setPhysicsCallback(AvatarPhysicsCallback cb) {
|
void Avatar::setPhysicsCallback(AvatarPhysicsCallback cb) {
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
#include <trackers/FaceTracker.h>
|
#include <trackers/FaceTracker.h>
|
||||||
#include <trackers/EyeTracker.h>
|
#include <trackers/EyeTracker.h>
|
||||||
#include <Rig.h>
|
#include <Rig.h>
|
||||||
|
#include "Logging.h"
|
||||||
|
|
||||||
#include "Avatar.h"
|
#include "Avatar.h"
|
||||||
|
|
||||||
|
@ -58,25 +59,30 @@ void Head::simulate(float deltaTime) {
|
||||||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_isFaceTrackerConnected) {
|
if (!_isEyeTrackerConnected) {
|
||||||
if (!_isEyeTrackerConnected) {
|
// Update eye saccades
|
||||||
// Update eye saccades
|
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
const float MICROSACCADE_MAGNITUDE = 0.002f;
|
||||||
const float MICROSACCADE_MAGNITUDE = 0.002f;
|
const float SACCADE_MAGNITUDE = 0.04f;
|
||||||
const float SACCADE_MAGNITUDE = 0.04f;
|
const float NOMINAL_FRAME_RATE = 60.0f;
|
||||||
const float NOMINAL_FRAME_RATE = 60.0f;
|
|
||||||
|
|
||||||
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
|
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
|
||||||
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
|
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
|
||||||
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
|
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
|
||||||
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
|
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
|
||||||
}
|
|
||||||
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
|
|
||||||
} else {
|
|
||||||
_saccade = glm::vec3();
|
|
||||||
}
|
}
|
||||||
|
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
|
||||||
|
} else {
|
||||||
|
_saccade = glm::vec3();
|
||||||
|
}
|
||||||
|
|
||||||
|
const float BLINK_SPEED = 10.0f;
|
||||||
|
const float BLINK_SPEED_VARIABILITY = 1.0f;
|
||||||
|
const float BLINK_START_VARIABILITY = 0.25f;
|
||||||
|
const float FULLY_OPEN = 0.0f;
|
||||||
|
const float FULLY_CLOSED = 1.0f;
|
||||||
|
if (getHasProceduralBlinkFaceMovement()) {
|
||||||
// Detect transition from talking to not; force blink after that and a delay
|
// Detect transition from talking to not; force blink after that and a delay
|
||||||
bool forceBlink = false;
|
bool forceBlink = false;
|
||||||
const float TALKING_LOUDNESS = 100.0f;
|
const float TALKING_LOUDNESS = 100.0f;
|
||||||
|
@ -88,29 +94,12 @@ void Head::simulate(float deltaTime) {
|
||||||
forceBlink = true;
|
forceBlink = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update audio attack data for facial animation (eyebrows and mouth)
|
|
||||||
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
|
||||||
_audioAttack = audioAttackAveragingRate * _audioAttack +
|
|
||||||
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
|
|
||||||
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
|
|
||||||
|
|
||||||
const float BROW_LIFT_THRESHOLD = 100.0f;
|
|
||||||
if (_audioAttack > BROW_LIFT_THRESHOLD) {
|
|
||||||
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
|
|
||||||
}
|
|
||||||
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
|
|
||||||
|
|
||||||
const float BLINK_SPEED = 10.0f;
|
|
||||||
const float BLINK_SPEED_VARIABILITY = 1.0f;
|
|
||||||
const float BLINK_START_VARIABILITY = 0.25f;
|
|
||||||
const float FULLY_OPEN = 0.0f;
|
|
||||||
const float FULLY_CLOSED = 1.0f;
|
|
||||||
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
|
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
|
||||||
// no blinking when brows are raised; blink less with increasing loudness
|
// no blinking when brows are raised; blink less with increasing loudness
|
||||||
const float BASE_BLINK_RATE = 15.0f / 60.0f;
|
const float BASE_BLINK_RATE = 15.0f / 60.0f;
|
||||||
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
|
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
|
||||||
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
|
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
|
||||||
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
|
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
|
||||||
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
||||||
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
||||||
if (randFloat() < 0.5f) {
|
if (randFloat() < 0.5f) {
|
||||||
|
@ -136,22 +125,45 @@ void Head::simulate(float deltaTime) {
|
||||||
_rightEyeBlinkVelocity = 0.0f;
|
_rightEyeBlinkVelocity = 0.0f;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
_rightEyeBlink = FULLY_OPEN;
|
||||||
|
_leftEyeBlink = FULLY_OPEN;
|
||||||
|
}
|
||||||
|
|
||||||
// use data to update fake Faceshift blendshape coefficients
|
// use data to update fake Faceshift blendshape coefficients
|
||||||
|
if (getHasAudioEnabledFaceMovement()) {
|
||||||
|
// Update audio attack data for facial animation (eyebrows and mouth)
|
||||||
|
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
||||||
|
_audioAttack = audioAttackAveragingRate * _audioAttack +
|
||||||
|
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
|
||||||
|
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
|
||||||
|
const float BROW_LIFT_THRESHOLD = 100.0f;
|
||||||
|
if (_audioAttack > BROW_LIFT_THRESHOLD) {
|
||||||
|
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
|
||||||
|
}
|
||||||
|
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
|
||||||
calculateMouthShapes(deltaTime);
|
calculateMouthShapes(deltaTime);
|
||||||
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
|
|
||||||
_rightEyeBlink,
|
|
||||||
_browAudioLift,
|
|
||||||
_audioJawOpen,
|
|
||||||
_mouth2,
|
|
||||||
_mouth3,
|
|
||||||
_mouth4,
|
|
||||||
_transientBlendshapeCoefficients);
|
|
||||||
|
|
||||||
applyEyelidOffset(getOrientation());
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
_saccade = glm::vec3();
|
_audioJawOpen = 0.0f;
|
||||||
|
_browAudioLift = 0.0f;
|
||||||
|
_mouth2 = 0.0f;
|
||||||
|
_mouth3 = 0.0f;
|
||||||
|
_mouth4 = 0.0f;
|
||||||
|
_mouthTime = 0.0f;
|
||||||
|
}
|
||||||
|
|
||||||
|
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
|
||||||
|
_rightEyeBlink,
|
||||||
|
_browAudioLift,
|
||||||
|
_audioJawOpen,
|
||||||
|
_mouth2,
|
||||||
|
_mouth3,
|
||||||
|
_mouth4,
|
||||||
|
_transientBlendshapeCoefficients);
|
||||||
|
|
||||||
|
if (getHasProceduralEyeFaceMovement()) {
|
||||||
|
applyEyelidOffset(getOrientation());
|
||||||
}
|
}
|
||||||
|
|
||||||
_leftEyePosition = _rightEyePosition = getPosition();
|
_leftEyePosition = _rightEyePosition = getPosition();
|
||||||
|
|
|
@ -300,14 +300,15 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
tranlationChangedSince(lastSentTime) ||
|
tranlationChangedSince(lastSentTime) ||
|
||||||
parentInfoChangedSince(lastSentTime));
|
parentInfoChangedSince(lastSentTime));
|
||||||
|
|
||||||
hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
||||||
|
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||||
hasJointData = sendAll || !sendMinimum;
|
hasJointData = sendAll || !sendMinimum;
|
||||||
hasJointDefaultPoseFlags = hasJointData;
|
hasJointDefaultPoseFlags = hasJointData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
|
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
|
||||||
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getNumSummedBlendshapeCoefficients()) : 0) +
|
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) : 0) +
|
||||||
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size()) : 0) +
|
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size()) : 0) +
|
||||||
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
|
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
|
||||||
|
|
||||||
|
@ -442,7 +443,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
||||||
|
|
||||||
uint8_t flags { 0 };
|
uint16_t flags { 0 };
|
||||||
|
|
||||||
setSemiNibbleAt(flags, KEY_STATE_START_BIT, _keyState);
|
setSemiNibbleAt(flags, KEY_STATE_START_BIT, _keyState);
|
||||||
|
|
||||||
|
@ -450,20 +451,33 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
bool isFingerPointing = _handState & IS_FINGER_POINTING_FLAG;
|
bool isFingerPointing = _handState & IS_FINGER_POINTING_FLAG;
|
||||||
setSemiNibbleAt(flags, HAND_STATE_START_BIT, _handState & ~IS_FINGER_POINTING_FLAG);
|
setSemiNibbleAt(flags, HAND_STATE_START_BIT, _handState & ~IS_FINGER_POINTING_FLAG);
|
||||||
if (isFingerPointing) {
|
if (isFingerPointing) {
|
||||||
setAtBit(flags, HAND_STATE_FINGER_POINTING_BIT);
|
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
|
||||||
}
|
}
|
||||||
// face tracker state
|
// face tracker state
|
||||||
if (_headData->_isFaceTrackerConnected) {
|
if (_headData->_isFaceTrackerConnected) {
|
||||||
setAtBit(flags, IS_FACE_TRACKER_CONNECTED);
|
setAtBit16(flags, IS_FACE_TRACKER_CONNECTED);
|
||||||
}
|
}
|
||||||
// eye tracker state
|
// eye tracker state
|
||||||
if (_headData->_isEyeTrackerConnected) {
|
if (_headData->_isEyeTrackerConnected) {
|
||||||
setAtBit(flags, IS_EYE_TRACKER_CONNECTED);
|
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
|
||||||
}
|
}
|
||||||
// referential state
|
// referential state
|
||||||
if (!parentID.isNull()) {
|
if (!parentID.isNull()) {
|
||||||
setAtBit(flags, HAS_REFERENTIAL);
|
setAtBit16(flags, HAS_REFERENTIAL);
|
||||||
}
|
}
|
||||||
|
// audio face movement
|
||||||
|
if (_headData->getHasAudioEnabledFaceMovement()) {
|
||||||
|
setAtBit16(flags, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||||
|
}
|
||||||
|
// procedural eye face movement
|
||||||
|
if (_headData->getHasProceduralEyeFaceMovement()) {
|
||||||
|
setAtBit16(flags, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||||
|
}
|
||||||
|
// procedural blink face movement
|
||||||
|
if (_headData->getHasProceduralBlinkFaceMovement()) {
|
||||||
|
setAtBit16(flags, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||||
|
}
|
||||||
|
|
||||||
data->flags = flags;
|
data->flags = flags;
|
||||||
destinationBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
destinationBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
||||||
|
|
||||||
|
@ -506,8 +520,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
if (hasFaceTrackerInfo) {
|
if (hasFaceTrackerInfo) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
||||||
const auto& blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients();
|
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
|
||||||
|
// note: we don't use the blink and average loudness, we just use the numBlendShapes and
|
||||||
|
// compute the procedural info on the client side.
|
||||||
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
||||||
faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink;
|
faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink;
|
||||||
faceTrackerInfo->averageLoudness = _headData->_averageLoudness;
|
faceTrackerInfo->averageLoudness = _headData->_averageLoudness;
|
||||||
|
@ -972,7 +987,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
|
|
||||||
PACKET_READ_CHECK(AdditionalFlags, sizeof(AvatarDataPacket::AdditionalFlags));
|
PACKET_READ_CHECK(AdditionalFlags, sizeof(AvatarDataPacket::AdditionalFlags));
|
||||||
auto data = reinterpret_cast<const AvatarDataPacket::AdditionalFlags*>(sourceBuffer);
|
auto data = reinterpret_cast<const AvatarDataPacket::AdditionalFlags*>(sourceBuffer);
|
||||||
uint8_t bitItems = data->flags;
|
uint16_t bitItems = data->flags;
|
||||||
|
|
||||||
// key state, stored as a semi-nibble in the bitItems
|
// key state, stored as a semi-nibble in the bitItems
|
||||||
auto newKeyState = (KeyState)getSemiNibbleAt(bitItems, KEY_STATE_START_BIT);
|
auto newKeyState = (KeyState)getSemiNibbleAt(bitItems, KEY_STATE_START_BIT);
|
||||||
|
@ -980,26 +995,38 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
// hand state, stored as a semi-nibble plus a bit in the bitItems
|
// hand state, stored as a semi-nibble plus a bit in the bitItems
|
||||||
// we store the hand state as well as other items in a shared bitset. The hand state is an octal, but is split
|
// we store the hand state as well as other items in a shared bitset. The hand state is an octal, but is split
|
||||||
// into two sections to maintain backward compatibility. The bits are ordered as such (0-7 left to right).
|
// into two sections to maintain backward compatibility. The bits are ordered as such (0-7 left to right).
|
||||||
// +---+-----+-----+--+
|
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
|
||||||
// |x,x|H0,H1|x,x,x|H2|
|
// +---+-----+-----+--+--+--+--+-----+
|
||||||
// +---+-----+-----+--+
|
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|xxxxx|
|
||||||
|
// +---+-----+-----+--+--+--+--+-----+
|
||||||
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
||||||
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
|
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
|
||||||
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||||
|
|
||||||
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACE_TRACKER_CONNECTED);
|
auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED);
|
||||||
auto newEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
|
auto newEyeTrackerConnected = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||||
|
|
||||||
|
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||||
|
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||||
|
auto newHasProceduralBlinkFaceMovement = oneAtBit16(bitItems, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||||
|
|
||||||
|
|
||||||
bool keyStateChanged = (_keyState != newKeyState);
|
bool keyStateChanged = (_keyState != newKeyState);
|
||||||
bool handStateChanged = (_handState != newHandState);
|
bool handStateChanged = (_handState != newHandState);
|
||||||
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
|
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
|
||||||
bool eyeStateChanged = (_headData->_isEyeTrackerConnected != newEyeTrackerConnected);
|
bool eyeStateChanged = (_headData->_isEyeTrackerConnected != newEyeTrackerConnected);
|
||||||
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged;
|
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
|
||||||
|
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
|
||||||
|
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
|
||||||
|
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged || audioEnableFaceMovementChanged || proceduralEyeFaceMovementChanged || proceduralBlinkFaceMovementChanged;
|
||||||
|
|
||||||
_keyState = newKeyState;
|
_keyState = newKeyState;
|
||||||
_handState = newHandState;
|
_handState = newHandState;
|
||||||
_headData->_isFaceTrackerConnected = newFaceTrackerConnected;
|
_headData->_isFaceTrackerConnected = newFaceTrackerConnected;
|
||||||
_headData->_isEyeTrackerConnected = newEyeTrackerConnected;
|
_headData->_isEyeTrackerConnected = newEyeTrackerConnected;
|
||||||
|
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
|
||||||
|
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
|
||||||
|
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);
|
||||||
|
|
||||||
sourceBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
sourceBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
||||||
|
|
||||||
|
@ -1060,23 +1087,21 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
|
|
||||||
PACKET_READ_CHECK(FaceTrackerInfo, sizeof(AvatarDataPacket::FaceTrackerInfo));
|
PACKET_READ_CHECK(FaceTrackerInfo, sizeof(AvatarDataPacket::FaceTrackerInfo));
|
||||||
auto faceTrackerInfo = reinterpret_cast<const AvatarDataPacket::FaceTrackerInfo*>(sourceBuffer);
|
auto faceTrackerInfo = reinterpret_cast<const AvatarDataPacket::FaceTrackerInfo*>(sourceBuffer);
|
||||||
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
|
|
||||||
|
|
||||||
_headData->_leftEyeBlink = faceTrackerInfo->leftEyeBlink;
|
|
||||||
_headData->_rightEyeBlink = faceTrackerInfo->rightEyeBlink;
|
|
||||||
_headData->_averageLoudness = faceTrackerInfo->averageLoudness;
|
|
||||||
_headData->_browAudioLift = faceTrackerInfo->browAudioLift;
|
|
||||||
|
|
||||||
int numCoefficients = faceTrackerInfo->numBlendshapeCoefficients;
|
int numCoefficients = faceTrackerInfo->numBlendshapeCoefficients;
|
||||||
const int coefficientsSize = sizeof(float) * numCoefficients;
|
const int coefficientsSize = sizeof(float) * numCoefficients;
|
||||||
|
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
|
||||||
|
|
||||||
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
|
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
|
||||||
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
|
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
|
||||||
_headData->_transientBlendshapeCoefficients.resize(numCoefficients);
|
//only copy the blendshapes to headData, not the procedural face info
|
||||||
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
|
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
|
||||||
sourceBuffer += coefficientsSize;
|
sourceBuffer += coefficientsSize;
|
||||||
|
|
||||||
int numBytesRead = sourceBuffer - startSection;
|
int numBytesRead = sourceBuffer - startSection;
|
||||||
_faceTrackerRate.increment(numBytesRead);
|
_faceTrackerRate.increment(numBytesRead);
|
||||||
_faceTrackerUpdateRate.increment();
|
_faceTrackerUpdateRate.increment();
|
||||||
|
} else {
|
||||||
|
_headData->_blendshapeCoefficients.fill(0, _headData->_blendshapeCoefficients.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasJointData) {
|
if (hasJointData) {
|
||||||
|
|
|
@ -79,20 +79,30 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
||||||
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
|
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
|
||||||
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
|
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
|
||||||
// backward compatibility. The bits are ordered as such (0-7 left to right).
|
// backward compatibility. The bits are ordered as such (0-7 left to right).
|
||||||
// +-----+-----+-+-+-+--+
|
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
|
||||||
// |K0,K1|H0,H1|F|E|R|H2|
|
//
|
||||||
// +-----+-----+-+-+-+--+
|
// +-----+-----+-+-+-+--+--+--+--+-----+
|
||||||
|
// |K0,K1|H0,H1|F|E|R|H2|Au|Bl|Ey|xxxxx|
|
||||||
|
// +-----+-----+-+-+-+--+--+--+--+-----+
|
||||||
|
//
|
||||||
// Key state - K0,K1 is found in the 1st and 2nd bits
|
// Key state - K0,K1 is found in the 1st and 2nd bits
|
||||||
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
||||||
// Face tracker - F is found in the 5th bit
|
// Face tracker - F is found in the 5th bit
|
||||||
// Eye tracker - E is found in the 6th bit
|
// Eye tracker - E is found in the 6th bit
|
||||||
// Referential Data - R is found in the 7th bit
|
// Referential Data - R is found in the 7th bit
|
||||||
|
// Procedural audio to mouth movement is enabled 8th bit
|
||||||
|
// Procedural Blink is enabled 9th bit
|
||||||
|
// Procedural Eyelid is enabled 10th bit
|
||||||
|
|
||||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
|
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
|
||||||
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
|
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
|
||||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
||||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
|
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
|
||||||
|
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
|
||||||
|
const int PROCEDURAL_EYE_FACE_MOVEMENT = 9; // 10th bit
|
||||||
|
const int PROCEDURAL_BLINK_FACE_MOVEMENT = 10; // 11th bit
|
||||||
|
|
||||||
|
|
||||||
const char HAND_STATE_NULL = 0;
|
const char HAND_STATE_NULL = 0;
|
||||||
|
@ -200,9 +210,9 @@ namespace AvatarDataPacket {
|
||||||
static_assert(sizeof(SensorToWorldMatrix) == SENSOR_TO_WORLD_SIZE, "AvatarDataPacket::SensorToWorldMatrix size doesn't match.");
|
static_assert(sizeof(SensorToWorldMatrix) == SENSOR_TO_WORLD_SIZE, "AvatarDataPacket::SensorToWorldMatrix size doesn't match.");
|
||||||
|
|
||||||
PACKED_BEGIN struct AdditionalFlags {
|
PACKED_BEGIN struct AdditionalFlags {
|
||||||
uint8_t flags; // additional flags: hand state, key state, eye tracking
|
uint16_t flags; // additional flags: hand state, key state, eye tracking
|
||||||
} PACKED_END;
|
} PACKED_END;
|
||||||
const size_t ADDITIONAL_FLAGS_SIZE = 1;
|
const size_t ADDITIONAL_FLAGS_SIZE = 2;
|
||||||
static_assert(sizeof(AdditionalFlags) == ADDITIONAL_FLAGS_SIZE, "AvatarDataPacket::AdditionalFlags size doesn't match.");
|
static_assert(sizeof(AdditionalFlags) == ADDITIONAL_FLAGS_SIZE, "AvatarDataPacket::AdditionalFlags size doesn't match.");
|
||||||
|
|
||||||
// only present if HAS_REFERENTIAL flag is set in AvatarInfo.flags
|
// only present if HAS_REFERENTIAL flag is set in AvatarInfo.flags
|
||||||
|
@ -501,6 +511,11 @@ public:
|
||||||
|
|
||||||
float getDomainLimitedScale() const;
|
float getDomainLimitedScale() const;
|
||||||
|
|
||||||
|
virtual bool getHasScriptedBlendshapes() const { return false; }
|
||||||
|
virtual bool getHasProceduralBlinkFaceMovement() const { return true; }
|
||||||
|
virtual bool getHasProceduralEyeFaceMovement() const { return true; }
|
||||||
|
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* Returns the minimum scale allowed for this avatar in the current domain.
|
* Returns the minimum scale allowed for this avatar in the current domain.
|
||||||
* This value can change as the user changes avatars or when changing domains.
|
* This value can change as the user changes avatars or when changing domains.
|
||||||
|
|
|
@ -69,6 +69,24 @@ public:
|
||||||
}
|
}
|
||||||
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
||||||
|
|
||||||
|
bool getHasProceduralEyeFaceMovement() const { return _hasProceduralEyeFaceMovement; }
|
||||||
|
|
||||||
|
void setHasProceduralEyeFaceMovement(const bool hasProceduralEyeFaceMovement) {
|
||||||
|
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool getHasProceduralBlinkFaceMovement() const { return _hasProceduralBlinkFaceMovement; }
|
||||||
|
|
||||||
|
void setHasProceduralBlinkFaceMovement(const bool hasProceduralBlinkFaceMovement) {
|
||||||
|
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool getHasAudioEnabledFaceMovement() const { return _hasAudioEnabledFaceMovement; }
|
||||||
|
|
||||||
|
void setHasAudioEnabledFaceMovement(const bool hasAudioEnabledFaceMovement) {
|
||||||
|
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
|
||||||
|
}
|
||||||
|
|
||||||
friend class AvatarData;
|
friend class AvatarData;
|
||||||
|
|
||||||
QJsonObject toJson() const;
|
QJsonObject toJson() const;
|
||||||
|
@ -83,6 +101,9 @@ protected:
|
||||||
glm::vec3 _lookAtPosition;
|
glm::vec3 _lookAtPosition;
|
||||||
quint64 _lookAtPositionChanged { 0 };
|
quint64 _lookAtPositionChanged { 0 };
|
||||||
|
|
||||||
|
bool _hasAudioEnabledFaceMovement { true };
|
||||||
|
bool _hasProceduralBlinkFaceMovement { true };
|
||||||
|
bool _hasProceduralEyeFaceMovement { true };
|
||||||
bool _isFaceTrackerConnected { false };
|
bool _isFaceTrackerConnected { false };
|
||||||
bool _isEyeTrackerConnected { false };
|
bool _isEyeTrackerConnected { false };
|
||||||
float _leftEyeBlink { 0.0f };
|
float _leftEyeBlink { 0.0f };
|
||||||
|
|
|
@ -22,12 +22,16 @@
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
#include <TextureMeta.h>
|
#include <TextureMeta.h>
|
||||||
|
|
||||||
|
#include <OwningBuffer.h>
|
||||||
|
|
||||||
#include "ModelBakingLoggingCategory.h"
|
#include "ModelBakingLoggingCategory.h"
|
||||||
|
|
||||||
const QString BAKED_TEXTURE_KTX_EXT = ".ktx";
|
const QString BAKED_TEXTURE_KTX_EXT = ".ktx";
|
||||||
const QString BAKED_TEXTURE_BCN_SUFFIX = "_bcn.ktx";
|
const QString BAKED_TEXTURE_BCN_SUFFIX = "_bcn.ktx";
|
||||||
const QString BAKED_META_TEXTURE_SUFFIX = ".texmeta.json";
|
const QString BAKED_META_TEXTURE_SUFFIX = ".texmeta.json";
|
||||||
|
|
||||||
|
bool TextureBaker::_compressionEnabled = true;
|
||||||
|
|
||||||
TextureBaker::TextureBaker(const QUrl& textureURL, image::TextureUsage::Type textureType,
|
TextureBaker::TextureBaker(const QUrl& textureURL, image::TextureUsage::Type textureType,
|
||||||
const QDir& outputDirectory, const QString& metaTexturePathPrefix,
|
const QDir& outputDirectory, const QString& metaTexturePathPrefix,
|
||||||
const QString& baseFilename, const QByteArray& textureContent) :
|
const QString& baseFilename, const QByteArray& textureContent) :
|
||||||
|
@ -124,42 +128,45 @@ void TextureBaker::processTexture() {
|
||||||
|
|
||||||
TextureMeta meta;
|
TextureMeta meta;
|
||||||
|
|
||||||
|
auto originalCopyFilePath = _outputDirectory.absoluteFilePath(_textureURL.fileName());
|
||||||
{
|
{
|
||||||
auto filePath = _outputDirectory.absoluteFilePath(_textureURL.fileName());
|
QFile file { originalCopyFilePath };
|
||||||
QFile file { filePath };
|
|
||||||
if (!file.open(QIODevice::WriteOnly) || file.write(_originalTexture) == -1) {
|
if (!file.open(QIODevice::WriteOnly) || file.write(_originalTexture) == -1) {
|
||||||
handleError("Could not write original texture for " + _textureURL.toString());
|
handleError("Could not write original texture for " + _textureURL.toString());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_outputFiles.push_back(filePath);
|
// IMPORTANT: _originalTexture is empty past this point
|
||||||
|
_originalTexture.clear();
|
||||||
|
_outputFiles.push_back(originalCopyFilePath);
|
||||||
meta.original = _metaTexturePathPrefix +_textureURL.fileName();
|
meta.original = _metaTexturePathPrefix +_textureURL.fileName();
|
||||||
}
|
}
|
||||||
|
|
||||||
// IMPORTANT: _originalTexture is empty past this point
|
auto buffer = std::static_pointer_cast<QIODevice>(std::make_shared<QFile>(originalCopyFilePath));
|
||||||
auto processedTexture = image::processImage(std::move(_originalTexture), _textureURL.toString().toStdString(),
|
if (!buffer->open(QIODevice::ReadOnly)) {
|
||||||
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, _abortProcessing);
|
handleError("Could not open original file at " + originalCopyFilePath);
|
||||||
processedTexture->setSourceHash(hash);
|
|
||||||
|
|
||||||
if (shouldStop()) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!processedTexture) {
|
// Compressed KTX
|
||||||
handleError("Could not process texture " + _textureURL.toString());
|
if (_compressionEnabled) {
|
||||||
return;
|
auto processedTexture = image::processImage(buffer, _textureURL.toString().toStdString(),
|
||||||
}
|
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, true, _abortProcessing);
|
||||||
|
if (!processedTexture) {
|
||||||
|
handleError("Could not process texture " + _textureURL.toString());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
processedTexture->setSourceHash(hash);
|
||||||
|
|
||||||
|
if (shouldStop()) {
|
||||||
auto memKTX = gpu::Texture::serialize(*processedTexture);
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!memKTX) {
|
auto memKTX = gpu::Texture::serialize(*processedTexture);
|
||||||
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
|
if (!memKTX) {
|
||||||
return;
|
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
|
||||||
}
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// attempt to write the baked texture to the destination file path
|
|
||||||
if (memKTX->_header.isCompressed()) {
|
|
||||||
const char* name = khronos::gl::texture::toString(memKTX->_header.getGLInternaFormat());
|
const char* name = khronos::gl::texture::toString(memKTX->_header.getGLInternaFormat());
|
||||||
if (name == nullptr) {
|
if (name == nullptr) {
|
||||||
handleError("Could not determine internal format for compressed KTX: " + _textureURL.toString());
|
handleError("Could not determine internal format for compressed KTX: " + _textureURL.toString());
|
||||||
|
@ -178,21 +185,45 @@ void TextureBaker::processTexture() {
|
||||||
}
|
}
|
||||||
_outputFiles.push_back(filePath);
|
_outputFiles.push_back(filePath);
|
||||||
meta.availableTextureTypes[memKTX->_header.getGLInternaFormat()] = _metaTexturePathPrefix + fileName;
|
meta.availableTextureTypes[memKTX->_header.getGLInternaFormat()] = _metaTexturePathPrefix + fileName;
|
||||||
} else {
|
}
|
||||||
|
|
||||||
|
// Uncompressed KTX
|
||||||
|
if (_textureType == image::TextureUsage::Type::CUBE_TEXTURE) {
|
||||||
|
buffer->reset();
|
||||||
|
auto processedTexture = image::processImage(std::move(buffer), _textureURL.toString().toStdString(),
|
||||||
|
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, false, _abortProcessing);
|
||||||
|
if (!processedTexture) {
|
||||||
|
handleError("Could not process texture " + _textureURL.toString());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
processedTexture->setSourceHash(hash);
|
||||||
|
|
||||||
|
if (shouldStop()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto memKTX = gpu::Texture::serialize(*processedTexture);
|
||||||
|
if (!memKTX) {
|
||||||
|
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const char* data = reinterpret_cast<const char*>(memKTX->_storage->data());
|
const char* data = reinterpret_cast<const char*>(memKTX->_storage->data());
|
||||||
const size_t length = memKTX->_storage->size();
|
const size_t length = memKTX->_storage->size();
|
||||||
|
|
||||||
auto fileName = _baseFilename + ".ktx";
|
auto fileName = _baseFilename + ".ktx";
|
||||||
auto filePath = _outputDirectory.absoluteFilePath(fileName);
|
auto filePath = _outputDirectory.absoluteFilePath(fileName);
|
||||||
QFile ktxTextureFile { filePath };
|
QFile bakedTextureFile { filePath };
|
||||||
if (!ktxTextureFile.open(QIODevice::WriteOnly) || ktxTextureFile.write(data, length) == -1) {
|
if (!bakedTextureFile.open(QIODevice::WriteOnly) || bakedTextureFile.write(data, length) == -1) {
|
||||||
handleError("Could not write ktx texture for " + _textureURL.toString());
|
handleError("Could not write baked texture for " + _textureURL.toString());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_outputFiles.push_back(filePath);
|
_outputFiles.push_back(filePath);
|
||||||
|
meta.uncompressed = _metaTexturePathPrefix + fileName;
|
||||||
|
} else {
|
||||||
|
buffer.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
{
|
{
|
||||||
auto data = meta.serialize();
|
auto data = meta.serialize();
|
||||||
_metaTextureFileName = _outputDirectory.absoluteFilePath(_baseFilename + BAKED_META_TEXTURE_SUFFIX);
|
_metaTextureFileName = _outputDirectory.absoluteFilePath(_baseFilename + BAKED_META_TEXTURE_SUFFIX);
|
||||||
|
|
|
@ -41,6 +41,8 @@ public:
|
||||||
|
|
||||||
virtual void setWasAborted(bool wasAborted) override;
|
virtual void setWasAborted(bool wasAborted) override;
|
||||||
|
|
||||||
|
static void setCompressionEnabled(bool enabled) { _compressionEnabled = enabled; }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
virtual void bake() override;
|
virtual void bake() override;
|
||||||
virtual void abort() override;
|
virtual void abort() override;
|
||||||
|
@ -65,6 +67,8 @@ private:
|
||||||
QString _metaTexturePathPrefix;
|
QString _metaTexturePathPrefix;
|
||||||
|
|
||||||
std::atomic<bool> _abortProcessing { false };
|
std::atomic<bool> _abortProcessing { false };
|
||||||
|
|
||||||
|
static bool _compressionEnabled;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_TextureBaker_h
|
#endif // hifi_TextureBaker_h
|
||||||
|
|
|
@ -151,11 +151,9 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
|
||||||
batch.setModelTransform(stickTransform);
|
batch.setModelTransform(stickTransform);
|
||||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||||
|
|
||||||
if (!virtualPadManager.getLeftVirtualPad()->isBeingTouched()) {
|
batch.setResourceTexture(0, _virtualPadJumpBtnTexture);
|
||||||
batch.setResourceTexture(0, _virtualPadJumpBtnTexture);
|
batch.setModelTransform(jumpTransform);
|
||||||
batch.setModelTransform(jumpTransform);
|
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -46,12 +46,9 @@ bool DEV_DECIMATE_TEXTURES = false;
|
||||||
std::atomic<size_t> DECIMATED_TEXTURE_COUNT{ 0 };
|
std::atomic<size_t> DECIMATED_TEXTURE_COUNT{ 0 };
|
||||||
std::atomic<size_t> RECTIFIED_TEXTURE_COUNT{ 0 };
|
std::atomic<size_t> RECTIFIED_TEXTURE_COUNT{ 0 };
|
||||||
|
|
||||||
static const auto HDR_FORMAT = gpu::Element::COLOR_R11G11B10;
|
// we use a ref here to work around static order initialization
|
||||||
|
// possibly causing the element not to be constructed yet
|
||||||
static std::atomic<bool> compressColorTextures { false };
|
static const auto& HDR_FORMAT = gpu::Element::COLOR_R11G11B10;
|
||||||
static std::atomic<bool> compressNormalTextures { false };
|
|
||||||
static std::atomic<bool> compressGrayscaleTextures { false };
|
|
||||||
static std::atomic<bool> compressCubeTextures { false };
|
|
||||||
|
|
||||||
uint rectifyDimension(const uint& dimension) {
|
uint rectifyDimension(const uint& dimension) {
|
||||||
if (dimension == 0) {
|
if (dimension == 0) {
|
||||||
|
@ -126,112 +123,63 @@ TextureUsage::TextureLoader TextureUsage::getTextureLoaderForType(Type type, con
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createStrict2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createStrict2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
|
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::create2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::create2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createAlbedoTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createAlbedoTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createEmissiveTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createEmissiveTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createLightmapTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createLightmapTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createNormalTextureFromNormalImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createNormalTextureFromNormalImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createNormalTextureFromBumpImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createNormalTextureFromBumpImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
|
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createRoughnessTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createRoughnessTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createRoughnessTextureFromGlossImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createRoughnessTextureFromGlossImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
|
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createMetallicTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createMetallicTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createCubeTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createCubeTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
|
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::createCubeTextureFromImageWithoutIrradiance(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::createCubeTextureFromImageWithoutIrradiance(QImage&& srcImage, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
|
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool isColorTexturesCompressionEnabled() {
|
|
||||||
#if CPU_MIPMAPS
|
|
||||||
return compressColorTextures.load();
|
|
||||||
#else
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isNormalTexturesCompressionEnabled() {
|
|
||||||
#if CPU_MIPMAPS
|
|
||||||
return compressNormalTextures.load();
|
|
||||||
#else
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isGrayscaleTexturesCompressionEnabled() {
|
|
||||||
#if CPU_MIPMAPS
|
|
||||||
return compressGrayscaleTextures.load();
|
|
||||||
#else
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isCubeTexturesCompressionEnabled() {
|
|
||||||
#if CPU_MIPMAPS
|
|
||||||
return compressCubeTextures.load();
|
|
||||||
#else
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void setColorTexturesCompressionEnabled(bool enabled) {
|
|
||||||
compressColorTextures.store(enabled);
|
|
||||||
}
|
|
||||||
|
|
||||||
void setNormalTexturesCompressionEnabled(bool enabled) {
|
|
||||||
compressNormalTextures.store(enabled);
|
|
||||||
}
|
|
||||||
|
|
||||||
void setGrayscaleTexturesCompressionEnabled(bool enabled) {
|
|
||||||
compressGrayscaleTextures.store(enabled);
|
|
||||||
}
|
|
||||||
|
|
||||||
void setCubeTexturesCompressionEnabled(bool enabled) {
|
|
||||||
compressCubeTextures.store(enabled);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static float denormalize(float value, const float minValue) {
|
static float denormalize(float value, const float minValue) {
|
||||||
|
@ -253,17 +201,11 @@ uint32 packR11G11B10F(const glm::vec3& color) {
|
||||||
return glm::packF2x11_1x10(ucolor);
|
return glm::packF2x11_1x10(ucolor);
|
||||||
}
|
}
|
||||||
|
|
||||||
QImage processRawImageData(QByteArray&& content, const std::string& filename) {
|
QImage processRawImageData(QIODevice& content, const std::string& filename) {
|
||||||
// Take a local copy to force move construction
|
|
||||||
// https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#f18-for-consume-parameters-pass-by-x-and-stdmove-the-parameter
|
|
||||||
QByteArray localCopy = std::move(content);
|
|
||||||
|
|
||||||
// Help the QImage loader by extracting the image file format from the url filename ext.
|
// Help the QImage loader by extracting the image file format from the url filename ext.
|
||||||
// Some tga are not created properly without it.
|
// Some tga are not created properly without it.
|
||||||
auto filenameExtension = filename.substr(filename.find_last_of('.') + 1);
|
auto filenameExtension = filename.substr(filename.find_last_of('.') + 1);
|
||||||
QBuffer buffer;
|
QImageReader imageReader(&content, filenameExtension.c_str());
|
||||||
buffer.setData(localCopy);
|
|
||||||
QImageReader imageReader(&buffer, filenameExtension.c_str());
|
|
||||||
|
|
||||||
if (imageReader.canRead()) {
|
if (imageReader.canRead()) {
|
||||||
return imageReader.read();
|
return imageReader.read();
|
||||||
|
@ -271,8 +213,8 @@ QImage processRawImageData(QByteArray&& content, const std::string& filename) {
|
||||||
// Extension could be incorrect, try to detect the format from the content
|
// Extension could be incorrect, try to detect the format from the content
|
||||||
QImageReader newImageReader;
|
QImageReader newImageReader;
|
||||||
newImageReader.setDecideFormatFromContent(true);
|
newImageReader.setDecideFormatFromContent(true);
|
||||||
buffer.setData(localCopy);
|
content.reset();
|
||||||
newImageReader.setDevice(&buffer);
|
newImageReader.setDevice(&content);
|
||||||
|
|
||||||
if (newImageReader.canRead()) {
|
if (newImageReader.canRead()) {
|
||||||
qCWarning(imagelogging) << "Image file" << filename.c_str() << "has extension" << filenameExtension.c_str()
|
qCWarning(imagelogging) << "Image file" << filename.c_str() << "has extension" << filenameExtension.c_str()
|
||||||
|
@ -284,11 +226,14 @@ QImage processRawImageData(QByteArray&& content, const std::string& filename) {
|
||||||
return QImage();
|
return QImage();
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer processImage(QByteArray&& content, const std::string& filename,
|
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& filename,
|
||||||
int maxNumPixels, TextureUsage::Type textureType,
|
int maxNumPixels, TextureUsage::Type textureType,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||||
|
|
||||||
QImage image = processRawImageData(std::move(content), filename);
|
QImage image = processRawImageData(*content.get(), filename);
|
||||||
|
// Texture content can take up a lot of memory. Here we release our ownership of that content
|
||||||
|
// in case it can be released.
|
||||||
|
content.reset();
|
||||||
|
|
||||||
int imageWidth = image.width();
|
int imageWidth = image.width();
|
||||||
int imageHeight = image.height();
|
int imageHeight = image.height();
|
||||||
|
@ -314,7 +259,7 @@ gpu::TexturePointer processImage(QByteArray&& content, const std::string& filena
|
||||||
}
|
}
|
||||||
|
|
||||||
auto loader = TextureUsage::getTextureLoaderForType(textureType);
|
auto loader = TextureUsage::getTextureLoaderForType(textureType);
|
||||||
auto texture = loader(std::move(image), filename, abortProcessing);
|
auto texture = loader(std::move(image), filename, compress, abortProcessing);
|
||||||
|
|
||||||
return texture;
|
return texture;
|
||||||
}
|
}
|
||||||
|
@ -804,7 +749,7 @@ void processTextureAlpha(const QImage& srcImage, bool& validAlpha, bool& alphaAs
|
||||||
validAlpha = (numOpaques != NUM_PIXELS);
|
validAlpha = (numOpaques != NUM_PIXELS);
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||||
bool isStrict, const std::atomic<bool>& abortProcessing) {
|
bool isStrict, const std::atomic<bool>& abortProcessing) {
|
||||||
PROFILE_RANGE(resource_parse, "process2DTextureColorFromImage");
|
PROFILE_RANGE(resource_parse, "process2DTextureColorFromImage");
|
||||||
QImage image = processSourceImage(std::move(srcImage), false);
|
QImage image = processSourceImage(std::move(srcImage), false);
|
||||||
|
@ -825,7 +770,7 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
|
||||||
if ((image.width() > 0) && (image.height() > 0)) {
|
if ((image.width() > 0) && (image.height() > 0)) {
|
||||||
gpu::Element formatMip;
|
gpu::Element formatMip;
|
||||||
gpu::Element formatGPU;
|
gpu::Element formatGPU;
|
||||||
if (isColorTexturesCompressionEnabled()) {
|
if (compress) {
|
||||||
if (validAlpha) {
|
if (validAlpha) {
|
||||||
// NOTE: This disables BC1a compression because it was producing odd artifacts on text textures
|
// NOTE: This disables BC1a compression because it was producing odd artifacts on text textures
|
||||||
// for the tutorial. Instead we use BC3 (which is larger) but doesn't produce the same artifacts).
|
// for the tutorial. Instead we use BC3 (which is larger) but doesn't produce the same artifacts).
|
||||||
|
@ -833,6 +778,7 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
|
||||||
} else {
|
} else {
|
||||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGB;
|
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGB;
|
||||||
}
|
}
|
||||||
|
formatMip = formatGPU;
|
||||||
} else {
|
} else {
|
||||||
#ifdef USE_GLES
|
#ifdef USE_GLES
|
||||||
// GLES does not support GL_BGRA
|
// GLES does not support GL_BGRA
|
||||||
|
@ -941,7 +887,8 @@ QImage processBumpMap(QImage&& image) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
bool isBumpMap, const std::atomic<bool>& abortProcessing) {
|
bool compress, bool isBumpMap,
|
||||||
|
const std::atomic<bool>& abortProcessing) {
|
||||||
PROFILE_RANGE(resource_parse, "process2DTextureNormalMapFromImage");
|
PROFILE_RANGE(resource_parse, "process2DTextureNormalMapFromImage");
|
||||||
QImage image = processSourceImage(std::move(srcImage), false);
|
QImage image = processSourceImage(std::move(srcImage), false);
|
||||||
|
|
||||||
|
@ -958,7 +905,7 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
|
||||||
if ((image.width() > 0) && (image.height() > 0)) {
|
if ((image.width() > 0) && (image.height() > 0)) {
|
||||||
gpu::Element formatMip;
|
gpu::Element formatMip;
|
||||||
gpu::Element formatGPU;
|
gpu::Element formatGPU;
|
||||||
if (isNormalTexturesCompressionEnabled()) {
|
if (compress) {
|
||||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_XY;
|
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_XY;
|
||||||
} else {
|
} else {
|
||||||
#ifdef USE_GLES
|
#ifdef USE_GLES
|
||||||
|
@ -980,7 +927,7 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
bool isInvertedPixels,
|
bool compress, bool isInvertedPixels,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
const std::atomic<bool>& abortProcessing) {
|
||||||
PROFILE_RANGE(resource_parse, "process2DTextureGrayscaleFromImage");
|
PROFILE_RANGE(resource_parse, "process2DTextureGrayscaleFromImage");
|
||||||
QImage image = processSourceImage(std::move(srcImage), false);
|
QImage image = processSourceImage(std::move(srcImage), false);
|
||||||
|
@ -998,7 +945,7 @@ gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& sr
|
||||||
if ((image.width() > 0) && (image.height() > 0)) {
|
if ((image.width() > 0) && (image.height() > 0)) {
|
||||||
gpu::Element formatMip;
|
gpu::Element formatMip;
|
||||||
gpu::Element formatGPU;
|
gpu::Element formatGPU;
|
||||||
if (isGrayscaleTexturesCompressionEnabled()) {
|
if (compress) {
|
||||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_RED;
|
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_RED;
|
||||||
} else {
|
} else {
|
||||||
#ifdef USE_GLES
|
#ifdef USE_GLES
|
||||||
|
@ -1345,7 +1292,7 @@ QImage convertToHDRFormat(QImage&& srcImage, gpu::Element format) {
|
||||||
}
|
}
|
||||||
|
|
||||||
gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
|
gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||||
bool generateIrradiance,
|
bool compress, bool generateIrradiance,
|
||||||
const std::atomic<bool>& abortProcessing) {
|
const std::atomic<bool>& abortProcessing) {
|
||||||
PROFILE_RANGE(resource_parse, "processCubeTextureColorFromImage");
|
PROFILE_RANGE(resource_parse, "processCubeTextureColorFromImage");
|
||||||
|
|
||||||
|
@ -1373,7 +1320,7 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcI
|
||||||
|
|
||||||
gpu::Element formatMip;
|
gpu::Element formatMip;
|
||||||
gpu::Element formatGPU;
|
gpu::Element formatGPU;
|
||||||
if (isCubeTexturesCompressionEnabled()) {
|
if (compress) {
|
||||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_HDR_RGB;
|
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_HDR_RGB;
|
||||||
} else {
|
} else {
|
||||||
#ifdef USE_GLES
|
#ifdef USE_GLES
|
||||||
|
|
|
@ -41,60 +41,50 @@ enum Type {
|
||||||
UNUSED_TEXTURE
|
UNUSED_TEXTURE
|
||||||
};
|
};
|
||||||
|
|
||||||
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, const std::atomic<bool>&)>;
|
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, bool, const std::atomic<bool>&)>;
|
||||||
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
|
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
|
||||||
|
|
||||||
gpu::TexturePointer create2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer create2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createStrict2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createStrict2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createAlbedoTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createAlbedoTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createEmissiveTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createEmissiveTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createNormalTextureFromNormalImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createNormalTextureFromNormalImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createNormalTextureFromBumpImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createNormalTextureFromBumpImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createRoughnessTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createRoughnessTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createRoughnessTextureFromGlossImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createRoughnessTextureFromGlossImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createMetallicTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createMetallicTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createCubeTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createCubeTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer createLightmapTextureFromImage(QImage&& image, const std::string& srcImageName,
|
gpu::TexturePointer createLightmapTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool compress, const std::atomic<bool>& abortProcessing);
|
||||||
|
|
||||||
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool isStrict,
|
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool isStrict, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool isBumpMap,
|
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool isBumpMap, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool isInvertedPixels,
|
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
|
||||||
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool generateIrradiance,
|
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||||
const std::atomic<bool>& abortProcessing);
|
bool generateIrradiance, const std::atomic<bool>& abortProcessing);
|
||||||
|
|
||||||
} // namespace TextureUsage
|
} // namespace TextureUsage
|
||||||
|
|
||||||
const QStringList getSupportedFormats();
|
const QStringList getSupportedFormats();
|
||||||
|
|
||||||
bool isColorTexturesCompressionEnabled();
|
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& url,
|
||||||
bool isNormalTexturesCompressionEnabled();
|
|
||||||
bool isGrayscaleTexturesCompressionEnabled();
|
|
||||||
bool isCubeTexturesCompressionEnabled();
|
|
||||||
|
|
||||||
void setColorTexturesCompressionEnabled(bool enabled);
|
|
||||||
void setNormalTexturesCompressionEnabled(bool enabled);
|
|
||||||
void setGrayscaleTexturesCompressionEnabled(bool enabled);
|
|
||||||
void setCubeTexturesCompressionEnabled(bool enabled);
|
|
||||||
|
|
||||||
gpu::TexturePointer processImage(QByteArray&& content, const std::string& url,
|
|
||||||
int maxNumPixels, TextureUsage::Type textureType,
|
int maxNumPixels, TextureUsage::Type textureType,
|
||||||
const std::atomic<bool>& abortProcessing = false);
|
bool compress = false, const std::atomic<bool>& abortProcessing = false);
|
||||||
|
|
||||||
} // namespace image
|
} // namespace image
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,9 @@ bool TextureMeta::deserialize(const QByteArray& data, TextureMeta* meta) {
|
||||||
if (root.contains("original")) {
|
if (root.contains("original")) {
|
||||||
meta->original = root["original"].toString();
|
meta->original = root["original"].toString();
|
||||||
}
|
}
|
||||||
|
if (root.contains("uncompressed")) {
|
||||||
|
meta->uncompressed = root["uncompressed"].toString();
|
||||||
|
}
|
||||||
if (root.contains("compressed")) {
|
if (root.contains("compressed")) {
|
||||||
auto compressed = root["compressed"].toObject();
|
auto compressed = root["compressed"].toObject();
|
||||||
for (auto it = compressed.constBegin(); it != compressed.constEnd(); it++) {
|
for (auto it = compressed.constBegin(); it != compressed.constEnd(); it++) {
|
||||||
|
@ -57,6 +60,7 @@ QByteArray TextureMeta::serialize() {
|
||||||
compressed[name] = kv.second.toString();
|
compressed[name] = kv.second.toString();
|
||||||
}
|
}
|
||||||
root["original"] = original.toString();
|
root["original"] = original.toString();
|
||||||
|
root["uncompressed"] = uncompressed.toString();
|
||||||
root["compressed"] = compressed;
|
root["compressed"] = compressed;
|
||||||
doc.setObject(root);
|
doc.setObject(root);
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,7 @@ struct TextureMeta {
|
||||||
QByteArray serialize();
|
QByteArray serialize();
|
||||||
|
|
||||||
QUrl original;
|
QUrl original;
|
||||||
|
QUrl uncompressed;
|
||||||
std::unordered_map<khronos::gl::texture::InternalFormat, QUrl> availableTextureTypes;
|
std::unordered_map<khronos::gl::texture::InternalFormat, QUrl> availableTextureTypes;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -50,6 +50,8 @@
|
||||||
|
|
||||||
#include <TextureMeta.h>
|
#include <TextureMeta.h>
|
||||||
|
|
||||||
|
#include <OwningBuffer.h>
|
||||||
|
|
||||||
Q_LOGGING_CATEGORY(trace_resource_parse_image, "trace.resource.parse.image")
|
Q_LOGGING_CATEGORY(trace_resource_parse_image, "trace.resource.parse.image")
|
||||||
Q_LOGGING_CATEGORY(trace_resource_parse_image_raw, "trace.resource.parse.image.raw")
|
Q_LOGGING_CATEGORY(trace_resource_parse_image_raw, "trace.resource.parse.image.raw")
|
||||||
Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.ktx")
|
Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.ktx")
|
||||||
|
@ -277,7 +279,7 @@ gpu::TexturePointer TextureCache::getImageTexture(const QString& path, image::Te
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
auto loader = image::TextureUsage::getTextureLoaderForType(type, options);
|
auto loader = image::TextureUsage::getTextureLoaderForType(type, options);
|
||||||
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), false));
|
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), false, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||||
|
@ -964,7 +966,6 @@ void NetworkTexture::loadMetaContent(const QByteArray& content) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
auto& backend = DependencyManager::get<TextureCache>()->getGPUContext()->getBackend();
|
auto& backend = DependencyManager::get<TextureCache>()->getGPUContext()->getBackend();
|
||||||
for (auto pair : meta.availableTextureTypes) {
|
for (auto pair : meta.availableTextureTypes) {
|
||||||
gpu::Element elFormat;
|
gpu::Element elFormat;
|
||||||
|
@ -990,6 +991,21 @@ void NetworkTexture::loadMetaContent(const QByteArray& content) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifndef Q_OS_ANDROID
|
||||||
|
if (!meta.uncompressed.isEmpty()) {
|
||||||
|
_currentlyLoadingResourceType = ResourceType::KTX;
|
||||||
|
_activeUrl = _activeUrl.resolved(meta.uncompressed);
|
||||||
|
|
||||||
|
auto textureCache = DependencyManager::get<TextureCache>();
|
||||||
|
auto self = _self.lock();
|
||||||
|
if (!self) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
QMetaObject::invokeMethod(this, "attemptRequest", Qt::QueuedConnection);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
if (!meta.original.isEmpty()) {
|
if (!meta.original.isEmpty()) {
|
||||||
_currentlyLoadingResourceType = ResourceType::ORIGINAL;
|
_currentlyLoadingResourceType = ResourceType::ORIGINAL;
|
||||||
_activeUrl = _activeUrl.resolved(meta.original);
|
_activeUrl = _activeUrl.resolved(meta.original);
|
||||||
|
@ -1143,7 +1159,8 @@ void ImageReader::read() {
|
||||||
PROFILE_RANGE_EX(resource_parse_image_raw, __FUNCTION__, 0xffff0000, 0);
|
PROFILE_RANGE_EX(resource_parse_image_raw, __FUNCTION__, 0xffff0000, 0);
|
||||||
|
|
||||||
// IMPORTANT: _content is empty past this point
|
// IMPORTANT: _content is empty past this point
|
||||||
texture = image::processImage(std::move(_content), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType());
|
auto buffer = std::shared_ptr<QIODevice>((QIODevice*)new OwningBuffer(std::move(_content)));
|
||||||
|
texture = image::processImage(std::move(buffer), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType());
|
||||||
|
|
||||||
if (!texture) {
|
if (!texture) {
|
||||||
qCWarning(modelnetworking) << "Could not process:" << _url;
|
qCWarning(modelnetworking) << "Could not process:" << _url;
|
||||||
|
|
|
@ -40,7 +40,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
||||||
case PacketType::AvatarData:
|
case PacketType::AvatarData:
|
||||||
case PacketType::BulkAvatarData:
|
case PacketType::BulkAvatarData:
|
||||||
case PacketType::KillAvatar:
|
case PacketType::KillAvatar:
|
||||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::FixMannequinDefaultAvatarFeet);
|
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ProceduralFaceMovementFlagsAndBlendshapes);
|
||||||
case PacketType::MessagesData:
|
case PacketType::MessagesData:
|
||||||
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
||||||
// ICE packets
|
// ICE packets
|
||||||
|
|
|
@ -283,7 +283,8 @@ enum class AvatarMixerPacketVersion : PacketVersion {
|
||||||
UpdatedMannequinDefaultAvatar,
|
UpdatedMannequinDefaultAvatar,
|
||||||
AvatarJointDefaultPoseFlags,
|
AvatarJointDefaultPoseFlags,
|
||||||
FBXReaderNodeReparenting,
|
FBXReaderNodeReparenting,
|
||||||
FixMannequinDefaultAvatarFeet
|
FixMannequinDefaultAvatarFeet,
|
||||||
|
ProceduralFaceMovementFlagsAndBlendshapes
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class DomainConnectRequestVersion : PacketVersion {
|
enum class DomainConnectRequestVersion : PacketVersion {
|
||||||
|
|
|
@ -111,7 +111,7 @@ public:
|
||||||
virtual PhysicsMotionType getMotionType() const { return _motionType; }
|
virtual PhysicsMotionType getMotionType() const { return _motionType; }
|
||||||
|
|
||||||
void setMass(float mass);
|
void setMass(float mass);
|
||||||
virtual float getMass() const;
|
float getMass() const;
|
||||||
|
|
||||||
void setBodyLinearVelocity(const glm::vec3& velocity) const;
|
void setBodyLinearVelocity(const glm::vec3& velocity) const;
|
||||||
void setBodyAngularVelocity(const glm::vec3& velocity) const;
|
void setBodyAngularVelocity(const glm::vec3& velocity) const;
|
||||||
|
|
|
@ -105,6 +105,10 @@ void PhysicsEngine::addObjectToDynamicsWorld(ObjectMotionState* motionState) {
|
||||||
}
|
}
|
||||||
case MOTION_TYPE_DYNAMIC: {
|
case MOTION_TYPE_DYNAMIC: {
|
||||||
mass = motionState->getMass();
|
mass = motionState->getMass();
|
||||||
|
if (mass != mass || mass < 1.0f) {
|
||||||
|
qCDebug(physics) << "mass is too low, setting to 1.0 Kg --" << mass;
|
||||||
|
mass = 1.0f;
|
||||||
|
}
|
||||||
btCollisionShape* shape = const_cast<btCollisionShape*>(motionState->getShape());
|
btCollisionShape* shape = const_cast<btCollisionShape*>(motionState->getShape());
|
||||||
assert(shape);
|
assert(shape);
|
||||||
shape->calculateLocalInertia(mass, inertia);
|
shape->calculateLocalInertia(mass, inertia);
|
||||||
|
|
94
libraries/shared/src/ApplicationVersion.cpp
Normal file
94
libraries/shared/src/ApplicationVersion.cpp
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
//
|
||||||
|
// ApplicationVersion.cpp
|
||||||
|
// libraries/shared/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 6/8/18.
|
||||||
|
// Copyright 2018 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "ApplicationVersion.h"
|
||||||
|
|
||||||
|
#include <cassert>
|
||||||
|
|
||||||
|
#include <QtCore/QDebug>
|
||||||
|
#include <QtCore/QRegExp>
|
||||||
|
#include <QtCore/QStringList>
|
||||||
|
|
||||||
|
ApplicationVersion::ApplicationVersion(const QString& versionString) :
|
||||||
|
versionString(versionString)
|
||||||
|
{
|
||||||
|
// attempt to regex out a semantic version from the string
|
||||||
|
// handling both x.y.z and x.y formats
|
||||||
|
QRegExp semanticRegex("([\\d]+)\\.([\\d]+)(?:\\.([\\d]+))?");
|
||||||
|
|
||||||
|
int pos = semanticRegex.indexIn(versionString);
|
||||||
|
if (pos != -1) {
|
||||||
|
isSemantic = true;
|
||||||
|
auto captures = semanticRegex.capturedTexts();
|
||||||
|
|
||||||
|
major = captures[1].toInt();
|
||||||
|
minor = captures[2].toInt();
|
||||||
|
|
||||||
|
if (captures.length() > 3) {
|
||||||
|
patch = captures[3].toInt();
|
||||||
|
} else {
|
||||||
|
// the patch is implictly 0 if it was not included
|
||||||
|
patch = 0;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// if we didn't have a sematic style, we assume that we just have a build number
|
||||||
|
build = versionString.toInt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ApplicationVersion::operator==(const ApplicationVersion& other) const {
|
||||||
|
if (isSemantic && other.isSemantic) {
|
||||||
|
return major == other.major && minor == other.minor && patch == other.patch;
|
||||||
|
} else if (!isSemantic && !other.isSemantic) {
|
||||||
|
return build == other.build;
|
||||||
|
} else {
|
||||||
|
assert(isSemantic == other.isSemantic);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ApplicationVersion::operator<(const ApplicationVersion& other) const {
|
||||||
|
if (isSemantic && other.isSemantic) {
|
||||||
|
if (major == other.major) {
|
||||||
|
if (minor == other.minor) {
|
||||||
|
return patch < other.patch;
|
||||||
|
} else {
|
||||||
|
return minor < other.minor;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return major < other.major;
|
||||||
|
}
|
||||||
|
} else if (!isSemantic && !other.isSemantic) {
|
||||||
|
return build < other.build;
|
||||||
|
} else {
|
||||||
|
assert(isSemantic == other.isSemantic);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ApplicationVersion::operator>(const ApplicationVersion& other) const {
|
||||||
|
if (isSemantic && other.isSemantic) {
|
||||||
|
if (major == other.major) {
|
||||||
|
if (minor == other.minor) {
|
||||||
|
return patch > other.patch;
|
||||||
|
} else {
|
||||||
|
return minor > other.minor;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return major > other.major;
|
||||||
|
}
|
||||||
|
} else if (!isSemantic && !other.isSemantic) {
|
||||||
|
return build > other.build;
|
||||||
|
} else {
|
||||||
|
assert(isSemantic == other.isSemantic);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
41
libraries/shared/src/ApplicationVersion.h
Normal file
41
libraries/shared/src/ApplicationVersion.h
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
//
|
||||||
|
// ApplicationVersion.h
|
||||||
|
// libraries/shared/src
|
||||||
|
//
|
||||||
|
// Created by Stephen Birarda on 6/8/18.
|
||||||
|
// Copyright 2018 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_ApplicationVersion_h
|
||||||
|
#define hifi_ApplicationVersion_h
|
||||||
|
|
||||||
|
#include <QtCore/QString>
|
||||||
|
|
||||||
|
class ApplicationVersion {
|
||||||
|
public:
|
||||||
|
ApplicationVersion(const QString& versionString);
|
||||||
|
|
||||||
|
bool operator==(const ApplicationVersion& other) const;
|
||||||
|
bool operator!=(const ApplicationVersion& other) const { return !(*this == other); }
|
||||||
|
|
||||||
|
bool operator <(const ApplicationVersion& other) const;
|
||||||
|
bool operator >(const ApplicationVersion& other) const;
|
||||||
|
|
||||||
|
bool operator >=(const ApplicationVersion& other) const { return (*this == other) || (*this > other); }
|
||||||
|
bool operator <=(const ApplicationVersion& other) const { return (*this == other) || (*this < other); }
|
||||||
|
|
||||||
|
int major = -1;
|
||||||
|
int minor = -1;
|
||||||
|
int patch = -1;
|
||||||
|
|
||||||
|
int build = -1;
|
||||||
|
|
||||||
|
bool isSemantic { false };
|
||||||
|
|
||||||
|
QString versionString;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_ApplicationVersion_h
|
29
libraries/shared/src/OwningBuffer.h
Normal file
29
libraries/shared/src/OwningBuffer.h
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
//
|
||||||
|
// OwningBuffer.h
|
||||||
|
// shared/src
|
||||||
|
//
|
||||||
|
// Created by Ryan Huffman on 5/31/2018.
|
||||||
|
// Copyright 2018 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_OwningBuffer_h
|
||||||
|
#define hifi_OwningBuffer_h
|
||||||
|
|
||||||
|
#include <QBuffer>
|
||||||
|
class OwningBuffer : public QBuffer {
|
||||||
|
public:
|
||||||
|
OwningBuffer(const QByteArray& content) : _content(content) {
|
||||||
|
setData(_content);
|
||||||
|
}
|
||||||
|
OwningBuffer(QByteArray&& content) : _content(std::move(content)) {
|
||||||
|
setData(_content);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
QByteArray _content;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_OwningBuffer_h
|
|
@ -297,14 +297,23 @@ void setAtBit(unsigned char& byte, int bitIndex) {
|
||||||
byte |= (1 << (7 - bitIndex));
|
byte |= (1 << (7 - bitIndex));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool oneAtBit16(unsigned short word, int bitIndex) {
|
||||||
|
return (word >> (15 - bitIndex) & 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void setAtBit16(unsigned short& word, int bitIndex) {
|
||||||
|
word |= (1 << (15 - bitIndex));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void clearAtBit(unsigned char& byte, int bitIndex) {
|
void clearAtBit(unsigned char& byte, int bitIndex) {
|
||||||
if (oneAtBit(byte, bitIndex)) {
|
if (oneAtBit(byte, bitIndex)) {
|
||||||
byte -= (1 << (7 - bitIndex));
|
byte -= (1 << (7 - bitIndex));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int getSemiNibbleAt(unsigned char byte, int bitIndex) {
|
int getSemiNibbleAt(unsigned short word, int bitIndex) {
|
||||||
return (byte >> (6 - bitIndex) & 3); // semi-nibbles store 00, 01, 10, or 11
|
return (word >> (14 - bitIndex) & 3); // semi-nibbles store 00, 01, 10, or 11
|
||||||
}
|
}
|
||||||
|
|
||||||
int getNthBit(unsigned char byte, int ordinal) {
|
int getNthBit(unsigned char byte, int ordinal) {
|
||||||
|
@ -326,9 +335,9 @@ int getNthBit(unsigned char byte, int ordinal) {
|
||||||
return ERROR_RESULT;
|
return ERROR_RESULT;
|
||||||
}
|
}
|
||||||
|
|
||||||
void setSemiNibbleAt(unsigned char& byte, int bitIndex, int value) {
|
void setSemiNibbleAt(unsigned short& word, int bitIndex, int value) {
|
||||||
//assert(value <= 3 && value >= 0);
|
//assert(value <= 3 && value >= 0);
|
||||||
byte |= ((value & 3) << (6 - bitIndex)); // semi-nibbles store 00, 01, 10, or 11
|
word |= ((value & 3) << (14 - bitIndex)); // semi-nibbles store 00, 01, 10, or 11
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isInEnvironment(const char* environment) {
|
bool isInEnvironment(const char* environment) {
|
||||||
|
|
|
@ -163,9 +163,11 @@ void printVoxelCode(unsigned char* voxelCode);
|
||||||
int numberOfOnes(unsigned char byte);
|
int numberOfOnes(unsigned char byte);
|
||||||
bool oneAtBit(unsigned char byte, int bitIndex);
|
bool oneAtBit(unsigned char byte, int bitIndex);
|
||||||
void setAtBit(unsigned char& byte, int bitIndex);
|
void setAtBit(unsigned char& byte, int bitIndex);
|
||||||
|
bool oneAtBit16(unsigned short word, int bitIndex);
|
||||||
|
void setAtBit16(unsigned short& word, int bitIndex);
|
||||||
void clearAtBit(unsigned char& byte, int bitIndex);
|
void clearAtBit(unsigned char& byte, int bitIndex);
|
||||||
int getSemiNibbleAt(unsigned char byte, int bitIndex);
|
int getSemiNibbleAt(unsigned short word, int bitIndex);
|
||||||
void setSemiNibbleAt(unsigned char& byte, int bitIndex, int value);
|
void setSemiNibbleAt(unsigned short& word, int bitIndex, int value);
|
||||||
|
|
||||||
int getNthBit(unsigned char byte, int ordinal); /// determines the bit placement 0-7 of the ordinal set bit
|
int getNthBit(unsigned char byte, int ordinal); /// determines the bit placement 0-7 of the ordinal set bit
|
||||||
|
|
||||||
|
|
374
scripts/developer/facialExpressions.js
Normal file
374
scripts/developer/facialExpressions.js
Normal file
|
@ -0,0 +1,374 @@
|
||||||
|
//
|
||||||
|
// facialExpressions.js
|
||||||
|
// A script to set different emotions using blend shapes
|
||||||
|
//
|
||||||
|
// Author: Elisa Lupin-Jimenez
|
||||||
|
// Copyright High Fidelity 2018
|
||||||
|
//
|
||||||
|
// Licensed under the Apache 2.0 License
|
||||||
|
// See accompanying license file or http://apache.org/
|
||||||
|
//
|
||||||
|
// All assets are under CC Attribution Non-Commerical
|
||||||
|
// http://creativecommons.org/licenses/
|
||||||
|
//
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
var TABLET_BUTTON_NAME = "EMOTIONS";
|
||||||
|
// TODO: ADD HTML LANDING PAGE
|
||||||
|
|
||||||
|
var TRANSITION_TIME_SECONDS = 0.25;
|
||||||
|
|
||||||
|
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||||
|
var icon = "https://hifi-content.s3.amazonaws.com/elisalj/emoji_scripts/icons/emoji-i.svg";
|
||||||
|
var activeIcon = "https://hifi-content.s3.amazonaws.com/elisalj/emoji_scripts/icons/emoji-a.svg";
|
||||||
|
var isActive = true;
|
||||||
|
|
||||||
|
var controllerMappingName;
|
||||||
|
var controllerMapping;
|
||||||
|
|
||||||
|
var tabletButton = tablet.addButton({
|
||||||
|
icon: icon,
|
||||||
|
activeIcon: activeIcon,
|
||||||
|
text: TABLET_BUTTON_NAME,
|
||||||
|
isActive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
var toggle = function() {
|
||||||
|
isActive = !isActive;
|
||||||
|
tabletButton.editProperties({isActive: isActive});
|
||||||
|
if (isActive) {
|
||||||
|
Controller.enableMapping(controllerMappingName);
|
||||||
|
} else {
|
||||||
|
setEmotion(DEFAULT);
|
||||||
|
Controller.disableMapping(controllerMappingName);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
tabletButton.clicked.connect(toggle);
|
||||||
|
|
||||||
|
var DEFAULT = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.00,
|
||||||
|
"EyeBlink_R": 0.00,
|
||||||
|
"EyeSquint_L": 0.00,
|
||||||
|
"EyeSquint_R": 0.00,
|
||||||
|
"BrowsD_L": 0.00,
|
||||||
|
"BrowsD_R": 0.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.00,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.00,
|
||||||
|
"MouthFrown_R": 0.00,
|
||||||
|
"MouthSmile_L": 0.00,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 0.00,
|
||||||
|
"MouthDimple_R": 0.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.00,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.00,
|
||||||
|
"Sneer": 0.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var SMILE = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.30,
|
||||||
|
"EyeBlink_R": 0.30,
|
||||||
|
"EyeSquint_L": 0.90,
|
||||||
|
"EyeSquint_R": 0.90,
|
||||||
|
"BrowsD_L": 1.00,
|
||||||
|
"BrowsD_R": 1.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.00,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.00,
|
||||||
|
"MouthFrown_R": 0.00,
|
||||||
|
"MouthSmile_L": 1.00,
|
||||||
|
"MouthSmile_R": 1.00,
|
||||||
|
"MouthDimple_L": 1.00,
|
||||||
|
"MouthDimple_R": 1.00,
|
||||||
|
"LipsUpperClose": 0.40,
|
||||||
|
"LipsLowerClose": 0.30,
|
||||||
|
"LipsLowerOpen": 0.25,
|
||||||
|
"ChinUpperRaise": 0.35,
|
||||||
|
"Sneer": 0.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var LAUGH = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.45,
|
||||||
|
"EyeBlink_R": 0.45,
|
||||||
|
"EyeSquint_L": 0.75,
|
||||||
|
"EyeSquint_R": 0.75,
|
||||||
|
"BrowsD_L": 0.00,
|
||||||
|
"BrowsD_R": 0.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.50,
|
||||||
|
"JawOpen": 0.50,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.00,
|
||||||
|
"MouthFrown_R": 0.00,
|
||||||
|
"MouthSmile_L": 1.00,
|
||||||
|
"MouthSmile_R": 1.00,
|
||||||
|
"MouthDimple_L": 1.00,
|
||||||
|
"MouthDimple_R": 1.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.00,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.30,
|
||||||
|
"Sneer": 1.00,
|
||||||
|
"Puff": 0.30
|
||||||
|
};
|
||||||
|
|
||||||
|
var FLIRT = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.50,
|
||||||
|
"EyeBlink_R": 0.50,
|
||||||
|
"EyeSquint_L": 0.25,
|
||||||
|
"EyeSquint_R": 0.25,
|
||||||
|
"BrowsD_L": 0.00,
|
||||||
|
"BrowsD_R": 1.00,
|
||||||
|
"BrowsU_L": 0.55,
|
||||||
|
"BrowsU_C": 0.00,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.00,
|
||||||
|
"MouthFrown_R": 0.00,
|
||||||
|
"MouthSmile_L": 0.50,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 1.00,
|
||||||
|
"MouthDimple_R": 1.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.00,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.00,
|
||||||
|
"Sneer": 0.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var SAD = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.30,
|
||||||
|
"EyeBlink_R": 0.30,
|
||||||
|
"EyeSquint_L": 0.30,
|
||||||
|
"EyeSquint_R": 0.30,
|
||||||
|
"BrowsD_L": 0.00,
|
||||||
|
"BrowsD_R": 0.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.50,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.80,
|
||||||
|
"MouthFrown_L": 0.80,
|
||||||
|
"MouthFrown_R": 0.80,
|
||||||
|
"MouthSmile_L": 0.00,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 0.00,
|
||||||
|
"MouthDimple_R": 0.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.50,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.00,
|
||||||
|
"Sneer": 0.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var ANGRY = {
|
||||||
|
"EyeOpen_L": 1.00,
|
||||||
|
"EyeOpen_R": 1.00,
|
||||||
|
"EyeBlink_L": 0.00,
|
||||||
|
"EyeBlink_R": 0.00,
|
||||||
|
"EyeSquint_L": 1.00,
|
||||||
|
"EyeSquint_R": 1.00,
|
||||||
|
"BrowsD_L": 1.00,
|
||||||
|
"BrowsD_R": 1.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.00,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.50,
|
||||||
|
"MouthFrown_R": 0.50,
|
||||||
|
"MouthSmile_L": 0.00,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 0.00,
|
||||||
|
"MouthDimple_R": 0.00,
|
||||||
|
"LipsUpperClose": 0.50,
|
||||||
|
"LipsLowerClose": 0.50,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.00,
|
||||||
|
"Sneer": 0.50,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var FEAR = {
|
||||||
|
"EyeOpen_L": 1.00,
|
||||||
|
"EyeOpen_R": 1.00,
|
||||||
|
"EyeBlink_L": 0.00,
|
||||||
|
"EyeBlink_R": 0.00,
|
||||||
|
"EyeSquint_L": 0.00,
|
||||||
|
"EyeSquint_R": 0.00,
|
||||||
|
"BrowsD_L": 0.00,
|
||||||
|
"BrowsD_R": 0.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 1.00,
|
||||||
|
"JawOpen": 0.15,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 0.30,
|
||||||
|
"MouthFrown_R": 0.30,
|
||||||
|
"MouthSmile_L": 0.00,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 0.00,
|
||||||
|
"MouthDimple_R": 0.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.00,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.00,
|
||||||
|
"Sneer": 0.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
var DISGUST = {
|
||||||
|
"EyeOpen_L": 0.00,
|
||||||
|
"EyeOpen_R": 0.00,
|
||||||
|
"EyeBlink_L": 0.25,
|
||||||
|
"EyeBlink_R": 0.25,
|
||||||
|
"EyeSquint_L": 1.00,
|
||||||
|
"EyeSquint_R": 1.00,
|
||||||
|
"BrowsD_L": 1.00,
|
||||||
|
"BrowsD_R": 1.00,
|
||||||
|
"BrowsU_L": 0.00,
|
||||||
|
"BrowsU_C": 0.00,
|
||||||
|
"JawOpen": 0.00,
|
||||||
|
"JawFwd": 0.00,
|
||||||
|
"MouthFrown_L": 1.00,
|
||||||
|
"MouthFrown_R": 1.00,
|
||||||
|
"MouthSmile_L": 0.00,
|
||||||
|
"MouthSmile_R": 0.00,
|
||||||
|
"MouthDimple_L": 0.00,
|
||||||
|
"MouthDimple_R": 0.00,
|
||||||
|
"LipsUpperClose": 0.00,
|
||||||
|
"LipsLowerClose": 0.75,
|
||||||
|
"LipsLowerOpen": 0.00,
|
||||||
|
"ChinUpperRaise": 0.75,
|
||||||
|
"Sneer": 1.00,
|
||||||
|
"Puff": 0.00
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function mixValue(valueA, valueB, percentage) {
|
||||||
|
return valueA + ((valueB - valueA) * percentage);
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastEmotionUsed = DEFAULT;
|
||||||
|
var emotion = DEFAULT;
|
||||||
|
var isChangingEmotion = false;
|
||||||
|
var changingEmotionPercentage = 0.0;
|
||||||
|
|
||||||
|
Script.update.connect(function(deltaTime) {
|
||||||
|
if (!isChangingEmotion) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
changingEmotionPercentage += deltaTime / TRANSITION_TIME_SECONDS;
|
||||||
|
if (changingEmotionPercentage >= 1.0) {
|
||||||
|
changingEmotionPercentage = 1.0;
|
||||||
|
isChangingEmotion = false;
|
||||||
|
if (emotion === DEFAULT) {
|
||||||
|
MyAvatar.hasScriptedBlendshapes = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (var blendshape in emotion) {
|
||||||
|
MyAvatar.setBlendshape(blendshape,
|
||||||
|
mixValue(lastEmotionUsed[blendshape], emotion[blendshape], changingEmotionPercentage));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function setEmotion(currentEmotion) {
|
||||||
|
if (emotion !== lastEmotionUsed) {
|
||||||
|
lastEmotionUsed = emotion;
|
||||||
|
}
|
||||||
|
if (currentEmotion !== lastEmotionUsed) {
|
||||||
|
changingEmotionPercentage = 0.0;
|
||||||
|
emotion = currentEmotion;
|
||||||
|
isChangingEmotion = true;
|
||||||
|
MyAvatar.hasScriptedBlendshapes = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
controllerMappingName = 'Hifi-FacialExpressions-Mapping';
|
||||||
|
controllerMapping = Controller.newMapping(controllerMappingName);
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.H).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(SMILE);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.J).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(LAUGH);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.K).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(FLIRT);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.L).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(SAD);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.V).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(ANGRY);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.B).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(FEAR);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.M).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(DISGUST);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
controllerMapping.from(Controller.Hardware.Keyboard.N).to(function(value) {
|
||||||
|
if (value !== 0) {
|
||||||
|
setEmotion(DEFAULT);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Controller.enableMapping(controllerMappingName);
|
||||||
|
|
||||||
|
Script.scriptEnding.connect(function() {
|
||||||
|
tabletButton.clicked.disconnect(toggle);
|
||||||
|
tablet.removeButton(tabletButton);
|
||||||
|
Controller.disableMapping(controllerMappingName);
|
||||||
|
|
||||||
|
if (emotion !== DEFAULT || isChangingEmotion) {
|
||||||
|
isChangingEmotion = false;
|
||||||
|
for (var blendshape in DEFAULT) {
|
||||||
|
MyAvatar.setBlendshape(blendshape, DEFAULT[blendshape]);
|
||||||
|
}
|
||||||
|
MyAvatar.hasScriptedBlendshapes = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
}());
|
|
@ -60,7 +60,14 @@ function getBuildInfo() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const DEFAULT_BUILD_INFO = { releaseType: "", buildIdentifier: "dev" };
|
const DEFAULT_BUILD_INFO = {
|
||||||
|
releaseType: "",
|
||||||
|
buildIdentifier: "dev",
|
||||||
|
buildNumber: "0",
|
||||||
|
stableBuild: "0",
|
||||||
|
organization: "High Fidelity - dev"
|
||||||
|
};
|
||||||
|
|
||||||
var buildInfo = DEFAULT_BUILD_INFO;
|
var buildInfo = DEFAULT_BUILD_INFO;
|
||||||
|
|
||||||
if (buildInfoPath) {
|
if (buildInfoPath) {
|
||||||
|
@ -768,33 +775,25 @@ function onContentLoaded() {
|
||||||
// maybeShowSplash();
|
// maybeShowSplash();
|
||||||
|
|
||||||
if (buildInfo.releaseType == 'PRODUCTION' && !argv.noUpdater) {
|
if (buildInfo.releaseType == 'PRODUCTION' && !argv.noUpdater) {
|
||||||
var currentVersion = null;
|
const CHECK_FOR_UPDATES_INTERVAL_SECONDS = 60 * 30;
|
||||||
try {
|
var hasShownUpdateNotification = false;
|
||||||
currentVersion = parseInt(buildInfo.buildIdentifier);
|
const updateChecker = new updater.UpdateChecker(buildInfo, CHECK_FOR_UPDATES_INTERVAL_SECONDS);
|
||||||
} catch (e) {
|
updateChecker.on('update-available', function(latestVersion, url) {
|
||||||
}
|
if (!hasShownUpdateNotification) {
|
||||||
|
notifier.notify({
|
||||||
if (currentVersion !== null) {
|
icon: notificationIcon,
|
||||||
const CHECK_FOR_UPDATES_INTERVAL_SECONDS = 60 * 30;
|
title: 'An update is available!',
|
||||||
var hasShownUpdateNotification = false;
|
message: 'High Fidelity version ' + latestVersion + ' is available',
|
||||||
const updateChecker = new updater.UpdateChecker(currentVersion, CHECK_FOR_UPDATES_INTERVAL_SECONDS);
|
wait: true,
|
||||||
updateChecker.on('update-available', function(latestVersion, url) {
|
url: url
|
||||||
if (!hasShownUpdateNotification) {
|
});
|
||||||
notifier.notify({
|
hasShownUpdateNotification = true;
|
||||||
icon: notificationIcon,
|
}
|
||||||
title: 'An update is available!',
|
});
|
||||||
message: 'High Fidelity version ' + latestVersion + ' is available',
|
notifier.on('click', function(notifierObject, options) {
|
||||||
wait: true,
|
log.debug("Got click", options.url);
|
||||||
url: url
|
shell.openExternal(options.url);
|
||||||
});
|
});
|
||||||
hasShownUpdateNotification = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
notifier.on('click', function(notifierObject, options) {
|
|
||||||
log.debug("Got click", options.url);
|
|
||||||
shell.openExternal(options.url);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteOldFiles(logPath, DELETE_LOG_FILES_OLDER_THAN_X_SECONDS, LOG_FILE_REGEX);
|
deleteOldFiles(logPath, DELETE_LOG_FILES_OLDER_THAN_X_SECONDS, LOG_FILE_REGEX);
|
||||||
|
|
|
@ -8,10 +8,48 @@ const os = require('os');
|
||||||
const platform = os.type() == 'Windows_NT' ? 'windows' : 'mac';
|
const platform = os.type() == 'Windows_NT' ? 'windows' : 'mac';
|
||||||
|
|
||||||
const BUILDS_URL = 'https://highfidelity.com/builds.xml';
|
const BUILDS_URL = 'https://highfidelity.com/builds.xml';
|
||||||
|
const DEV_BUILDS_URL = 'https://highfidelity.com/dev-builds.xml';
|
||||||
|
|
||||||
function UpdateChecker(currentVersion, checkForUpdatesEveryXSeconds) {
|
// returns 1 if A is greater, 0 if equal, -1 if A is lesser
|
||||||
this.currentVersion = currentVersion;
|
function semanticVersionCompare(versionA, versionB) {
|
||||||
log.debug('cur', currentVersion);
|
var versionAParts = versionA.split('.');
|
||||||
|
var versionBParts = versionB.split('.');
|
||||||
|
|
||||||
|
// make sure each version has 3 parts
|
||||||
|
var partsLength = versionAParts.length;
|
||||||
|
while (partsLength < 3) {
|
||||||
|
partsLength = versionAParts.push(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
partsLength = versionBParts.length;
|
||||||
|
while (partsLength < 3) {
|
||||||
|
partsLength = versionBParts.push(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// map all of the parts to numbers
|
||||||
|
versionAParts = versionAParts.map(Number);
|
||||||
|
versionBParts = versionBParts.map(Number);
|
||||||
|
|
||||||
|
for (var i = 0; i < 3; ++i) {
|
||||||
|
if (versionAParts[i] == versionBParts[i]) {
|
||||||
|
continue;
|
||||||
|
} else if (versionAParts[i] > versionBParts[i]) {
|
||||||
|
return 1;
|
||||||
|
} else {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function UpdateChecker(buildInfo, checkForUpdatesEveryXSeconds) {
|
||||||
|
this.stableBuild = (buildInfo.stableBuild == "1");
|
||||||
|
|
||||||
|
this.buildsURL = this.stableBuild ? BUILDS_URL : DEV_BUILDS_URL;
|
||||||
|
this.currentVersion = this.stableBuild ? buildInfo.buildIdentifier : parseInt(buildInfo.buildNumber);
|
||||||
|
|
||||||
|
log.debug('Current version is', this.currentVersion);
|
||||||
|
|
||||||
setInterval(this.checkForUpdates.bind(this), checkForUpdatesEveryXSeconds * 1000);
|
setInterval(this.checkForUpdates.bind(this), checkForUpdatesEveryXSeconds * 1000);
|
||||||
this.checkForUpdates();
|
this.checkForUpdates();
|
||||||
|
@ -20,7 +58,7 @@ util.inherits(UpdateChecker, events.EventEmitter);
|
||||||
UpdateChecker.prototype = extend(UpdateChecker.prototype, {
|
UpdateChecker.prototype = extend(UpdateChecker.prototype, {
|
||||||
checkForUpdates: function() {
|
checkForUpdates: function() {
|
||||||
log.debug("Checking for updates");
|
log.debug("Checking for updates");
|
||||||
request(BUILDS_URL, (error, response, body) => {
|
request(this.buildsURL, (error, response, body) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
log.debug("Error", error);
|
log.debug("Error", error);
|
||||||
return;
|
return;
|
||||||
|
@ -29,12 +67,32 @@ UpdateChecker.prototype = extend(UpdateChecker.prototype, {
|
||||||
try {
|
try {
|
||||||
var $ = cheerio.load(body, { xmlMode: true });
|
var $ = cheerio.load(body, { xmlMode: true });
|
||||||
const latestBuild = $('project[name="interface"] platform[name="' + platform + '"]').children().first();
|
const latestBuild = $('project[name="interface"] platform[name="' + platform + '"]').children().first();
|
||||||
const latestVersion = parseInt(latestBuild.find('version').text());
|
|
||||||
log.debug("Latest version is:", latestVersion, this.currentVersion);
|
var latestVersion = 0;
|
||||||
if (latestVersion > this.currentVersion) {
|
|
||||||
|
if (this.stableBuild) {
|
||||||
|
latestVersion = latestBuild.find('stable_version').text();
|
||||||
|
} else {
|
||||||
|
latestVersion = parseInt(latestBuild.find('version').text());
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Latest available update version is:", latestVersion);
|
||||||
|
|
||||||
|
updateAvailable = false;
|
||||||
|
|
||||||
|
if (this.stableBuild) {
|
||||||
|
// compare the semantic versions to see if the update is newer
|
||||||
|
updateAvailable = (semanticVersionCompare(latestVersion, this.currentVersion) == 1);
|
||||||
|
} else {
|
||||||
|
// for master builds we just compare the versions as integers
|
||||||
|
updateAvailable = latestVersion > this.currentVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updateAvailable) {
|
||||||
const url = latestBuild.find('url').text();
|
const url = latestBuild.find('url').text();
|
||||||
this.emit('update-available', latestVersion, url);
|
this.emit('update-available', latestVersion, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
log.warn("Error when checking for updates", e);
|
log.warn("Error when checking for updates", e);
|
||||||
}
|
}
|
||||||
|
|
23
tools/dissectors/hf-domain.lua
Normal file
23
tools/dissectors/hf-domain.lua
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
-- create the domain protocol
|
||||||
|
p_hf_domain = Proto("hf-domain", "HF Domain Protocol")
|
||||||
|
|
||||||
|
-- domain packet fields
|
||||||
|
local f_domain_id = ProtoField.guid("hf_domain.domain_id", "Domain ID")
|
||||||
|
local f_domain_local_id = ProtoField.uint16("hf_domain.domain_local_id", "Domain Local ID")
|
||||||
|
|
||||||
|
p_hf_domain.fields = {
|
||||||
|
f_domain_id, f_domain_local_id
|
||||||
|
}
|
||||||
|
|
||||||
|
function p_hf_domain.dissector(buf, pinfo, tree)
|
||||||
|
pinfo.cols.protocol = p_hf_domain.name
|
||||||
|
|
||||||
|
domain_subtree = tree:add(p_hf_domain, buf())
|
||||||
|
|
||||||
|
local i = 0
|
||||||
|
|
||||||
|
domain_subtree:add(f_domain_id, buf(i, 16))
|
||||||
|
i = i + 16
|
||||||
|
|
||||||
|
domain_subtree:add_le(f_domain_local_id, buf(i, 2))
|
||||||
|
end
|
|
@ -4,11 +4,21 @@ p_hf_entity = Proto("hf-entity", "HF Entity Protocol")
|
||||||
-- entity packet fields
|
-- entity packet fields
|
||||||
local f_entity_sequence_number = ProtoField.uint16("hf_entity.sequence_number", "Sequence Number")
|
local f_entity_sequence_number = ProtoField.uint16("hf_entity.sequence_number", "Sequence Number")
|
||||||
local f_entity_timestamp = ProtoField.uint64("hf_entity.timestamp", "Timestamp")
|
local f_entity_timestamp = ProtoField.uint64("hf_entity.timestamp", "Timestamp")
|
||||||
local f_octal_code_bytes = ProtoField.uint8("hf_entity.octal_code_bytes", "Octal Code Bytes")
|
local f_octal_code_three_bit_sections = ProtoField.uint8("hf_entity.octal_code_three_bit_sections", "Octal Code Three Bit Sections")
|
||||||
|
local f_octal_code = ProtoField.bytes("hf_entity.octal_code", "Octal Code")
|
||||||
local f_entity_id = ProtoField.guid("hf_entity.entity_id", "Entity ID")
|
local f_entity_id = ProtoField.guid("hf_entity.entity_id", "Entity ID")
|
||||||
|
local f_last_edited = ProtoField.uint64("hf_entity.last_edited", "Last Edited")
|
||||||
|
local f_coded_property_type = ProtoField.bytes("hf_entity.coded_property_type", "Coded Property Type")
|
||||||
|
local f_property_type = ProtoField.uint32("hf_entity.property_type", "Property Type")
|
||||||
|
local f_coded_update_delta = ProtoField.bytes("hf_entity.f_coded_update_delta", "Coded Update Delta")
|
||||||
|
local f_update_delta = ProtoField.uint32("hf_entity.update_delta", "Update Delta")
|
||||||
|
|
||||||
p_hf_entity.fields = {
|
p_hf_entity.fields = {
|
||||||
f_entity_sequence_number, f_entity_timestamp, f_octal_code_bytes, f_entity_id
|
f_entity_sequence_number, f_entity_timestamp,
|
||||||
|
f_octal_code_three_bit_sections, f_octal_code,
|
||||||
|
f_last_edited, f_entity_id,
|
||||||
|
f_coded_property_type, f_property_type,
|
||||||
|
f_coded_update_delta, f_update_delta
|
||||||
}
|
}
|
||||||
|
|
||||||
function p_hf_entity.dissector(buf, pinfo, tree)
|
function p_hf_entity.dissector(buf, pinfo, tree)
|
||||||
|
@ -16,21 +26,72 @@ function p_hf_entity.dissector(buf, pinfo, tree)
|
||||||
|
|
||||||
entity_subtree = tree:add(p_hf_entity, buf())
|
entity_subtree = tree:add(p_hf_entity, buf())
|
||||||
|
|
||||||
i = 0
|
local i = 0
|
||||||
|
|
||||||
entity_subtree:add_le(f_entity_sequence_number, buf(i, 2))
|
entity_subtree:add_le(f_entity_sequence_number, buf(i, 2))
|
||||||
i = i + 2
|
i = i + 2
|
||||||
|
|
||||||
entity_subtree:add_le(f_entity_timestamp, buf(i, 4))
|
entity_subtree:add_le(f_entity_timestamp, buf(i, 8))
|
||||||
i = i + 4
|
i = i + 8
|
||||||
|
|
||||||
-- figure out the number of bytes the octal code takes
|
-- figure out the number of three bit sections in the octal code
|
||||||
local octal_code_bytes = buf(i, 1):le_uint()
|
local octal_code_three_bit_sections = buf(i, 1):le_uint()
|
||||||
entity_subtree:add_le(f_octal_code_bytes, buf(i, 1))
|
entity_subtree:add_le(f_octal_code_three_bit_sections, buf(i, 1))
|
||||||
|
i = i + 1
|
||||||
|
|
||||||
-- skip over the octal code
|
-- read the bytes for the octal code
|
||||||
i = i + 1 + octal_code_bytes
|
local octal_code_bytes = math.ceil((octal_code_three_bit_sections * 3) / 8)
|
||||||
|
entity_subtree:add_le(f_octal_code, buf(i, octal_code_bytes))
|
||||||
|
i = i + octal_code_bytes
|
||||||
|
|
||||||
|
-- read the last edited timestamp
|
||||||
|
entity_subtree:add_le(f_last_edited, buf(i, 8))
|
||||||
|
i = i + 8
|
||||||
|
|
||||||
-- read the entity ID
|
-- read the entity ID
|
||||||
entity_subtree:add(f_entity_id, buf(i, 16))
|
entity_subtree:add(f_entity_id, buf(i, 16))
|
||||||
|
i = i + 16
|
||||||
|
|
||||||
|
-- figure out the property type and the size of the coded value
|
||||||
|
local property_type, coded_property_bytes = number_of_coded_bytes(buf(i))
|
||||||
|
entity_subtree:add(f_coded_property_type, buf(i, coded_property_bytes))
|
||||||
|
entity_subtree:add(f_property_type, property_type)
|
||||||
|
i = i + coded_property_bytes
|
||||||
|
|
||||||
|
-- figure out the update delta and the size of the coded value
|
||||||
|
local update_delta, coded_update_delta_bytes = number_of_coded_bytes(buf(i))
|
||||||
|
entity_subtree:add(f_coded_update_delta, buf(i, coded_update_delta_bytes))
|
||||||
|
entity_subtree:add(f_update_delta, update_delta)
|
||||||
|
i = i + coded_update_delta_bytes
|
||||||
|
end
|
||||||
|
|
||||||
|
function number_of_coded_bytes(buf)
|
||||||
|
local coded_buffer = buf(0, 4):le_uint() -- max 64 bit value means max 10 header bits
|
||||||
|
|
||||||
|
-- first figure out the total number of bytes for the coded value based
|
||||||
|
-- on the bits in the header
|
||||||
|
local total_coded_bytes = 1
|
||||||
|
|
||||||
|
for bit = 0, 10, 1 do
|
||||||
|
local header_bit = bit32.extract(coded_buffer, bit)
|
||||||
|
|
||||||
|
if header_bit == 1 then
|
||||||
|
total_coded_bytes = total_coded_bytes + 1
|
||||||
|
else
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- pull out the bits and write them to our decoded value
|
||||||
|
local decoded_value = 0
|
||||||
|
local decoded_position = 0
|
||||||
|
local total_bits = total_coded_bytes * 8
|
||||||
|
|
||||||
|
for bit = total_coded_bytes, total_bits - 1, 1 do
|
||||||
|
local value_bit = bit32.extract(coded_buffer, total_bits - bit - 1)
|
||||||
|
decoded_value = bit32.replace(decoded_value, value_bit, decoded_position)
|
||||||
|
decoded_position = decoded_position + 1
|
||||||
|
end
|
||||||
|
|
||||||
|
return decoded_value, total_coded_bytes
|
||||||
end
|
end
|
||||||
|
|
|
@ -118,6 +118,10 @@ local packet_types = {
|
||||||
[54] = "AssetGetInfoReply"
|
[54] = "AssetGetInfoReply"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
local unsourced_packet_types = {
|
||||||
|
["DomainList"] = true
|
||||||
|
}
|
||||||
|
|
||||||
function p_hfudt.dissector(buf, pinfo, tree)
|
function p_hfudt.dissector(buf, pinfo, tree)
|
||||||
|
|
||||||
-- make sure this isn't a STUN packet - those don't follow HFUDT format
|
-- make sure this isn't a STUN packet - those don't follow HFUDT format
|
||||||
|
@ -230,54 +234,63 @@ function p_hfudt.dissector(buf, pinfo, tree)
|
||||||
|
|
||||||
-- if the message bit is set, handle the second word
|
-- if the message bit is set, handle the second word
|
||||||
if message_bit == 1 then
|
if message_bit == 1 then
|
||||||
payload_offset = 12
|
payload_offset = 12
|
||||||
|
|
||||||
local second_word = buf(4, 4):le_uint()
|
local second_word = buf(4, 4):le_uint()
|
||||||
|
|
||||||
-- read message position from upper 2 bits
|
-- read message position from upper 2 bits
|
||||||
local message_position = bit32.rshift(second_word, 30)
|
local message_position = bit32.rshift(second_word, 30)
|
||||||
local position = subtree:add(f_message_position, message_position)
|
local position = subtree:add(f_message_position, message_position)
|
||||||
|
|
||||||
if message_positions[message_position] ~= nil then
|
if message_positions[message_position] ~= nil then
|
||||||
-- if we know this position then add the name
|
-- if we know this position then add the name
|
||||||
position:append_text(" (".. message_positions[message_position] .. ")")
|
position:append_text(" (".. message_positions[message_position] .. ")")
|
||||||
end
|
end
|
||||||
|
|
||||||
-- read message number from lower 30 bits
|
-- read message number from lower 30 bits
|
||||||
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
|
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
|
||||||
|
|
||||||
-- read the message part number
|
-- read the message part number
|
||||||
subtree:add(f_message_part_number, buf(8, 4):le_uint())
|
subtree:add(f_message_part_number, buf(8, 4):le_uint())
|
||||||
end
|
end
|
||||||
|
|
||||||
-- read the type
|
-- read the type
|
||||||
local packet_type = buf(payload_offset, 1):le_uint()
|
local packet_type = buf(payload_offset, 1):le_uint()
|
||||||
local ptype = subtree:add_le(f_type, buf(payload_offset, 1))
|
local ptype = subtree:add_le(f_type, buf(payload_offset, 1))
|
||||||
if packet_types[packet_type] ~= nil then
|
local packet_type_text = packet_types[packet_type]
|
||||||
subtree:add(f_type_text, packet_types[packet_type])
|
if packet_type_text ~= nil then
|
||||||
|
subtree:add(f_type_text, packet_type_text)
|
||||||
-- if we know this packet type then add the name
|
-- if we know this packet type then add the name
|
||||||
ptype:append_text(" (".. packet_types[packet_type] .. ")")
|
ptype:append_text(" (".. packet_type_text .. ")")
|
||||||
end
|
end
|
||||||
|
|
||||||
-- read the version
|
-- read the version
|
||||||
subtree:add_le(f_version, buf(payload_offset + 1, 1))
|
subtree:add_le(f_version, buf(payload_offset + 1, 1))
|
||||||
|
|
||||||
-- read node local ID
|
local i = payload_offset + 2
|
||||||
local sender_id = buf(payload_offset + 2, 2)
|
|
||||||
subtree:add_le(f_sender_id, sender_id)
|
|
||||||
|
|
||||||
local i = payload_offset + 4
|
if unsourced_packet_types[packet_type_text] == nil then
|
||||||
|
-- read node local ID
|
||||||
|
local sender_id = buf(payload_offset + 2, 2)
|
||||||
|
subtree:add_le(f_sender_id, sender_id)
|
||||||
|
i = i + 2
|
||||||
|
|
||||||
-- read HMAC MD5 hash
|
-- read HMAC MD5 hash
|
||||||
subtree:add(f_hmac_hash, buf(i, 16))
|
subtree:add(f_hmac_hash, buf(i, 16))
|
||||||
i = i + 16
|
i = i + 16
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Domain packets
|
||||||
|
if packet_type_text == "DomainList" then
|
||||||
|
Dissector.get("hf-domain"):call(buf(i):tvb(), pinfo, tree)
|
||||||
|
end
|
||||||
|
|
||||||
-- AvatarData or BulkAvatarDataPacket
|
-- AvatarData or BulkAvatarDataPacket
|
||||||
if packet_types[packet_type] == "AvatarData" or packet_types[packet_type] == "BulkAvatarDataPacket" then
|
if packet_type_text == "AvatarData" or packet_type_text == "BulkAvatarData" then
|
||||||
Dissector.get("hf-avatar"):call(buf(i):tvb(), pinfo, tree)
|
Dissector.get("hf-avatar"):call(buf(i):tvb(), pinfo, tree)
|
||||||
end
|
end
|
||||||
|
|
||||||
if packet_types[packet_type] == "EntityEdit" then
|
if packet_type_text == "EntityEdit" then
|
||||||
Dissector.get("hf-entity"):call(buf(i):tvb(), pinfo, tree)
|
Dissector.get("hf-entity"):call(buf(i):tvb(), pinfo, tree)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -25,12 +25,6 @@ Oven* Oven::_staticInstance { nullptr };
|
||||||
Oven::Oven() {
|
Oven::Oven() {
|
||||||
_staticInstance = this;
|
_staticInstance = this;
|
||||||
|
|
||||||
// enable compression in image library
|
|
||||||
image::setColorTexturesCompressionEnabled(true);
|
|
||||||
image::setGrayscaleTexturesCompressionEnabled(true);
|
|
||||||
image::setNormalTexturesCompressionEnabled(true);
|
|
||||||
image::setCubeTexturesCompressionEnabled(true);
|
|
||||||
|
|
||||||
// setup our worker threads
|
// setup our worker threads
|
||||||
setupWorkerThreads(QThread::idealThreadCount());
|
setupWorkerThreads(QThread::idealThreadCount());
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#include <QtCore/QUrl>
|
#include <QtCore/QUrl>
|
||||||
|
|
||||||
#include <image/Image.h>
|
#include <image/Image.h>
|
||||||
|
#include <TextureBaker.h>
|
||||||
|
|
||||||
#include "BakerCLI.h"
|
#include "BakerCLI.h"
|
||||||
|
|
||||||
|
@ -47,10 +48,7 @@ OvenCLIApplication::OvenCLIApplication(int argc, char* argv[]) :
|
||||||
|
|
||||||
if (parser.isSet(CLI_DISABLE_TEXTURE_COMPRESSION_PARAMETER)) {
|
if (parser.isSet(CLI_DISABLE_TEXTURE_COMPRESSION_PARAMETER)) {
|
||||||
qDebug() << "Disabling texture compression";
|
qDebug() << "Disabling texture compression";
|
||||||
image::setColorTexturesCompressionEnabled(false);
|
TextureBaker::setCompressionEnabled(false);
|
||||||
image::setGrayscaleTexturesCompressionEnabled(false);
|
|
||||||
image::setNormalTexturesCompressionEnabled(false);
|
|
||||||
image::setCubeTexturesCompressionEnabled(false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QMetaObject::invokeMethod(cli, "bakeFile", Qt::QueuedConnection, Q_ARG(QUrl, inputUrl),
|
QMetaObject::invokeMethod(cli, "bakeFile", Qt::QueuedConnection, Q_ARG(QUrl, inputUrl),
|
||||||
|
|
Loading…
Reference in a new issue