Merge remote-tracking branch 'upstream/master' into modelTextures

This commit is contained in:
SamGondelman 2018-06-13 11:53:31 -07:00
commit c4670322ab
47 changed files with 1271 additions and 486 deletions

View file

@ -4,12 +4,15 @@ android {
compileSdkVersion 26
//buildToolsVersion '27.0.3'
def appVersionCode = Integer.valueOf(RELEASE_NUMBER ?: 1)
def appVersionName = RELEASE_NUMBER ?: "1.0"
defaultConfig {
applicationId "io.highfidelity.hifiinterface"
minSdkVersion 24
targetSdkVersion 26
versionCode 1
versionName "1.0"
versionCode appVersionCode
versionName appVersionName
ndk { abiFilters 'arm64-v8a' }
externalNativeBuild {
cmake {

View file

@ -291,18 +291,6 @@ AssetServer::AssetServer(ReceivedMessage& message) :
_bakingTaskPool(this),
_filesizeLimit(AssetUtils::MAX_UPLOAD_SIZE)
{
// store the current state of image compression so we can reset it when this assignment is complete
_wasColorTextureCompressionEnabled = image::isColorTexturesCompressionEnabled();
_wasGrayscaleTextureCompressionEnabled = image::isGrayscaleTexturesCompressionEnabled();
_wasNormalTextureCompressionEnabled = image::isNormalTexturesCompressionEnabled();
_wasCubeTextureCompressionEnabled = image::isCubeTexturesCompressionEnabled();
// enable compression in image library
image::setColorTexturesCompressionEnabled(true);
image::setGrayscaleTexturesCompressionEnabled(true);
image::setNormalTexturesCompressionEnabled(true);
image::setCubeTexturesCompressionEnabled(true);
BAKEABLE_TEXTURE_EXTENSIONS = image::getSupportedFormats();
qDebug() << "Supported baking texture formats:" << BAKEABLE_MODEL_EXTENSIONS;
@ -354,12 +342,6 @@ void AssetServer::aboutToFinish() {
while (_pendingBakes.size() > 0) {
QCoreApplication::processEvents();
}
// re-set defaults in image library
image::setColorTexturesCompressionEnabled(_wasCubeTextureCompressionEnabled);
image::setGrayscaleTexturesCompressionEnabled(_wasGrayscaleTextureCompressionEnabled);
image::setNormalTexturesCompressionEnabled(_wasNormalTextureCompressionEnabled);
image::setCubeTexturesCompressionEnabled(_wasCubeTextureCompressionEnabled);
}
void AssetServer::run() {

View file

@ -167,11 +167,6 @@ private:
using RequestQueue = QVector<QPair<QSharedPointer<ReceivedMessage>, SharedNodePointer>>;
RequestQueue _queuedRequests;
bool _wasColorTextureCompressionEnabled { false };
bool _wasGrayscaleTextureCompressionEnabled { false };
bool _wasNormalTextureCompressionEnabled { false };
bool _wasCubeTextureCompressionEnabled { false };
uint64_t _filesizeLimit;
};

View file

@ -4,8 +4,8 @@ set(EXTERNAL_NAME serverless-content)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC68.zip
URL_MD5 a068f74d4045e257cfa7926fe6e38ad5
URL http://cdn.highfidelity.com/content-sets/serverless-tutorial-RC68-v2.zip
URL_MD5 f7d290471baf7f5694c147217b8fc548
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -95,7 +95,7 @@ macro(SET_PACKAGING_PARAMETERS)
endif ()
execute_process(
COMMAND git log -1 --format=${_GIT_LOG_FORMAT}
COMMAND git log -1 --abbrev=7 --format=${_GIT_LOG_FORMAT}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE _GIT_LOG_OUTPUT
ERROR_VARIABLE _GIT_LOG_ERROR

View file

@ -1,5 +1,7 @@
{
"releaseType": "@RELEASE_TYPE@",
"buildNumber": "@BUILD_NUMBER@",
"stableBuild": "@STABLE_BUILD@",
"buildIdentifier": "@BUILD_VERSION@",
"organization": "@BUILD_ORGANIZATION@"
"organization": "@BUILD_ORGANIZATION@"
}

View file

@ -905,7 +905,6 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
DependencyManager::set<DiscoverabilityManager>();
DependencyManager::set<SceneScriptingInterface>();
DependencyManager::set<OffscreenUi>();
DependencyManager::set<AutoUpdater>();
DependencyManager::set<Midi>();
DependencyManager::set<PathUtils>();
DependencyManager::set<InterfaceDynamicFactory>();
@ -1439,17 +1438,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// add firstRun flag from settings to launch event
Setting::Handle<bool> firstRun { Settings::firstRun, true };
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
auto& userActivityLogger = UserActivityLogger::getInstance();
if (!userActivityLogger.isDisabledSettingSet()) {
// the user activity logger is opt-out for Interface
// but it's defaulted to disabled for other targets
// so we need to enable it here if it has never been disabled by the user
userActivityLogger.disable(false);
}
QString machineFingerPrint = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
auto& userActivityLogger = UserActivityLogger::getInstance();
if (userActivityLogger.isEnabled()) {
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
@ -1784,10 +1775,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// If launched from Steam, let it handle updates
const QString HIFI_NO_UPDATER_COMMAND_LINE_KEY = "--no-updater";
bool noUpdater = arguments().indexOf(HIFI_NO_UPDATER_COMMAND_LINE_KEY) != -1;
if (!noUpdater) {
bool buildCanUpdate = BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable
|| BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master;
if (!noUpdater && buildCanUpdate) {
constexpr auto INSTALLER_TYPE_CLIENT_ONLY = "client_only";
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
auto applicationUpdater = DependencyManager::set<AutoUpdater>();
AutoUpdater::InstallerType type = installerType == INSTALLER_TYPE_CLIENT_ONLY
? AutoUpdater::InstallerType::CLIENT_ONLY : AutoUpdater::InstallerType::FULL;

View file

@ -124,7 +124,7 @@ Menu::Menu() {
});
// Edit > Delete
auto deleteAction =addActionToQMenuAndActionHash(editMenu, "Delete", QKeySequence::Delete);
auto deleteAction = addActionToQMenuAndActionHash(editMenu, "Delete", QKeySequence::Delete);
connect(deleteAction, &QAction::triggered, [] {
QKeyEvent* keyEvent = new QKeyEvent(QEvent::KeyPress, Qt::Key_Delete, Qt::ControlModifier);
QCoreApplication::postEvent(QCoreApplication::instance(), keyEvent);

View file

@ -21,17 +21,6 @@ AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisi
_type = MOTIONSTATE_TYPE_AVATAR;
}
void AvatarMotionState::handleEasyChanges(uint32_t& flags) {
ObjectMotionState::handleEasyChanges(flags);
if (flags & Simulation::DIRTY_PHYSICS_ACTIVATION && !_body->isActive()) {
_body->activate();
}
}
bool AvatarMotionState::handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) {
return ObjectMotionState::handleHardAndEasyChanges(flags, engine);
}
AvatarMotionState::~AvatarMotionState() {
assert(_avatar);
_avatar = nullptr;
@ -57,9 +46,6 @@ PhysicsMotionType AvatarMotionState::computePhysicsMotionType() const {
const btCollisionShape* AvatarMotionState::computeNewShape() {
ShapeInfo shapeInfo;
std::static_pointer_cast<Avatar>(_avatar)->computeShapeInfo(shapeInfo);
glm::vec3 halfExtents = shapeInfo.getHalfExtents();
halfExtents.y = 0.0f;
_diameter = 2.0f * glm::length(halfExtents);
return getShapeManager()->getShape(shapeInfo);
}
@ -74,31 +60,25 @@ void AvatarMotionState::getWorldTransform(btTransform& worldTrans) const {
worldTrans.setRotation(glmToBullet(getObjectRotation()));
if (_body) {
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
}
}
// virtual
void AvatarMotionState::setWorldTransform(const btTransform& worldTrans) {
// HACK: The PhysicsEngine does not actually move OTHER avatars -- instead it slaves their local RigidBody to the transform
// as specified by a remote simulation. However, to give the remote simulation time to respond to our own objects we tie
// the other avatar's body to its true position with a simple spring. This is a HACK that will have to be improved later.
const float SPRING_TIMESCALE = 0.5f;
float tau = PHYSICS_ENGINE_FIXED_SUBSTEP / SPRING_TIMESCALE;
btVector3 currentPosition = worldTrans.getOrigin();
btVector3 offsetToTarget = glmToBullet(getObjectPosition()) - currentPosition;
float distance = offsetToTarget.length();
if ((1.0f - tau) * distance > _diameter) {
// the avatar body is far from its target --> slam position
btTransform newTransform;
newTransform.setOrigin(currentPosition + offsetToTarget);
newTransform.setRotation(glmToBullet(getObjectRotation()));
_body->setWorldTransform(newTransform);
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
} else {
// the avatar body is near its target --> slam velocity
btVector3 velocity = glmToBullet(getObjectLinearVelocity()) + (1.0f / SPRING_TIMESCALE) * offsetToTarget;
_body->setLinearVelocity(velocity);
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
}
btVector3 targetPosition = glmToBullet(getObjectPosition());
btTransform newTransform;
newTransform.setOrigin((1.0f - tau) * currentPosition + tau * targetPosition);
newTransform.setRotation(glmToBullet(getObjectRotation()));
_body->setWorldTransform(newTransform);
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
}
// These pure virtual methods must be implemented for each MotionState type
@ -165,8 +145,3 @@ void AvatarMotionState::computeCollisionGroupAndMask(int32_t& group, int32_t& ma
mask = Physics::getDefaultCollisionMask(group);
}
// virtual
float AvatarMotionState::getMass() const {
return std::static_pointer_cast<Avatar>(_avatar)->computeMass();
}

View file

@ -23,9 +23,6 @@ class AvatarMotionState : public ObjectMotionState {
public:
AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape);
virtual void handleEasyChanges(uint32_t& flags) override;
virtual bool handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) override;
virtual PhysicsMotionType getMotionType() const override { return _motionType; }
virtual uint32_t getIncomingDirtyFlags() override;
@ -67,8 +64,6 @@ public:
virtual void computeCollisionGroupAndMask(int32_t& group, int32_t& mask) const override;
virtual float getMass() const override;
friend class AvatarManager;
friend class Avatar;
@ -81,7 +76,6 @@ protected:
virtual const btCollisionShape* computeNewShape() override;
AvatarSharedPointer _avatar;
float _diameter { 0.0f };
uint32_t _dirtyFlags;
};

View file

@ -2117,6 +2117,31 @@ bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
return !defaultMode || !firstPerson || !insideHead;
}
void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
if (hasScriptedBlendshapes == _hasScriptedBlendShapes) {
return;
}
if (!hasScriptedBlendshapes) {
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
sendAvatarDataPacket(true);
}
_hasScriptedBlendShapes = hasScriptedBlendshapes;
}
void MyAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
}
void MyAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
}
void MyAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
}
void MyAvatar::updateOrientation(float deltaTime) {
// Smoothly rotate body with arrow keys

View file

@ -86,6 +86,10 @@ class MyAvatar : public Avatar {
* @property {number} audioListenerModeCamera=1 - The audio listening position is at the camera. <em>Read-only.</em>
* @property {number} audioListenerModeCustom=2 - The audio listening position is at a the position specified by set by the
* <code>customListenPosition</code> and <code>customListenOrientation</code> property values. <em>Read-only.</em>
* @property {boolean} hasScriptedBlendshapes=false - Blendshapes will be transmitted over the network if set to true.
* @property {boolean} hasProceduralBlinkFaceMovement=true - procedural blinking will be turned on if set to true.
* @property {boolean} hasProceduralEyeFaceMovement=true - procedural eye movement will be turned on if set to true.
* @property {boolean} hasAudioEnabledFaceMovement=true - If set to true, voice audio will move the mouth Blendshapes while MyAvatar.hasScriptedBlendshapes is enabled.
* @property {Vec3} customListenPosition=Vec3.ZERO - The listening position used when the <code>audioListenerMode</code>
* property value is <code>audioListenerModeCustom</code>.
* @property {Quat} customListenOrientation=Quat.IDENTITY - The listening orientation used when the
@ -187,6 +191,10 @@ class MyAvatar : public Avatar {
Q_PROPERTY(AudioListenerMode audioListenerModeHead READ getAudioListenerModeHead)
Q_PROPERTY(AudioListenerMode audioListenerModeCamera READ getAudioListenerModeCamera)
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
Q_PROPERTY(bool hasProceduralBlinkFaceMovement READ getHasProceduralBlinkFaceMovement WRITE setHasProceduralBlinkFaceMovement)
Q_PROPERTY(bool hasProceduralEyeFaceMovement READ getHasProceduralEyeFaceMovement WRITE setHasProceduralEyeFaceMovement)
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
Q_PROPERTY(glm::vec3 leftHandPosition READ getLeftHandPosition)
@ -1380,6 +1388,14 @@ private:
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
bool getShouldRenderLocally() const { return _shouldRender; }
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
bool getHasScriptedBlendshapes() const override { return _hasScriptedBlendShapes; }
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
bool isMyAvatar() const override { return true; }
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
virtual glm::vec3 getSkeletonPosition() const override;
@ -1488,6 +1504,7 @@ private:
bool _hmdRollControlEnabled { true };
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
std::atomic<bool> _hasScriptedBlendShapes { false };
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
glm::mat4 _sensorToWorldMatrix { glm::mat4() };

View file

@ -46,32 +46,18 @@ void MyHead::simulate(float deltaTime) {
auto player = DependencyManager::get<recording::Deck>();
// Only use face trackers when not playing back a recording.
if (!player->isPlaying()) {
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
_isFaceTrackerConnected = faceTracker != nullptr && !faceTracker->isMuted();
auto faceTracker = qApp->getActiveFaceTracker();
const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
_isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
if (_isFaceTrackerConnected) {
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
calculateMouthShapes(deltaTime);
const int JAW_OPEN_BLENDSHAPE = 21;
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
_transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
if (hasActualFaceTrackerConnected) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
}
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
// if eye tracker is connected we should get the data here.
}
Parent::simulate(deltaTime);
}

View file

@ -81,6 +81,13 @@ int main(int argc, const char* argv[]) {
// Instance UserActivityLogger now that the settings are loaded
auto& ual = UserActivityLogger::getInstance();
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
if (!ual.isDisabledSettingSet()) {
// the user activity logger is opt-out for Interface
// but it's defaulted to disabled for other targets
// so we need to enable it here if it has never been disabled by the user
ual.disable(false);
}
qDebug() << "UserActivityLogger is enabled:" << ual.isEnabled();
if (ual.isEnabled()) {

View file

@ -21,19 +21,31 @@ UpdateDialog::UpdateDialog(QQuickItem* parent) :
OffscreenQmlDialog(parent)
{
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
int currentVersion = QCoreApplication::applicationVersion().toInt();
int latestVersion = applicationUpdater.data()->getBuildData().lastKey();
_updateAvailableDetails = "v" + QString::number(latestVersion) + " released on "
+ QString(applicationUpdater.data()->getBuildData()[latestVersion]["releaseTime"]).replace(" ", " ");
if (applicationUpdater) {
_releaseNotes = "";
for (int i = latestVersion; i > currentVersion; i--) {
if (applicationUpdater.data()->getBuildData().contains(i)) {
QString releaseNotes = applicationUpdater.data()->getBuildData()[i]["releaseNotes"];
releaseNotes.remove("<br />");
releaseNotes.remove(QRegExp("^\n+"));
_releaseNotes += "\n" + QString().sprintf("%d", i) + "\n" + releaseNotes + "\n";
auto buildData = applicationUpdater.data()->getBuildData();
ApplicationVersion latestVersion = buildData.lastKey();
_updateAvailableDetails = "v" + latestVersion.versionString + " released on "
+ QString(buildData[latestVersion]["releaseTime"]).replace(" ", " ");
_releaseNotes = "";
auto it = buildData.end();
while (it != buildData.begin()) {
--it;
if (applicationUpdater->getCurrentVersion() < it.key()) {
// grab the release notes for this later version
QString releaseNotes = it.value()["releaseNotes"];
releaseNotes.remove("<br />");
releaseNotes.remove(QRegExp("^\n+"));
_releaseNotes += "\n" + it.key().versionString + "\n" + releaseNotes + "\n";
} else {
break;
}
}
}
}
@ -47,5 +59,5 @@ const QString& UpdateDialog::releaseNotes() const {
void UpdateDialog::triggerUpgrade() {
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
applicationUpdater.data()->performAutoUpdate(applicationUpdater.data()->getBuildData().lastKey());
applicationUpdater.data()->openLatestUpdateURL();
}

View file

@ -11,13 +11,16 @@
#include "AutoUpdater.h"
#include <BuildInfo.h>
#include <NetworkAccessManager.h>
#include <SharedUtil.h>
#include <unordered_map>
AutoUpdater::AutoUpdater() {
#include <ApplicationVersion.h>
#include <BuildInfo.h>
#include <NetworkAccessManager.h>
#include <SharedUtil.h>
AutoUpdater::AutoUpdater() :
_currentVersion(BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable ? BuildInfo::VERSION : BuildInfo::BUILD_NUMBER)
{
#if defined Q_OS_WIN32
_operatingSystem = "windows";
#elif defined Q_OS_MAC
@ -33,9 +36,22 @@ void AutoUpdater::checkForUpdate() {
this->getLatestVersionData();
}
const QUrl BUILDS_XML_URL("https://highfidelity.com/builds.xml");
const QUrl MASTER_BUILDS_XML_URL("https://highfidelity.com/dev-builds.xml");
void AutoUpdater::getLatestVersionData() {
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest latestVersionRequest(BUILDS_XML_URL);
QUrl buildsURL;
if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable) {
buildsURL = BUILDS_XML_URL;
} else if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master) {
buildsURL = MASTER_BUILDS_XML_URL;
}
QNetworkRequest latestVersionRequest(buildsURL);
latestVersionRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
latestVersionRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
QNetworkReply* reply = networkAccessManager.get(latestVersionRequest);
@ -52,12 +68,22 @@ void AutoUpdater::parseLatestVersionData() {
QString clientOnly;
};
int version { 0 };
QString version;
QString downloadUrl;
QString releaseTime;
QString releaseNotes;
QString commitSha;
QString pullRequestNumber;
QString versionKey;
// stable builds look at the stable_version node (semantic version)
// master builds look at the version node (build number)
if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable) {
versionKey = "stable_version";
} else if (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Master) {
versionKey = "version";
}
while (xml.readNextStartElement()) {
if (xml.name() == "projects") {
@ -77,8 +103,8 @@ void AutoUpdater::parseLatestVersionData() {
QHash<QString, InstallerURLs> campaignInstallers;
while (xml.readNextStartElement()) {
if (xml.name() == "version") {
version = xml.readElementText().toInt();
if (xml.name() == versionKey) {
version = xml.readElementText();
} else if (xml.name() == "url") {
downloadUrl = xml.readElementText();
} else if (xml.name() == "installers") {
@ -159,31 +185,31 @@ void AutoUpdater::parseLatestVersionData() {
}
void AutoUpdater::checkVersionAndNotify() {
if (BuildInfo::BUILD_TYPE != BuildInfo::BuildType::Stable || _builds.empty()) {
// No version checking is required in nightly/PR/dev builds or when no build
// data was found for the platform
if (_builds.empty()) {
// no build data was found for this platform
return;
}
int latestVersionAvailable = _builds.lastKey();
if (QCoreApplication::applicationVersion().toInt() < latestVersionAvailable) {
qDebug() << "Checking if update version" << _builds.lastKey().versionString
<< "is newer than current version" << _currentVersion.versionString;
if (_builds.lastKey() > _currentVersion) {
emit newVersionIsAvailable();
}
}
void AutoUpdater::performAutoUpdate(int version) {
// NOTE: This is not yet auto updating - however this is a checkpoint towards that end
// Next PR will handle the automatic download, upgrading and application restart
const QMap<QString, QString>& chosenVersion = _builds.value(version);
void AutoUpdater::openLatestUpdateURL() {
const QMap<QString, QString>& chosenVersion = _builds.last();
const QUrl& downloadUrl = chosenVersion.value("downloadUrl");
QDesktopServices::openUrl(downloadUrl);
QCoreApplication::quit();
}
void AutoUpdater::downloadUpdateVersion(int version) {
void AutoUpdater::downloadUpdateVersion(const QString& version) {
emit newVersionIsDownloaded();
}
void AutoUpdater::appendBuildData(int versionNumber,
void AutoUpdater::appendBuildData(const QString& versionNumber,
const QString& downloadURL,
const QString& releaseTime,
const QString& releaseNotes,
@ -194,6 +220,6 @@ void AutoUpdater::appendBuildData(int versionNumber,
thisBuildDetails.insert("releaseTime", releaseTime);
thisBuildDetails.insert("releaseNotes", releaseNotes);
thisBuildDetails.insert("pullRequestNumber", pullRequestNumber);
_builds.insert(versionNumber, thisBuildDetails);
_builds.insert(ApplicationVersion(versionNumber), thisBuildDetails);
}

View file

@ -26,10 +26,9 @@
#include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest>
#include <ApplicationVersion.h>
#include <DependencyManager.h>
const QUrl BUILDS_XML_URL("https://highfidelity.com/builds.xml");
class AutoUpdater : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
@ -43,25 +42,29 @@ public:
};
void checkForUpdate();
const QMap<int, QMap<QString, QString>>& getBuildData() { return _builds; }
void performAutoUpdate(int version);
const QMap<ApplicationVersion, QMap<QString, QString>>& getBuildData() { return _builds; }
void openLatestUpdateURL();
void setInstallerType(InstallerType type) { _installerType = type; }
void setInstallerCampaign(QString campaign) { _installerCampaign = campaign; }
const ApplicationVersion& getCurrentVersion() const { return _currentVersion; }
signals:
void latestVersionDataParsed();
void newVersionIsAvailable();
void newVersionIsDownloaded();
private:
QMap<int, QMap<QString, QString>> _builds;
QMap<ApplicationVersion, QMap<QString, QString>> _builds;
QString _operatingSystem;
InstallerType _installerType { InstallerType::FULL };
QString _installerCampaign { "" };
ApplicationVersion _currentVersion;
void getLatestVersionData();
void downloadUpdateVersion(int version);
void appendBuildData(int versionNumber,
void downloadUpdateVersion(const QString& version);
void appendBuildData(const QString& versionNumber,
const QString& downloadURL,
const QString& releaseTime,
const QString& releaseNotes,

View file

@ -861,6 +861,7 @@ bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
return true;
}
// virtual
void Avatar::simulateAttachments(float deltaTime) {
assert(_attachmentModels.size() == _attachmentModelsTexturesLoaded.size());
PerformanceTimer perfTimer("attachments");
@ -1543,14 +1544,12 @@ void Avatar::updateDisplayNameAlpha(bool showDisplayName) {
}
}
// virtual
void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
float uniformScale = getModelScale();
float radius = uniformScale * _skeletonModel->getBoundingCapsuleRadius();
float height = uniformScale * _skeletonModel->getBoundingCapsuleHeight();
shapeInfo.setCapsuleY(radius, 0.5f * height);
glm::vec3 offset = uniformScale * _skeletonModel->getBoundingCapsuleOffset();
shapeInfo.setOffset(offset);
shapeInfo.setCapsuleY(uniformScale * _skeletonModel->getBoundingCapsuleRadius(),
0.5f * uniformScale * _skeletonModel->getBoundingCapsuleHeight());
shapeInfo.setOffset(uniformScale * _skeletonModel->getBoundingCapsuleOffset());
}
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
@ -1573,8 +1572,9 @@ float Avatar::computeMass() {
return _density * TWO_PI * radius * radius * (glm::length(end - start) + 2.0f * radius / 3.0f);
}
// virtual
void Avatar::rebuildCollisionShape() {
addPhysicsFlags(Simulation::DIRTY_SHAPE | Simulation::DIRTY_MASS);
addPhysicsFlags(Simulation::DIRTY_SHAPE);
}
void Avatar::setPhysicsCallback(AvatarPhysicsCallback cb) {

View file

@ -20,6 +20,7 @@
#include <trackers/FaceTracker.h>
#include <trackers/EyeTracker.h>
#include <Rig.h>
#include "Logging.h"
#include "Avatar.h"
@ -58,25 +59,30 @@ void Head::simulate(float deltaTime) {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
}
if (!_isFaceTrackerConnected) {
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
}
const float BLINK_SPEED = 10.0f;
const float BLINK_SPEED_VARIABILITY = 1.0f;
const float BLINK_START_VARIABILITY = 0.25f;
const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f;
if (getHasProceduralBlinkFaceMovement()) {
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
@ -88,29 +94,12 @@ void Head::simulate(float deltaTime) {
forceBlink = true;
}
// Update audio attack data for facial animation (eyebrows and mouth)
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
_audioAttack = audioAttackAveragingRate * _audioAttack +
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
}
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
const float BLINK_SPEED = 10.0f;
const float BLINK_SPEED_VARIABILITY = 1.0f;
const float BLINK_START_VARIABILITY = 0.25f;
const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f;
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
// no blinking when brows are raised; blink less with increasing loudness
const float BASE_BLINK_RATE = 15.0f / 60.0f;
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
if (randFloat() < 0.5f) {
@ -136,22 +125,45 @@ void Head::simulate(float deltaTime) {
_rightEyeBlinkVelocity = 0.0f;
}
}
} else {
_rightEyeBlink = FULLY_OPEN;
_leftEyeBlink = FULLY_OPEN;
}
// use data to update fake Faceshift blendshape coefficients
if (getHasAudioEnabledFaceMovement()) {
// Update audio attack data for facial animation (eyebrows and mouth)
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
_audioAttack = audioAttackAveragingRate * _audioAttack +
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
}
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
calculateMouthShapes(deltaTime);
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_transientBlendshapeCoefficients);
applyEyelidOffset(getOrientation());
} else {
_saccade = glm::vec3();
_audioJawOpen = 0.0f;
_browAudioLift = 0.0f;
_mouth2 = 0.0f;
_mouth3 = 0.0f;
_mouth4 = 0.0f;
_mouthTime = 0.0f;
}
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_transientBlendshapeCoefficients);
if (getHasProceduralEyeFaceMovement()) {
applyEyelidOffset(getOrientation());
}
_leftEyePosition = _rightEyePosition = getPosition();

View file

@ -300,14 +300,15 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
tranlationChangedSince(lastSentTime) ||
parentInfoChangedSince(lastSentTime));
hasFaceTrackerInfo = !dropFaceTracking && hasFaceTracker() && (sendAll || faceTrackerInfoChangedSince(lastSentTime));
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
hasJointData = sendAll || !sendMinimum;
hasJointDefaultPoseFlags = hasJointData;
}
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getNumSummedBlendshapeCoefficients()) : 0) +
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) : 0) +
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size()) : 0) +
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
@ -442,7 +443,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
auto startSection = destinationBuffer;
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
uint8_t flags { 0 };
uint16_t flags { 0 };
setSemiNibbleAt(flags, KEY_STATE_START_BIT, _keyState);
@ -450,20 +451,33 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
bool isFingerPointing = _handState & IS_FINGER_POINTING_FLAG;
setSemiNibbleAt(flags, HAND_STATE_START_BIT, _handState & ~IS_FINGER_POINTING_FLAG);
if (isFingerPointing) {
setAtBit(flags, HAND_STATE_FINGER_POINTING_BIT);
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
}
// face tracker state
if (_headData->_isFaceTrackerConnected) {
setAtBit(flags, IS_FACE_TRACKER_CONNECTED);
setAtBit16(flags, IS_FACE_TRACKER_CONNECTED);
}
// eye tracker state
if (_headData->_isEyeTrackerConnected) {
setAtBit(flags, IS_EYE_TRACKER_CONNECTED);
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
}
// referential state
if (!parentID.isNull()) {
setAtBit(flags, HAS_REFERENTIAL);
setAtBit16(flags, HAS_REFERENTIAL);
}
// audio face movement
if (_headData->getHasAudioEnabledFaceMovement()) {
setAtBit16(flags, AUDIO_ENABLED_FACE_MOVEMENT);
}
// procedural eye face movement
if (_headData->getHasProceduralEyeFaceMovement()) {
setAtBit16(flags, PROCEDURAL_EYE_FACE_MOVEMENT);
}
// procedural blink face movement
if (_headData->getHasProceduralBlinkFaceMovement()) {
setAtBit16(flags, PROCEDURAL_BLINK_FACE_MOVEMENT);
}
data->flags = flags;
destinationBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
@ -506,8 +520,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasFaceTrackerInfo) {
auto startSection = destinationBuffer;
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
const auto& blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients();
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
// note: we don't use the blink and average loudness, we just use the numBlendShapes and
// compute the procedural info on the client side.
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink;
faceTrackerInfo->averageLoudness = _headData->_averageLoudness;
@ -972,7 +987,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
PACKET_READ_CHECK(AdditionalFlags, sizeof(AvatarDataPacket::AdditionalFlags));
auto data = reinterpret_cast<const AvatarDataPacket::AdditionalFlags*>(sourceBuffer);
uint8_t bitItems = data->flags;
uint16_t bitItems = data->flags;
// key state, stored as a semi-nibble in the bitItems
auto newKeyState = (KeyState)getSemiNibbleAt(bitItems, KEY_STATE_START_BIT);
@ -980,26 +995,38 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
// hand state, stored as a semi-nibble plus a bit in the bitItems
// we store the hand state as well as other items in a shared bitset. The hand state is an octal, but is split
// into two sections to maintain backward compatibility. The bits are ordered as such (0-7 left to right).
// +---+-----+-----+--+
// |x,x|H0,H1|x,x,x|H2|
// +---+-----+-----+--+
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
// +---+-----+-----+--+--+--+--+-----+
// |x,x|H0,H1|x,x,x|H2|Au|Bl|Ey|xxxxx|
// +---+-----+-----+--+--+--+--+-----+
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACE_TRACKER_CONNECTED);
auto newEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED);
auto newEyeTrackerConnected = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
auto newHasProceduralBlinkFaceMovement = oneAtBit16(bitItems, PROCEDURAL_BLINK_FACE_MOVEMENT);
bool keyStateChanged = (_keyState != newKeyState);
bool handStateChanged = (_handState != newHandState);
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
bool eyeStateChanged = (_headData->_isEyeTrackerConnected != newEyeTrackerConnected);
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged;
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged || audioEnableFaceMovementChanged || proceduralEyeFaceMovementChanged || proceduralBlinkFaceMovementChanged;
_keyState = newKeyState;
_handState = newHandState;
_headData->_isFaceTrackerConnected = newFaceTrackerConnected;
_headData->_isEyeTrackerConnected = newEyeTrackerConnected;
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);
sourceBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
@ -1060,23 +1087,21 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
PACKET_READ_CHECK(FaceTrackerInfo, sizeof(AvatarDataPacket::FaceTrackerInfo));
auto faceTrackerInfo = reinterpret_cast<const AvatarDataPacket::FaceTrackerInfo*>(sourceBuffer);
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
_headData->_leftEyeBlink = faceTrackerInfo->leftEyeBlink;
_headData->_rightEyeBlink = faceTrackerInfo->rightEyeBlink;
_headData->_averageLoudness = faceTrackerInfo->averageLoudness;
_headData->_browAudioLift = faceTrackerInfo->browAudioLift;
int numCoefficients = faceTrackerInfo->numBlendshapeCoefficients;
const int coefficientsSize = sizeof(float) * numCoefficients;
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
_headData->_transientBlendshapeCoefficients.resize(numCoefficients);
//only copy the blendshapes to headData, not the procedural face info
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
sourceBuffer += coefficientsSize;
int numBytesRead = sourceBuffer - startSection;
_faceTrackerRate.increment(numBytesRead);
_faceTrackerUpdateRate.increment();
} else {
_headData->_blendshapeCoefficients.fill(0, _headData->_blendshapeCoefficients.size());
}
if (hasJointData) {

View file

@ -79,20 +79,30 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
// backward compatibility. The bits are ordered as such (0-7 left to right).
// +-----+-----+-+-+-+--+
// |K0,K1|H0,H1|F|E|R|H2|
// +-----+-----+-+-+-+--+
// AA 6/1/18 added three more flags bits 8,9, and 10 for procedural audio, blink, and eye saccade enabled
//
// +-----+-----+-+-+-+--+--+--+--+-----+
// |K0,K1|H0,H1|F|E|R|H2|Au|Bl|Ey|xxxxx|
// +-----+-----+-+-+-+--+--+--+--+-----+
//
// Key state - K0,K1 is found in the 1st and 2nd bits
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
// Face tracker - F is found in the 5th bit
// Eye tracker - E is found in the 6th bit
// Referential Data - R is found in the 7th bit
// Procedural audio to mouth movement is enabled 8th bit
// Procedural Blink is enabled 9th bit
// Procedural Eyelid is enabled 10th bit
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
const int HAS_REFERENTIAL = 6; // 7th bit
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
const int PROCEDURAL_EYE_FACE_MOVEMENT = 9; // 10th bit
const int PROCEDURAL_BLINK_FACE_MOVEMENT = 10; // 11th bit
const char HAND_STATE_NULL = 0;
@ -200,9 +210,9 @@ namespace AvatarDataPacket {
static_assert(sizeof(SensorToWorldMatrix) == SENSOR_TO_WORLD_SIZE, "AvatarDataPacket::SensorToWorldMatrix size doesn't match.");
PACKED_BEGIN struct AdditionalFlags {
uint8_t flags; // additional flags: hand state, key state, eye tracking
uint16_t flags; // additional flags: hand state, key state, eye tracking
} PACKED_END;
const size_t ADDITIONAL_FLAGS_SIZE = 1;
const size_t ADDITIONAL_FLAGS_SIZE = 2;
static_assert(sizeof(AdditionalFlags) == ADDITIONAL_FLAGS_SIZE, "AvatarDataPacket::AdditionalFlags size doesn't match.");
// only present if HAS_REFERENTIAL flag is set in AvatarInfo.flags
@ -501,6 +511,11 @@ public:
float getDomainLimitedScale() const;
virtual bool getHasScriptedBlendshapes() const { return false; }
virtual bool getHasProceduralBlinkFaceMovement() const { return true; }
virtual bool getHasProceduralEyeFaceMovement() const { return true; }
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
/**jsdoc
* Returns the minimum scale allowed for this avatar in the current domain.
* This value can change as the user changes avatars or when changing domains.

View file

@ -69,6 +69,24 @@ public:
}
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
bool getHasProceduralEyeFaceMovement() const { return _hasProceduralEyeFaceMovement; }
void setHasProceduralEyeFaceMovement(const bool hasProceduralEyeFaceMovement) {
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
}
bool getHasProceduralBlinkFaceMovement() const { return _hasProceduralBlinkFaceMovement; }
void setHasProceduralBlinkFaceMovement(const bool hasProceduralBlinkFaceMovement) {
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
}
bool getHasAudioEnabledFaceMovement() const { return _hasAudioEnabledFaceMovement; }
void setHasAudioEnabledFaceMovement(const bool hasAudioEnabledFaceMovement) {
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
}
friend class AvatarData;
QJsonObject toJson() const;
@ -83,6 +101,9 @@ protected:
glm::vec3 _lookAtPosition;
quint64 _lookAtPositionChanged { 0 };
bool _hasAudioEnabledFaceMovement { true };
bool _hasProceduralBlinkFaceMovement { true };
bool _hasProceduralEyeFaceMovement { true };
bool _isFaceTrackerConnected { false };
bool _isEyeTrackerConnected { false };
float _leftEyeBlink { 0.0f };

View file

@ -22,12 +22,16 @@
#include <SharedUtil.h>
#include <TextureMeta.h>
#include <OwningBuffer.h>
#include "ModelBakingLoggingCategory.h"
const QString BAKED_TEXTURE_KTX_EXT = ".ktx";
const QString BAKED_TEXTURE_BCN_SUFFIX = "_bcn.ktx";
const QString BAKED_META_TEXTURE_SUFFIX = ".texmeta.json";
bool TextureBaker::_compressionEnabled = true;
TextureBaker::TextureBaker(const QUrl& textureURL, image::TextureUsage::Type textureType,
const QDir& outputDirectory, const QString& metaTexturePathPrefix,
const QString& baseFilename, const QByteArray& textureContent) :
@ -124,42 +128,45 @@ void TextureBaker::processTexture() {
TextureMeta meta;
auto originalCopyFilePath = _outputDirectory.absoluteFilePath(_textureURL.fileName());
{
auto filePath = _outputDirectory.absoluteFilePath(_textureURL.fileName());
QFile file { filePath };
QFile file { originalCopyFilePath };
if (!file.open(QIODevice::WriteOnly) || file.write(_originalTexture) == -1) {
handleError("Could not write original texture for " + _textureURL.toString());
return;
}
_outputFiles.push_back(filePath);
// IMPORTANT: _originalTexture is empty past this point
_originalTexture.clear();
_outputFiles.push_back(originalCopyFilePath);
meta.original = _metaTexturePathPrefix +_textureURL.fileName();
}
// IMPORTANT: _originalTexture is empty past this point
auto processedTexture = image::processImage(std::move(_originalTexture), _textureURL.toString().toStdString(),
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, _abortProcessing);
processedTexture->setSourceHash(hash);
if (shouldStop()) {
auto buffer = std::static_pointer_cast<QIODevice>(std::make_shared<QFile>(originalCopyFilePath));
if (!buffer->open(QIODevice::ReadOnly)) {
handleError("Could not open original file at " + originalCopyFilePath);
return;
}
if (!processedTexture) {
handleError("Could not process texture " + _textureURL.toString());
return;
}
// Compressed KTX
if (_compressionEnabled) {
auto processedTexture = image::processImage(buffer, _textureURL.toString().toStdString(),
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, true, _abortProcessing);
if (!processedTexture) {
handleError("Could not process texture " + _textureURL.toString());
return;
}
processedTexture->setSourceHash(hash);
auto memKTX = gpu::Texture::serialize(*processedTexture);
if (shouldStop()) {
return;
}
if (!memKTX) {
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
return;
}
auto memKTX = gpu::Texture::serialize(*processedTexture);
if (!memKTX) {
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
return;
}
// attempt to write the baked texture to the destination file path
if (memKTX->_header.isCompressed()) {
const char* name = khronos::gl::texture::toString(memKTX->_header.getGLInternaFormat());
if (name == nullptr) {
handleError("Could not determine internal format for compressed KTX: " + _textureURL.toString());
@ -178,21 +185,45 @@ void TextureBaker::processTexture() {
}
_outputFiles.push_back(filePath);
meta.availableTextureTypes[memKTX->_header.getGLInternaFormat()] = _metaTexturePathPrefix + fileName;
} else {
}
// Uncompressed KTX
if (_textureType == image::TextureUsage::Type::CUBE_TEXTURE) {
buffer->reset();
auto processedTexture = image::processImage(std::move(buffer), _textureURL.toString().toStdString(),
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, false, _abortProcessing);
if (!processedTexture) {
handleError("Could not process texture " + _textureURL.toString());
return;
}
processedTexture->setSourceHash(hash);
if (shouldStop()) {
return;
}
auto memKTX = gpu::Texture::serialize(*processedTexture);
if (!memKTX) {
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
return;
}
const char* data = reinterpret_cast<const char*>(memKTX->_storage->data());
const size_t length = memKTX->_storage->size();
auto fileName = _baseFilename + ".ktx";
auto filePath = _outputDirectory.absoluteFilePath(fileName);
QFile ktxTextureFile { filePath };
if (!ktxTextureFile.open(QIODevice::WriteOnly) || ktxTextureFile.write(data, length) == -1) {
handleError("Could not write ktx texture for " + _textureURL.toString());
QFile bakedTextureFile { filePath };
if (!bakedTextureFile.open(QIODevice::WriteOnly) || bakedTextureFile.write(data, length) == -1) {
handleError("Could not write baked texture for " + _textureURL.toString());
return;
}
_outputFiles.push_back(filePath);
meta.uncompressed = _metaTexturePathPrefix + fileName;
} else {
buffer.reset();
}
{
auto data = meta.serialize();
_metaTextureFileName = _outputDirectory.absoluteFilePath(_baseFilename + BAKED_META_TEXTURE_SUFFIX);

View file

@ -41,6 +41,8 @@ public:
virtual void setWasAborted(bool wasAborted) override;
static void setCompressionEnabled(bool enabled) { _compressionEnabled = enabled; }
public slots:
virtual void bake() override;
virtual void abort() override;
@ -65,6 +67,8 @@ private:
QString _metaTexturePathPrefix;
std::atomic<bool> _abortProcessing { false };
static bool _compressionEnabled;
};
#endif // hifi_TextureBaker_h

View file

@ -151,11 +151,9 @@ void Basic2DWindowOpenGLDisplayPlugin::compositeExtra() {
batch.setModelTransform(stickTransform);
batch.draw(gpu::TRIANGLE_STRIP, 4);
if (!virtualPadManager.getLeftVirtualPad()->isBeingTouched()) {
batch.setResourceTexture(0, _virtualPadJumpBtnTexture);
batch.setModelTransform(jumpTransform);
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
batch.setResourceTexture(0, _virtualPadJumpBtnTexture);
batch.setModelTransform(jumpTransform);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}
#endif

View file

@ -46,12 +46,9 @@ bool DEV_DECIMATE_TEXTURES = false;
std::atomic<size_t> DECIMATED_TEXTURE_COUNT{ 0 };
std::atomic<size_t> RECTIFIED_TEXTURE_COUNT{ 0 };
static const auto HDR_FORMAT = gpu::Element::COLOR_R11G11B10;
static std::atomic<bool> compressColorTextures { false };
static std::atomic<bool> compressNormalTextures { false };
static std::atomic<bool> compressGrayscaleTextures { false };
static std::atomic<bool> compressCubeTextures { false };
// we use a ref here to work around static order initialization
// possibly causing the element not to be constructed yet
static const auto& HDR_FORMAT = gpu::Element::COLOR_R11G11B10;
uint rectifyDimension(const uint& dimension) {
if (dimension == 0) {
@ -126,112 +123,63 @@ TextureUsage::TextureLoader TextureUsage::getTextureLoaderForType(Type type, con
}
gpu::TexturePointer TextureUsage::createStrict2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
}
gpu::TexturePointer TextureUsage::create2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createAlbedoTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createEmissiveTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createLightmapTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createNormalTextureFromNormalImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createNormalTextureFromBumpImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
}
gpu::TexturePointer TextureUsage::createRoughnessTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createRoughnessTextureFromGlossImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
}
gpu::TexturePointer TextureUsage::createMetallicTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
gpu::TexturePointer TextureUsage::createCubeTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, true, abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing) {
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
}
gpu::TexturePointer TextureUsage::createCubeTextureFromImageWithoutIrradiance(QImage&& srcImage, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing) {
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, false, abortProcessing);
}
bool isColorTexturesCompressionEnabled() {
#if CPU_MIPMAPS
return compressColorTextures.load();
#else
return false;
#endif
}
bool isNormalTexturesCompressionEnabled() {
#if CPU_MIPMAPS
return compressNormalTextures.load();
#else
return false;
#endif
}
bool isGrayscaleTexturesCompressionEnabled() {
#if CPU_MIPMAPS
return compressGrayscaleTextures.load();
#else
return false;
#endif
}
bool isCubeTexturesCompressionEnabled() {
#if CPU_MIPMAPS
return compressCubeTextures.load();
#else
return false;
#endif
}
void setColorTexturesCompressionEnabled(bool enabled) {
compressColorTextures.store(enabled);
}
void setNormalTexturesCompressionEnabled(bool enabled) {
compressNormalTextures.store(enabled);
}
void setGrayscaleTexturesCompressionEnabled(bool enabled) {
compressGrayscaleTextures.store(enabled);
}
void setCubeTexturesCompressionEnabled(bool enabled) {
compressCubeTextures.store(enabled);
bool compress, const std::atomic<bool>& abortProcessing) {
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
}
static float denormalize(float value, const float minValue) {
@ -253,17 +201,11 @@ uint32 packR11G11B10F(const glm::vec3& color) {
return glm::packF2x11_1x10(ucolor);
}
QImage processRawImageData(QByteArray&& content, const std::string& filename) {
// Take a local copy to force move construction
// https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#f18-for-consume-parameters-pass-by-x-and-stdmove-the-parameter
QByteArray localCopy = std::move(content);
QImage processRawImageData(QIODevice& content, const std::string& filename) {
// Help the QImage loader by extracting the image file format from the url filename ext.
// Some tga are not created properly without it.
auto filenameExtension = filename.substr(filename.find_last_of('.') + 1);
QBuffer buffer;
buffer.setData(localCopy);
QImageReader imageReader(&buffer, filenameExtension.c_str());
QImageReader imageReader(&content, filenameExtension.c_str());
if (imageReader.canRead()) {
return imageReader.read();
@ -271,8 +213,8 @@ QImage processRawImageData(QByteArray&& content, const std::string& filename) {
// Extension could be incorrect, try to detect the format from the content
QImageReader newImageReader;
newImageReader.setDecideFormatFromContent(true);
buffer.setData(localCopy);
newImageReader.setDevice(&buffer);
content.reset();
newImageReader.setDevice(&content);
if (newImageReader.canRead()) {
qCWarning(imagelogging) << "Image file" << filename.c_str() << "has extension" << filenameExtension.c_str()
@ -284,11 +226,14 @@ QImage processRawImageData(QByteArray&& content, const std::string& filename) {
return QImage();
}
gpu::TexturePointer processImage(QByteArray&& content, const std::string& filename,
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& filename,
int maxNumPixels, TextureUsage::Type textureType,
const std::atomic<bool>& abortProcessing) {
bool compress, const std::atomic<bool>& abortProcessing) {
QImage image = processRawImageData(std::move(content), filename);
QImage image = processRawImageData(*content.get(), filename);
// Texture content can take up a lot of memory. Here we release our ownership of that content
// in case it can be released.
content.reset();
int imageWidth = image.width();
int imageHeight = image.height();
@ -314,7 +259,7 @@ gpu::TexturePointer processImage(QByteArray&& content, const std::string& filena
}
auto loader = TextureUsage::getTextureLoaderForType(textureType);
auto texture = loader(std::move(image), filename, abortProcessing);
auto texture = loader(std::move(image), filename, compress, abortProcessing);
return texture;
}
@ -804,7 +749,7 @@ void processTextureAlpha(const QImage& srcImage, bool& validAlpha, bool& alphaAs
validAlpha = (numOpaques != NUM_PIXELS);
}
gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
bool isStrict, const std::atomic<bool>& abortProcessing) {
PROFILE_RANGE(resource_parse, "process2DTextureColorFromImage");
QImage image = processSourceImage(std::move(srcImage), false);
@ -825,7 +770,7 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
if ((image.width() > 0) && (image.height() > 0)) {
gpu::Element formatMip;
gpu::Element formatGPU;
if (isColorTexturesCompressionEnabled()) {
if (compress) {
if (validAlpha) {
// NOTE: This disables BC1a compression because it was producing odd artifacts on text textures
// for the tutorial. Instead we use BC3 (which is larger) but doesn't produce the same artifacts).
@ -833,6 +778,7 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
} else {
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGB;
}
formatMip = formatGPU;
} else {
#ifdef USE_GLES
// GLES does not support GL_BGRA
@ -941,7 +887,8 @@ QImage processBumpMap(QImage&& image) {
return result;
}
gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName,
bool isBumpMap, const std::atomic<bool>& abortProcessing) {
bool compress, bool isBumpMap,
const std::atomic<bool>& abortProcessing) {
PROFILE_RANGE(resource_parse, "process2DTextureNormalMapFromImage");
QImage image = processSourceImage(std::move(srcImage), false);
@ -958,7 +905,7 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
if ((image.width() > 0) && (image.height() > 0)) {
gpu::Element formatMip;
gpu::Element formatGPU;
if (isNormalTexturesCompressionEnabled()) {
if (compress) {
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_XY;
} else {
#ifdef USE_GLES
@ -980,7 +927,7 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
}
gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName,
bool isInvertedPixels,
bool compress, bool isInvertedPixels,
const std::atomic<bool>& abortProcessing) {
PROFILE_RANGE(resource_parse, "process2DTextureGrayscaleFromImage");
QImage image = processSourceImage(std::move(srcImage), false);
@ -998,7 +945,7 @@ gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& sr
if ((image.width() > 0) && (image.height() > 0)) {
gpu::Element formatMip;
gpu::Element formatGPU;
if (isGrayscaleTexturesCompressionEnabled()) {
if (compress) {
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_RED;
} else {
#ifdef USE_GLES
@ -1345,7 +1292,7 @@ QImage convertToHDRFormat(QImage&& srcImage, gpu::Element format) {
}
gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
bool generateIrradiance,
bool compress, bool generateIrradiance,
const std::atomic<bool>& abortProcessing) {
PROFILE_RANGE(resource_parse, "processCubeTextureColorFromImage");
@ -1373,7 +1320,7 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcI
gpu::Element formatMip;
gpu::Element formatGPU;
if (isCubeTexturesCompressionEnabled()) {
if (compress) {
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_HDR_RGB;
} else {
#ifdef USE_GLES

View file

@ -41,60 +41,50 @@ enum Type {
UNUSED_TEXTURE
};
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, const std::atomic<bool>&)>;
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, bool, const std::atomic<bool>&)>;
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
gpu::TexturePointer create2DTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createStrict2DTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createAlbedoTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createEmissiveTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromNormalImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createNormalTextureFromBumpImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createRoughnessTextureFromGlossImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createMetallicTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer createLightmapTextureFromImage(QImage&& image, const std::string& srcImageName,
const std::atomic<bool>& abortProcessing);
bool compress, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool isStrict,
const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool isBumpMap,
const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool isInvertedPixels,
const std::atomic<bool>& abortProcessing);
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool generateIrradiance,
const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
bool isStrict, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
bool isBumpMap, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
bool generateIrradiance, const std::atomic<bool>& abortProcessing);
} // namespace TextureUsage
const QStringList getSupportedFormats();
bool isColorTexturesCompressionEnabled();
bool isNormalTexturesCompressionEnabled();
bool isGrayscaleTexturesCompressionEnabled();
bool isCubeTexturesCompressionEnabled();
void setColorTexturesCompressionEnabled(bool enabled);
void setNormalTexturesCompressionEnabled(bool enabled);
void setGrayscaleTexturesCompressionEnabled(bool enabled);
void setCubeTexturesCompressionEnabled(bool enabled);
gpu::TexturePointer processImage(QByteArray&& content, const std::string& url,
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& url,
int maxNumPixels, TextureUsage::Type textureType,
const std::atomic<bool>& abortProcessing = false);
bool compress = false, const std::atomic<bool>& abortProcessing = false);
} // namespace image

View file

@ -33,6 +33,9 @@ bool TextureMeta::deserialize(const QByteArray& data, TextureMeta* meta) {
if (root.contains("original")) {
meta->original = root["original"].toString();
}
if (root.contains("uncompressed")) {
meta->uncompressed = root["uncompressed"].toString();
}
if (root.contains("compressed")) {
auto compressed = root["compressed"].toObject();
for (auto it = compressed.constBegin(); it != compressed.constEnd(); it++) {
@ -57,6 +60,7 @@ QByteArray TextureMeta::serialize() {
compressed[name] = kv.second.toString();
}
root["original"] = original.toString();
root["uncompressed"] = uncompressed.toString();
root["compressed"] = compressed;
doc.setObject(root);

View file

@ -35,6 +35,7 @@ struct TextureMeta {
QByteArray serialize();
QUrl original;
QUrl uncompressed;
std::unordered_map<khronos::gl::texture::InternalFormat, QUrl> availableTextureTypes;
};

View file

@ -50,6 +50,8 @@
#include <TextureMeta.h>
#include <OwningBuffer.h>
Q_LOGGING_CATEGORY(trace_resource_parse_image, "trace.resource.parse.image")
Q_LOGGING_CATEGORY(trace_resource_parse_image_raw, "trace.resource.parse.image.raw")
Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.ktx")
@ -277,7 +279,7 @@ gpu::TexturePointer TextureCache::getImageTexture(const QString& path, image::Te
return nullptr;
}
auto loader = image::TextureUsage::getTextureLoaderForType(type, options);
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), false));
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), false, false));
}
QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
@ -964,7 +966,6 @@ void NetworkTexture::loadMetaContent(const QByteArray& content) {
return;
}
auto& backend = DependencyManager::get<TextureCache>()->getGPUContext()->getBackend();
for (auto pair : meta.availableTextureTypes) {
gpu::Element elFormat;
@ -990,6 +991,21 @@ void NetworkTexture::loadMetaContent(const QByteArray& content) {
}
}
#ifndef Q_OS_ANDROID
if (!meta.uncompressed.isEmpty()) {
_currentlyLoadingResourceType = ResourceType::KTX;
_activeUrl = _activeUrl.resolved(meta.uncompressed);
auto textureCache = DependencyManager::get<TextureCache>();
auto self = _self.lock();
if (!self) {
return;
}
QMetaObject::invokeMethod(this, "attemptRequest", Qt::QueuedConnection);
return;
}
#endif
if (!meta.original.isEmpty()) {
_currentlyLoadingResourceType = ResourceType::ORIGINAL;
_activeUrl = _activeUrl.resolved(meta.original);
@ -1143,7 +1159,8 @@ void ImageReader::read() {
PROFILE_RANGE_EX(resource_parse_image_raw, __FUNCTION__, 0xffff0000, 0);
// IMPORTANT: _content is empty past this point
texture = image::processImage(std::move(_content), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType());
auto buffer = std::shared_ptr<QIODevice>((QIODevice*)new OwningBuffer(std::move(_content)));
texture = image::processImage(std::move(buffer), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType());
if (!texture) {
qCWarning(modelnetworking) << "Could not process:" << _url;

View file

@ -40,7 +40,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::AvatarData:
case PacketType::BulkAvatarData:
case PacketType::KillAvatar:
return static_cast<PacketVersion>(AvatarMixerPacketVersion::FixMannequinDefaultAvatarFeet);
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ProceduralFaceMovementFlagsAndBlendshapes);
case PacketType::MessagesData:
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
// ICE packets

View file

@ -283,7 +283,8 @@ enum class AvatarMixerPacketVersion : PacketVersion {
UpdatedMannequinDefaultAvatar,
AvatarJointDefaultPoseFlags,
FBXReaderNodeReparenting,
FixMannequinDefaultAvatarFeet
FixMannequinDefaultAvatarFeet,
ProceduralFaceMovementFlagsAndBlendshapes
};
enum class DomainConnectRequestVersion : PacketVersion {

View file

@ -111,7 +111,7 @@ public:
virtual PhysicsMotionType getMotionType() const { return _motionType; }
void setMass(float mass);
virtual float getMass() const;
float getMass() const;
void setBodyLinearVelocity(const glm::vec3& velocity) const;
void setBodyAngularVelocity(const glm::vec3& velocity) const;

View file

@ -105,6 +105,10 @@ void PhysicsEngine::addObjectToDynamicsWorld(ObjectMotionState* motionState) {
}
case MOTION_TYPE_DYNAMIC: {
mass = motionState->getMass();
if (mass != mass || mass < 1.0f) {
qCDebug(physics) << "mass is too low, setting to 1.0 Kg --" << mass;
mass = 1.0f;
}
btCollisionShape* shape = const_cast<btCollisionShape*>(motionState->getShape());
assert(shape);
shape->calculateLocalInertia(mass, inertia);

View file

@ -0,0 +1,94 @@
//
// ApplicationVersion.cpp
// libraries/shared/src
//
// Created by Stephen Birarda on 6/8/18.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ApplicationVersion.h"
#include <cassert>
#include <QtCore/QDebug>
#include <QtCore/QRegExp>
#include <QtCore/QStringList>
ApplicationVersion::ApplicationVersion(const QString& versionString) :
versionString(versionString)
{
// attempt to regex out a semantic version from the string
// handling both x.y.z and x.y formats
QRegExp semanticRegex("([\\d]+)\\.([\\d]+)(?:\\.([\\d]+))?");
int pos = semanticRegex.indexIn(versionString);
if (pos != -1) {
isSemantic = true;
auto captures = semanticRegex.capturedTexts();
major = captures[1].toInt();
minor = captures[2].toInt();
if (captures.length() > 3) {
patch = captures[3].toInt();
} else {
// the patch is implictly 0 if it was not included
patch = 0;
}
} else {
// if we didn't have a sematic style, we assume that we just have a build number
build = versionString.toInt();
}
}
bool ApplicationVersion::operator==(const ApplicationVersion& other) const {
if (isSemantic && other.isSemantic) {
return major == other.major && minor == other.minor && patch == other.patch;
} else if (!isSemantic && !other.isSemantic) {
return build == other.build;
} else {
assert(isSemantic == other.isSemantic);
return false;
}
}
bool ApplicationVersion::operator<(const ApplicationVersion& other) const {
if (isSemantic && other.isSemantic) {
if (major == other.major) {
if (minor == other.minor) {
return patch < other.patch;
} else {
return minor < other.minor;
}
} else {
return major < other.major;
}
} else if (!isSemantic && !other.isSemantic) {
return build < other.build;
} else {
assert(isSemantic == other.isSemantic);
return false;
}
}
bool ApplicationVersion::operator>(const ApplicationVersion& other) const {
if (isSemantic && other.isSemantic) {
if (major == other.major) {
if (minor == other.minor) {
return patch > other.patch;
} else {
return minor > other.minor;
}
} else {
return major > other.major;
}
} else if (!isSemantic && !other.isSemantic) {
return build > other.build;
} else {
assert(isSemantic == other.isSemantic);
return false;
}
}

View file

@ -0,0 +1,41 @@
//
// ApplicationVersion.h
// libraries/shared/src
//
// Created by Stephen Birarda on 6/8/18.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ApplicationVersion_h
#define hifi_ApplicationVersion_h
#include <QtCore/QString>
class ApplicationVersion {
public:
ApplicationVersion(const QString& versionString);
bool operator==(const ApplicationVersion& other) const;
bool operator!=(const ApplicationVersion& other) const { return !(*this == other); }
bool operator <(const ApplicationVersion& other) const;
bool operator >(const ApplicationVersion& other) const;
bool operator >=(const ApplicationVersion& other) const { return (*this == other) || (*this > other); }
bool operator <=(const ApplicationVersion& other) const { return (*this == other) || (*this < other); }
int major = -1;
int minor = -1;
int patch = -1;
int build = -1;
bool isSemantic { false };
QString versionString;
};
#endif // hifi_ApplicationVersion_h

View file

@ -0,0 +1,29 @@
//
// OwningBuffer.h
// shared/src
//
// Created by Ryan Huffman on 5/31/2018.
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OwningBuffer_h
#define hifi_OwningBuffer_h
#include <QBuffer>
class OwningBuffer : public QBuffer {
public:
OwningBuffer(const QByteArray& content) : _content(content) {
setData(_content);
}
OwningBuffer(QByteArray&& content) : _content(std::move(content)) {
setData(_content);
}
private:
QByteArray _content;
};
#endif // hifi_OwningBuffer_h

View file

@ -297,14 +297,23 @@ void setAtBit(unsigned char& byte, int bitIndex) {
byte |= (1 << (7 - bitIndex));
}
bool oneAtBit16(unsigned short word, int bitIndex) {
return (word >> (15 - bitIndex) & 1);
}
void setAtBit16(unsigned short& word, int bitIndex) {
word |= (1 << (15 - bitIndex));
}
void clearAtBit(unsigned char& byte, int bitIndex) {
if (oneAtBit(byte, bitIndex)) {
byte -= (1 << (7 - bitIndex));
}
}
int getSemiNibbleAt(unsigned char byte, int bitIndex) {
return (byte >> (6 - bitIndex) & 3); // semi-nibbles store 00, 01, 10, or 11
int getSemiNibbleAt(unsigned short word, int bitIndex) {
return (word >> (14 - bitIndex) & 3); // semi-nibbles store 00, 01, 10, or 11
}
int getNthBit(unsigned char byte, int ordinal) {
@ -326,9 +335,9 @@ int getNthBit(unsigned char byte, int ordinal) {
return ERROR_RESULT;
}
void setSemiNibbleAt(unsigned char& byte, int bitIndex, int value) {
void setSemiNibbleAt(unsigned short& word, int bitIndex, int value) {
//assert(value <= 3 && value >= 0);
byte |= ((value & 3) << (6 - bitIndex)); // semi-nibbles store 00, 01, 10, or 11
word |= ((value & 3) << (14 - bitIndex)); // semi-nibbles store 00, 01, 10, or 11
}
bool isInEnvironment(const char* environment) {

View file

@ -163,9 +163,11 @@ void printVoxelCode(unsigned char* voxelCode);
int numberOfOnes(unsigned char byte);
bool oneAtBit(unsigned char byte, int bitIndex);
void setAtBit(unsigned char& byte, int bitIndex);
bool oneAtBit16(unsigned short word, int bitIndex);
void setAtBit16(unsigned short& word, int bitIndex);
void clearAtBit(unsigned char& byte, int bitIndex);
int getSemiNibbleAt(unsigned char byte, int bitIndex);
void setSemiNibbleAt(unsigned char& byte, int bitIndex, int value);
int getSemiNibbleAt(unsigned short word, int bitIndex);
void setSemiNibbleAt(unsigned short& word, int bitIndex, int value);
int getNthBit(unsigned char byte, int ordinal); /// determines the bit placement 0-7 of the ordinal set bit

View file

@ -0,0 +1,374 @@
//
// facialExpressions.js
// A script to set different emotions using blend shapes
//
// Author: Elisa Lupin-Jimenez
// Copyright High Fidelity 2018
//
// Licensed under the Apache 2.0 License
// See accompanying license file or http://apache.org/
//
// All assets are under CC Attribution Non-Commerical
// http://creativecommons.org/licenses/
//
(function() {
var TABLET_BUTTON_NAME = "EMOTIONS";
// TODO: ADD HTML LANDING PAGE
var TRANSITION_TIME_SECONDS = 0.25;
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
var icon = "https://hifi-content.s3.amazonaws.com/elisalj/emoji_scripts/icons/emoji-i.svg";
var activeIcon = "https://hifi-content.s3.amazonaws.com/elisalj/emoji_scripts/icons/emoji-a.svg";
var isActive = true;
var controllerMappingName;
var controllerMapping;
var tabletButton = tablet.addButton({
icon: icon,
activeIcon: activeIcon,
text: TABLET_BUTTON_NAME,
isActive: true
});
var toggle = function() {
isActive = !isActive;
tabletButton.editProperties({isActive: isActive});
if (isActive) {
Controller.enableMapping(controllerMappingName);
} else {
setEmotion(DEFAULT);
Controller.disableMapping(controllerMappingName);
}
};
tabletButton.clicked.connect(toggle);
var DEFAULT = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.00,
"EyeBlink_R": 0.00,
"EyeSquint_L": 0.00,
"EyeSquint_R": 0.00,
"BrowsD_L": 0.00,
"BrowsD_R": 0.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.00,
"JawOpen": 0.00,
"JawFwd": 0.00,
"MouthFrown_L": 0.00,
"MouthFrown_R": 0.00,
"MouthSmile_L": 0.00,
"MouthSmile_R": 0.00,
"MouthDimple_L": 0.00,
"MouthDimple_R": 0.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.00,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.00,
"Sneer": 0.00,
"Puff": 0.00
};
var SMILE = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.30,
"EyeBlink_R": 0.30,
"EyeSquint_L": 0.90,
"EyeSquint_R": 0.90,
"BrowsD_L": 1.00,
"BrowsD_R": 1.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.00,
"JawOpen": 0.00,
"JawFwd": 0.00,
"MouthFrown_L": 0.00,
"MouthFrown_R": 0.00,
"MouthSmile_L": 1.00,
"MouthSmile_R": 1.00,
"MouthDimple_L": 1.00,
"MouthDimple_R": 1.00,
"LipsUpperClose": 0.40,
"LipsLowerClose": 0.30,
"LipsLowerOpen": 0.25,
"ChinUpperRaise": 0.35,
"Sneer": 0.00,
"Puff": 0.00
};
var LAUGH = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.45,
"EyeBlink_R": 0.45,
"EyeSquint_L": 0.75,
"EyeSquint_R": 0.75,
"BrowsD_L": 0.00,
"BrowsD_R": 0.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.50,
"JawOpen": 0.50,
"JawFwd": 0.00,
"MouthFrown_L": 0.00,
"MouthFrown_R": 0.00,
"MouthSmile_L": 1.00,
"MouthSmile_R": 1.00,
"MouthDimple_L": 1.00,
"MouthDimple_R": 1.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.00,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.30,
"Sneer": 1.00,
"Puff": 0.30
};
var FLIRT = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.50,
"EyeBlink_R": 0.50,
"EyeSquint_L": 0.25,
"EyeSquint_R": 0.25,
"BrowsD_L": 0.00,
"BrowsD_R": 1.00,
"BrowsU_L": 0.55,
"BrowsU_C": 0.00,
"JawOpen": 0.00,
"JawFwd": 0.00,
"MouthFrown_L": 0.00,
"MouthFrown_R": 0.00,
"MouthSmile_L": 0.50,
"MouthSmile_R": 0.00,
"MouthDimple_L": 1.00,
"MouthDimple_R": 1.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.00,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.00,
"Sneer": 0.00,
"Puff": 0.00
};
var SAD = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.30,
"EyeBlink_R": 0.30,
"EyeSquint_L": 0.30,
"EyeSquint_R": 0.30,
"BrowsD_L": 0.00,
"BrowsD_R": 0.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.50,
"JawOpen": 0.00,
"JawFwd": 0.80,
"MouthFrown_L": 0.80,
"MouthFrown_R": 0.80,
"MouthSmile_L": 0.00,
"MouthSmile_R": 0.00,
"MouthDimple_L": 0.00,
"MouthDimple_R": 0.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.50,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.00,
"Sneer": 0.00,
"Puff": 0.00
};
var ANGRY = {
"EyeOpen_L": 1.00,
"EyeOpen_R": 1.00,
"EyeBlink_L": 0.00,
"EyeBlink_R": 0.00,
"EyeSquint_L": 1.00,
"EyeSquint_R": 1.00,
"BrowsD_L": 1.00,
"BrowsD_R": 1.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.00,
"JawOpen": 0.00,
"JawFwd": 0.00,
"MouthFrown_L": 0.50,
"MouthFrown_R": 0.50,
"MouthSmile_L": 0.00,
"MouthSmile_R": 0.00,
"MouthDimple_L": 0.00,
"MouthDimple_R": 0.00,
"LipsUpperClose": 0.50,
"LipsLowerClose": 0.50,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.00,
"Sneer": 0.50,
"Puff": 0.00
};
var FEAR = {
"EyeOpen_L": 1.00,
"EyeOpen_R": 1.00,
"EyeBlink_L": 0.00,
"EyeBlink_R": 0.00,
"EyeSquint_L": 0.00,
"EyeSquint_R": 0.00,
"BrowsD_L": 0.00,
"BrowsD_R": 0.00,
"BrowsU_L": 0.00,
"BrowsU_C": 1.00,
"JawOpen": 0.15,
"JawFwd": 0.00,
"MouthFrown_L": 0.30,
"MouthFrown_R": 0.30,
"MouthSmile_L": 0.00,
"MouthSmile_R": 0.00,
"MouthDimple_L": 0.00,
"MouthDimple_R": 0.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.00,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.00,
"Sneer": 0.00,
"Puff": 0.00
};
var DISGUST = {
"EyeOpen_L": 0.00,
"EyeOpen_R": 0.00,
"EyeBlink_L": 0.25,
"EyeBlink_R": 0.25,
"EyeSquint_L": 1.00,
"EyeSquint_R": 1.00,
"BrowsD_L": 1.00,
"BrowsD_R": 1.00,
"BrowsU_L": 0.00,
"BrowsU_C": 0.00,
"JawOpen": 0.00,
"JawFwd": 0.00,
"MouthFrown_L": 1.00,
"MouthFrown_R": 1.00,
"MouthSmile_L": 0.00,
"MouthSmile_R": 0.00,
"MouthDimple_L": 0.00,
"MouthDimple_R": 0.00,
"LipsUpperClose": 0.00,
"LipsLowerClose": 0.75,
"LipsLowerOpen": 0.00,
"ChinUpperRaise": 0.75,
"Sneer": 1.00,
"Puff": 0.00
};
function mixValue(valueA, valueB, percentage) {
return valueA + ((valueB - valueA) * percentage);
}
var lastEmotionUsed = DEFAULT;
var emotion = DEFAULT;
var isChangingEmotion = false;
var changingEmotionPercentage = 0.0;
Script.update.connect(function(deltaTime) {
if (!isChangingEmotion) {
return;
}
changingEmotionPercentage += deltaTime / TRANSITION_TIME_SECONDS;
if (changingEmotionPercentage >= 1.0) {
changingEmotionPercentage = 1.0;
isChangingEmotion = false;
if (emotion === DEFAULT) {
MyAvatar.hasScriptedBlendshapes = false;
}
}
for (var blendshape in emotion) {
MyAvatar.setBlendshape(blendshape,
mixValue(lastEmotionUsed[blendshape], emotion[blendshape], changingEmotionPercentage));
}
});
function setEmotion(currentEmotion) {
if (emotion !== lastEmotionUsed) {
lastEmotionUsed = emotion;
}
if (currentEmotion !== lastEmotionUsed) {
changingEmotionPercentage = 0.0;
emotion = currentEmotion;
isChangingEmotion = true;
MyAvatar.hasScriptedBlendshapes = true;
}
}
controllerMappingName = 'Hifi-FacialExpressions-Mapping';
controllerMapping = Controller.newMapping(controllerMappingName);
controllerMapping.from(Controller.Hardware.Keyboard.H).to(function(value) {
if (value !== 0) {
setEmotion(SMILE);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.J).to(function(value) {
if (value !== 0) {
setEmotion(LAUGH);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.K).to(function(value) {
if (value !== 0) {
setEmotion(FLIRT);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.L).to(function(value) {
if (value !== 0) {
setEmotion(SAD);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.V).to(function(value) {
if (value !== 0) {
setEmotion(ANGRY);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.B).to(function(value) {
if (value !== 0) {
setEmotion(FEAR);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.M).to(function(value) {
if (value !== 0) {
setEmotion(DISGUST);
}
});
controllerMapping.from(Controller.Hardware.Keyboard.N).to(function(value) {
if (value !== 0) {
setEmotion(DEFAULT);
}
});
Controller.enableMapping(controllerMappingName);
Script.scriptEnding.connect(function() {
tabletButton.clicked.disconnect(toggle);
tablet.removeButton(tabletButton);
Controller.disableMapping(controllerMappingName);
if (emotion !== DEFAULT || isChangingEmotion) {
isChangingEmotion = false;
for (var blendshape in DEFAULT) {
MyAvatar.setBlendshape(blendshape, DEFAULT[blendshape]);
}
MyAvatar.hasScriptedBlendshapes = false;
}
});
}());

View file

@ -60,7 +60,14 @@ function getBuildInfo() {
}
}
const DEFAULT_BUILD_INFO = { releaseType: "", buildIdentifier: "dev" };
const DEFAULT_BUILD_INFO = {
releaseType: "",
buildIdentifier: "dev",
buildNumber: "0",
stableBuild: "0",
organization: "High Fidelity - dev"
};
var buildInfo = DEFAULT_BUILD_INFO;
if (buildInfoPath) {
@ -768,33 +775,25 @@ function onContentLoaded() {
// maybeShowSplash();
if (buildInfo.releaseType == 'PRODUCTION' && !argv.noUpdater) {
var currentVersion = null;
try {
currentVersion = parseInt(buildInfo.buildIdentifier);
} catch (e) {
}
if (currentVersion !== null) {
const CHECK_FOR_UPDATES_INTERVAL_SECONDS = 60 * 30;
var hasShownUpdateNotification = false;
const updateChecker = new updater.UpdateChecker(currentVersion, CHECK_FOR_UPDATES_INTERVAL_SECONDS);
updateChecker.on('update-available', function(latestVersion, url) {
if (!hasShownUpdateNotification) {
notifier.notify({
icon: notificationIcon,
title: 'An update is available!',
message: 'High Fidelity version ' + latestVersion + ' is available',
wait: true,
url: url
});
hasShownUpdateNotification = true;
}
});
notifier.on('click', function(notifierObject, options) {
log.debug("Got click", options.url);
shell.openExternal(options.url);
});
}
const CHECK_FOR_UPDATES_INTERVAL_SECONDS = 60 * 30;
var hasShownUpdateNotification = false;
const updateChecker = new updater.UpdateChecker(buildInfo, CHECK_FOR_UPDATES_INTERVAL_SECONDS);
updateChecker.on('update-available', function(latestVersion, url) {
if (!hasShownUpdateNotification) {
notifier.notify({
icon: notificationIcon,
title: 'An update is available!',
message: 'High Fidelity version ' + latestVersion + ' is available',
wait: true,
url: url
});
hasShownUpdateNotification = true;
}
});
notifier.on('click', function(notifierObject, options) {
log.debug("Got click", options.url);
shell.openExternal(options.url);
});
}
deleteOldFiles(logPath, DELETE_LOG_FILES_OLDER_THAN_X_SECONDS, LOG_FILE_REGEX);

View file

@ -8,10 +8,48 @@ const os = require('os');
const platform = os.type() == 'Windows_NT' ? 'windows' : 'mac';
const BUILDS_URL = 'https://highfidelity.com/builds.xml';
const DEV_BUILDS_URL = 'https://highfidelity.com/dev-builds.xml';
function UpdateChecker(currentVersion, checkForUpdatesEveryXSeconds) {
this.currentVersion = currentVersion;
log.debug('cur', currentVersion);
// returns 1 if A is greater, 0 if equal, -1 if A is lesser
function semanticVersionCompare(versionA, versionB) {
var versionAParts = versionA.split('.');
var versionBParts = versionB.split('.');
// make sure each version has 3 parts
var partsLength = versionAParts.length;
while (partsLength < 3) {
partsLength = versionAParts.push(0);
}
partsLength = versionBParts.length;
while (partsLength < 3) {
partsLength = versionBParts.push(0);
}
// map all of the parts to numbers
versionAParts = versionAParts.map(Number);
versionBParts = versionBParts.map(Number);
for (var i = 0; i < 3; ++i) {
if (versionAParts[i] == versionBParts[i]) {
continue;
} else if (versionAParts[i] > versionBParts[i]) {
return 1;
} else {
return -1;
}
}
return 0;
}
function UpdateChecker(buildInfo, checkForUpdatesEveryXSeconds) {
this.stableBuild = (buildInfo.stableBuild == "1");
this.buildsURL = this.stableBuild ? BUILDS_URL : DEV_BUILDS_URL;
this.currentVersion = this.stableBuild ? buildInfo.buildIdentifier : parseInt(buildInfo.buildNumber);
log.debug('Current version is', this.currentVersion);
setInterval(this.checkForUpdates.bind(this), checkForUpdatesEveryXSeconds * 1000);
this.checkForUpdates();
@ -20,7 +58,7 @@ util.inherits(UpdateChecker, events.EventEmitter);
UpdateChecker.prototype = extend(UpdateChecker.prototype, {
checkForUpdates: function() {
log.debug("Checking for updates");
request(BUILDS_URL, (error, response, body) => {
request(this.buildsURL, (error, response, body) => {
if (error) {
log.debug("Error", error);
return;
@ -29,12 +67,32 @@ UpdateChecker.prototype = extend(UpdateChecker.prototype, {
try {
var $ = cheerio.load(body, { xmlMode: true });
const latestBuild = $('project[name="interface"] platform[name="' + platform + '"]').children().first();
const latestVersion = parseInt(latestBuild.find('version').text());
log.debug("Latest version is:", latestVersion, this.currentVersion);
if (latestVersion > this.currentVersion) {
var latestVersion = 0;
if (this.stableBuild) {
latestVersion = latestBuild.find('stable_version').text();
} else {
latestVersion = parseInt(latestBuild.find('version').text());
}
log.debug("Latest available update version is:", latestVersion);
updateAvailable = false;
if (this.stableBuild) {
// compare the semantic versions to see if the update is newer
updateAvailable = (semanticVersionCompare(latestVersion, this.currentVersion) == 1);
} else {
// for master builds we just compare the versions as integers
updateAvailable = latestVersion > this.currentVersion;
}
if (updateAvailable) {
const url = latestBuild.find('url').text();
this.emit('update-available', latestVersion, url);
}
} catch (e) {
log.warn("Error when checking for updates", e);
}

View file

@ -0,0 +1,23 @@
-- create the domain protocol
p_hf_domain = Proto("hf-domain", "HF Domain Protocol")
-- domain packet fields
local f_domain_id = ProtoField.guid("hf_domain.domain_id", "Domain ID")
local f_domain_local_id = ProtoField.uint16("hf_domain.domain_local_id", "Domain Local ID")
p_hf_domain.fields = {
f_domain_id, f_domain_local_id
}
function p_hf_domain.dissector(buf, pinfo, tree)
pinfo.cols.protocol = p_hf_domain.name
domain_subtree = tree:add(p_hf_domain, buf())
local i = 0
domain_subtree:add(f_domain_id, buf(i, 16))
i = i + 16
domain_subtree:add_le(f_domain_local_id, buf(i, 2))
end

View file

@ -4,11 +4,21 @@ p_hf_entity = Proto("hf-entity", "HF Entity Protocol")
-- entity packet fields
local f_entity_sequence_number = ProtoField.uint16("hf_entity.sequence_number", "Sequence Number")
local f_entity_timestamp = ProtoField.uint64("hf_entity.timestamp", "Timestamp")
local f_octal_code_bytes = ProtoField.uint8("hf_entity.octal_code_bytes", "Octal Code Bytes")
local f_octal_code_three_bit_sections = ProtoField.uint8("hf_entity.octal_code_three_bit_sections", "Octal Code Three Bit Sections")
local f_octal_code = ProtoField.bytes("hf_entity.octal_code", "Octal Code")
local f_entity_id = ProtoField.guid("hf_entity.entity_id", "Entity ID")
local f_last_edited = ProtoField.uint64("hf_entity.last_edited", "Last Edited")
local f_coded_property_type = ProtoField.bytes("hf_entity.coded_property_type", "Coded Property Type")
local f_property_type = ProtoField.uint32("hf_entity.property_type", "Property Type")
local f_coded_update_delta = ProtoField.bytes("hf_entity.f_coded_update_delta", "Coded Update Delta")
local f_update_delta = ProtoField.uint32("hf_entity.update_delta", "Update Delta")
p_hf_entity.fields = {
f_entity_sequence_number, f_entity_timestamp, f_octal_code_bytes, f_entity_id
f_entity_sequence_number, f_entity_timestamp,
f_octal_code_three_bit_sections, f_octal_code,
f_last_edited, f_entity_id,
f_coded_property_type, f_property_type,
f_coded_update_delta, f_update_delta
}
function p_hf_entity.dissector(buf, pinfo, tree)
@ -16,21 +26,72 @@ function p_hf_entity.dissector(buf, pinfo, tree)
entity_subtree = tree:add(p_hf_entity, buf())
i = 0
local i = 0
entity_subtree:add_le(f_entity_sequence_number, buf(i, 2))
i = i + 2
entity_subtree:add_le(f_entity_timestamp, buf(i, 4))
i = i + 4
entity_subtree:add_le(f_entity_timestamp, buf(i, 8))
i = i + 8
-- figure out the number of bytes the octal code takes
local octal_code_bytes = buf(i, 1):le_uint()
entity_subtree:add_le(f_octal_code_bytes, buf(i, 1))
-- figure out the number of three bit sections in the octal code
local octal_code_three_bit_sections = buf(i, 1):le_uint()
entity_subtree:add_le(f_octal_code_three_bit_sections, buf(i, 1))
i = i + 1
-- skip over the octal code
i = i + 1 + octal_code_bytes
-- read the bytes for the octal code
local octal_code_bytes = math.ceil((octal_code_three_bit_sections * 3) / 8)
entity_subtree:add_le(f_octal_code, buf(i, octal_code_bytes))
i = i + octal_code_bytes
-- read the last edited timestamp
entity_subtree:add_le(f_last_edited, buf(i, 8))
i = i + 8
-- read the entity ID
entity_subtree:add(f_entity_id, buf(i, 16))
i = i + 16
-- figure out the property type and the size of the coded value
local property_type, coded_property_bytes = number_of_coded_bytes(buf(i))
entity_subtree:add(f_coded_property_type, buf(i, coded_property_bytes))
entity_subtree:add(f_property_type, property_type)
i = i + coded_property_bytes
-- figure out the update delta and the size of the coded value
local update_delta, coded_update_delta_bytes = number_of_coded_bytes(buf(i))
entity_subtree:add(f_coded_update_delta, buf(i, coded_update_delta_bytes))
entity_subtree:add(f_update_delta, update_delta)
i = i + coded_update_delta_bytes
end
function number_of_coded_bytes(buf)
local coded_buffer = buf(0, 4):le_uint() -- max 64 bit value means max 10 header bits
-- first figure out the total number of bytes for the coded value based
-- on the bits in the header
local total_coded_bytes = 1
for bit = 0, 10, 1 do
local header_bit = bit32.extract(coded_buffer, bit)
if header_bit == 1 then
total_coded_bytes = total_coded_bytes + 1
else
break
end
end
-- pull out the bits and write them to our decoded value
local decoded_value = 0
local decoded_position = 0
local total_bits = total_coded_bytes * 8
for bit = total_coded_bytes, total_bits - 1, 1 do
local value_bit = bit32.extract(coded_buffer, total_bits - bit - 1)
decoded_value = bit32.replace(decoded_value, value_bit, decoded_position)
decoded_position = decoded_position + 1
end
return decoded_value, total_coded_bytes
end

View file

@ -118,6 +118,10 @@ local packet_types = {
[54] = "AssetGetInfoReply"
}
local unsourced_packet_types = {
["DomainList"] = true
}
function p_hfudt.dissector(buf, pinfo, tree)
-- make sure this isn't a STUN packet - those don't follow HFUDT format
@ -230,54 +234,63 @@ function p_hfudt.dissector(buf, pinfo, tree)
-- if the message bit is set, handle the second word
if message_bit == 1 then
payload_offset = 12
payload_offset = 12
local second_word = buf(4, 4):le_uint()
local second_word = buf(4, 4):le_uint()
-- read message position from upper 2 bits
local message_position = bit32.rshift(second_word, 30)
local position = subtree:add(f_message_position, message_position)
-- read message position from upper 2 bits
local message_position = bit32.rshift(second_word, 30)
local position = subtree:add(f_message_position, message_position)
if message_positions[message_position] ~= nil then
-- if we know this position then add the name
position:append_text(" (".. message_positions[message_position] .. ")")
end
if message_positions[message_position] ~= nil then
-- if we know this position then add the name
position:append_text(" (".. message_positions[message_position] .. ")")
end
-- read message number from lower 30 bits
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
-- read message number from lower 30 bits
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
-- read the message part number
subtree:add(f_message_part_number, buf(8, 4):le_uint())
-- read the message part number
subtree:add(f_message_part_number, buf(8, 4):le_uint())
end
-- read the type
local packet_type = buf(payload_offset, 1):le_uint()
local ptype = subtree:add_le(f_type, buf(payload_offset, 1))
if packet_types[packet_type] ~= nil then
subtree:add(f_type_text, packet_types[packet_type])
local packet_type_text = packet_types[packet_type]
if packet_type_text ~= nil then
subtree:add(f_type_text, packet_type_text)
-- if we know this packet type then add the name
ptype:append_text(" (".. packet_types[packet_type] .. ")")
ptype:append_text(" (".. packet_type_text .. ")")
end
-- read the version
subtree:add_le(f_version, buf(payload_offset + 1, 1))
-- read node local ID
local sender_id = buf(payload_offset + 2, 2)
subtree:add_le(f_sender_id, sender_id)
local i = payload_offset + 2
local i = payload_offset + 4
if unsourced_packet_types[packet_type_text] == nil then
-- read node local ID
local sender_id = buf(payload_offset + 2, 2)
subtree:add_le(f_sender_id, sender_id)
i = i + 2
-- read HMAC MD5 hash
subtree:add(f_hmac_hash, buf(i, 16))
i = i + 16
-- read HMAC MD5 hash
subtree:add(f_hmac_hash, buf(i, 16))
i = i + 16
end
-- Domain packets
if packet_type_text == "DomainList" then
Dissector.get("hf-domain"):call(buf(i):tvb(), pinfo, tree)
end
-- AvatarData or BulkAvatarDataPacket
if packet_types[packet_type] == "AvatarData" or packet_types[packet_type] == "BulkAvatarDataPacket" then
if packet_type_text == "AvatarData" or packet_type_text == "BulkAvatarData" then
Dissector.get("hf-avatar"):call(buf(i):tvb(), pinfo, tree)
end
if packet_types[packet_type] == "EntityEdit" then
if packet_type_text == "EntityEdit" then
Dissector.get("hf-entity"):call(buf(i):tvb(), pinfo, tree)
end
end

View file

@ -25,12 +25,6 @@ Oven* Oven::_staticInstance { nullptr };
Oven::Oven() {
_staticInstance = this;
// enable compression in image library
image::setColorTexturesCompressionEnabled(true);
image::setGrayscaleTexturesCompressionEnabled(true);
image::setNormalTexturesCompressionEnabled(true);
image::setCubeTexturesCompressionEnabled(true);
// setup our worker threads
setupWorkerThreads(QThread::idealThreadCount());

View file

@ -15,6 +15,7 @@
#include <QtCore/QUrl>
#include <image/Image.h>
#include <TextureBaker.h>
#include "BakerCLI.h"
@ -47,10 +48,7 @@ OvenCLIApplication::OvenCLIApplication(int argc, char* argv[]) :
if (parser.isSet(CLI_DISABLE_TEXTURE_COMPRESSION_PARAMETER)) {
qDebug() << "Disabling texture compression";
image::setColorTexturesCompressionEnabled(false);
image::setGrayscaleTexturesCompressionEnabled(false);
image::setNormalTexturesCompressionEnabled(false);
image::setCubeTexturesCompressionEnabled(false);
TextureBaker::setCompressionEnabled(false);
}
QMetaObject::invokeMethod(cli, "bakeFile", Qt::QueuedConnection, Q_ARG(QUrl, inputUrl),