Merge branch 'master' of https://github.com/highfidelity/hifi into deference

This commit is contained in:
Andrzej Kapolka 2014-09-19 10:30:40 -07:00
commit 08fda3bbef
23 changed files with 183 additions and 96 deletions

View file

@ -67,7 +67,7 @@
"type": "checkbox", "type": "checkbox",
"label": "Enable Positional Filter", "label": "Enable Positional Filter",
"help": "If enabled, positional audio stream uses lowpass filter", "help": "If enabled, positional audio stream uses lowpass filter",
"default": true "default": false
} }
} }
} }

View file

@ -12,6 +12,10 @@
// Set the following variables to the values needed // Set the following variables to the values needed
var filename = "http://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/bartender.rec"; var filename = "http://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/bartender.rec";
var playFromCurrentLocation = true; var playFromCurrentLocation = true;
var useDisplayName = true;
var useAttachments = true;
var useHeadModel = true;
var useSkeletonModel = true;
// ID of the agent. Two agents can't have the same ID. // ID of the agent. Two agents can't have the same ID.
var id = 0; var id = 0;
@ -45,10 +49,14 @@ COLORS[HIDE] = { red: HIDE, green: 0, blue: 0 };
controlVoxelPosition.x += id * controlVoxelSize; controlVoxelPosition.x += id * controlVoxelSize;
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.loadRecording(filename); Avatar.loadRecording(filename);
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerUseDisplayName(useDisplayName);
Avatar.setPlayerUseAttachments(useAttachments);
Avatar.setPlayerUseHeadModel(useHeadModel);
Avatar.setPlayerUseSkeletonModel(useSkeletonModel);
function setupVoxelViewer() { function setupVoxelViewer() {
var voxelViewerOffset = 10; var voxelViewerOffset = 10;
var voxelViewerPosition = JSON.parse(JSON.stringify(controlVoxelPosition)); var voxelViewerPosition = JSON.parse(JSON.stringify(controlVoxelPosition));

View file

@ -12,7 +12,14 @@
Script.include("toolBars.js"); Script.include("toolBars.js");
var recordingFile = "recording.rec"; var recordingFile = "recording.rec";
var playFromCurrentLocation = true;
function setPlayerOptions() {
MyAvatar.setPlayFromCurrentLocation(true);
MyAvatar.setPlayerUseDisplayName(false);
MyAvatar.setPlayerUseAttachments(false);
MyAvatar.setPlayerUseHeadModel(false);
MyAvatar.setPlayerUseSkeletonModel(false);
}
var windowDimensions = Controller.getViewportDimensions(); var windowDimensions = Controller.getViewportDimensions();
var TOOL_ICON_URL = "http://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/"; var TOOL_ICON_URL = "http://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/";
@ -186,7 +193,7 @@ function mousePressEvent(event) {
toolBar.setAlpha(ALPHA_ON, saveIcon); toolBar.setAlpha(ALPHA_ON, saveIcon);
toolBar.setAlpha(ALPHA_ON, loadIcon); toolBar.setAlpha(ALPHA_ON, loadIcon);
} else if (MyAvatar.playerLength() > 0) { } else if (MyAvatar.playerLength() > 0) {
MyAvatar.setPlayFromCurrentLocation(playFromCurrentLocation); setPlayerOptions();
MyAvatar.setPlayerLoop(false); MyAvatar.setPlayerLoop(false);
MyAvatar.startPlaying(); MyAvatar.startPlaying();
toolBar.setAlpha(ALPHA_OFF, recordIcon); toolBar.setAlpha(ALPHA_OFF, recordIcon);
@ -201,7 +208,7 @@ function mousePressEvent(event) {
toolBar.setAlpha(ALPHA_ON, saveIcon); toolBar.setAlpha(ALPHA_ON, saveIcon);
toolBar.setAlpha(ALPHA_ON, loadIcon); toolBar.setAlpha(ALPHA_ON, loadIcon);
} else if (MyAvatar.playerLength() > 0) { } else if (MyAvatar.playerLength() > 0) {
MyAvatar.setPlayFromCurrentLocation(playFromCurrentLocation); setPlayerOptions();
MyAvatar.setPlayerLoop(true); MyAvatar.setPlayerLoop(true);
MyAvatar.startPlaying(); MyAvatar.startPlaying();
toolBar.setAlpha(ALPHA_OFF, recordIcon); toolBar.setAlpha(ALPHA_OFF, recordIcon);

View file

@ -1110,7 +1110,7 @@ function keyPressEvent(event) {
print("Color = " + (whichColor + 1)); print("Color = " + (whichColor + 1));
swatchesSound.play(whichColor); swatchesSound.play(whichColor);
moveTools(); moveTools();
} else if (event.text == "0") { } else if (event.text == "0" && voxelToolSelected) {
// Create a brand new 1 meter voxel in front of your avatar // Create a brand new 1 meter voxel in front of your avatar
var newPosition = getNewVoxelPosition(); var newPosition = getNewVoxelPosition();
var newVoxel = { var newVoxel = {

View file

@ -16,24 +16,21 @@ var debug = false;
var movingWithHead = false; var movingWithHead = false;
var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw; var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw;
var HEAD_MOVE_DEAD_ZONE = 0.10; var HEAD_MOVE_DEAD_ZONE = 0.05;
var HEAD_STRAFE_DEAD_ZONE = 0.0; var HEAD_STRAFE_DEAD_ZONE = 0.03;
var HEAD_ROTATE_DEAD_ZONE = 0.0; var HEAD_ROTATE_DEAD_ZONE = 10.0;
//var HEAD_THRUST_FWD_SCALE = 12000.0; var HEAD_YAW_RATE = 1.5;
//var HEAD_THRUST_STRAFE_SCALE = 0.0;
var HEAD_YAW_RATE = 1.0;
var HEAD_PITCH_RATE = 1.0; var HEAD_PITCH_RATE = 1.0;
//var HEAD_ROLL_THRUST_SCALE = 75.0; var WALL_BOUNCE = 10000.0;
//var HEAD_PITCH_LIFT_THRUST = 3.0; var FIXED_WALK_VELOCITY = 1.5;
var WALL_BOUNCE = 4000.0;
// Modify these values to tweak the strength of the motion. // Modify these values to tweak the strength of the motion.
// A larger *FACTOR increases the speed. // A larger *FACTOR increases the speed.
// A lower SHORT_TIMESCALE makes the motor achieve full speed faster. // A lower SHORT_TIMESCALE makes the motor achieve full speed faster.
var HEAD_VELOCITY_FWD_FACTOR = 20.0; var HEAD_VELOCITY_FWD_FACTOR = 10.0;
var HEAD_VELOCITY_LEFT_FACTOR = 20.0; var HEAD_VELOCITY_LEFT_FACTOR = 0.0;
var HEAD_VELOCITY_UP_FACTOR = 20.0; var HEAD_VELOCITY_UP_FACTOR = 20.0;
var SHORT_TIMESCALE = 0.125; var SHORT_TIMESCALE = 0.01;
var VERY_LARGE_TIMESCALE = 1000000.0; var VERY_LARGE_TIMESCALE = 1000000.0;
var xAxis = {x:1.0, y:0.0, z:0.0 }; var xAxis = {x:1.0, y:0.0, z:0.0 };
@ -43,9 +40,10 @@ var zAxis = {x:0.0, y:0.0, z:1.0 };
// If these values are set to something // If these values are set to something
var maxVelocity = 1.25; var maxVelocity = 1.25;
var noFly = true; var noFly = true;
var fixedWalkVelocity = true;
//var roomLimits = { xMin: 618, xMax: 635.5, zMin: 528, zMax: 552.5 }; //var roomLimits = { xMin: 618, xMax: 635.5, zMin: 528, zMax: 552.5 };
var roomLimits = { xMin: -1, xMax: 0, zMin: 0, zMax: 0 }; var roomLimits = { xMin: 193.0, xMax: 206.5, zMin: 251.4, zMax: 269.5 };
function isInRoom(position) { function isInRoom(position) {
var BUFFER = 2.0; var BUFFER = 2.0;
@ -71,25 +69,49 @@ function moveWithHead(deltaTime) {
var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch; var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch;
var deltaRoll = MyAvatar.getHeadFinalRoll() - headStartRoll; var deltaRoll = MyAvatar.getHeadFinalRoll() - headStartRoll;
var velocity = MyAvatar.getVelocity(); var velocity = MyAvatar.getVelocity();
var bodyLocalCurrentHeadVector = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position); var position = MyAvatar.position;
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.angleAxis(-deltaYaw, {x:0, y: 1, z:0}), bodyLocalCurrentHeadVector); var neckPosition = MyAvatar.getNeckPosition();
var bodyLocalCurrentHeadVector = Vec3.subtract(neckPosition, position);
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.inverse(MyAvatar.orientation), bodyLocalCurrentHeadVector);
var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition); var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion
headDelta = Vec3.multiplyQbyV(MyAvatar.orientation, headDelta);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
var length = Vec3.length(headDelta);
if (length > 1.0) {
// Needs fixed! Right now sometimes reported neck position jumps to a bad value
headDelta.x = headDelta.y = headDelta.z = 0.0;
length = 0.0;
return;
}
// Thrust based on leaning forward and side-to-side // Thrust based on leaning forward and side-to-side
var targetVelocity = {x:0.0, y:0.0, z:0.0}; var targetVelocity = {x:0.0, y:0.0, z:0.0};
if (length > HEAD_MOVE_DEAD_ZONE) {
//headDelta = Vec3.normalize(headDelta);
//targetVelocity = Vec3.multiply(headDelta, FIXED_WALK_VELOCITY);
targetVelocity = Vec3.multiply(headDelta, HEAD_VELOCITY_FWD_FACTOR);
}
/*
if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) { if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) {
targetVelocity = Vec3.multiply(zAxis, -headDelta.z * HEAD_VELOCITY_FWD_FACTOR); if (fixedWalkVelocity) {
targetVelocity = Vec3.multiply(zAxis, headDelta.z > 0 ? FIXED_WALK_VELOCITY : -FIXED_WALK_VELOCITY);
} else {
targetVelocity = Vec3.multiply(zAxis, headDelta.z * HEAD_VELOCITY_FWD_FACTOR);
}
} }
if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) { if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) {
var deltaVelocity = Vec3.multiply(xAxis, -headDelta.x * HEAD_VELOCITY_LEFT_FACTOR); var deltaVelocity = Vec3.multiply(xAxis, headDelta.x * HEAD_VELOCITY_LEFT_FACTOR);
targetVelocity = Vec3.sum(targetVelocity, deltaVelocity); targetVelocity = Vec3.sum(targetVelocity, deltaVelocity);
} }
*/
if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) { if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) {
var orientation = Quat.multiply(Quat.angleAxis((deltaYaw + deltaRoll) * HEAD_YAW_RATE * deltaTime, yAxis), MyAvatar.orientation); var orientation = Quat.multiply(Quat.angleAxis((deltaYaw + deltaRoll) * HEAD_YAW_RATE * deltaTime, yAxis), MyAvatar.orientation);
MyAvatar.orientation = orientation; MyAvatar.orientation = orientation;
} }
// Thrust Up/Down based on head pitch // Thrust Up/Down based on head pitch
if (!noFly) { if (!noFly) {
var deltaVelocity = Vec3.multiply(yAxis, headDelta.y * HEAD_VELOCITY_UP_FACTOR); var deltaVelocity = Vec3.multiply(yAxis, headDelta.y * HEAD_VELOCITY_UP_FACTOR);
@ -121,7 +143,7 @@ function moveWithHead(deltaTime) {
if (movingWithHead && Vec3.length(thrust) > 0.0) { if (movingWithHead && Vec3.length(thrust) > 0.0) {
// reduce the timescale of the motor so that it won't defeat the thrust code // reduce the timescale of the motor so that it won't defeat the thrust code
Vec3.print("adebug room containment thrust = ", thrust); Vec3.print("adebug room containment thrust = ", thrust);
motorTimescale = 4.0 * SHORT_TIMESCALE; motorTimescale = 1000000.0;
} }
} }
MyAvatar.motorTimescale = motorTimescale; MyAvatar.motorTimescale = motorTimescale;
@ -130,7 +152,8 @@ function moveWithHead(deltaTime) {
Controller.keyPressEvent.connect(function(event) { Controller.keyPressEvent.connect(function(event) {
if (event.text == "SPACE" && !movingWithHead) { if (event.text == "SPACE" && !movingWithHead) {
movingWithHead = true; movingWithHead = true;
headStartPosition = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position); headStartPosition = Vec3.subtract(MyAvatar.getNeckPosition(), MyAvatar.position);
headStartPosition = Vec3.multiplyQbyV(Quat.inverse(MyAvatar.orientation), headStartPosition);
headStartDeltaPitch = MyAvatar.getHeadDeltaPitch(); headStartDeltaPitch = MyAvatar.getHeadDeltaPitch();
headStartFinalPitch = MyAvatar.getHeadFinalPitch(); headStartFinalPitch = MyAvatar.getHeadFinalPitch();
headStartRoll = MyAvatar.getHeadFinalRoll(); headStartRoll = MyAvatar.getHeadFinalRoll();

View file

@ -220,6 +220,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
listenPort = atoi(portStr); listenPort = atoi(portStr);
} }
// call the OAuthWebviewHandler static getter so that its instance lives in our thread
// make sure it is ready before the NodeList might need it
OAuthWebViewHandler::getInstance();
// start the nodeThread so its event loop is running // start the nodeThread so its event loop is running
_nodeThread->start(); _nodeThread->start();
@ -418,11 +422,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(_window, &MainWindow::windowGeometryChanged, connect(_window, &MainWindow::windowGeometryChanged,
_runningScriptsWidget, &RunningScriptsWidget::setBoundary); _runningScriptsWidget, &RunningScriptsWidget::setBoundary);
// call the OAuthWebviewHandler static getter so that its instance lives in our thread
OAuthWebViewHandler::getInstance();
// make sure the High Fidelity root CA is in our list of trusted certs
OAuthWebViewHandler::addHighFidelityRootCAToSSLConfig();
_trayIcon->show(); _trayIcon->show();
#ifdef HAVE_RTMIDI #ifdef HAVE_RTMIDI

View file

@ -97,6 +97,12 @@ glm::vec3 Avatar::getChestPosition() const {
return _skeletonModel.getNeckPosition(neckPosition) ? (_position + neckPosition) * 0.5f : _position; return _skeletonModel.getNeckPosition(neckPosition) ? (_position + neckPosition) * 0.5f : _position;
} }
glm::vec3 Avatar::getNeckPosition() const {
glm::vec3 neckPosition;
return _skeletonModel.getNeckPosition(neckPosition) ? neckPosition : _position;
}
glm::quat Avatar::getWorldAlignedOrientation () const { glm::quat Avatar::getWorldAlignedOrientation () const {
return computeRotationFromBodyToWorldUp() * getOrientation(); return computeRotationFromBodyToWorldUp() * getOrientation();
} }

View file

@ -155,6 +155,8 @@ public:
Q_INVOKABLE void setJointModelPositionAndOrientation(const QString& name, const glm::vec3 position, Q_INVOKABLE void setJointModelPositionAndOrientation(const QString& name, const glm::vec3 position,
const glm::quat& rotation); const glm::quat& rotation);
Q_INVOKABLE glm::vec3 getNeckPosition() const;
Q_INVOKABLE glm::vec3 getVelocity() const { return _velocity; } Q_INVOKABLE glm::vec3 getVelocity() const { return _velocity; }
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; } Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; } Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; }

View file

@ -32,6 +32,7 @@ Head::Head(Avatar* owningAvatar) :
_eyePosition(0.0f, 0.0f, 0.0f), _eyePosition(0.0f, 0.0f, 0.0f),
_scale(1.0f), _scale(1.0f),
_lastLoudness(0.0f), _lastLoudness(0.0f),
_longTermAverageLoudness(-1.0f),
_audioAttack(0.0f), _audioAttack(0.0f),
_angularVelocity(0,0,0), _angularVelocity(0,0,0),
_renderLookatVectors(false), _renderLookatVectors(false),
@ -62,7 +63,7 @@ void Head::reset() {
} }
void Head::simulate(float deltaTime, bool isMine, bool billboard) { void Head::simulate(float deltaTime, bool isMine, bool billboard) {
// Update audio trailing average for rendering facial animations
if (isMine) { if (isMine) {
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar); MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
@ -78,6 +79,18 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
} }
} }
} }
// Update audio trailing average for rendering facial animations
const float AUDIO_AVERAGING_SECS = 0.05f;
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
if (_longTermAverageLoudness == -1.0) {
_longTermAverageLoudness = _averageLoudness;
} else {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
}
float deltaLoudness = glm::max(0.0f, _averageLoudness - _longTermAverageLoudness);
//qDebug() << "deltaLoudness: " << deltaLoudness;
if (!(_isFaceshiftConnected || billboard)) { if (!(_isFaceshiftConnected || billboard)) {
// Update eye saccades // Update eye saccades
@ -93,9 +106,6 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
} }
_saccade += (_saccadeTarget - _saccade) * 0.50f; _saccade += (_saccadeTarget - _saccade) * 0.50f;
const float AUDIO_AVERAGING_SECS = 0.05f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
// Detect transition from talking to not; force blink after that and a delay // Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false; bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f; const float TALKING_LOUDNESS = 100.0f;
@ -108,8 +118,8 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
} }
// Update audio attack data for facial animation (eyebrows and mouth) // Update audio attack data for facial animation (eyebrows and mouth)
_audioAttack = 0.9f * _audioAttack + 0.1f * fabs(_audioLoudness - _lastLoudness); _audioAttack = 0.9f * _audioAttack + 0.1f * fabs((_audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = _audioLoudness; _lastLoudness = (_audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f; const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) { if (_audioAttack > BROW_LIFT_THRESHOLD) {

View file

@ -123,6 +123,7 @@ private:
float _scale; float _scale;
float _lastLoudness; float _lastLoudness;
float _longTermAverageLoudness;
float _audioAttack; float _audioAttack;
glm::vec3 _angularVelocity; glm::vec3 _angularVelocity;
bool _renderLookatVectors; bool _renderLookatVectors;

View file

@ -15,6 +15,8 @@
#include <LimitedNodeList.h> #include <LimitedNodeList.h>
#include <OAuthNetworkAccessManager.h> #include <OAuthNetworkAccessManager.h>
#include "Application.h"
#include "DataWebDialog.h" #include "DataWebDialog.h"
DataWebDialog::DataWebDialog() { DataWebDialog::DataWebDialog() {
@ -23,6 +25,12 @@ DataWebDialog::DataWebDialog() {
// use an OAuthNetworkAccessManager instead of regular QNetworkAccessManager so our requests are authed // use an OAuthNetworkAccessManager instead of regular QNetworkAccessManager so our requests are authed
page()->setNetworkAccessManager(OAuthNetworkAccessManager::getInstance()); page()->setNetworkAccessManager(OAuthNetworkAccessManager::getInstance());
// have the page delegate external links so they can be captured by the Application in case they are a hifi link
page()->setLinkDelegationPolicy(QWebPage::DelegateExternalLinks);
// have the Application handle external links
connect(this, &QWebView::linkClicked, Application::getInstance(), &Application::openUrl);
} }
DataWebDialog* DataWebDialog::dialogForPath(const QString& path) { DataWebDialog* DataWebDialog::dialogForPath(const QString& path) {

View file

@ -27,7 +27,7 @@ OAuthWebViewHandler::OAuthWebViewHandler() :
_webViewRedisplayTimer(), _webViewRedisplayTimer(),
_lastAuthorizationURL() _lastAuthorizationURL()
{ {
addHighFidelityRootCAToSSLConfig();
} }
const char HIGH_FIDELITY_CA[] = "-----BEGIN CERTIFICATE-----\n" const char HIGH_FIDELITY_CA[] = "-----BEGIN CERTIFICATE-----\n"

View file

@ -75,6 +75,8 @@ public:
- (_b1 * _ym1) - (_b1 * _ym1)
- (_b2 * _ym2); - (_b2 * _ym2);
y = (y >= -EPSILON && y < EPSILON) ? 0.0f : y; // clamp to 0
// update delay line // update delay line
_xm2 = _xm1; _xm2 = _xm1;
_xm1 = x; _xm1 = x;

View file

@ -40,8 +40,8 @@ void AudioSourceTone::updateCoefficients() {
void AudioSourceTone::initialize() { void AudioSourceTone::initialize() {
const float32_t FREQUENCY_220_HZ = 220.0f; const float32_t FREQUENCY_220_HZ = 220.0f;
const float32_t GAIN_MINUS_3DB = 0.708f; const float32_t GAIN_MINUS_6DB = 0.501f;
setParameters(SAMPLE_RATE, FREQUENCY_220_HZ, GAIN_MINUS_3DB); setParameters(SAMPLE_RATE, FREQUENCY_220_HZ, GAIN_MINUS_6DB);
} }
void AudioSourceTone::setParameters(const float32_t sampleRate, const float32_t frequency, const float32_t amplitude) { void AudioSourceTone::setParameters(const float32_t sampleRate, const float32_t frequency, const float32_t amplitude) {

View file

@ -661,6 +661,30 @@ void AvatarData::setPlayerLoop(bool loop) {
} }
} }
void AvatarData::setPlayerUseDisplayName(bool useDisplayName) {
if(_player) {
_player->useDisplayName(useDisplayName);
}
}
void AvatarData::setPlayerUseAttachments(bool useAttachments) {
if(_player) {
_player->useAttachements(useAttachments);
}
}
void AvatarData::setPlayerUseHeadModel(bool useHeadModel) {
if(_player) {
_player->useHeadModel(useHeadModel);
}
}
void AvatarData::setPlayerUseSkeletonModel(bool useSkeletonModel) {
if(_player) {
_player->useSkeletonModel(useSkeletonModel);
}
}
void AvatarData::play() { void AvatarData::play() {
if (isPlaying()) { if (isPlaying()) {
if (QThread::currentThread() != thread()) { if (QThread::currentThread() != thread()) {

View file

@ -302,6 +302,10 @@ public slots:
void startPlaying(); void startPlaying();
void setPlayFromCurrentLocation(bool playFromCurrentLocation); void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setPlayerLoop(bool loop); void setPlayerLoop(bool loop);
void setPlayerUseDisplayName(bool useDisplayName);
void setPlayerUseAttachments(bool useAttachments);
void setPlayerUseHeadModel(bool useHeadModel);
void setPlayerUseSkeletonModel(bool useSkeletonModel);
void play(); void play();
void stopPlaying(); void stopPlaying();

View file

@ -157,10 +157,9 @@ void Player::loopRecording() {
setupAudioThread(); setupAudioThread();
_currentFrame = 0; _currentFrame = 0;
_timer.restart(); _timer.restart();
} }
void Player::loadFromFile(QString file) { void Player::loadFromFile(const QString& file) {
if (_recording) { if (_recording) {
_recording->clear(); _recording->clear();
} else { } else {
@ -204,7 +203,7 @@ void Player::play() {
head->setFinalPitch(eulers.x); head->setFinalPitch(eulers.x);
head->setFinalYaw(eulers.y); head->setFinalYaw(eulers.y);
head->setFinalRoll(eulers.z); head->setFinalRoll(eulers.z);
head->setLookAtPosition(currentFrame.getLookAtPosition()); head->setLookAtPosition(context->position + context->orientation * currentFrame.getLookAtPosition());
} else { } else {
qDebug() << "WARNING: Player couldn't find head data."; qDebug() << "WARNING: Player couldn't find head data.";
} }

View file

@ -37,7 +37,7 @@ public:
public slots: public slots:
void startPlaying(); void startPlaying();
void stopPlaying(); void stopPlaying();
void loadFromFile(QString file); void loadFromFile(const QString& file);
void loadRecording(RecordingPointer recording); void loadRecording(RecordingPointer recording);
void play(); void play();

View file

@ -86,7 +86,7 @@ void Recorder::stopRecording() {
qDebug().nospace() << "Recorded " << _recording->getFrameNumber() << " during " << _recording->getLength() << " msec (" << _recording->getFrameNumber() / (_recording->getLength() / 1000.0f) << " fps)"; qDebug().nospace() << "Recorded " << _recording->getFrameNumber() << " during " << _recording->getLength() << " msec (" << _recording->getFrameNumber() / (_recording->getLength() / 1000.0f) << " fps)";
} }
void Recorder::saveToFile(QString file) { void Recorder::saveToFile(const QString& file) {
if (_recording->isEmpty()) { if (_recording->isEmpty()) {
qDebug() << "Cannot save recording to file, recording is empty."; qDebug() << "Cannot save recording to file, recording is empty.";
} }

View file

@ -38,7 +38,7 @@ public:
public slots: public slots:
void startRecording(); void startRecording();
void stopRecording(); void stopRecording();
void saveToFile(QString file); void saveToFile(const QString& file);
void record(); void record();
void record(char* samples, int size); void record(char* samples, int size);

View file

@ -20,7 +20,6 @@
#include <QEventLoop> #include <QEventLoop>
#include <QFile> #include <QFile>
#include <QFileInfo> #include <QFileInfo>
#include <QMessageBox>
#include <QPair> #include <QPair>
#include "AvatarData.h" #include "AvatarData.h"
@ -74,7 +73,7 @@ void Recording::addFrame(int timestamp, RecordingFrame &frame) {
_frames << frame; _frames << frame;
} }
void Recording::addAudioPacket(QByteArray byteArray) { void Recording::addAudioPacket(const QByteArray& byteArray) {
if (!_audio) { if (!_audio) {
_audio = new Sound(byteArray); _audio = new Sound(byteArray);
return; return;
@ -89,7 +88,7 @@ void Recording::clear() {
_audio = NULL; _audio = NULL;
} }
void writeVec3(QDataStream& stream, glm::vec3 value) { void writeVec3(QDataStream& stream, const glm::vec3& value) {
unsigned char buffer[sizeof(value)]; unsigned char buffer[sizeof(value)];
memcpy(buffer, &value, sizeof(value)); memcpy(buffer, &value, sizeof(value));
stream.writeRawData(reinterpret_cast<char*>(buffer), sizeof(value)); stream.writeRawData(reinterpret_cast<char*>(buffer), sizeof(value));
@ -102,7 +101,7 @@ bool readVec3(QDataStream& stream, glm::vec3& value) {
return true; return true;
} }
void writeQuat(QDataStream& stream, glm::quat value) { void writeQuat(QDataStream& stream, const glm::quat& value) {
unsigned char buffer[256]; unsigned char buffer[256];
int writtenToBuffer = packOrientationQuatToBytes(buffer, value); int writtenToBuffer = packOrientationQuatToBytes(buffer, value);
stream.writeRawData(reinterpret_cast<char*>(buffer), writtenToBuffer); stream.writeRawData(reinterpret_cast<char*>(buffer), writtenToBuffer);
@ -136,7 +135,7 @@ bool readFloat(QDataStream& stream, float& value, int radix) {
return true; return true;
} }
void writeRecordingToFile(RecordingPointer recording, QString filename) { void writeRecordingToFile(RecordingPointer recording, const QString& filename) {
if (!recording || recording->getFrameNumber() < 1) { if (!recording || recording->getFrameNumber() < 1) {
qDebug() << "Can't save empty recording"; qDebug() << "Can't save empty recording";
return; return;
@ -329,7 +328,7 @@ void writeRecordingToFile(RecordingPointer recording, QString filename) {
fileStream << recording->_audio->getByteArray(); fileStream << recording->_audio->getByteArray();
qint64 writtingTime = timer.restart(); qint64 writingTime = timer.restart();
// Write data length and CRC-16 // Write data length and CRC-16
quint32 dataLength = file.pos() - dataOffset; quint32 dataLength = file.pos() - dataOffset;
file.seek(dataOffset); // Go to beginning of data for checksum file.seek(dataOffset); // Go to beginning of data for checksum
@ -374,10 +373,10 @@ void writeRecordingToFile(RecordingPointer recording, QString filename) {
} }
qint64 checksumTime = timer.elapsed(); qint64 checksumTime = timer.elapsed();
qDebug() << "Wrote" << file.size() << "bytes in" << writtingTime + checksumTime << "ms. (" << checksumTime << "ms for checksum)"; qDebug() << "Wrote" << file.size() << "bytes in" << writingTime + checksumTime << "ms. (" << checksumTime << "ms for checksum)";
} }
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) { RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename) {
QByteArray byteArray; QByteArray byteArray;
QUrl url(filename); QUrl url(filename);
QElapsedTimer timer; QElapsedTimer timer;
@ -416,10 +415,6 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filen
if (filename.endsWith(".rec") || filename.endsWith(".REC")) { if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
qDebug() << "Old .rec format"; qDebug() << "Old .rec format";
QMessageBox::warning(NULL,
QString("Old recording format"),
QString("Converting your file to the new format."),
QMessageBox::Ok);
readRecordingFromRecFile(recording, filename, byteArray); readRecordingFromRecFile(recording, filename, byteArray);
return recording; return recording;
} else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) { } else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
@ -641,7 +636,7 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filen
} }
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray) { RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray) {
QElapsedTimer timer; QElapsedTimer timer;
timer.start(); timer.start();
@ -786,21 +781,18 @@ RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString fi
qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms."; qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
// Set new filename // Set new filename
if (filename.startsWith("http") || filename.startsWith("https") || filename.startsWith("ftp")) { QString newFilename = filename;
filename = QUrl(filename).fileName(); if (newFilename.startsWith("http") || newFilename.startsWith("https") || newFilename.startsWith("ftp")) {
newFilename = QUrl(newFilename).fileName();
} }
if (filename.endsWith(".rec") || filename.endsWith(".REC")) { if (newFilename.endsWith(".rec") || newFilename.endsWith(".REC")) {
filename.chop(qstrlen(".rec")); newFilename.chop(qstrlen(".rec"));
} }
filename.append(".hfr"); newFilename.append(".hfr");
filename = QFileInfo(filename).absoluteFilePath(); newFilename = QFileInfo(newFilename).absoluteFilePath();
// Set recording to new format // Set recording to new format
writeRecordingToFile(recording, filename); writeRecordingToFile(recording, newFilename);
QMessageBox::warning(NULL, qDebug() << "Recording has been successfully converted at" << newFilename;
QString("New recording location"),
QString("The new recording was saved at:\n" + filename),
QMessageBox::Ok);
qDebug() << "Recording has been successfully converted at" << filename;
return recording; return recording;
} }

View file

@ -62,7 +62,7 @@ public:
protected: protected:
void addFrame(int timestamp, RecordingFrame& frame); void addFrame(int timestamp, RecordingFrame& frame);
void addAudioPacket(QByteArray byteArray); void addAudioPacket(const QByteArray& byteArray);
void clear(); void clear();
private: private:
@ -74,9 +74,10 @@ private:
friend class Recorder; friend class Recorder;
friend class Player; friend class Player;
friend void writeRecordingToFile(RecordingPointer recording, QString file); friend void writeRecordingToFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file); friend RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray); friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename,
const QByteArray& byteArray);
}; };
/// Stores the different values associated to one recording frame /// Stores the different values associated to one recording frame
@ -95,13 +96,13 @@ public:
protected: protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients); void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; } void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; }
void setTranslation(glm::vec3 translation) { _translation = translation; } void setTranslation(const glm::vec3& translation) { _translation = translation; }
void setRotation(glm::quat rotation) { _rotation = rotation; } void setRotation(const glm::quat& rotation) { _rotation = rotation; }
void setScale(float scale) { _scale = scale; } void setScale(float scale) { _scale = scale; }
void setHeadRotation(glm::quat headRotation) { _headRotation = headRotation; } void setHeadRotation(glm::quat headRotation) { _headRotation = headRotation; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; } void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; } void setLeanForward(float leanForward) { _leanForward = leanForward; }
void setLookAtPosition(glm::vec3 lookAtPosition) { _lookAtPosition = lookAtPosition; } void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
private: private:
QVector<float> _blendshapeCoefficients; QVector<float> _blendshapeCoefficients;
@ -115,13 +116,14 @@ private:
glm::vec3 _lookAtPosition; glm::vec3 _lookAtPosition;
friend class Recorder; friend class Recorder;
friend void writeRecordingToFile(RecordingPointer recording, QString file); friend void writeRecordingToFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file); friend RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray); friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename,
const QByteArray& byteArray);
}; };
void writeRecordingToFile(RecordingPointer recording, QString filename); void writeRecordingToFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename); RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray); RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray);
#endif // hifi_Recording_h #endif // hifi_Recording_h

View file

@ -1769,7 +1769,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
lowestWeight = weights[k]; lowestWeight = weights[k];
} }
} }
if (k == 4) { if (k == 4 && weight > lowestWeight) {
// no space for an additional weight; we must replace the lowest // no space for an additional weight; we must replace the lowest
weights[lowestIndex] = weight; weights[lowestIndex] = weight;
extracted.mesh.clusterIndices[it.value()][lowestIndex] = i; extracted.mesh.clusterIndices[it.value()][lowestIndex] = i;