Merge branch 'master' of https://github.com/highfidelity/hifi into deference

This commit is contained in:
Andrzej Kapolka 2014-09-19 10:30:40 -07:00
commit 08fda3bbef
23 changed files with 183 additions and 96 deletions

View file

@ -67,7 +67,7 @@
"type": "checkbox",
"label": "Enable Positional Filter",
"help": "If enabled, positional audio stream uses lowpass filter",
"default": true
"default": false
}
}
}

View file

@ -12,6 +12,10 @@
// Set the following variables to the values needed
var filename = "http://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/bartender.rec";
var playFromCurrentLocation = true;
var useDisplayName = true;
var useAttachments = true;
var useHeadModel = true;
var useSkeletonModel = true;
// ID of the agent. Two agents can't have the same ID.
var id = 0;
@ -44,11 +48,15 @@ COLORS[SHOW] = { red: SHOW, green: 0, blue: 0 };
COLORS[HIDE] = { red: HIDE, green: 0, blue: 0 };
controlVoxelPosition.x += id * controlVoxelSize;
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.loadRecording(filename);
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerUseDisplayName(useDisplayName);
Avatar.setPlayerUseAttachments(useAttachments);
Avatar.setPlayerUseHeadModel(useHeadModel);
Avatar.setPlayerUseSkeletonModel(useSkeletonModel);
function setupVoxelViewer() {
var voxelViewerOffset = 10;
var voxelViewerPosition = JSON.parse(JSON.stringify(controlVoxelPosition));

View file

@ -12,7 +12,14 @@
Script.include("toolBars.js");
var recordingFile = "recording.rec";
var playFromCurrentLocation = true;
function setPlayerOptions() {
MyAvatar.setPlayFromCurrentLocation(true);
MyAvatar.setPlayerUseDisplayName(false);
MyAvatar.setPlayerUseAttachments(false);
MyAvatar.setPlayerUseHeadModel(false);
MyAvatar.setPlayerUseSkeletonModel(false);
}
var windowDimensions = Controller.getViewportDimensions();
var TOOL_ICON_URL = "http://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/";
@ -186,7 +193,7 @@ function mousePressEvent(event) {
toolBar.setAlpha(ALPHA_ON, saveIcon);
toolBar.setAlpha(ALPHA_ON, loadIcon);
} else if (MyAvatar.playerLength() > 0) {
MyAvatar.setPlayFromCurrentLocation(playFromCurrentLocation);
setPlayerOptions();
MyAvatar.setPlayerLoop(false);
MyAvatar.startPlaying();
toolBar.setAlpha(ALPHA_OFF, recordIcon);
@ -201,7 +208,7 @@ function mousePressEvent(event) {
toolBar.setAlpha(ALPHA_ON, saveIcon);
toolBar.setAlpha(ALPHA_ON, loadIcon);
} else if (MyAvatar.playerLength() > 0) {
MyAvatar.setPlayFromCurrentLocation(playFromCurrentLocation);
setPlayerOptions();
MyAvatar.setPlayerLoop(true);
MyAvatar.startPlaying();
toolBar.setAlpha(ALPHA_OFF, recordIcon);

View file

@ -1110,7 +1110,7 @@ function keyPressEvent(event) {
print("Color = " + (whichColor + 1));
swatchesSound.play(whichColor);
moveTools();
} else if (event.text == "0") {
} else if (event.text == "0" && voxelToolSelected) {
// Create a brand new 1 meter voxel in front of your avatar
var newPosition = getNewVoxelPosition();
var newVoxel = {

View file

@ -16,24 +16,21 @@ var debug = false;
var movingWithHead = false;
var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw;
var HEAD_MOVE_DEAD_ZONE = 0.10;
var HEAD_STRAFE_DEAD_ZONE = 0.0;
var HEAD_ROTATE_DEAD_ZONE = 0.0;
//var HEAD_THRUST_FWD_SCALE = 12000.0;
//var HEAD_THRUST_STRAFE_SCALE = 0.0;
var HEAD_YAW_RATE = 1.0;
var HEAD_MOVE_DEAD_ZONE = 0.05;
var HEAD_STRAFE_DEAD_ZONE = 0.03;
var HEAD_ROTATE_DEAD_ZONE = 10.0;
var HEAD_YAW_RATE = 1.5;
var HEAD_PITCH_RATE = 1.0;
//var HEAD_ROLL_THRUST_SCALE = 75.0;
//var HEAD_PITCH_LIFT_THRUST = 3.0;
var WALL_BOUNCE = 4000.0;
var WALL_BOUNCE = 10000.0;
var FIXED_WALK_VELOCITY = 1.5;
// Modify these values to tweak the strength of the motion.
// A larger *FACTOR increases the speed.
// A lower SHORT_TIMESCALE makes the motor achieve full speed faster.
var HEAD_VELOCITY_FWD_FACTOR = 20.0;
var HEAD_VELOCITY_LEFT_FACTOR = 20.0;
var HEAD_VELOCITY_FWD_FACTOR = 10.0;
var HEAD_VELOCITY_LEFT_FACTOR = 0.0;
var HEAD_VELOCITY_UP_FACTOR = 20.0;
var SHORT_TIMESCALE = 0.125;
var SHORT_TIMESCALE = 0.01;
var VERY_LARGE_TIMESCALE = 1000000.0;
var xAxis = {x:1.0, y:0.0, z:0.0 };
@ -43,9 +40,10 @@ var zAxis = {x:0.0, y:0.0, z:1.0 };
// If these values are set to something
var maxVelocity = 1.25;
var noFly = true;
var fixedWalkVelocity = true;
//var roomLimits = { xMin: 618, xMax: 635.5, zMin: 528, zMax: 552.5 };
var roomLimits = { xMin: -1, xMax: 0, zMin: 0, zMax: 0 };
var roomLimits = { xMin: 193.0, xMax: 206.5, zMin: 251.4, zMax: 269.5 };
function isInRoom(position) {
var BUFFER = 2.0;
@ -71,25 +69,49 @@ function moveWithHead(deltaTime) {
var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch;
var deltaRoll = MyAvatar.getHeadFinalRoll() - headStartRoll;
var velocity = MyAvatar.getVelocity();
var bodyLocalCurrentHeadVector = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position);
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.angleAxis(-deltaYaw, {x:0, y: 1, z:0}), bodyLocalCurrentHeadVector);
var position = MyAvatar.position;
var neckPosition = MyAvatar.getNeckPosition();
var bodyLocalCurrentHeadVector = Vec3.subtract(neckPosition, position);
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.inverse(MyAvatar.orientation), bodyLocalCurrentHeadVector);
var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion
headDelta = Vec3.multiplyQbyV(MyAvatar.orientation, headDelta);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
var length = Vec3.length(headDelta);
if (length > 1.0) {
// Needs fixed! Right now sometimes reported neck position jumps to a bad value
headDelta.x = headDelta.y = headDelta.z = 0.0;
length = 0.0;
return;
}
// Thrust based on leaning forward and side-to-side
var targetVelocity = {x:0.0, y:0.0, z:0.0};
if (length > HEAD_MOVE_DEAD_ZONE) {
//headDelta = Vec3.normalize(headDelta);
//targetVelocity = Vec3.multiply(headDelta, FIXED_WALK_VELOCITY);
targetVelocity = Vec3.multiply(headDelta, HEAD_VELOCITY_FWD_FACTOR);
}
/*
if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) {
targetVelocity = Vec3.multiply(zAxis, -headDelta.z * HEAD_VELOCITY_FWD_FACTOR);
if (fixedWalkVelocity) {
targetVelocity = Vec3.multiply(zAxis, headDelta.z > 0 ? FIXED_WALK_VELOCITY : -FIXED_WALK_VELOCITY);
} else {
targetVelocity = Vec3.multiply(zAxis, headDelta.z * HEAD_VELOCITY_FWD_FACTOR);
}
}
if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) {
var deltaVelocity = Vec3.multiply(xAxis, -headDelta.x * HEAD_VELOCITY_LEFT_FACTOR);
var deltaVelocity = Vec3.multiply(xAxis, headDelta.x * HEAD_VELOCITY_LEFT_FACTOR);
targetVelocity = Vec3.sum(targetVelocity, deltaVelocity);
}
*/
if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) {
var orientation = Quat.multiply(Quat.angleAxis((deltaYaw + deltaRoll) * HEAD_YAW_RATE * deltaTime, yAxis), MyAvatar.orientation);
MyAvatar.orientation = orientation;
}
// Thrust Up/Down based on head pitch
if (!noFly) {
var deltaVelocity = Vec3.multiply(yAxis, headDelta.y * HEAD_VELOCITY_UP_FACTOR);
@ -121,7 +143,7 @@ function moveWithHead(deltaTime) {
if (movingWithHead && Vec3.length(thrust) > 0.0) {
// reduce the timescale of the motor so that it won't defeat the thrust code
Vec3.print("adebug room containment thrust = ", thrust);
motorTimescale = 4.0 * SHORT_TIMESCALE;
motorTimescale = 1000000.0;
}
}
MyAvatar.motorTimescale = motorTimescale;
@ -130,7 +152,8 @@ function moveWithHead(deltaTime) {
Controller.keyPressEvent.connect(function(event) {
if (event.text == "SPACE" && !movingWithHead) {
movingWithHead = true;
headStartPosition = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position);
headStartPosition = Vec3.subtract(MyAvatar.getNeckPosition(), MyAvatar.position);
headStartPosition = Vec3.multiplyQbyV(Quat.inverse(MyAvatar.orientation), headStartPosition);
headStartDeltaPitch = MyAvatar.getHeadDeltaPitch();
headStartFinalPitch = MyAvatar.getHeadFinalPitch();
headStartRoll = MyAvatar.getHeadFinalRoll();

View file

@ -219,6 +219,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
if (portStr) {
listenPort = atoi(portStr);
}
// call the OAuthWebviewHandler static getter so that its instance lives in our thread
// make sure it is ready before the NodeList might need it
OAuthWebViewHandler::getInstance();
// start the nodeThread so its event loop is running
_nodeThread->start();
@ -418,11 +422,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(_window, &MainWindow::windowGeometryChanged,
_runningScriptsWidget, &RunningScriptsWidget::setBoundary);
// call the OAuthWebviewHandler static getter so that its instance lives in our thread
OAuthWebViewHandler::getInstance();
// make sure the High Fidelity root CA is in our list of trusted certs
OAuthWebViewHandler::addHighFidelityRootCAToSSLConfig();
_trayIcon->show();
#ifdef HAVE_RTMIDI

View file

@ -97,6 +97,12 @@ glm::vec3 Avatar::getChestPosition() const {
return _skeletonModel.getNeckPosition(neckPosition) ? (_position + neckPosition) * 0.5f : _position;
}
glm::vec3 Avatar::getNeckPosition() const {
glm::vec3 neckPosition;
return _skeletonModel.getNeckPosition(neckPosition) ? neckPosition : _position;
}
glm::quat Avatar::getWorldAlignedOrientation () const {
return computeRotationFromBodyToWorldUp() * getOrientation();
}

View file

@ -154,6 +154,8 @@ public:
Q_INVOKABLE void setJointModelPositionAndOrientation(int index, const glm::vec3 position, const glm::quat& rotation);
Q_INVOKABLE void setJointModelPositionAndOrientation(const QString& name, const glm::vec3 position,
const glm::quat& rotation);
Q_INVOKABLE glm::vec3 getNeckPosition() const;
Q_INVOKABLE glm::vec3 getVelocity() const { return _velocity; }
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }

View file

@ -32,6 +32,7 @@ Head::Head(Avatar* owningAvatar) :
_eyePosition(0.0f, 0.0f, 0.0f),
_scale(1.0f),
_lastLoudness(0.0f),
_longTermAverageLoudness(-1.0f),
_audioAttack(0.0f),
_angularVelocity(0,0,0),
_renderLookatVectors(false),
@ -62,7 +63,7 @@ void Head::reset() {
}
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
// Update audio trailing average for rendering facial animations
if (isMine) {
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
@ -78,6 +79,18 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
}
}
// Update audio trailing average for rendering facial animations
const float AUDIO_AVERAGING_SECS = 0.05f;
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
if (_longTermAverageLoudness == -1.0) {
_longTermAverageLoudness = _averageLoudness;
} else {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
}
float deltaLoudness = glm::max(0.0f, _averageLoudness - _longTermAverageLoudness);
//qDebug() << "deltaLoudness: " << deltaLoudness;
if (!(_isFaceshiftConnected || billboard)) {
// Update eye saccades
@ -92,9 +105,6 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * 0.50f;
const float AUDIO_AVERAGING_SECS = 0.05f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
@ -108,8 +118,8 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
// Update audio attack data for facial animation (eyebrows and mouth)
_audioAttack = 0.9f * _audioAttack + 0.1f * fabs(_audioLoudness - _lastLoudness);
_lastLoudness = _audioLoudness;
_audioAttack = 0.9f * _audioAttack + 0.1f * fabs((_audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (_audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {

View file

@ -123,6 +123,7 @@ private:
float _scale;
float _lastLoudness;
float _longTermAverageLoudness;
float _audioAttack;
glm::vec3 _angularVelocity;
bool _renderLookatVectors;

View file

@ -15,6 +15,8 @@
#include <LimitedNodeList.h>
#include <OAuthNetworkAccessManager.h>
#include "Application.h"
#include "DataWebDialog.h"
DataWebDialog::DataWebDialog() {
@ -23,6 +25,12 @@ DataWebDialog::DataWebDialog() {
// use an OAuthNetworkAccessManager instead of regular QNetworkAccessManager so our requests are authed
page()->setNetworkAccessManager(OAuthNetworkAccessManager::getInstance());
// have the page delegate external links so they can be captured by the Application in case they are a hifi link
page()->setLinkDelegationPolicy(QWebPage::DelegateExternalLinks);
// have the Application handle external links
connect(this, &QWebView::linkClicked, Application::getInstance(), &Application::openUrl);
}
DataWebDialog* DataWebDialog::dialogForPath(const QString& path) {

View file

@ -27,7 +27,7 @@ OAuthWebViewHandler::OAuthWebViewHandler() :
_webViewRedisplayTimer(),
_lastAuthorizationURL()
{
addHighFidelityRootCAToSSLConfig();
}
const char HIGH_FIDELITY_CA[] = "-----BEGIN CERTIFICATE-----\n"

View file

@ -75,6 +75,8 @@ public:
- (_b1 * _ym1)
- (_b2 * _ym2);
y = (y >= -EPSILON && y < EPSILON) ? 0.0f : y; // clamp to 0
// update delay line
_xm2 = _xm1;
_xm1 = x;

View file

@ -40,8 +40,8 @@ void AudioSourceTone::updateCoefficients() {
void AudioSourceTone::initialize() {
const float32_t FREQUENCY_220_HZ = 220.0f;
const float32_t GAIN_MINUS_3DB = 0.708f;
setParameters(SAMPLE_RATE, FREQUENCY_220_HZ, GAIN_MINUS_3DB);
const float32_t GAIN_MINUS_6DB = 0.501f;
setParameters(SAMPLE_RATE, FREQUENCY_220_HZ, GAIN_MINUS_6DB);
}
void AudioSourceTone::setParameters(const float32_t sampleRate, const float32_t frequency, const float32_t amplitude) {

View file

@ -661,6 +661,30 @@ void AvatarData::setPlayerLoop(bool loop) {
}
}
void AvatarData::setPlayerUseDisplayName(bool useDisplayName) {
if(_player) {
_player->useDisplayName(useDisplayName);
}
}
void AvatarData::setPlayerUseAttachments(bool useAttachments) {
if(_player) {
_player->useAttachements(useAttachments);
}
}
void AvatarData::setPlayerUseHeadModel(bool useHeadModel) {
if(_player) {
_player->useHeadModel(useHeadModel);
}
}
void AvatarData::setPlayerUseSkeletonModel(bool useSkeletonModel) {
if(_player) {
_player->useSkeletonModel(useSkeletonModel);
}
}
void AvatarData::play() {
if (isPlaying()) {
if (QThread::currentThread() != thread()) {

View file

@ -302,6 +302,10 @@ public slots:
void startPlaying();
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setPlayerLoop(bool loop);
void setPlayerUseDisplayName(bool useDisplayName);
void setPlayerUseAttachments(bool useAttachments);
void setPlayerUseHeadModel(bool useHeadModel);
void setPlayerUseSkeletonModel(bool useSkeletonModel);
void play();
void stopPlaying();

View file

@ -17,15 +17,15 @@
#include "Player.h"
Player::Player(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL),
_playFromCurrentPosition(true),
_loop(false),
_useAttachments(true),
_useDisplayName(true),
_useHeadURL(true),
_useSkeletonURL(true)
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL),
_playFromCurrentPosition(true),
_loop(false),
_useAttachments(true),
_useDisplayName(true),
_useHeadURL(true),
_useSkeletonURL(true)
{
_timer.invalidate();
_options.setLoop(false);
@ -157,10 +157,9 @@ void Player::loopRecording() {
setupAudioThread();
_currentFrame = 0;
_timer.restart();
}
void Player::loadFromFile(QString file) {
void Player::loadFromFile(const QString& file) {
if (_recording) {
_recording->clear();
} else {
@ -204,7 +203,7 @@ void Player::play() {
head->setFinalPitch(eulers.x);
head->setFinalYaw(eulers.y);
head->setFinalRoll(eulers.z);
head->setLookAtPosition(currentFrame.getLookAtPosition());
head->setLookAtPosition(context->position + context->orientation * currentFrame.getLookAtPosition());
} else {
qDebug() << "WARNING: Player couldn't find head data.";
}

View file

@ -34,10 +34,10 @@ public:
RecordingPointer getRecording() const { return _recording; }
public slots:
public slots:
void startPlaying();
void stopPlaying();
void loadFromFile(QString file);
void loadFromFile(const QString& file);
void loadRecording(RecordingPointer recording);
void play();

View file

@ -86,7 +86,7 @@ void Recorder::stopRecording() {
qDebug().nospace() << "Recorded " << _recording->getFrameNumber() << " during " << _recording->getLength() << " msec (" << _recording->getFrameNumber() / (_recording->getLength() / 1000.0f) << " fps)";
}
void Recorder::saveToFile(QString file) {
void Recorder::saveToFile(const QString& file) {
if (_recording->isEmpty()) {
qDebug() << "Cannot save recording to file, recording is empty.";
}

View file

@ -38,7 +38,7 @@ public:
public slots:
void startRecording();
void stopRecording();
void saveToFile(QString file);
void saveToFile(const QString& file);
void record();
void record(char* samples, int size);

View file

@ -20,7 +20,6 @@
#include <QEventLoop>
#include <QFile>
#include <QFileInfo>
#include <QMessageBox>
#include <QPair>
#include "AvatarData.h"
@ -74,7 +73,7 @@ void Recording::addFrame(int timestamp, RecordingFrame &frame) {
_frames << frame;
}
void Recording::addAudioPacket(QByteArray byteArray) {
void Recording::addAudioPacket(const QByteArray& byteArray) {
if (!_audio) {
_audio = new Sound(byteArray);
return;
@ -89,7 +88,7 @@ void Recording::clear() {
_audio = NULL;
}
void writeVec3(QDataStream& stream, glm::vec3 value) {
void writeVec3(QDataStream& stream, const glm::vec3& value) {
unsigned char buffer[sizeof(value)];
memcpy(buffer, &value, sizeof(value));
stream.writeRawData(reinterpret_cast<char*>(buffer), sizeof(value));
@ -102,7 +101,7 @@ bool readVec3(QDataStream& stream, glm::vec3& value) {
return true;
}
void writeQuat(QDataStream& stream, glm::quat value) {
void writeQuat(QDataStream& stream, const glm::quat& value) {
unsigned char buffer[256];
int writtenToBuffer = packOrientationQuatToBytes(buffer, value);
stream.writeRawData(reinterpret_cast<char*>(buffer), writtenToBuffer);
@ -136,7 +135,7 @@ bool readFloat(QDataStream& stream, float& value, int radix) {
return true;
}
void writeRecordingToFile(RecordingPointer recording, QString filename) {
void writeRecordingToFile(RecordingPointer recording, const QString& filename) {
if (!recording || recording->getFrameNumber() < 1) {
qDebug() << "Can't save empty recording";
return;
@ -329,7 +328,7 @@ void writeRecordingToFile(RecordingPointer recording, QString filename) {
fileStream << recording->_audio->getByteArray();
qint64 writtingTime = timer.restart();
qint64 writingTime = timer.restart();
// Write data length and CRC-16
quint32 dataLength = file.pos() - dataOffset;
file.seek(dataOffset); // Go to beginning of data for checksum
@ -374,10 +373,10 @@ void writeRecordingToFile(RecordingPointer recording, QString filename) {
}
qint64 checksumTime = timer.elapsed();
qDebug() << "Wrote" << file.size() << "bytes in" << writtingTime + checksumTime << "ms. (" << checksumTime << "ms for checksum)";
qDebug() << "Wrote" << file.size() << "bytes in" << writingTime + checksumTime << "ms. (" << checksumTime << "ms for checksum)";
}
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) {
RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename) {
QByteArray byteArray;
QUrl url(filename);
QElapsedTimer timer;
@ -416,10 +415,6 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filen
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
qDebug() << "Old .rec format";
QMessageBox::warning(NULL,
QString("Old recording format"),
QString("Converting your file to the new format."),
QMessageBox::Ok);
readRecordingFromRecFile(recording, filename, byteArray);
return recording;
} else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
@ -641,7 +636,7 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filen
}
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray) {
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray) {
QElapsedTimer timer;
timer.start();
@ -786,21 +781,18 @@ RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString fi
qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
// Set new filename
if (filename.startsWith("http") || filename.startsWith("https") || filename.startsWith("ftp")) {
filename = QUrl(filename).fileName();
QString newFilename = filename;
if (newFilename.startsWith("http") || newFilename.startsWith("https") || newFilename.startsWith("ftp")) {
newFilename = QUrl(newFilename).fileName();
}
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
filename.chop(qstrlen(".rec"));
if (newFilename.endsWith(".rec") || newFilename.endsWith(".REC")) {
newFilename.chop(qstrlen(".rec"));
}
filename.append(".hfr");
filename = QFileInfo(filename).absoluteFilePath();
newFilename.append(".hfr");
newFilename = QFileInfo(newFilename).absoluteFilePath();
// Set recording to new format
writeRecordingToFile(recording, filename);
QMessageBox::warning(NULL,
QString("New recording location"),
QString("The new recording was saved at:\n" + filename),
QMessageBox::Ok);
qDebug() << "Recording has been successfully converted at" << filename;
writeRecordingToFile(recording, newFilename);
qDebug() << "Recording has been successfully converted at" << newFilename;
return recording;
}

View file

@ -62,7 +62,7 @@ public:
protected:
void addFrame(int timestamp, RecordingFrame& frame);
void addAudioPacket(QByteArray byteArray);
void addAudioPacket(const QByteArray& byteArray);
void clear();
private:
@ -74,9 +74,10 @@ private:
friend class Recorder;
friend class Player;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
friend void writeRecordingToFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename,
const QByteArray& byteArray);
};
/// Stores the different values associated to one recording frame
@ -95,13 +96,13 @@ public:
protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; }
void setTranslation(glm::vec3 translation) { _translation = translation; }
void setRotation(glm::quat rotation) { _rotation = rotation; }
void setTranslation(const glm::vec3& translation) { _translation = translation; }
void setRotation(const glm::quat& rotation) { _rotation = rotation; }
void setScale(float scale) { _scale = scale; }
void setHeadRotation(glm::quat headRotation) { _headRotation = headRotation; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; }
void setLookAtPosition(glm::vec3 lookAtPosition) { _lookAtPosition = lookAtPosition; }
void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
private:
QVector<float> _blendshapeCoefficients;
@ -115,13 +116,14 @@ private:
glm::vec3 _lookAtPosition;
friend class Recorder;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
friend void writeRecordingToFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename,
const QByteArray& byteArray);
};
void writeRecordingToFile(RecordingPointer recording, QString filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
void writeRecordingToFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray);
#endif // hifi_Recording_h

View file

@ -1769,7 +1769,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
lowestWeight = weights[k];
}
}
if (k == 4) {
if (k == 4 && weight > lowestWeight) {
// no space for an additional weight; we must replace the lowest
weights[lowestIndex] = weight;
extracted.mesh.clusterIndices[it.value()][lowestIndex] = i;