Merge pull request #3296 from Atlante45/record_feature

Record feature
This commit is contained in:
AndrewMeadows 2014-08-18 16:42:26 -07:00
commit 22f8f7779e
20 changed files with 716 additions and 25 deletions

View file

@ -1051,9 +1051,22 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_R:
if (isShifted) {
Menu::getInstance()->triggerOption(MenuOption::FrustumRenderMode);
} else if (isMeta) {
if (_myAvatar->isRecording()) {
_myAvatar->stopRecording();
} else {
_myAvatar->startRecording();
_audio.setRecorder(_myAvatar->getRecorder());
}
} else {
if (_myAvatar->isPlaying()) {
_myAvatar->stopPlaying();
} else {
_myAvatar->startPlaying();
_audio.setPlayer(_myAvatar->getPlayer());
}
}
break;
break;
case Qt::Key_Percent:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;

View file

@ -82,7 +82,6 @@ Audio::Audio(QObject* parent) :
_noiseGateSampleCounter(0),
_noiseGateOpen(false),
_noiseGateEnabled(true),
_peqEnabled(false),
_toneInjectionEnabled(false),
_noiseGateFramesToClose(0),
_totalInputAudioSamples(0),
@ -102,6 +101,7 @@ Audio::Audio(QObject* parent) :
_scopeOutputOffset(0),
_framesPerScope(DEFAULT_FRAMES_PER_SCOPE),
_samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope),
_peqEnabled(false),
_scopeInput(0),
_scopeOutputLeft(0),
_scopeOutputRight(0),
@ -475,7 +475,7 @@ void Audio::handleAudioInput() {
int16_t* ioBuffer = (int16_t*)inputByteArray.data();
_peq.render( ioBuffer, ioBuffer, inputByteArray.size() / sizeof(int16_t) );
_peq.render(ioBuffer, ioBuffer, inputByteArray.size() / sizeof(int16_t));
}
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted && _audioOutput) {
@ -676,6 +676,11 @@ void Audio::handleAudioInput() {
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (_recorder && _recorder.data()->isRecording()) {
_recorder.data()->record(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
}
if (audioMixer && audioMixer->getActiveSocket()) {
MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition();

View file

@ -17,6 +17,7 @@
#include "InterfaceConfig.h"
#include "AudioStreamStats.h"
#include "Recorder.h"
#include "RingBufferHistory.h"
#include "MovingMinMaxAvg.h"
#include "AudioFilter.h"
@ -102,6 +103,9 @@ public:
float getAudioOutputMsecsUnplayed() const;
float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); }
void setRecorder(RecorderPointer recorder) { _recorder = recorder; }
void setPlayer(PlayerPointer player) { _player = player; }
public slots:
void start();
@ -308,6 +312,9 @@ private:
MovingMinMaxAvg<quint64> _packetSentTimeGaps;
AudioOutputIODevice _audioOutputIODevice;
WeakRecorderPointer _recorder;
WeakPlayerPointer _player;
};

314
interface/src/Recorder.cpp Normal file
View file

@ -0,0 +1,314 @@
//
// Recorder.cpp
//
//
// Created by Clement on 8/7/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GLMHelpers.h>
#include <QMetaObject>
#include "Recorder.h"
void RecordingFrame::setBlendshapeCoefficients(QVector<float> blendshapeCoefficients) {
_blendshapeCoefficients = blendshapeCoefficients;
}
void RecordingFrame::setJointRotations(QVector<glm::quat> jointRotations) {
_jointRotations = jointRotations;
}
void RecordingFrame::setTranslation(glm::vec3 translation) {
_translation = translation;
}
void RecordingFrame::setRotation(glm::quat rotation) {
_rotation = rotation;
}
void RecordingFrame::setScale(float scale) {
_scale = scale;
}
void RecordingFrame::setHeadRotation(glm::quat headRotation) {
_headRotation = headRotation;
}
void RecordingFrame::setLeanSideways(float leanSideways) {
_leanSideways = leanSideways;
}
void RecordingFrame::setLeanForward(float leanForward) {
_leanForward = leanForward;
}
Recording::Recording() : _audio(NULL) {
}
Recording::~Recording() {
delete _audio;
}
void Recording::addFrame(int timestamp, RecordingFrame &frame) {
_timestamps << timestamp;
_frames << frame;
}
void Recording::addAudioPacket(QByteArray byteArray) {
if (!_audio) {
_audio = new Sound(byteArray);
}
_audio->append(byteArray);
}
void Recording::clear() {
_timestamps.clear();
_frames.clear();
delete _audio;
_audio = NULL;
}
Recorder::Recorder(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar)
{
_timer.invalidate();
}
bool Recorder::isRecording() const {
return _timer.isValid();
}
qint64 Recorder::elapsed() const {
if (isRecording()) {
return _timer.elapsed();
} else {
return 0;
}
}
void Recorder::startRecording() {
qDebug() << "Recorder::startRecording()";
_recording->clear();
_timer.start();
RecordingFrame frame;
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
frame.setJointRotations(_avatar->getJointRotations());
frame.setTranslation(_avatar->getPosition());
frame.setRotation(_avatar->getOrientation());
frame.setScale(_avatar->getTargetScale());
const HeadData* head = _avatar->getHeadData();
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
head->getFinalYaw(),
head->getFinalRoll())));
frame.setHeadRotation(rotation);
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
_recording->addFrame(0, frame);
}
void Recorder::stopRecording() {
qDebug() << "Recorder::stopRecording()";
_timer.invalidate();
qDebug().nospace() << "Recorded " << _recording->getFrameNumber() << " during " << _recording->getLength() << " msec (" << _recording->getFrameNumber() / (_recording->getLength() / 1000.0f) << " fps)";
}
void Recorder::saveToFile(QString file) {
if (_recording->isEmpty()) {
qDebug() << "Cannot save recording to file, recording is empty.";
}
writeRecordingToFile(_recording, file);
}
void Recorder::record() {
if (isRecording()) {
const RecordingFrame& referenceFrame = _recording->getFrame(0);
RecordingFrame frame;
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
frame.setJointRotations(_avatar->getJointRotations());
frame.setTranslation(_avatar->getPosition() - referenceFrame.getTranslation());
frame.setRotation(glm::inverse(referenceFrame.getRotation()) * _avatar->getOrientation());
frame.setScale(_avatar->getTargetScale() / referenceFrame.getScale());
const HeadData* head = _avatar->getHeadData();
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
head->getFinalYaw(),
head->getFinalRoll())));
frame.setHeadRotation(rotation);
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
_recording->addFrame(_timer.elapsed(), frame);
}
}
void Recorder::record(char* samples, int size) {
QByteArray byteArray(samples, size);
_recording->addAudioPacket(byteArray);
}
Player::Player(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL)
{
_timer.invalidate();
_options.setLoop(false);
_options.setVolume(1.0f);
}
bool Player::isPlaying() const {
return _timer.isValid();
}
qint64 Player::elapsed() const {
if (isPlaying()) {
return _timer.elapsed();
} else {
return 0;
}
}
glm::quat Player::getHeadRotation() {
if (computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getHeadRotation()";
return glm::quat();
}
if (_currentFrame == 0) {
return _recording->getFrame(_currentFrame).getHeadRotation();
}
return _recording->getFrame(0).getHeadRotation() *
_recording->getFrame(_currentFrame).getHeadRotation();
}
float Player::getLeanSideways() {
if (computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getLeanSideways()";
return 0.0f;
}
return _recording->getFrame(_currentFrame).getLeanSideways();
}
float Player::getLeanForward() {
if (computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getLeanForward()";
return 0.0f;
}
return _recording->getFrame(_currentFrame).getLeanForward();
}
void Player::startPlaying() {
if (_recording && _recording->getFrameNumber() > 0) {
qDebug() << "Recorder::startPlaying()";
_currentFrame = 0;
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector.reset(new AudioInjector(_recording->getAudio(), _options));
_audioThread = new QThread();
_injector->moveToThread(_audioThread);
_audioThread->start();
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
_timer.start();
}
}
void Player::stopPlaying() {
if (!isPlaying()) {
return;
}
qDebug() << "Recorder::stopPlaying()";
_timer.invalidate();
_avatar->clearJointsData();
// Cleanup audio thread
_injector->stop();
_injector.clear();
_audioThread->exit();
_audioThread->deleteLater();
}
void Player::loadFromFile(QString file) {
if (_recording) {
_recording->clear();
} else {
_recording = RecordingPointer(new Recording());
}
readRecordingFromFile(_recording, file);
}
void Player::loadRecording(RecordingPointer recording) {
_recording = recording;
}
void Player::play() {
computeCurrentFrame();
if (_currentFrame < 0 || _currentFrame >= _recording->getFrameNumber()) {
// If it's the end of the recording, stop playing
stopPlaying();
return;
}
if (_currentFrame == 0) {
_avatar->setPosition(_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
} else {
_avatar->setPosition(_recording->getFrame(0).getTranslation() +
_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_recording->getFrame(0).getRotation() *
_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_recording->getFrame(0).getScale() *
_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
}
}
bool Player::computeCurrentFrame() {
if (!isPlaying()) {
_currentFrame = -1;
return false;
}
if (_currentFrame < 0) {
_currentFrame = 0;
}
while (_currentFrame < _recording->getFrameNumber() &&
_recording->getFrameTimestamp(_currentFrame) < _timer.elapsed()) {
++_currentFrame;
}
return true;
}
void writeRecordingToFile(RecordingPointer recording, QString file) {
// TODO
qDebug() << "Writing recording to " << file;
}
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file) {
// TODO
qDebug() << "Reading recording from " << file;
return recording;
}

170
interface/src/Recorder.h Normal file
View file

@ -0,0 +1,170 @@
//
// Recorder.h
//
//
// Created by Clement on 8/7/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Recorder_h
#define hifi_Recorder_h
#include <QBitArray>
#include <QElapsedTimer>
#include <QHash>
#include <QSharedPointer>
#include <QVector>
#include <QWeakPointer>
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <AudioInjector.h>
#include <AvatarData.h>
#include <SharedUtil.h>
#include <Sound.h>
class Recorder;
class Recording;
class Player;
typedef QSharedPointer<Recording> RecordingPointer;
typedef QSharedPointer<Recorder> RecorderPointer;
typedef QWeakPointer<Recorder> WeakRecorderPointer;
typedef QSharedPointer<Player> PlayerPointer;
typedef QWeakPointer<Player> WeakPlayerPointer;
/// Stores the different values associated to one recording frame
class RecordingFrame {
public:
QVector<float> getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
QVector<glm::quat> getJointRotations() const { return _jointRotations; }
glm::vec3 getTranslation() const { return _translation; }
glm::quat getRotation() const { return _rotation; }
float getScale() const { return _scale; }
glm::quat getHeadRotation() const { return _headRotation; }
float getLeanSideways() const { return _leanSideways; }
float getLeanForward() const { return _leanForward; }
protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointRotations(QVector<glm::quat> jointRotations);
void setTranslation(glm::vec3 translation);
void setRotation(glm::quat rotation);
void setScale(float scale);
void setHeadRotation(glm::quat headRotation);
void setLeanSideways(float leanSideways);
void setLeanForward(float leanForward);
private:
QVector<float> _blendshapeCoefficients;
QVector<glm::quat> _jointRotations;
glm::vec3 _translation;
glm::quat _rotation;
float _scale;
glm::quat _headRotation;
float _leanSideways;
float _leanForward;
friend class Recorder;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
};
/// Stores a recording
class Recording {
public:
Recording();
~Recording();
bool isEmpty() const { return _timestamps.isEmpty(); }
int getLength() const { return _timestamps.last(); } // in ms
int getFrameNumber() const { return _frames.size(); }
qint32 getFrameTimestamp(int i) const { return _timestamps[i]; }
const RecordingFrame& getFrame(int i) const { return _frames[i]; }
Sound* getAudio() const { return _audio; }
protected:
void addFrame(int timestamp, RecordingFrame& frame);
void addAudioPacket(QByteArray byteArray);
void clear();
private:
QVector<qint32> _timestamps;
QVector<RecordingFrame> _frames;
Sound* _audio;
friend class Recorder;
friend class Player;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
};
/// Records a recording
class Recorder {
public:
Recorder(AvatarData* avatar);
bool isRecording() const;
qint64 elapsed() const;
RecordingPointer getRecording() const { return _recording; }
public slots:
void startRecording();
void stopRecording();
void saveToFile(QString file);
void record();
void record(char* samples, int size);
private:
QElapsedTimer _timer;
RecordingPointer _recording;
AvatarData* _avatar;
};
/// Plays back a recording
class Player {
public:
Player(AvatarData* avatar);
bool isPlaying() const;
qint64 elapsed() const;
// Those should only be called if isPlaying() returns true
glm::quat getHeadRotation();
float getLeanSideways();
float getLeanForward();
public slots:
void startPlaying();
void stopPlaying();
void loadFromFile(QString file);
void loadRecording(RecordingPointer recording);
void play();
private:
bool computeCurrentFrame();
QElapsedTimer _timer;
RecordingPointer _recording;
int _currentFrame;
QSharedPointer<AudioInjector> _injector;
AudioInjectorOptions _options;
AvatarData* _avatar;
QThread* _audioThread;
};
void writeRecordingToFile(RecordingPointer recording, QString file);
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
#endif // hifi_Recorder_h

View file

@ -29,6 +29,7 @@
#include "Menu.h"
#include "ModelReferential.h"
#include "Physics.h"
#include "Recorder.h"
#include "world.h"
#include "devices/OculusManager.h"
#include "renderer/TextureCache.h"
@ -725,6 +726,17 @@ bool Avatar::findCollisions(const QVector<const Shape*>& shapes, CollisionList&
return collided;
}
QVector<glm::quat> Avatar::getJointRotations() const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointRotations();
}
QVector<glm::quat> jointRotations(_skeletonModel.getJointStateCount());
for (int i = 0; i < _skeletonModel.getJointStateCount(); ++i) {
_skeletonModel.getJointState(i, jointRotations[i]);
}
return jointRotations;
}
glm::quat Avatar::getJointRotation(int index) const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointRotation(index);

View file

@ -23,6 +23,7 @@
#include "Hand.h"
#include "Head.h"
#include "InterfaceConfig.h"
#include "Recorder.h"
#include "SkeletonModel.h"
#include "world.h"
@ -121,6 +122,7 @@ public:
virtual bool isMyAvatar() { return false; }
virtual QVector<glm::quat> getJointRotations() const;
virtual glm::quat getJointRotation(int index) const;
virtual int getJointIndex(const QString& name) const;
virtual QStringList getJointNames() const;
@ -220,8 +222,6 @@ private:
void renderBillboard();
float getBillboardSize() const;
};
#endif // hifi_Avatar_h

View file

@ -64,13 +64,18 @@ void Head::reset() {
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
// Update audio trailing average for rendering facial animations
if (isMine) {
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
if ((_isFaceshiftConnected = faceTracker)) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
_isFaceshiftConnected = true;
} else if (Application::getInstance()->getDDE()->isActive()) {
faceTracker = Application::getInstance()->getDDE();
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
// Only use face trackers when not playing back a recording.
if (!myAvatar->isPlaying()) {
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
if ((_isFaceshiftConnected = faceTracker)) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
_isFaceshiftConnected = true;
} else if (Application::getInstance()->getDDE()->isActive()) {
faceTracker = Application::getInstance()->getDDE();
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
}
}
}

View file

@ -48,8 +48,6 @@ public:
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; }
/// \return orientationBase+Delta
glm::quat getFinalOrientationInLocalFrame() const;
@ -57,7 +55,6 @@ public:
/// \return orientationBody * (orientationBase+Delta)
glm::quat getFinalOrientationInWorldFrame() const;
/// \return orientationBody * orientationBasePitch
glm::quat getCameraOrientation () const;
@ -71,8 +68,6 @@ public:
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
glm::vec3 getUpDirection() const { return getOrientation() * IDENTITY_UP; }
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
float getLeanSideways() const { return _leanSideways; }
float getLeanForward() const { return _leanForward; }
float getFinalLeanSideways() const { return _leanSideways + _deltaLeanSideways; }
float getFinalLeanForward() const { return _leanForward + _deltaLeanForward; }

View file

@ -35,6 +35,7 @@
#include "ModelReferential.h"
#include "MyAvatar.h"
#include "Physics.h"
#include "Recorder.h"
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "ui/TextRenderer.h"
@ -136,6 +137,12 @@ void MyAvatar::update(float deltaTime) {
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("simulate");
// Play back recording
if (_player && _player->isPlaying()) {
_player->play();
}
if (_scale != _targetScale) {
float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale;
setScale(scale);
@ -148,7 +155,7 @@ void MyAvatar::simulate(float deltaTime) {
updateOrientation(deltaTime);
updatePosition(deltaTime);
}
{
PerformanceTimer perfTimer("hand");
// update avatar skeleton and simulate hand and head
@ -252,6 +259,11 @@ void MyAvatar::simulate(float deltaTime) {
}
}
// Record avatars movements.
if (_recorder && _recorder->isRecording()) {
_recorder->record();
}
// consider updating our billboard
maybeUpdateBillboard();
}
@ -260,7 +272,9 @@ void MyAvatar::simulate(float deltaTime) {
void MyAvatar::updateFromTrackers(float deltaTime) {
glm::vec3 estimatedPosition, estimatedRotation;
if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
if (isPlaying()) {
estimatedRotation = glm::degrees(safeEulerAngles(_player->getHeadRotation()));
} else if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
estimatedRotation = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getHeadRotation()));
estimatedRotation.x *= -1.0f;
estimatedRotation.z *= -1.0f;
@ -302,7 +316,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
Head* head = getHead();
if (OculusManager::isConnected()) {
if (OculusManager::isConnected() || isPlaying()) {
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
} else {
@ -312,6 +326,11 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
}
head->setDeltaRoll(estimatedRotation.z);
if (isPlaying()) {
head->setLeanSideways(_player->getLeanSideways());
head->setLeanForward(_player->getLeanForward());
return;
}
// the priovr can give us exact lean
if (Application::getInstance()->getPrioVR()->isActive()) {
glm::vec3 eulers = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getTorsoRotation()));
@ -319,7 +338,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
head->setLeanForward(eulers.x);
return;
}
// Update torso lean distance based on accelerometer data
const float TORSO_LENGTH = 0.5f;
glm::vec3 relativePosition = estimatedPosition - glm::vec3(0.0f, -TORSO_LENGTH, 0.0f);
@ -490,6 +508,45 @@ bool MyAvatar::setJointReferential(int id, int jointIndex) {
}
}
bool MyAvatar::isRecording() const {
return _recorder && _recorder->isRecording();
}
RecorderPointer MyAvatar::startRecording() {
if (!_recorder) {
_recorder = RecorderPointer(new Recorder(this));
}
_recorder->startRecording();
return _recorder;
}
void MyAvatar::stopRecording() {
if (_recorder) {
_recorder->stopRecording();
}
}
bool MyAvatar::isPlaying() const {
return _player && _player->isPlaying();
}
PlayerPointer MyAvatar::startPlaying() {
if (!_player) {
_player = PlayerPointer(new Player(this));
}
if (_recorder) {
_player->loadRecording(_recorder->getRecording());
_player->startPlaying();
}
return _player;
}
void MyAvatar::stopPlaying() {
if (_player) {
_player->stopPlaying();
}
}
void MyAvatar::setLocalGravity(glm::vec3 gravity) {
_motionBehaviors |= AVATAR_MOTION_OBEY_LOCAL_GRAVITY;
// Environmental and Local gravities are incompatible. Since Local is being set here
@ -872,6 +929,14 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const {
const float JOINT_PRIORITY = 2.0f;
void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
for (int i = 0; i < jointRotations.size(); ++i) {
if (i < _jointData.size()) {
_skeletonModel.setJointState(i, true, jointRotations[i], JOINT_PRIORITY + 1.0f);
}
}
}
void MyAvatar::setJointData(int index, const glm::quat& rotation) {
Avatar::setJointData(index, rotation);
if (QThread::currentThread() == thread()) {
@ -886,6 +951,15 @@ void MyAvatar::clearJointData(int index) {
}
}
void MyAvatar::clearJointsData() {
for (int i = 0; i < _jointData.size(); ++i) {
Avatar::clearJointData(i);
if (QThread::currentThread() == thread()) {
_skeletonModel.clearJointState(i);
}
}
}
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
Avatar::setFaceModelURL(faceModelURL);
_billboardValid = false;

View file

@ -112,8 +112,10 @@ public:
void updateLookAtTargetAvatar();
void clearLookAtTargetAvatar();
virtual void setJointRotations(QVector<glm::quat> jointRotations);
virtual void setJointData(int index, const glm::quat& rotation);
virtual void clearJointData(int index);
virtual void clearJointsData();
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
@ -155,6 +157,17 @@ public slots:
bool setModelReferential(int id);
bool setJointReferential(int id, int jointIndex);
const RecorderPointer getRecorder() const { return _recorder; }
bool isRecording() const;
RecorderPointer startRecording();
void stopRecording();
const PlayerPointer getPlayer() const { return _player; }
bool isPlaying() const;
PlayerPointer startPlaying();
void stopPlaying();
signals:
void transformChanged();
@ -192,6 +205,9 @@ private:
QList<AnimationHandlePointer> _animationHandles;
PhysicsSimulation _physicsSimulation;
RecorderPointer _recorder;
PlayerPointer _player;
// private methods
float computeDistanceToFloor(const glm::vec3& startPoint);
void updateOrientation(float deltaTime);

View file

@ -62,9 +62,15 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
Model::simulate(deltaTime, fullUpdate);
if (!(isActive() && _owningAvatar->isMyAvatar())) {
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
if (myAvatar->isPlaying()) {
// Don't take inputs if playing back a recording.
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
PrioVR* prioVR = Application::getInstance()->getPrioVR();

View file

@ -692,6 +692,14 @@ bool Model::getVisibleJointState(int index, glm::quat& rotation) const {
return !state.rotationIsDefault(rotation);
}
void Model::clearJointState(int index) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];
state.setRotationInConstrainedFrame(glm::quat());
state._animationPriority = 0.0f;
}
}
void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];

View file

@ -118,6 +118,9 @@ public:
/// \return whether or not the joint state is "valid" (that is, non-default)
bool getVisibleJointState(int index, glm::quat& rotation) const;
/// Clear the joint states
void clearJointState(int index);
/// Sets the joint state at the specified index.
void setJointState(int index, bool valid, const glm::quat& rotation = glm::quat(), float priority = 1.0f);

View file

@ -37,8 +37,8 @@ public:
float getVolume() const { return _volume; }
void setVolume(float volume) { _volume = volume; }
float getLoop() const { return _loop; }
void setLoop(float loop) { _loop = loop; }
bool getLoop() const { return _loop; }
void setLoop(bool loop) { _loop = loop; }
const glm::quat& getOrientation() const { return _orientation; }
void setOrientation(const glm::quat& orientation) { _orientation = orientation; }

View file

@ -82,6 +82,17 @@ Sound::Sound(const QUrl& sampleURL, QObject* parent) :
connect(soundDownload, SIGNAL(error(QNetworkReply::NetworkError)), this, SLOT(replyError(QNetworkReply::NetworkError)));
}
Sound::Sound(const QByteArray byteArray, QObject* parent) :
QObject(parent),
_byteArray(byteArray),
_hasDownloaded(true)
{
}
void Sound::append(const QByteArray byteArray) {
_byteArray.append(byteArray);
}
void Sound::replyFinished() {
QNetworkReply* reply = reinterpret_cast<QNetworkReply*>(sender());

View file

@ -22,6 +22,8 @@ class Sound : public QObject {
public:
Sound(const QUrl& sampleURL, QObject* parent = NULL);
Sound(float volume, float frequency, float duration, float decay, QObject* parent = NULL);
Sound(const QByteArray byteArray, QObject* parent = NULL);
void append(const QByteArray byteArray);
bool hasDownloaded() const { return _hasDownloaded; }

View file

@ -683,6 +683,41 @@ glm::quat AvatarData::getJointRotation(const QString& name) const {
return getJointRotation(getJointIndex(name));
}
QVector<glm::quat> AvatarData::getJointRotations() const {
if (QThread::currentThread() != thread()) {
QVector<glm::quat> result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this),
"getJointRotation", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(QVector<glm::quat>, result));
return result;
}
QVector<glm::quat> jointRotations(_jointData.size());
for (int i = 0; i < _jointData.size(); ++i) {
jointRotations[i] = _jointData[i].rotation;
}
return jointRotations;
}
void AvatarData::setJointRotations(QVector<glm::quat> jointRotations) {
if (QThread::currentThread() != thread()) {
QVector<glm::quat> result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this),
"setJointRotation", Qt::BlockingQueuedConnection,
Q_ARG(QVector<glm::quat>, jointRotations));
}
for (int i = 0; i < jointRotations.size(); ++i) {
if (i < _jointData.size()) {
setJointData(i, jointRotations[i]);
}
}
}
void AvatarData::clearJointsData() {
for (int i = 0; i < _jointData.size(); ++i) {
clearJointData(i);
}
}
bool AvatarData::hasIdentityChangedAfterParsing(const QByteArray &packet) {
QDataStream packetStream(packet);
packetStream.skipRawData(numBytesForPacketHeader(packet));

View file

@ -210,7 +210,12 @@ public:
Q_INVOKABLE void clearJointData(const QString& name);
Q_INVOKABLE bool isJointDataValid(const QString& name) const;
Q_INVOKABLE glm::quat getJointRotation(const QString& name) const;
Q_INVOKABLE virtual QVector<glm::quat> getJointRotations() const;
Q_INVOKABLE virtual void setJointRotations(QVector<glm::quat> jointRotations);
Q_INVOKABLE virtual void clearJointsData();
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
Q_INVOKABLE virtual int getJointIndex(const QString& name) const { return _jointIndices.value(name) - 1; }

View file

@ -56,6 +56,7 @@ public:
void setBlendshape(QString name, float val);
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) { _blendshapeCoefficients = blendshapeCoefficients; }
float getPupilDilation() const { return _pupilDilation; }
void setPupilDilation(float pupilDilation) { _pupilDilation = pupilDilation; }
@ -68,6 +69,15 @@ public:
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }
void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
float getLeanSideways() const { return _leanSideways; }
float getLeanForward() const { return _leanForward; }
virtual float getFinalLeanSideways() const { return _leanSideways; }
virtual float getFinalLeanForward() const { return _leanForward; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; }
friend class AvatarData;
protected: