mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 17:03:58 +02:00
More recording work
This commit is contained in:
parent
792f779bbf
commit
73a3a13c59
11 changed files with 158 additions and 50 deletions
|
@ -1048,10 +1048,19 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
break;
|
||||
case Qt::Key_R:
|
||||
if (isShifted) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::FrustumRenderMode);
|
||||
if (_myAvatar->isRecording()) {
|
||||
_myAvatar->stopRecording();
|
||||
} else {
|
||||
_myAvatar->startRecording();
|
||||
}
|
||||
} else {
|
||||
if (_myAvatar->isPlaying()) {
|
||||
_myAvatar->stopPlaying();
|
||||
} else {
|
||||
_myAvatar->startPlaying();
|
||||
}
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case Qt::Key_Percent:
|
||||
Menu::getInstance()->triggerOption(MenuOption::Stats);
|
||||
break;
|
||||
|
|
|
@ -45,14 +45,6 @@ void RecordingFrame::setLeanForward(float leanForward) {
|
|||
_leanForward = leanForward;
|
||||
}
|
||||
|
||||
void RecordingFrame::setEstimatedEyePitch(float estimatedEyePitch) {
|
||||
_estimatedEyePitch = estimatedEyePitch;
|
||||
}
|
||||
|
||||
void RecordingFrame::setEstimatedEyeYaw(float estimatedEyeYaw) {
|
||||
_estimatedEyeYaw = estimatedEyeYaw;
|
||||
}
|
||||
|
||||
void Recording::addFrame(int timestamp, RecordingFrame &frame) {
|
||||
_timestamps << timestamp;
|
||||
_frames << frame;
|
||||
|
@ -93,15 +85,13 @@ void Recorder::startRecording() {
|
|||
frame.setRotation(_avatar->getOrientation());
|
||||
frame.setScale(_avatar->getTargetScale());
|
||||
|
||||
// TODO
|
||||
const HeadData* head = _avatar->getHeadData();
|
||||
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
|
||||
head->getFinalYaw(),
|
||||
head->getFinalRoll())));
|
||||
frame.setHeadRotation(rotation);
|
||||
// TODO
|
||||
//frame.setEstimatedEyePitch();
|
||||
//frame.setEstimatedEyeYaw();
|
||||
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
|
||||
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
|
||||
|
||||
_recording->addFrame(0, frame);
|
||||
}
|
||||
|
@ -130,16 +120,16 @@ void Recorder::record() {
|
|||
frame.setTranslation(_avatar->getPosition() - referenceFrame.getTranslation());
|
||||
frame.setRotation(glm::inverse(referenceFrame.getRotation()) * _avatar->getOrientation());
|
||||
frame.setScale(_avatar->getTargetScale() / referenceFrame.getScale());
|
||||
// TODO
|
||||
//frame.setHeadTranslation();
|
||||
|
||||
|
||||
const HeadData* head = _avatar->getHeadData();
|
||||
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
|
||||
head->getFinalYaw(),
|
||||
head->getFinalRoll())));
|
||||
frame.setHeadRotation(glm::inverse(referenceFrame.getHeadRotation()) * rotation);
|
||||
// TODO
|
||||
//frame.setEstimatedEyePitch();
|
||||
//frame.setEstimatedEyeYaw();
|
||||
frame.setHeadRotation(rotation);
|
||||
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
|
||||
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
|
||||
|
||||
_recording->addFrame(_timer.elapsed(), frame);
|
||||
}
|
||||
}
|
||||
|
@ -164,22 +154,82 @@ qint64 Player::elapsed() const {
|
|||
|
||||
QVector<float> Player::getBlendshapeCoefficients() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getBlendshapeCoefficients();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(_currentFrame - 1).getBlendshapeCoefficients();
|
||||
}
|
||||
|
||||
return _recording->getFrame(_currentFrame).getBlendshapeCoefficients();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getBlendshapeCoefficients()";
|
||||
return QVector<float>();
|
||||
}
|
||||
|
||||
QVector<glm::quat> Player::getJointRotations() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getJointRotations();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(_currentFrame - 1).getJointRotations();
|
||||
}
|
||||
|
||||
return _recording->getFrame(_currentFrame).getJointRotations();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getJointRotations()";
|
||||
return QVector<glm::quat>();
|
||||
}
|
||||
|
||||
glm::vec3 Player::getPosition() {
|
||||
computeCurrentFrame();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(0).getTranslation() +
|
||||
_recording->getFrame(_currentFrame - 1).getTranslation();
|
||||
}
|
||||
if (_currentFrame == 0) {
|
||||
return _recording->getFrame(_currentFrame).getTranslation();
|
||||
}
|
||||
|
||||
return _recording->getFrame(0).getTranslation() +
|
||||
_recording->getFrame(_currentFrame).getTranslation();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getTranslation()";
|
||||
return glm::vec3();
|
||||
}
|
||||
|
||||
glm::quat Player::getRotation() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getRotation();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(0).getRotation() *
|
||||
_recording->getFrame(_currentFrame - 1).getRotation();
|
||||
}
|
||||
if (_currentFrame == 0) {
|
||||
return _recording->getFrame(_currentFrame).getRotation();
|
||||
}
|
||||
|
||||
return _recording->getFrame(0).getRotation() *
|
||||
_recording->getFrame(_currentFrame).getRotation();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getRotation()";
|
||||
return glm::quat();
|
||||
}
|
||||
|
||||
float Player::getScale() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getScale();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(0).getScale() *
|
||||
_recording->getFrame(_currentFrame - 1).getScale();
|
||||
}
|
||||
if (_currentFrame == 0) {
|
||||
return _recording->getFrame(_currentFrame).getScale();
|
||||
}
|
||||
|
||||
return _recording->getFrame(0).getScale() *
|
||||
_recording->getFrame(_currentFrame).getScale();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getScale()";
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
glm::quat Player::getHeadRotation() {
|
||||
|
@ -200,14 +250,30 @@ glm::quat Player::getHeadRotation() {
|
|||
return glm::quat();
|
||||
}
|
||||
|
||||
float Player::getEstimatedEyePitch() {
|
||||
float Player::getLeanSideways() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getEstimatedEyePitch();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(_currentFrame - 1).getLeanSideways();
|
||||
}
|
||||
|
||||
return _recording->getFrame(_currentFrame).getLeanSideways();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getLeanSideways()";
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float Player::getEstimatedEyeYaw() {
|
||||
float Player::getLeanForward() {
|
||||
computeCurrentFrame();
|
||||
return _recording->getFrame(_currentFrame).getEstimatedEyeYaw();
|
||||
if (_currentFrame >= 0 && _currentFrame <= _recording->getFrameNumber()) {
|
||||
if (_currentFrame == _recording->getFrameNumber()) {
|
||||
return _recording->getFrame(_currentFrame - 1).getLeanForward();
|
||||
}
|
||||
|
||||
return _recording->getFrame(_currentFrame).getLeanForward();
|
||||
}
|
||||
qWarning() << "Incorrect use of Player::getLeanForward()";
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
|
||||
|
@ -249,10 +315,10 @@ void Player::play() {
|
|||
_avatar->setPosition(_recording->getFrame(_currentFrame).getTranslation());
|
||||
_avatar->setOrientation(_recording->getFrame(_currentFrame).getRotation());
|
||||
_avatar->setTargetScale(_recording->getFrame(_currentFrame).getScale());
|
||||
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
|
||||
// TODO
|
||||
// HEAD: Coeff, Translation, estimated eye rotations
|
||||
// BODY: Joint Rotations
|
||||
} else {
|
||||
_avatar->setPosition(_recording->getFrame(0).getTranslation() +
|
||||
|
@ -261,10 +327,10 @@ void Player::play() {
|
|||
_recording->getFrame(_currentFrame).getRotation());
|
||||
_avatar->setTargetScale(_recording->getFrame(0).getScale() *
|
||||
_recording->getFrame(_currentFrame).getScale());
|
||||
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
|
||||
// TODO
|
||||
// HEAD: Coeff, Translation, estimated eye rotations
|
||||
// BODY: Joint Rotations
|
||||
}
|
||||
}
|
||||
|
@ -282,7 +348,6 @@ void Player::computeCurrentFrame() {
|
|||
|
||||
while (_currentFrame < _recording->getFrameNumber() &&
|
||||
_recording->getFrameTimestamp(_currentFrame) < _timer.elapsed()) {
|
||||
qDebug() << "Loop";
|
||||
++_currentFrame;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,8 +46,6 @@ public:
|
|||
glm::quat getHeadRotation() const { return _headRotation; }
|
||||
float getLeanSideways() const { return _leanSideways; }
|
||||
float getLeanForward() const { return _leanForward; }
|
||||
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
||||
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
||||
|
||||
protected:
|
||||
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
|
||||
|
@ -58,8 +56,6 @@ protected:
|
|||
void setHeadRotation(glm::quat headRotation);
|
||||
void setLeanSideways(float leanSideways);
|
||||
void setLeanForward(float leanForward);
|
||||
void setEstimatedEyePitch(float estimatedEyePitch);
|
||||
void setEstimatedEyeYaw(float estimatedEyeYaw);
|
||||
|
||||
private:
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
|
@ -70,8 +66,6 @@ private:
|
|||
glm::quat _headRotation;
|
||||
float _leanSideways;
|
||||
float _leanForward;
|
||||
float _estimatedEyePitch;
|
||||
float _estimatedEyeYaw;
|
||||
|
||||
friend class Recorder;
|
||||
friend void writeRecordingToFile(Recording& recording, QString file);
|
||||
|
@ -137,12 +131,12 @@ public:
|
|||
// Those should only be called if isPlaying() returns true
|
||||
QVector<float> getBlendshapeCoefficients();
|
||||
QVector<glm::quat> getJointRotations();
|
||||
glm::vec3 getPosition();
|
||||
glm::quat getRotation();
|
||||
float getScale();
|
||||
glm::vec3 getHeadTranslation();
|
||||
glm::quat getHeadRotation();
|
||||
float getEstimatedEyePitch();
|
||||
float getEstimatedEyeYaw();
|
||||
float getLeanSideways();
|
||||
float getLeanForward();
|
||||
|
||||
public slots:
|
||||
void startPlaying();
|
||||
|
|
|
@ -735,6 +735,7 @@ QVector<glm::quat> Avatar::getJointRotations() const {
|
|||
for (int i = 0; i < _skeletonModel.getJointStateCount(); ++i) {
|
||||
_skeletonModel.getJointState(i, jointRotations[i]);
|
||||
}
|
||||
qDebug() << "Get Joints";
|
||||
return jointRotations;
|
||||
}
|
||||
|
||||
|
|
|
@ -48,8 +48,6 @@ public:
|
|||
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
|
||||
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
|
||||
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
|
||||
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
|
||||
void setLeanForward(float leanForward) { _leanForward = leanForward; }
|
||||
|
||||
/// \return orientationBase+Delta
|
||||
glm::quat getFinalOrientationInLocalFrame() const;
|
||||
|
@ -57,7 +55,6 @@ public:
|
|||
/// \return orientationBody * (orientationBase+Delta)
|
||||
glm::quat getFinalOrientationInWorldFrame() const;
|
||||
|
||||
|
||||
/// \return orientationBody * orientationBasePitch
|
||||
glm::quat getCameraOrientation () const;
|
||||
|
||||
|
@ -71,8 +68,6 @@ public:
|
|||
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
|
||||
float getLeanSideways() const { return _leanSideways; }
|
||||
float getLeanForward() const { return _leanForward; }
|
||||
float getFinalLeanSideways() const { return _leanSideways + _deltaLeanSideways; }
|
||||
float getFinalLeanForward() const { return _leanForward + _deltaLeanForward; }
|
||||
|
||||
|
|
|
@ -263,7 +263,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
||||
if (isPlaying()) {
|
||||
//estimatedPosition = _player->getHeadTranslation();
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(_player->getHeadRotation()));
|
||||
} else if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getHeadRotation()));
|
||||
|
@ -311,14 +310,18 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
}
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
|
||||
if (isPlaying()) {
|
||||
head->setLeanSideways(_player->getLeanSideways());
|
||||
head->setLeanForward(_player->getLeanForward());
|
||||
return;
|
||||
}
|
||||
// the priovr can give us exact lean
|
||||
if (Application::getInstance()->getPrioVR()->isActive() && !isPlaying()) {
|
||||
if (Application::getInstance()->getPrioVR()->isActive()) {
|
||||
glm::vec3 eulers = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getTorsoRotation()));
|
||||
head->setLeanSideways(eulers.z);
|
||||
head->setLeanForward(eulers.x);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update torso lean distance based on accelerometer data
|
||||
const float TORSO_LENGTH = 0.5f;
|
||||
glm::vec3 relativePosition = estimatedPosition - glm::vec3(0.0f, -TORSO_LENGTH, 0.0f);
|
||||
|
@ -910,6 +913,14 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const {
|
|||
|
||||
const float JOINT_PRIORITY = 2.0f;
|
||||
|
||||
void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
|
||||
for (int i = 0; i < jointRotations.size(); ++i) {
|
||||
if (i < _jointData.size()) {
|
||||
_skeletonModel.setJointState(i, true, jointRotations[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setJointData(int index, const glm::quat& rotation) {
|
||||
Avatar::setJointData(index, rotation);
|
||||
if (QThread::currentThread() == thread()) {
|
||||
|
|
|
@ -112,6 +112,7 @@ public:
|
|||
void updateLookAtTargetAvatar();
|
||||
void clearLookAtTargetAvatar();
|
||||
|
||||
virtual void setJointRotations(QVector<glm::quat> jointRotations);
|
||||
virtual void setJointData(int index, const glm::quat& rotation);
|
||||
virtual void clearJointData(int index);
|
||||
virtual void setFaceModelURL(const QUrl& faceModelURL);
|
||||
|
@ -155,10 +156,12 @@ public slots:
|
|||
bool setModelReferential(int id);
|
||||
bool setJointReferential(int id, int jointIndex);
|
||||
|
||||
const RecorderPointer getRecorder() const { return _recorder; }
|
||||
bool isRecording() const;
|
||||
RecorderPointer startRecording();
|
||||
void stopRecording();
|
||||
|
||||
const PlayerPointer getPlayer() const { return _player; }
|
||||
bool isPlaying() const;
|
||||
PlayerPointer startPlaying();
|
||||
void stopPlaying();
|
||||
|
|
|
@ -59,9 +59,15 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
|
||||
if (!(isActive() && _owningAvatar->isMyAvatar())) {
|
||||
if (!isActive() || !_owningAvatar->isMyAvatar()) {
|
||||
return; // only simulate for own avatar
|
||||
}
|
||||
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
if (myAvatar->isPlaying()) {
|
||||
// Don't take inputs if playing back a recording.
|
||||
return;
|
||||
}
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
PrioVR* prioVR = Application::getInstance()->getPrioVR();
|
||||
|
|
|
@ -698,6 +698,20 @@ QVector<glm::quat> AvatarData::getJointRotations() const {
|
|||
return jointRotations;
|
||||
}
|
||||
|
||||
void AvatarData::setJointRotations(QVector<glm::quat> jointRotations) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QVector<glm::quat> result;
|
||||
QMetaObject::invokeMethod(const_cast<AvatarData*>(this),
|
||||
"setJointRotation", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QVector<glm::quat>, jointRotations));
|
||||
}
|
||||
for (int i = 0; i < jointRotations.size(); ++i) {
|
||||
if (i < _jointData.size()) {
|
||||
setJointData(i, jointRotations[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool AvatarData::hasIdentityChangedAfterParsing(const QByteArray &packet) {
|
||||
QDataStream packetStream(packet);
|
||||
packetStream.skipRawData(numBytesForPacketHeader(packet));
|
||||
|
|
|
@ -211,7 +211,8 @@ public:
|
|||
Q_INVOKABLE bool isJointDataValid(const QString& name) const;
|
||||
Q_INVOKABLE glm::quat getJointRotation(const QString& name) const;
|
||||
|
||||
QVector<glm::quat> getJointRotations() const;
|
||||
Q_INVOKABLE virtual QVector<glm::quat> getJointRotations() const;
|
||||
Q_INVOKABLE virtual void setJointRotations(QVector<glm::quat> jointRotations);
|
||||
|
||||
/// Returns the index of the joint with the specified name, or -1 if not found/unknown.
|
||||
Q_INVOKABLE virtual int getJointIndex(const QString& name) const { return _jointIndices.value(name) - 1; }
|
||||
|
|
|
@ -69,6 +69,15 @@ public:
|
|||
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }
|
||||
void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; }
|
||||
|
||||
|
||||
float getLeanSideways() const { return _leanSideways; }
|
||||
float getLeanForward() const { return _leanForward; }
|
||||
virtual float getFinalLeanSideways() const { return _leanSideways; }
|
||||
virtual float getFinalLeanForward() const { return _leanForward; }
|
||||
|
||||
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
|
||||
void setLeanForward(float leanForward) { _leanForward = leanForward; }
|
||||
|
||||
friend class AvatarData;
|
||||
|
||||
protected:
|
||||
|
|
Loading…
Reference in a new issue