mirror of
https://github.com/overte-org/overte.git
synced 2025-04-19 13:43:49 +02:00
Merge pull request #6360 from samcake/controllers
Recording and playback of the Avatar animation update but not quite over
This commit is contained in:
commit
aea911ebc2
7 changed files with 74 additions and 221 deletions
|
@ -14,7 +14,7 @@
|
|||
]
|
||||
},
|
||||
{ "from": "Standard.RX", "to": "Actions.Yaw" },
|
||||
|
||||
|
||||
{ "from": "Standard.RY",
|
||||
"when": "Application.Grounded",
|
||||
"to": "Actions.Up",
|
||||
|
|
|
@ -1132,14 +1132,6 @@ void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
|
|||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setJointTranslations(QVector<glm::vec3> jointTranslations) {
|
||||
int numStates = glm::min(_skeletonModel.getJointStateCount(), jointTranslations.size());
|
||||
for (int i = 0; i < numStates; ++i) {
|
||||
// HACK: ATM only Recorder calls setJointTranslations() so we hardcode its priority here
|
||||
_skeletonModel.setJointTranslation(i, true, jointTranslations[i], RECORDER_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointData", Q_ARG(int, index), Q_ARG(const glm::quat&, rotation),
|
||||
|
|
|
@ -192,7 +192,6 @@ public:
|
|||
void clearLookAtTargetAvatar();
|
||||
|
||||
virtual void setJointRotations(QVector<glm::quat> jointRotations) override;
|
||||
virtual void setJointTranslations(QVector<glm::vec3> jointTranslations) override;
|
||||
virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) override;
|
||||
virtual void setJointRotation(int index, const glm::quat& rotation) override;
|
||||
virtual void setJointTranslation(int index, const glm::vec3& translation) override;
|
||||
|
|
|
@ -246,24 +246,38 @@ void Player::play() {
|
|||
nextFrame.getScale(),
|
||||
_frameInterpolationFactor);
|
||||
_avatar->setTargetScale(context->scale * scale);
|
||||
|
||||
|
||||
QVector<glm::quat> jointRotations(currentFrame.getJointRotations().size());
|
||||
for (int i = 0; i < currentFrame.getJointRotations().size(); ++i) {
|
||||
jointRotations[i] = safeMix(currentFrame.getJointRotations()[i],
|
||||
nextFrame.getJointRotations()[i],
|
||||
_frameInterpolationFactor);
|
||||
|
||||
// Joint array playback
|
||||
// FIXME: THis is still using a deprecated path to assign the joint orientation since setting the full RawJointData array doesn't
|
||||
// work for Avatar. We need to fix this working with the animation team
|
||||
const auto& prevJointArray = currentFrame.getJointArray();
|
||||
const auto& nextJointArray = currentFrame.getJointArray();
|
||||
QVector<JointData> jointArray(prevJointArray.size());
|
||||
QVector<glm::quat> jointRotations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
|
||||
QVector<glm::vec3> jointTranslations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
|
||||
|
||||
for (int i = 0; i < jointArray.size(); i++) {
|
||||
const auto& prevJoint = prevJointArray[i];
|
||||
const auto& nextJoint = nextJointArray[i];
|
||||
auto& joint = jointArray[i];
|
||||
|
||||
// Rotation
|
||||
joint.rotationSet = prevJoint.rotationSet || nextJoint.rotationSet;
|
||||
if (joint.rotationSet) {
|
||||
joint.rotation = safeMix(prevJoint.rotation, nextJoint.rotation, _frameInterpolationFactor);
|
||||
jointRotations[i] = joint.rotation; // FIXME: remove once the setRawJointData is fixed
|
||||
}
|
||||
|
||||
joint.translationSet = prevJoint.translationSet || nextJoint.translationSet;
|
||||
if (joint.translationSet) {
|
||||
joint.translation = glm::mix(prevJoint.translation, nextJoint.translation, _frameInterpolationFactor);
|
||||
jointTranslations[i] = joint.translation; // FIXME: remove once the setRawJointData is fixed
|
||||
}
|
||||
}
|
||||
|
||||
QVector<glm::vec3> jointTranslations(currentFrame.getJointTranslations().size());
|
||||
for (int i = 0; i < currentFrame.getJointTranslations().size(); ++i) {
|
||||
jointTranslations[i] =
|
||||
currentFrame.getJointTranslations()[i] * (1.0f - _frameInterpolationFactor) +
|
||||
nextFrame.getJointTranslations()[i] * _frameInterpolationFactor;
|
||||
}
|
||||
|
||||
_avatar->setJointRotations(jointRotations);
|
||||
_avatar->setJointTranslations(jointTranslations);
|
||||
// _avatar->setRawJointData(jointArray); // FIXME: Enable once the setRawJointData is fixed
|
||||
_avatar->setJointRotations(jointRotations); // FIXME: remove once the setRawJointData is fixed
|
||||
// _avatar->setJointTranslations(jointTranslations); // FIXME: remove once the setRawJointData is fixed
|
||||
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
if (head) {
|
||||
|
@ -423,3 +437,4 @@ bool Player::computeCurrentFrame() {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -100,12 +100,15 @@ void Recorder::record() {
|
|||
const RecordingContext& context = _recording->getContext();
|
||||
RecordingFrame frame;
|
||||
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
|
||||
frame.setJointRotations(_avatar->getJointRotations());
|
||||
|
||||
// Capture the full skeleton joint data
|
||||
auto& jointData = _avatar->getRawJointData();
|
||||
frame.setJointArray(jointData);
|
||||
|
||||
frame.setTranslation(context.orientationInv * (_avatar->getPosition() - context.position));
|
||||
frame.setRotation(context.orientationInv * _avatar->getOrientation());
|
||||
frame.setScale(_avatar->getTargetScale() / context.scale);
|
||||
|
||||
|
||||
|
||||
const HeadData* head = _avatar->getHeadData();
|
||||
if (head) {
|
||||
glm::vec3 rotationDegrees = glm::vec3(head->getFinalPitch(),
|
||||
|
@ -123,7 +126,7 @@ void Recorder::record() {
|
|||
if (wantDebug) {
|
||||
qCDebug(avatars) << "Recording frame #" << _recording->getFrameNumber();
|
||||
qCDebug(avatars) << "Blendshapes:" << frame.getBlendshapeCoefficients().size();
|
||||
qCDebug(avatars) << "JointRotations:" << frame.getJointRotations().size();
|
||||
qCDebug(avatars) << "JointArray:" << frame.getJointArray().size();
|
||||
qCDebug(avatars) << "Translation:" << frame.getTranslation();
|
||||
qCDebug(avatars) << "Rotation:" << frame.getRotation();
|
||||
qCDebug(avatars) << "Scale:" << frame.getScale();
|
||||
|
|
|
@ -229,22 +229,27 @@ void writeRecordingToFile(RecordingPointer recording, const QString& filename) {
|
|||
++maskIndex;
|
||||
}
|
||||
|
||||
// Joint Rotations
|
||||
const auto& jointArray = frame.getJointArray();
|
||||
if (i == 0) {
|
||||
numJoints = frame.getJointRotations().size();
|
||||
numJoints = jointArray.size();
|
||||
stream << numJoints;
|
||||
mask.resize(mask.size() + numJoints);
|
||||
// 2 fields per joints
|
||||
mask.resize(mask.size() + numJoints * 2);
|
||||
}
|
||||
for (quint32 j = 0; j < numJoints; ++j) {
|
||||
if (i == 0 ||
|
||||
frame._jointRotations[j] != previousFrame._jointRotations[j]) {
|
||||
writeQuat(stream, frame._jointRotations[j]);
|
||||
// TODO -- handle translations
|
||||
for (quint32 j = 0; j < numJoints; j++) {
|
||||
const auto& joint = jointArray[j];
|
||||
if (true) { //(joint.rotationSet) {
|
||||
writeQuat(stream, joint.rotation);
|
||||
mask.setBit(maskIndex);
|
||||
}
|
||||
maskIndex++;
|
||||
if (joint.translationSet) {
|
||||
writeVec3(stream, joint.translation);
|
||||
mask.setBit(maskIndex);
|
||||
}
|
||||
maskIndex++;
|
||||
}
|
||||
|
||||
|
||||
// Translation
|
||||
if (i == 0) {
|
||||
mask.resize(mask.size() + 1);
|
||||
|
@ -408,11 +413,7 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
|
|||
file.close();
|
||||
}
|
||||
|
||||
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
|
||||
qCDebug(avatars) << "Old .rec format";
|
||||
readRecordingFromRecFile(recording, filename, byteArray);
|
||||
return recording;
|
||||
} else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
|
||||
if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
|
||||
qCDebug(avatars) << "File extension not recognized";
|
||||
}
|
||||
|
||||
|
@ -552,19 +553,28 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
|
|||
stream >> frame._blendshapeCoefficients[j];
|
||||
}
|
||||
}
|
||||
// Joint Rotations
|
||||
// Joint Array
|
||||
if (i == 0) {
|
||||
stream >> numJoints;
|
||||
}
|
||||
frame._jointRotations.resize(numJoints);
|
||||
|
||||
frame._jointArray.resize(numJoints);
|
||||
for (quint32 j = 0; j < numJoints; ++j) {
|
||||
if (!mask[maskIndex++] || !readQuat(stream, frame._jointRotations[j])) {
|
||||
frame._jointRotations[j] = previousFrame._jointRotations[j];
|
||||
auto& joint = frame._jointArray[2];
|
||||
|
||||
if (mask[maskIndex++] && readQuat(stream, joint.rotation)) {
|
||||
joint.rotationSet = true;
|
||||
} else {
|
||||
joint.rotationSet = false;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++] || readVec3(stream, joint.translation)) {
|
||||
joint.translationSet = true;
|
||||
} else {
|
||||
joint.translationSet = false;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO -- handle translations
|
||||
|
||||
if (!mask[maskIndex++] || !readVec3(stream, frame._translation)) {
|
||||
frame._translation = previousFrame._translation;
|
||||
}
|
||||
|
@ -649,167 +659,3 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
|
|||
return recording;
|
||||
}
|
||||
|
||||
|
||||
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray) {
|
||||
QElapsedTimer timer;
|
||||
timer.start();
|
||||
|
||||
if (!recording) {
|
||||
recording = QSharedPointer<Recording>::create();
|
||||
}
|
||||
|
||||
QDataStream fileStream(byteArray);
|
||||
|
||||
fileStream >> recording->_timestamps;
|
||||
RecordingFrame baseFrame;
|
||||
|
||||
// Blendshape coefficients
|
||||
fileStream >> baseFrame._blendshapeCoefficients;
|
||||
|
||||
// Joint Rotations
|
||||
int jointRotationSize;
|
||||
fileStream >> jointRotationSize;
|
||||
baseFrame._jointRotations.resize(jointRotationSize);
|
||||
for (int i = 0; i < jointRotationSize; ++i) {
|
||||
fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w;
|
||||
}
|
||||
|
||||
// TODO -- handle translations
|
||||
|
||||
fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z;
|
||||
fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w;
|
||||
fileStream >> baseFrame._scale;
|
||||
fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w;
|
||||
fileStream >> baseFrame._leanSideways;
|
||||
fileStream >> baseFrame._leanForward;
|
||||
|
||||
|
||||
// Fake context
|
||||
RecordingContext& context = recording->getContext();
|
||||
context.globalTimestamp = usecTimestampNow();
|
||||
context.domain = DependencyManager::get<NodeList>()->getDomainHandler().getHostname();
|
||||
context.position = glm::vec3(144.5f, 3.3f, 181.3f);
|
||||
context.orientation = glm::angleAxis(glm::radians(-92.5f), glm::vec3(0, 1, 0));;
|
||||
context.scale = baseFrame._scale;
|
||||
context.headModel = "http://public.highfidelity.io/models/heads/Emily_v4.fst";
|
||||
context.skeletonModel = "http://public.highfidelity.io/models/skeletons/EmilyCutMesh_A.fst";
|
||||
context.displayName = "Leslie";
|
||||
context.attachments.clear();
|
||||
AttachmentData data;
|
||||
data.modelURL = "http://public.highfidelity.io/models/attachments/fbx.fst";
|
||||
data.jointName = "RightHand" ;
|
||||
data.translation = glm::vec3(0.04f, 0.07f, 0.0f);
|
||||
data.rotation = glm::angleAxis(glm::radians(102.0f), glm::vec3(0, 1, 0));
|
||||
data.scale = 0.20f;
|
||||
context.attachments << data;
|
||||
|
||||
context.orientationInv = glm::inverse(context.orientation);
|
||||
|
||||
baseFrame._translation = glm::vec3();
|
||||
baseFrame._rotation = glm::quat();
|
||||
baseFrame._scale = 1.0f;
|
||||
|
||||
recording->_frames << baseFrame;
|
||||
|
||||
for (int i = 1; i < recording->_timestamps.size(); ++i) {
|
||||
QBitArray mask;
|
||||
QByteArray buffer;
|
||||
QDataStream stream(&buffer, QIODevice::ReadOnly);
|
||||
RecordingFrame frame;
|
||||
RecordingFrame& previousFrame = recording->_frames.last();
|
||||
|
||||
fileStream >> mask;
|
||||
fileStream >> buffer;
|
||||
int maskIndex = 0;
|
||||
|
||||
// Blendshape Coefficients
|
||||
frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size());
|
||||
for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) {
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._blendshapeCoefficients[i];
|
||||
} else {
|
||||
frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i];
|
||||
}
|
||||
}
|
||||
|
||||
// Joint Rotations
|
||||
frame._jointRotations.resize(baseFrame._jointRotations.size());
|
||||
for (int i = 0; i < baseFrame._jointRotations.size(); ++i) {
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w;
|
||||
} else {
|
||||
frame._jointRotations[i] = previousFrame._jointRotations[i];
|
||||
}
|
||||
}
|
||||
|
||||
// TODO -- handle translations
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._translation.x >> frame._translation.y >> frame._translation.z;
|
||||
frame._translation = context.orientationInv * frame._translation;
|
||||
} else {
|
||||
frame._translation = previousFrame._translation;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w;
|
||||
} else {
|
||||
frame._rotation = previousFrame._rotation;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._scale;
|
||||
} else {
|
||||
frame._scale = previousFrame._scale;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w;
|
||||
} else {
|
||||
frame._headRotation = previousFrame._headRotation;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._leanSideways;
|
||||
} else {
|
||||
frame._leanSideways = previousFrame._leanSideways;
|
||||
}
|
||||
|
||||
if (mask[maskIndex++]) {
|
||||
stream >> frame._leanForward;
|
||||
} else {
|
||||
frame._leanForward = previousFrame._leanForward;
|
||||
}
|
||||
|
||||
recording->_frames << frame;
|
||||
}
|
||||
|
||||
QByteArray audioArray;
|
||||
fileStream >> audioArray;
|
||||
|
||||
// Cut down audio if necessary
|
||||
int SAMPLE_SIZE = 2; // 16 bits
|
||||
int MSEC_PER_SEC = 1000;
|
||||
int audioLength = recording->getLength() * SAMPLE_SIZE * (AudioConstants::SAMPLE_RATE / MSEC_PER_SEC);
|
||||
audioArray.chop(audioArray.size() - audioLength);
|
||||
|
||||
recording->addAudioPacket(audioArray);
|
||||
|
||||
qCDebug(avatars) << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
|
||||
|
||||
// Set new filename
|
||||
QString newFilename = filename;
|
||||
if (newFilename.startsWith("http") || newFilename.startsWith("https") || newFilename.startsWith("ftp")) {
|
||||
newFilename = QUrl(newFilename).fileName();
|
||||
}
|
||||
if (newFilename.endsWith(".rec") || newFilename.endsWith(".REC")) {
|
||||
newFilename.chop(qstrlen(".rec"));
|
||||
}
|
||||
newFilename.append(".hfr");
|
||||
newFilename = QFileInfo(newFilename).absoluteFilePath();
|
||||
|
||||
// Set recording to new format
|
||||
writeRecordingToFile(recording, newFilename);
|
||||
qCDebug(avatars) << "Recording has been successfully converted at" << newFilename;
|
||||
return recording;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ class AttachmentData;
|
|||
class Recording;
|
||||
class RecordingFrame;
|
||||
class Sound;
|
||||
class JointData;
|
||||
|
||||
typedef QSharedPointer<Recording> RecordingPointer;
|
||||
|
||||
|
@ -82,8 +83,7 @@ private:
|
|||
class RecordingFrame {
|
||||
public:
|
||||
QVector<float> getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||
QVector<glm::quat> getJointRotations() const { return _jointRotations; }
|
||||
QVector<glm::vec3> getJointTranslations() const { return _jointTranslations; }
|
||||
QVector<JointData> getJointArray() const { return _jointArray; }
|
||||
glm::vec3 getTranslation() const { return _translation; }
|
||||
glm::quat getRotation() const { return _rotation; }
|
||||
float getScale() const { return _scale; }
|
||||
|
@ -94,8 +94,7 @@ public:
|
|||
|
||||
protected:
|
||||
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
|
||||
void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; }
|
||||
void setJointTranslations(QVector<glm::vec3> jointTranslations) { _jointTranslations = jointTranslations; }
|
||||
void setJointArray(const QVector<JointData>& jointArray) { _jointArray = jointArray; }
|
||||
void setTranslation(const glm::vec3& translation) { _translation = translation; }
|
||||
void setRotation(const glm::quat& rotation) { _rotation = rotation; }
|
||||
void setScale(float scale) { _scale = scale; }
|
||||
|
@ -106,8 +105,8 @@ protected:
|
|||
|
||||
private:
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
QVector<glm::quat> _jointRotations;
|
||||
QVector<glm::vec3> _jointTranslations;
|
||||
QVector<JointData> _jointArray;
|
||||
|
||||
glm::vec3 _translation;
|
||||
glm::quat _rotation;
|
||||
float _scale;
|
||||
|
@ -125,6 +124,5 @@ private:
|
|||
|
||||
void writeRecordingToFile(RecordingPointer recording, const QString& filename);
|
||||
RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename);
|
||||
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray);
|
||||
|
||||
#endif // hifi_Recording_h
|
||||
|
|
Loading…
Reference in a new issue