Cleaning the code to remove the old rec format and the use of separate joint Rotationns/Translations arrays

This commit is contained in:
Sam Gateau 2015-11-09 23:12:04 -08:00
parent 77e21b7190
commit b47c5dbff2
4 changed files with 14 additions and 248 deletions

View file

@ -247,11 +247,14 @@ void Player::play() {
_frameInterpolationFactor);
_avatar->setTargetScale(context->scale * scale);
// Joint array playback
// FIXME: THis is still using a deprecated path to assign the joint orientation since setting the full RawJointData array doesn't
// work for Avatar. We need to fix this working with the animation team
const auto& prevJointArray = currentFrame.getJointArray();
const auto& nextJointArray = currentFrame.getJointArray();
QVector<JointData> jointArray(prevJointArray.size());
QVector<glm::quat> jointRotations(prevJointArray.size());
QVector<glm::vec3> jointTranslations(prevJointArray.size());
QVector<glm::quat> jointRotations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
QVector<glm::vec3> jointTranslations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
for (int i = 0; i < jointArray.size(); i++) {
const auto& prevJoint = prevJointArray[i];
@ -262,35 +265,19 @@ void Player::play() {
joint.rotationSet = prevJoint.rotationSet || nextJoint.rotationSet;
if (joint.rotationSet) {
joint.rotation = safeMix(prevJoint.rotation, nextJoint.rotation, _frameInterpolationFactor);
jointRotations[i] = joint.rotation;
jointRotations[i] = joint.rotation; // FIXME: remove once the setRawJointData is fixed
}
joint.translationSet = prevJoint.translationSet || nextJoint.translationSet;
if (joint.translationSet) {
joint.translation = glm::mix(prevJoint.translation, nextJoint.translation, _frameInterpolationFactor);
jointTranslations[i] = joint.translation;
jointTranslations[i] = joint.translation; // FIXME: remove once the setRawJointData is fixed
}
}
// _avatar->setRawJointData(jointArray);
_avatar->setJointRotations(jointRotations);
// _avatar->setJointTranslations(jointTranslations);
/* QVector<glm::quat> jointRotations(currentFrame.getJointRotations().size());
for (int i = 0; i < currentFrame.getJointRotations().size(); ++i) {
jointRotations[i] = safeMix(currentFrame.getJointRotations()[i],
nextFrame.getJointRotations()[i],
_frameInterpolationFactor);
}
*/
/* QVector<glm::vec3> jointTranslations(currentFrame.getJointTranslations().size());
for (int i = 0; i < currentFrame.getJointTranslations().size(); ++i) {
jointTranslations[i] = glm::mix(currentFrame.getJointTranslations()[i],
nextFrame.getJointTranslations()[i],
_frameInterpolationFactor);
}
*/
// _avatar->setJointRotations(jointRotations);
// _avatar->setJointTranslations(jointTranslations);
// _avatar->setRawJointData(jointArray); // FIXME: Enable once the setRawJointData is fixed
_avatar->setJointRotations(jointRotations); // FIXME: remove once the setRawJointData is fixed
_avatar->setJointTranslations(jointTranslations); // FIXME: remove once the setRawJointData is fixed
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
if (head) {

View file

@ -100,13 +100,10 @@ void Recorder::record() {
const RecordingContext& context = _recording->getContext();
RecordingFrame frame;
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
// FIXME: here we need to make sure the correct joint data on the AvatarData to get correct play back.
// This should be fixed by a fix coming from Howard soon
auto& jointData = _avatar->getRawJointData();
// Capture the full skeleton joint data
auto& jointData = _avatar->getRawJointData();
frame.setJointArray(jointData);
// frame.setJointRotations(_avatar->::AvatarData::getJointRotations());
// frame.setJointTranslations(_avatar->::AvatarData::getJointTranslations());
frame.setTranslation(context.orientationInv * (_avatar->getPosition() - context.position));
frame.setRotation(context.orientationInv * _avatar->getOrientation());
@ -129,8 +126,7 @@ void Recorder::record() {
if (wantDebug) {
qCDebug(avatars) << "Recording frame #" << _recording->getFrameNumber();
qCDebug(avatars) << "Blendshapes:" << frame.getBlendshapeCoefficients().size();
qCDebug(avatars) << "JointRotations:" << frame.getJointRotations().size();
qCDebug(avatars) << "JointRotations:" << frame.getJointTranslations().size();
qCDebug(avatars) << "JointArray:" << frame.getJointArray().size();
qCDebug(avatars) << "Translation:" << frame.getTranslation();
qCDebug(avatars) << "Rotation:" << frame.getRotation();
qCDebug(avatars) << "Scale:" << frame.getScale();

View file

@ -250,31 +250,6 @@ void writeRecordingToFile(RecordingPointer recording, const QString& filename) {
maskIndex++;
}
/* // Joint Rotations
if (i == 0) {
numJoints = frame.getJointRotations().size();
stream << numJoints;
mask.resize(mask.size() + numJoints);
}
for (quint32 j = 0; j < numJoints; ++j) {
if (i == 0 ||
frame._jointRotations[j] != previousFrame._jointRotations[j]) {
writeQuat(stream, frame._jointRotations[j]);
mask.setBit(maskIndex);
}
maskIndex++;
}
// Joint Translations
for (quint32 j = 0; j < numJoints; ++j) {
if (i == 0 ||
frame._jointTranslations[j] != previousFrame._jointTranslations[j]) {
writeVec3(stream, frame._jointTranslations[j]);
mask.setBit(maskIndex);
}
maskIndex++;
} */
// Translation
if (i == 0) {
mask.resize(mask.size() + 1);
@ -438,11 +413,7 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
file.close();
}
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
qCDebug(avatars) << "Old .rec format";
readRecordingFromRecFile(recording, filename, byteArray);
return recording;
} else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
qCDebug(avatars) << "File extension not recognized";
}
@ -604,23 +575,6 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
}
}
/*
frame._jointRotations.resize(numJoints);
for (quint32 j = 0; j < numJoints; ++j) {
if (!mask[maskIndex++] || !readQuat(stream, frame._jointRotations[j])) {
frame._jointRotations[j] = previousFrame._jointRotations[j];
}
}
// Joint Translations
/*frame._jointTranslations.resize(numJoints);
for (quint32 j = 0; j < numJoints; ++j) {
if (!mask[maskIndex++] || !readVec3(stream, frame._jointTranslations[j])) {
frame._jointTranslations[j] = previousFrame._jointTranslations[j];
}
}
*/
if (!mask[maskIndex++] || !readVec3(stream, frame._translation)) {
frame._translation = previousFrame._translation;
}
@ -705,167 +659,3 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
return recording;
}
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray) {
QElapsedTimer timer;
timer.start();
if (!recording) {
recording = QSharedPointer<Recording>::create();
}
QDataStream fileStream(byteArray);
fileStream >> recording->_timestamps;
RecordingFrame baseFrame;
// Blendshape coefficients
fileStream >> baseFrame._blendshapeCoefficients;
// Joint Rotations
int jointRotationSize;
fileStream >> jointRotationSize;
baseFrame._jointRotations.resize(jointRotationSize);
for (int i = 0; i < jointRotationSize; ++i) {
fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w;
}
// TODO -- handle translations
fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z;
fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w;
fileStream >> baseFrame._scale;
fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w;
fileStream >> baseFrame._leanSideways;
fileStream >> baseFrame._leanForward;
// Fake context
RecordingContext& context = recording->getContext();
context.globalTimestamp = usecTimestampNow();
context.domain = DependencyManager::get<NodeList>()->getDomainHandler().getHostname();
context.position = glm::vec3(144.5f, 3.3f, 181.3f);
context.orientation = glm::angleAxis(glm::radians(-92.5f), glm::vec3(0, 1, 0));;
context.scale = baseFrame._scale;
context.headModel = "http://public.highfidelity.io/models/heads/Emily_v4.fst";
context.skeletonModel = "http://public.highfidelity.io/models/skeletons/EmilyCutMesh_A.fst";
context.displayName = "Leslie";
context.attachments.clear();
AttachmentData data;
data.modelURL = "http://public.highfidelity.io/models/attachments/fbx.fst";
data.jointName = "RightHand" ;
data.translation = glm::vec3(0.04f, 0.07f, 0.0f);
data.rotation = glm::angleAxis(glm::radians(102.0f), glm::vec3(0, 1, 0));
data.scale = 0.20f;
context.attachments << data;
context.orientationInv = glm::inverse(context.orientation);
baseFrame._translation = glm::vec3();
baseFrame._rotation = glm::quat();
baseFrame._scale = 1.0f;
recording->_frames << baseFrame;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::ReadOnly);
RecordingFrame frame;
RecordingFrame& previousFrame = recording->_frames.last();
fileStream >> mask;
fileStream >> buffer;
int maskIndex = 0;
// Blendshape Coefficients
frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size());
for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._blendshapeCoefficients[i];
} else {
frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i];
}
}
// Joint Rotations
frame._jointRotations.resize(baseFrame._jointRotations.size());
for (int i = 0; i < baseFrame._jointRotations.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w;
} else {
frame._jointRotations[i] = previousFrame._jointRotations[i];
}
}
// TODO -- handle translations
if (mask[maskIndex++]) {
stream >> frame._translation.x >> frame._translation.y >> frame._translation.z;
frame._translation = context.orientationInv * frame._translation;
} else {
frame._translation = previousFrame._translation;
}
if (mask[maskIndex++]) {
stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w;
} else {
frame._rotation = previousFrame._rotation;
}
if (mask[maskIndex++]) {
stream >> frame._scale;
} else {
frame._scale = previousFrame._scale;
}
if (mask[maskIndex++]) {
stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w;
} else {
frame._headRotation = previousFrame._headRotation;
}
if (mask[maskIndex++]) {
stream >> frame._leanSideways;
} else {
frame._leanSideways = previousFrame._leanSideways;
}
if (mask[maskIndex++]) {
stream >> frame._leanForward;
} else {
frame._leanForward = previousFrame._leanForward;
}
recording->_frames << frame;
}
QByteArray audioArray;
fileStream >> audioArray;
// Cut down audio if necessary
int SAMPLE_SIZE = 2; // 16 bits
int MSEC_PER_SEC = 1000;
int audioLength = recording->getLength() * SAMPLE_SIZE * (AudioConstants::SAMPLE_RATE / MSEC_PER_SEC);
audioArray.chop(audioArray.size() - audioLength);
recording->addAudioPacket(audioArray);
qCDebug(avatars) << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
// Set new filename
QString newFilename = filename;
if (newFilename.startsWith("http") || newFilename.startsWith("https") || newFilename.startsWith("ftp")) {
newFilename = QUrl(newFilename).fileName();
}
if (newFilename.endsWith(".rec") || newFilename.endsWith(".REC")) {
newFilename.chop(qstrlen(".rec"));
}
newFilename.append(".hfr");
newFilename = QFileInfo(newFilename).absoluteFilePath();
// Set recording to new format
writeRecordingToFile(recording, newFilename);
qCDebug(avatars) << "Recording has been successfully converted at" << newFilename;
return recording;
}

View file

@ -84,8 +84,6 @@ class RecordingFrame {
public:
QVector<float> getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
QVector<JointData> getJointArray() const { return _jointArray; }
QVector<glm::quat> getJointRotations() const { return _jointRotations; }
QVector<glm::vec3> getJointTranslations() const { return _jointTranslations; }
glm::vec3 getTranslation() const { return _translation; }
glm::quat getRotation() const { return _rotation; }
float getScale() const { return _scale; }
@ -97,8 +95,6 @@ public:
protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointArray(const QVector<JointData>& jointArray) { _jointArray = jointArray; }
void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; }
void setJointTranslations(QVector<glm::vec3> jointTranslations) { _jointTranslations = jointTranslations; }
void setTranslation(const glm::vec3& translation) { _translation = translation; }
void setRotation(const glm::quat& rotation) { _rotation = rotation; }
void setScale(float scale) { _scale = scale; }
@ -109,8 +105,6 @@ protected:
private:
QVector<float> _blendshapeCoefficients;
QVector<glm::quat> _jointRotations;
QVector<glm::vec3> _jointTranslations;
QVector<JointData> _jointArray;
glm::vec3 _translation;
@ -130,6 +124,5 @@ private:
void writeRecordingToFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray);
#endif // hifi_Recording_h