trying to get somewhere....

This commit is contained in:
samcake 2015-11-12 17:54:35 -08:00
parent da8270d50b
commit 1fd37b51a2
4 changed files with 54 additions and 20 deletions
examples/utilities/record
interface/src/avatar
libraries/avatars/src

View file

@ -176,6 +176,8 @@ function formatTime(time) {
var SEC_PER_MIN = 60;
var MSEC_PER_SEC = 1000;
time = time * (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR);
var hours = Math.floor(time / (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR));
time -= hours * (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR);

View file

@ -608,7 +608,7 @@ float MyAvatar::recorderElapsed() {
if (!_recorder) {
return 0;
}
return (float)_recorder->position() / MSECS_PER_SECOND;
return (float)_recorder->position();
}
QMetaObject::Connection _audioClientRecorderConnection;

View file

@ -804,12 +804,12 @@ float AvatarData::playerElapsed() {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
float result;
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
Q_RETURN_ARG(float, result));
return result;
}
return (float)_player->position() / MSECS_PER_SECOND;
return (float)_player->position();
}
float AvatarData::playerLength() {
@ -817,12 +817,12 @@ float AvatarData::playerLength() {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
float result;
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
Q_RETURN_ARG(float, result));
return result;
}
return _player->length() / MSECS_PER_SECOND;
return _player->length();
}
void AvatarData::loadRecording(const QString& filename) {
@ -870,7 +870,7 @@ void AvatarData::setPlayerTime(float time) {
return;
}
_player->seek(time * MSECS_PER_SECOND);
_player->seek(time);
}
void AvatarData::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
@ -1532,7 +1532,7 @@ Transform AvatarData::getTransform() const {
static const QString JSON_AVATAR_BASIS = QStringLiteral("basisTransform");
static const QString JSON_AVATAR_RELATIVE = QStringLiteral("relativeTransform");
static const QString JSON_AVATAR_JOINT_ROTATIONS = QStringLiteral("jointRotations");
static const QString JSON_AVATAR_JOINT_ARRAY = QStringLiteral("jointArray");
static const QString JSON_AVATAR_HEAD = QStringLiteral("head");
static const QString JSON_AVATAR_HEAD_ROTATION = QStringLiteral("rotation");
static const QString JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS = QStringLiteral("blendShapes");
@ -1544,6 +1544,24 @@ static const QString JSON_AVATAR_BODY_MODEL = QStringLiteral("bodyModel");
static const QString JSON_AVATAR_DISPLAY_NAME = QStringLiteral("displayName");
static const QString JSON_AVATAR_ATTACHEMENTS = QStringLiteral("attachments");
QJsonValue toJsonValue(const JointData& joint) {
QJsonArray result;
result.push_back(toJsonValue(joint.rotation));
result.push_back(toJsonValue(joint.translation));
return result;
}
JointData jointDataFromJsonValue(const QJsonValue& json) {
JointData result;
if (json.isArray()) {
QJsonArray array = json.toArray();
result.rotation = quatFromJsonValue(array[0]);
result.rotationSet = true;
result.translation = vec3FromJsonValue(array[1]);
result.translationSet = false;
}
return result;
}
// Every frame will store both a basis for the recording and a relative transform
// This allows the application to decide whether playback should be relative to an avatar's
@ -1575,13 +1593,16 @@ QByteArray avatarStateToFrame(const AvatarData* _avatar) {
root[JSON_AVATAR_RELATIVE] = Transform::toJson(relativeTransform);
root[JSON_AVATAR_BASIS] = Transform::toJson(*recordingBasis);
}
} else {
root[JSON_AVATAR_RELATIVE] = Transform::toJson(_avatar->getTransform());
}
QJsonArray jointRotations;
for (const auto& jointRotation : _avatar->getJointRotations()) {
jointRotations.push_back(toJsonValue(jointRotation));
// Skeleton pose
QJsonArray jointArray;
for (const auto& joint : _avatar->getRawJointData()) {
jointArray.push_back(toJsonValue(joint));
}
root[JSON_AVATAR_JOINT_ROTATIONS] = jointRotations;
root[JSON_AVATAR_JOINT_ARRAY] = jointArray;
const HeadData* head = _avatar->getHeadData();
if (head) {
@ -1646,21 +1667,29 @@ void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar) {
_avatar->setTargetScale(worldTransform.getScale().x);
}
#if 0
if (root.contains(JSON_AVATAR_ATTACHEMENTS)) {
// FIXME de-serialize attachment data
}
// Joint rotations are relative to the avatar, so they require no basis correction
if (root.contains(JSON_AVATAR_JOINT_ROTATIONS)) {
QVector<quat> jointRotations;
QJsonArray jointRotationsJson = root[JSON_AVATAR_JOINT_ROTATIONS].toArray();
jointRotations.reserve(jointRotationsJson.size());
for (const auto& jointRotationJson : jointRotationsJson) {
jointRotations.push_back(quatFromJsonValue(jointRotationJson));
if (root.contains(JSON_AVATAR_JOINT_ARRAY)) {
QVector<JointData> jointArray;
QJsonArray jointArrayJson = root[JSON_AVATAR_JOINT_ARRAY].toArray();
jointArray.reserve(jointArrayJson.size());
for (const auto& jointJson : jointArrayJson) {
jointArray.push_back(jointDataFromJsonValue(jointJson));
}
QVector<glm::quat> jointRotations;
jointRotations.reserve(jointArray.size());
for (const auto& joint : jointArray) {
jointRotations.push_back(joint.rotation);
}
_avatar->setJointRotations(jointRotations);
}
#if 0
// Most head data is relative to the avatar, and needs no basis correction,
// but the lookat vector does need correction
HeadData* head = _avatar->_headData;

View file

@ -457,6 +457,9 @@ public:
bool translationSet = false;
};
QJsonValue toJsonValue(const JointData& joint);
JointData jointDataFromJsonValue(const QJsonValue& q);
class AttachmentData {
public:
QUrl modelURL;