Merge pull request #6387 from samcake/controllers

Recording : Updating the recorder.js and a few fixes
This commit is contained in:
Brad Davis 2015-11-13 17:56:57 -08:00
commit 7a34d4f567
6 changed files with 64 additions and 28 deletions

View file

@ -176,14 +176,13 @@ function formatTime(time) {
var SEC_PER_MIN = 60;
var MSEC_PER_SEC = 1000;
var hours = Math.floor(time / (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR));
time -= hours * (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR);
var hours = Math.floor(time / (SEC_PER_MIN * MIN_PER_HOUR));
time -= hours * (SEC_PER_MIN * MIN_PER_HOUR);
var minutes = Math.floor(time / (MSEC_PER_SEC * SEC_PER_MIN));
time -= minutes * (MSEC_PER_SEC * SEC_PER_MIN);
var minutes = Math.floor(time / (SEC_PER_MIN));
time -= minutes * (SEC_PER_MIN);
var seconds = Math.floor(time / MSEC_PER_SEC);
seconds = time / MSEC_PER_SEC;
var seconds = time;
var text = "";
text += (hours > 0) ? hours + ":" :

View file

@ -608,7 +608,7 @@ float MyAvatar::recorderElapsed() {
if (!_recorder) {
return 0;
}
return (float)_recorder->position() / MSECS_PER_SECOND;
return (float)_recorder->position() / (float) MSECS_PER_SECOND;
}
QMetaObject::Connection _audioClientRecorderConnection;

View file

@ -804,12 +804,12 @@ float AvatarData::playerElapsed() {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
float result;
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
Q_RETURN_ARG(float, result));
return result;
}
return (float)_player->position() / MSECS_PER_SECOND;
return (float)_player->position() / (float) MSECS_PER_SECOND;
}
float AvatarData::playerLength() {
@ -817,12 +817,12 @@ float AvatarData::playerLength() {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
float result;
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
Q_RETURN_ARG(float, result));
return result;
}
return _player->length() / MSECS_PER_SECOND;
return (float)_player->length() / (float) MSECS_PER_SECOND;
}
void AvatarData::loadRecording(const QString& filename) {
@ -1513,7 +1513,8 @@ void AvatarData::setRecordingBasis(std::shared_ptr<Transform> recordingBasis) {
recordingBasis = std::make_shared<Transform>();
recordingBasis->setRotation(getOrientation());
recordingBasis->setTranslation(getPosition());
recordingBasis->setScale(getTargetScale());
// TODO: find a different way to record/playback the Scale of the avatar
//recordingBasis->setScale(getTargetScale());
}
_recordingBasis = recordingBasis;
}
@ -1532,7 +1533,7 @@ Transform AvatarData::getTransform() const {
static const QString JSON_AVATAR_BASIS = QStringLiteral("basisTransform");
static const QString JSON_AVATAR_RELATIVE = QStringLiteral("relativeTransform");
static const QString JSON_AVATAR_JOINT_ROTATIONS = QStringLiteral("jointRotations");
static const QString JSON_AVATAR_JOINT_ARRAY = QStringLiteral("jointArray");
static const QString JSON_AVATAR_HEAD = QStringLiteral("head");
static const QString JSON_AVATAR_HEAD_ROTATION = QStringLiteral("rotation");
static const QString JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS = QStringLiteral("blendShapes");
@ -1544,6 +1545,24 @@ static const QString JSON_AVATAR_BODY_MODEL = QStringLiteral("bodyModel");
static const QString JSON_AVATAR_DISPLAY_NAME = QStringLiteral("displayName");
static const QString JSON_AVATAR_ATTACHEMENTS = QStringLiteral("attachments");
QJsonValue toJsonValue(const JointData& joint) {
QJsonArray result;
result.push_back(toJsonValue(joint.rotation));
result.push_back(toJsonValue(joint.translation));
return result;
}
JointData jointDataFromJsonValue(const QJsonValue& json) {
JointData result;
if (json.isArray()) {
QJsonArray array = json.toArray();
result.rotation = quatFromJsonValue(array[0]);
result.rotationSet = true;
result.translation = vec3FromJsonValue(array[1]);
result.translationSet = false;
}
return result;
}
// Every frame will store both a basis for the recording and a relative transform
// This allows the application to decide whether playback should be relative to an avatar's
@ -1575,13 +1594,16 @@ QByteArray avatarStateToFrame(const AvatarData* _avatar) {
root[JSON_AVATAR_RELATIVE] = Transform::toJson(relativeTransform);
root[JSON_AVATAR_BASIS] = Transform::toJson(*recordingBasis);
}
} else {
root[JSON_AVATAR_RELATIVE] = Transform::toJson(_avatar->getTransform());
}
QJsonArray jointRotations;
for (const auto& jointRotation : _avatar->getJointRotations()) {
jointRotations.push_back(toJsonValue(jointRotation));
// Skeleton pose
QJsonArray jointArray;
for (const auto& joint : _avatar->getRawJointData()) {
jointArray.push_back(toJsonValue(joint));
}
root[JSON_AVATAR_JOINT_ROTATIONS] = jointRotations;
root[JSON_AVATAR_JOINT_ARRAY] = jointArray;
const HeadData* head = _avatar->getHeadData();
if (head) {
@ -1643,24 +1665,34 @@ void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar) {
auto worldTransform = currentBasis->worldTransform(relativeTransform);
_avatar->setPosition(worldTransform.getTranslation());
_avatar->setOrientation(worldTransform.getRotation());
_avatar->setTargetScale(worldTransform.getScale().x);
// TODO: find a way to record/playback the Scale of the avatar
//_avatar->setTargetScale(worldTransform.getScale().x);
}
#if 0
if (root.contains(JSON_AVATAR_ATTACHEMENTS)) {
// FIXME de-serialize attachment data
}
// Joint rotations are relative to the avatar, so they require no basis correction
if (root.contains(JSON_AVATAR_JOINT_ROTATIONS)) {
QVector<quat> jointRotations;
QJsonArray jointRotationsJson = root[JSON_AVATAR_JOINT_ROTATIONS].toArray();
jointRotations.reserve(jointRotationsJson.size());
for (const auto& jointRotationJson : jointRotationsJson) {
jointRotations.push_back(quatFromJsonValue(jointRotationJson));
if (root.contains(JSON_AVATAR_JOINT_ARRAY)) {
QVector<JointData> jointArray;
QJsonArray jointArrayJson = root[JSON_AVATAR_JOINT_ARRAY].toArray();
jointArray.reserve(jointArrayJson.size());
for (const auto& jointJson : jointArrayJson) {
jointArray.push_back(jointDataFromJsonValue(jointJson));
}
QVector<glm::quat> jointRotations;
jointRotations.reserve(jointArray.size());
for (const auto& joint : jointArray) {
jointRotations.push_back(joint.rotation);
}
_avatar->setJointRotations(jointRotations);
}
#if 0
// Most head data is relative to the avatar, and needs no basis correction,
// but the lookat vector does need correction
HeadData* head = _avatar->_headData;

View file

@ -457,6 +457,9 @@ public:
bool translationSet = false;
};
QJsonValue toJsonValue(const JointData& joint);
JointData jointDataFromJsonValue(const QJsonValue& q);
class AttachmentData {
public:
QUrl modelURL;

View file

@ -25,6 +25,8 @@ void Deck::queueClip(ClipPointer clip, Time timeOffset) {
// FIXME if the time offset is not zero, wrap the clip in a OffsetClip wrapper
_clips.push_back(clip);
_length = std::max(_length, clip->duration());
}
void Deck::play() {

View file

@ -27,7 +27,7 @@ public:
static const FrameType TYPE_INVALID = 0xFFFF;
static const FrameType TYPE_HEADER = 0x0;
FrameType type { TYPE_INVALID };
Time timeOffset { 0 };
Time timeOffset { 0 }; // milliseconds
QByteArray data;
Frame() {}