This commit is contained in:
Philip Rosedale 2014-09-18 10:48:44 -07:00
commit 3f9eff2a90
25 changed files with 1499 additions and 1181 deletions

View file

@ -210,14 +210,14 @@ function mousePressEvent(event) {
}
} else if (saveIcon === toolBar.clicked(clickedOverlay)) {
if (!MyAvatar.isRecording() && !MyAvatar.isPlaying() && MyAvatar.playerLength() != 0) {
recordingFile = Window.save("Save recording to file", ".", "*.rec");
recordingFile = Window.save("Save recording to file", ".", "Recordings (*.hfr)");
if (!(recordingFile === "null" || recordingFile === null || recordingFile === "")) {
MyAvatar.saveRecording(recordingFile);
}
}
} else if (loadIcon === toolBar.clicked(clickedOverlay)) {
if (!MyAvatar.isRecording() && !MyAvatar.isPlaying()) {
recordingFile = Window.browse("Load recorcding from file", ".", "*.rec");
recordingFile = Window.browse("Load recorcding from file", ".", "Recordings (*.hfr *.rec *.HFR *.REC)");
if (!(recordingFile === "null" || recordingFile === null || recordingFile === "")) {
MyAvatar.loadRecording(recordingFile);
}

View file

@ -68,6 +68,7 @@
#include <UUID.h>
#include "Application.h"
#include "ui/DataWebDialog.h"
#include "InterfaceVersion.h"
#include "Menu.h"
#include "ModelUploader.h"

View file

@ -273,6 +273,7 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
avatar, SLOT(updateMotionBehavior()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ChatCircling, 0, false);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ObeyEnvironmentalGravity, Qt::SHIFT | Qt::Key_G, false,
@ -1216,13 +1217,17 @@ void Menu::displayNameLocationResponse(const QString& errorString) {
void Menu::toggleLocationList() {
if (!_userLocationsDialog) {
_userLocationsDialog = new UserLocationsDialog(Application::getInstance()->getWindow());
_userLocationsDialog = DataWebDialog::dialogForPath("/locations");
}
if (_userLocationsDialog->isVisible()) {
_userLocationsDialog->hide();
} else {
if (!_userLocationsDialog->isVisible()) {
_userLocationsDialog->show();
}
_userLocationsDialog->raise();
_userLocationsDialog->activateWindow();
_userLocationsDialog->showNormal();
}
void Menu::nameLocation() {

View file

@ -28,12 +28,12 @@
#endif
#include "location/LocationManager.h"
#include "ui/PreferencesDialog.h"
#include "ui/ChatWindow.h"
#include "ui/DataWebDialog.h"
#include "ui/JSConsole.h"
#include "ui/LoginDialog.h"
#include "ui/PreferencesDialog.h"
#include "ui/ScriptEditorWindow.h"
#include "ui/UserLocationsDialog.h"
const float ADJUST_LOD_DOWN_FPS = 40.0;
const float ADJUST_LOD_UP_FPS = 55.0;
@ -273,7 +273,7 @@ private:
QDialog* _jsConsole;
OctreeStatsDialog* _octreeStatsDialog;
LodToolsDialog* _lodToolsDialog;
UserLocationsDialog* _userLocationsDialog;
QPointer<DataWebDialog> _userLocationsDialog;
#ifdef Q_OS_MAC
SpeechRecognizer _speechRecognizer;
#endif
@ -395,6 +395,7 @@ namespace MenuOption {
const QString FullscreenMirror = "Fullscreen Mirror";
const QString GlowMode = "Cycle Glow Mode";
const QString GlowWhenSpeaking = "Glow When Speaking";
const QString NamesAboveHeads = "Names Above Heads";
const QString GoToUser = "Go To User";
const QString HeadMouse = "Head Mouse";
const QString IncreaseAvatarSize = "Increase Avatar Size";

View file

@ -1,251 +0,0 @@
//
// UserLocationsModel.cpp
// interface/src
//
// Created by Ryan Huffman on 06/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <QJsonArray>
#include <QJsonDocument>
#include <QMessageBox>
#include "AccountManager.h"
#include "Application.h"
#include "UserLocationsModel.h"
static const QString LOCATIONS_GET = "/api/v1/locations";
static const QString LOCATION_UPDATE_OR_DELETE = "/api/v1/locations/%1";
UserLocation::UserLocation(const QString& id, const QString& name, const QString& address) :
_id(id),
_name(name),
_address(address),
_previousName(name),
_updating(false) {
}
void UserLocation::requestRename(const QString& newName) {
if (!_updating && newName.toLower() != _name) {
_updating = true;
JSONCallbackParameters callbackParams(this, "handleRenameResponse", this, "handleRenameError");
QJsonObject jsonNameObject;
jsonNameObject.insert("name", newName);
QJsonObject locationObject;
locationObject.insert("location", jsonNameObject);
QJsonDocument jsonDocument(jsonNameObject);
AccountManager::getInstance().authenticatedRequest(LOCATION_UPDATE_OR_DELETE.arg(_id),
QNetworkAccessManager::PutOperation,
callbackParams,
jsonDocument.toJson());
_previousName = _name;
_name = newName;
emit updated(_name);
}
}
void UserLocation::handleRenameResponse(const QJsonObject& responseData) {
_updating = false;
QJsonValue status = responseData["status"];
if (!status.isUndefined() && status.toString() == "success") {
qDebug() << responseData;
QString updatedName = responseData["data"].toObject()["location"].toObject()["name"].toString();
qDebug() << "The updated name is" << updatedName;
_name = updatedName;
} else {
_name = _previousName;
QString msg = "There was an error renaming location '" + _name + "'";
QJsonValue data = responseData["data"];
if (!data.isUndefined()) {
QJsonValue nameError = data.toObject()["name"];
if (!nameError.isUndefined()) {
msg += ": " + nameError.toString();
}
}
qDebug() << msg;
QMessageBox::warning(Application::getInstance()->getWindow(), "Error", msg);
}
emit updated(_name);
}
void UserLocation::handleRenameError(QNetworkReply& errorReply) {
_updating = false;
QString msg = "There was an error renaming location '" + _name + "': " + errorReply.errorString();
qDebug() << msg;
QMessageBox::warning(Application::getInstance()->getWindow(), "Error", msg);
emit updated(_name);
}
void UserLocation::requestDelete() {
if (!_updating) {
_updating = true;
JSONCallbackParameters callbackParams(this, "handleDeleteResponse", this, "handleDeleteError");
AccountManager::getInstance().authenticatedRequest(LOCATION_UPDATE_OR_DELETE.arg(_id),
QNetworkAccessManager::DeleteOperation,
callbackParams);
}
}
void UserLocation::handleDeleteResponse(const QJsonObject& responseData) {
_updating = false;
QJsonValue status = responseData["status"];
if (!status.isUndefined() && status.toString() == "success") {
emit deleted(_name);
} else {
QString msg = "There was an error deleting location '" + _name + "'";
qDebug() << msg;
QMessageBox::warning(Application::getInstance()->getWindow(), "Error", msg);
}
}
void UserLocation::handleDeleteError(QNetworkReply& errorReply) {
_updating = false;
QString msg = "There was an error deleting location '" + _name + "': " + errorReply.errorString();
qDebug() << msg;
QMessageBox::warning(Application::getInstance()->getWindow(), "Error", msg);
}
UserLocationsModel::UserLocationsModel(QObject* parent) :
QAbstractListModel(parent),
_updating(false) {
refresh();
}
UserLocationsModel::~UserLocationsModel() {
qDeleteAll(_locations);
_locations.clear();
}
void UserLocationsModel::update() {
beginResetModel();
endResetModel();
}
void UserLocationsModel::deleteLocation(const QModelIndex& index) {
UserLocation* location = _locations[index.row()];
location->requestDelete();
}
void UserLocationsModel::renameLocation(const QModelIndex& index, const QString& newName) {
UserLocation* location = _locations[index.row()];
location->requestRename(newName);
}
void UserLocationsModel::refresh() {
if (!_updating) {
beginResetModel();
qDeleteAll(_locations);
_locations.clear();
_updating = true;
endResetModel();
JSONCallbackParameters callbackParams(this, "handleLocationsResponse");
AccountManager::getInstance().authenticatedRequest(LOCATIONS_GET,
QNetworkAccessManager::GetOperation,
callbackParams);
}
}
void UserLocationsModel::handleLocationsResponse(const QJsonObject& responseData) {
_updating = false;
QJsonValue status = responseData["status"];
if (!status.isUndefined() && status.toString() == "success") {
beginResetModel();
QJsonArray locations = responseData["data"].toObject()["locations"].toArray();
for (QJsonArray::const_iterator it = locations.constBegin(); it != locations.constEnd(); it++) {
QJsonObject location = (*it).toObject();
QString locationAddress = "hifi://" + location["domain"].toObject()["name"].toString()
+ location["path"].toString();
UserLocation* userLocation = new UserLocation(location["id"].toString(), location["name"].toString(),
locationAddress);
_locations.append(userLocation);
connect(userLocation, &UserLocation::deleted, this, &UserLocationsModel::removeLocation);
connect(userLocation, &UserLocation::updated, this, &UserLocationsModel::update);
}
endResetModel();
} else {
qDebug() << "Error loading location data";
}
}
void UserLocationsModel::removeLocation(const QString& name) {
beginResetModel();
for (QList<UserLocation*>::iterator it = _locations.begin(); it != _locations.end(); it++) {
if ((*it)->name() == name) {
_locations.erase(it);
break;
}
}
endResetModel();
}
int UserLocationsModel::rowCount(const QModelIndex& parent) const {
if (parent.isValid()) {
return 0;
}
if (_updating) {
return 1;
}
return _locations.length();
}
QVariant UserLocationsModel::data(const QModelIndex& index, int role) const {
if (role == Qt::DisplayRole) {
if (_updating) {
return QVariant("Updating...");
} else if (index.row() > _locations.length()) {
return QVariant();
} else if (index.column() == NameColumn) {
return _locations[index.row()]->name();
} else if (index.column() == AddressColumn) {
return QVariant(_locations[index.row()]->address());
}
}
return QVariant();
}
QVariant UserLocationsModel::headerData(int section, Qt::Orientation orientation, int role) const {
if (orientation == Qt::Horizontal && role == Qt::DisplayRole) {
switch (section) {
case NameColumn: return "Name";
case AddressColumn: return "Address";
default: return QVariant();
}
}
return QVariant();
}
Qt::ItemFlags UserLocationsModel::flags(const QModelIndex& index) const {
if (index.row() < _locations.length()) {
UserLocation* ul = _locations[index.row()];
if (ul->isUpdating()) {
return Qt::NoItemFlags;
}
}
return QAbstractListModel::flags(index);
}

View file

@ -1,82 +0,0 @@
//
// UserLocationsModel.h
// interface/src
//
// Created by Ryan Huffman on 06/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_UserLocationsModel_h
#define hifi_UserLocationsModel_h
#include <QAbstractListModel>
#include <QModelIndex>
#include <QVariant>
class UserLocation : public QObject {
Q_OBJECT
public:
UserLocation(const QString& id, const QString& name, const QString& address);
bool isUpdating() { return _updating; }
void requestRename(const QString& newName);
void requestDelete();
const QString& id() { return _id; }
const QString& name() { return _name; }
const QString& address() { return _address; }
public slots:
void handleRenameResponse(const QJsonObject& responseData);
void handleRenameError(QNetworkReply& errorReply);
void handleDeleteResponse(const QJsonObject& responseData);
void handleDeleteError(QNetworkReply& errorReply);
signals:
void updated(const QString& name);
void deleted(const QString& name);
private:
QString _id;
QString _name;
QString _address;
QString _previousName;
bool _updating;
};
class UserLocationsModel : public QAbstractListModel {
Q_OBJECT
public:
UserLocationsModel(QObject* parent = NULL);
~UserLocationsModel();
virtual int rowCount(const QModelIndex& parent = QModelIndex()) const;
virtual QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const;
virtual int columnCount(const QModelIndex& parent = QModelIndex()) const { return 2; };
virtual QVariant headerData(int section, Qt::Orientation orientation, int role = Qt::DisplayRole) const;
virtual Qt::ItemFlags flags(const QModelIndex& index) const;
void deleteLocation(const QModelIndex& index);
void renameLocation(const QModelIndex& index, const QString& newName);
enum Columns {
NameColumn = 0,
AddressColumn
};
public slots:
void refresh();
void update();
void handleLocationsResponse(const QJsonObject& responseData);
void removeLocation(const QString& name);
private:
bool _updating;
QList<UserLocation*> _locations;
};
#endif // hifi_UserLocationsModel_h

View file

@ -1033,6 +1033,11 @@ float Avatar::getPelvisToHeadLength() const {
}
void Avatar::setShowDisplayName(bool showDisplayName) {
if (!Menu::getInstance()->isOptionChecked(MenuOption::NamesAboveHeads)) {
_displayNameAlpha = 0.0f;
return;
}
// For myAvatar, the alpha update is not done (called in simulate for other avatars)
if (Application::getInstance()->getAvatar() == this) {
if (showDisplayName) {

View file

@ -59,8 +59,8 @@ float MAX_KEYBOARD_MOTOR_SPEED = MAX_AVATAR_SPEED;
float DEFAULT_KEYBOARD_MOTOR_TIMESCALE = 0.25f;
float MIN_SCRIPTED_MOTOR_TIMESCALE = 0.005f;
float DEFAULT_SCRIPTED_MOTOR_TIMESCALE = 1.0e6f;
const int SCRIPTED_MOTOR_AVATAR_FRAME = 0;
const int SCRIPTED_MOTOR_CAMERA_FRAME = 1;
const int SCRIPTED_MOTOR_CAMERA_FRAME = 0;
const int SCRIPTED_MOTOR_AVATAR_FRAME = 1;
const int SCRIPTED_MOTOR_WORLD_FRAME = 2;
MyAvatar::MyAvatar() :
@ -1062,10 +1062,10 @@ void MyAvatar::setScriptedMotorTimescale(float timescale) {
}
void MyAvatar::setScriptedMotorFrame(QString frame) {
if (frame.toLower() == "avatar") {
_scriptedMotorFrame = SCRIPTED_MOTOR_AVATAR_FRAME;
} else if (frame.toLower() == "camera") {
if (frame.toLower() == "camera") {
_scriptedMotorFrame = SCRIPTED_MOTOR_CAMERA_FRAME;
} else if (frame.toLower() == "avatar") {
_scriptedMotorFrame = SCRIPTED_MOTOR_AVATAR_FRAME;
} else if (frame.toLower() == "world") {
_scriptedMotorFrame = SCRIPTED_MOTOR_WORLD_FRAME;
}
@ -1267,11 +1267,25 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
}
glm::vec3 MyAvatar::applyScriptedMotor(float deltaTime, const glm::vec3& localVelocity) {
// NOTE: localVelocity is in camera-frame because that's the frame of the default avatar motor
if (! (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED)) {
return localVelocity;
}
glm::vec3 deltaVelocity(0.0f);
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
// camera frame
deltaVelocity = _scriptedMotorVelocity - localVelocity;
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
// avatar frame
glm::quat rotation = glm::inverse(getHead()->getCameraOrientation()) * getOrientation();
deltaVelocity = rotation * _scriptedMotorVelocity - localVelocity;
} else {
// world-frame
glm::quat rotation = glm::inverse(getHead()->getCameraOrientation());
deltaVelocity = rotation * _scriptedMotorVelocity - localVelocity;
}
float motorEfficiency = glm::clamp(deltaTime / _scriptedMotorTimescale, 0.0f, 1.0f);
return localVelocity + motorEfficiency * (_scriptedMotorVelocity - localVelocity);
return localVelocity + motorEfficiency * deltaVelocity;
}
const float NEARBY_FLOOR_THRESHOLD = 5.0f;

View file

@ -0,0 +1,39 @@
//
// DataWebDialog.cpp
// interface/src/ui
//
// Created by Stephen Birarda on 2014-09-17.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qwebview.h>
#include <AccountManager.h>
#include <LimitedNodeList.h>
#include <OAuthNetworkAccessManager.h>
#include "DataWebDialog.h"
DataWebDialog::DataWebDialog() {
// make sure the dialog deletes itself when it closes
setAttribute(Qt::WA_DeleteOnClose);
// use an OAuthNetworkAccessManager instead of regular QNetworkAccessManager so our requests are authed
page()->setNetworkAccessManager(OAuthNetworkAccessManager::getInstance());
}
DataWebDialog* DataWebDialog::dialogForPath(const QString& path) {
DataWebDialog* dialogWebView = new DataWebDialog();
QUrl dataWebUrl(DEFAULT_NODE_AUTH_URL);
dataWebUrl.setPath(path);
qDebug() << "Opening a data web dialog for" << dataWebUrl.toString();
dialogWebView->load(dataWebUrl);
return dialogWebView;
}

View file

@ -0,0 +1,25 @@
//
// DataWebDialog.h
// interface/src/ui
//
// Created by Stephen Birarda on 2014-09-17.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_DataWebDialog_h
#define hifi_DataWebDialog_h
#include <qobject.h>
#include <qwebview.h>
class DataWebDialog : public QWebView {
Q_OBJECT
public:
DataWebDialog();
static DataWebDialog* dialogForPath(const QString& path);
};
#endif // hifi_WebkitDialog_h

View file

@ -1,79 +0,0 @@
//
// UserLocationsDialog.cpp
// interface/src/ui
//
// Created by Ryan Huffman on 06/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <QInputDialog>
#include <QPushButton>
#include <AddressManager.h>
#include "Menu.h"
#include "UserLocationsDialog.h"
UserLocationsDialog::UserLocationsDialog(QWidget* parent) :
QDialog(parent),
_ui(),
_proxyModel(this),
_userLocationsModel(this) {
_ui.setupUi(this);
_proxyModel.setSourceModel(&_userLocationsModel);
_proxyModel.setDynamicSortFilter(true);
_ui.locationsTreeView->setModel(&_proxyModel);
_ui.locationsTreeView->setSortingEnabled(true);
_ui.locationsTreeView->sortByColumn(UserLocationsModel::NameColumn, Qt::AscendingOrder);
connect(_ui.locationsTreeView->selectionModel(), &QItemSelectionModel::selectionChanged,
this, &UserLocationsDialog::updateEnabled);
connect(&_userLocationsModel, &UserLocationsModel::modelReset, this, &UserLocationsDialog::updateEnabled);
connect(&_userLocationsModel, &UserLocationsModel::modelReset, &_proxyModel, &QSortFilterProxyModel::invalidate);
connect(_ui.locationsTreeView, &QTreeView::doubleClicked, this, &UserLocationsDialog::goToModelIndex);
connect(_ui.deleteButton, &QPushButton::clicked, this, &UserLocationsDialog::deleteSelection);
connect(_ui.renameButton, &QPushButton::clicked, this, &UserLocationsDialog::renameSelection);
connect(_ui.refreshButton, &QPushButton::clicked, &_userLocationsModel, &UserLocationsModel::refresh);
this->setWindowTitle("My Locations");
}
void UserLocationsDialog::updateEnabled() {
bool enabled = _ui.locationsTreeView->selectionModel()->hasSelection();
_ui.renameButton->setEnabled(enabled);
_ui.deleteButton->setEnabled(enabled);
}
void UserLocationsDialog::goToModelIndex(const QModelIndex& index) {
QVariant address = _proxyModel.data(index.sibling(index.row(), UserLocationsModel::AddressColumn));
AddressManager::getInstance().handleLookupString(address.toString());
}
void UserLocationsDialog::deleteSelection() {
QModelIndex selection = _ui.locationsTreeView->selectionModel()->currentIndex();
selection = _proxyModel.mapToSource(selection);
if (selection.isValid()) {
_userLocationsModel.deleteLocation(selection);
}
}
void UserLocationsDialog::renameSelection() {
QModelIndex selection = _ui.locationsTreeView->selectionModel()->currentIndex();
selection = _proxyModel.mapToSource(selection);
if (selection.isValid()) {
bool ok;
QString name = _userLocationsModel.data(selection.sibling(selection.row(), UserLocationsModel::NameColumn)).toString();
QString newName = QInputDialog::getText(this, "Rename '" + name + "'", "Set name to:", QLineEdit::Normal, name, &ok);
if (ok && !newName.isEmpty()) {
_userLocationsModel.renameLocation(selection, newName);
}
}
}

View file

@ -1,35 +0,0 @@
//
// UserLocationsDialog.h
// interface/src/ui
//
// Created by Ryan Huffman on 06/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_UserLocationsDialog_h
#define hifi_UserLocationsDialog_h
#include "ui_userLocationsDialog.h"
#include "UserLocationsModel.h"
class UserLocationsDialog : public QDialog {
Q_OBJECT
public:
UserLocationsDialog(QWidget* parent = NULL);
protected slots:
void updateEnabled();
void goToModelIndex(const QModelIndex& index);
void deleteSelection();
void renameSelection();
private:
Ui::UserLocationsDialog _ui;
QSortFilterProxyModel _proxyModel;
UserLocationsModel _userLocationsModel;
};
#endif // hifi_UserLocationsDialog_h

View file

@ -49,10 +49,11 @@ typedef unsigned long long quint64;
#include <Node.h>
#include "HandData.h"
#include "HeadData.h"
#include "Player.h"
#include "Recorder.h"
#include "Referential.h"
#include "HeadData.h"
#include "HandData.h"
// avatar motion behaviors
const quint32 AVATAR_MOTION_KEYBOARD_MOTOR_ENABLED = 1U << 0;

View file

@ -0,0 +1,236 @@
//
// Player.cpp
//
//
// Created by Clement on 9/17/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GLMHelpers.h>
#include <NodeList.h>
#include <StreamUtils.h>
#include "AvatarData.h"
#include "Player.h"
Player::Player(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL),
_playFromCurrentPosition(true),
_loop(false),
_useAttachments(true),
_useDisplayName(true),
_useHeadURL(true),
_useSkeletonURL(true)
{
_timer.invalidate();
_options.setLoop(false);
_options.setVolume(1.0f);
}
bool Player::isPlaying() const {
return _timer.isValid();
}
qint64 Player::elapsed() const {
if (isPlaying()) {
return _timer.elapsed();
} else {
return 0;
}
}
void Player::startPlaying() {
if (_recording && _recording->getFrameNumber() > 0) {
_currentContext.globalTimestamp = usecTimestampNow();
_currentContext.domain = NodeList::getInstance()->getDomainHandler().getHostname();
_currentContext.position = _avatar->getPosition();
_currentContext.orientation = _avatar->getOrientation();
_currentContext.scale = _avatar->getTargetScale();
_currentContext.headModel = _avatar->getFaceModelURL().toString();
_currentContext.skeletonModel = _avatar->getSkeletonModelURL().toString();
_currentContext.displayName = _avatar->getDisplayName();
_currentContext.attachments = _avatar->getAttachmentData();
_currentContext.orientationInv = glm::inverse(_currentContext.orientation);
RecordingContext& context = _recording->getContext();
if (_useAttachments) {
_avatar->setAttachmentData(context.attachments);
}
if (_useDisplayName) {
_avatar->setDisplayName(context.displayName);
}
if (_useHeadURL) {
_avatar->setFaceModelURL(context.headModel);
}
if (_useSkeletonURL) {
_avatar->setSkeletonModelURL(context.skeletonModel);
}
bool wantDebug = false;
if (wantDebug) {
qDebug() << "Player::startPlaying(): Recording Context";
qDebug() << "Domain:" << _currentContext.domain;
qDebug() << "Position:" << _currentContext.position;
qDebug() << "Orientation:" << _currentContext.orientation;
qDebug() << "Scale:" << _currentContext.scale;
qDebug() << "Head URL:" << _currentContext.headModel;
qDebug() << "Skeleton URL:" << _currentContext.skeletonModel;
qDebug() << "Display Name:" << _currentContext.displayName;
qDebug() << "Num Attachments:" << _currentContext.attachments.size();
for (int i = 0; i < _currentContext.attachments.size(); ++i) {
qDebug() << "Model URL:" << _currentContext.attachments[i].modelURL;
qDebug() << "Joint Name:" << _currentContext.attachments[i].jointName;
qDebug() << "Translation:" << _currentContext.attachments[i].translation;
qDebug() << "Rotation:" << _currentContext.attachments[i].rotation;
qDebug() << "Scale:" << _currentContext.attachments[i].scale;
}
}
// Fake faceshift connection
_avatar->setForceFaceshiftConnected(true);
qDebug() << "Recorder::startPlaying()";
_currentFrame = 0;
setupAudioThread();
_timer.start();
}
}
void Player::stopPlaying() {
if (!isPlaying()) {
return;
}
_timer.invalidate();
cleanupAudioThread();
_avatar->clearJointsData();
// Turn off fake faceshift connection
_avatar->setForceFaceshiftConnected(false);
if (_useAttachments) {
_avatar->setAttachmentData(_currentContext.attachments);
}
if (_useDisplayName) {
_avatar->setDisplayName(_currentContext.displayName);
}
if (_useHeadURL) {
_avatar->setFaceModelURL(_currentContext.headModel);
}
if (_useSkeletonURL) {
_avatar->setSkeletonModelURL(_currentContext.skeletonModel);
}
qDebug() << "Recorder::stopPlaying()";
}
void Player::setupAudioThread() {
_audioThread = new QThread();
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector.reset(new AudioInjector(_recording->getAudio(), _options), &QObject::deleteLater);
_injector->moveToThread(_audioThread);
_audioThread->start();
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
}
void Player::cleanupAudioThread() {
_injector->stop();
QObject::connect(_injector.data(), &AudioInjector::finished,
_injector.data(), &AudioInjector::deleteLater);
QObject::connect(_injector.data(), &AudioInjector::destroyed,
_audioThread, &QThread::quit);
QObject::connect(_audioThread, &QThread::finished,
_audioThread, &QThread::deleteLater);
_injector.clear();
_audioThread = NULL;
}
void Player::loopRecording() {
cleanupAudioThread();
setupAudioThread();
_currentFrame = 0;
_timer.restart();
}
void Player::loadFromFile(QString file) {
if (_recording) {
_recording->clear();
} else {
_recording = RecordingPointer(new Recording());
}
readRecordingFromFile(_recording, file);
}
void Player::loadRecording(RecordingPointer recording) {
_recording = recording;
}
void Player::play() {
computeCurrentFrame();
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) {
if (_loop) {
loopRecording();
} else {
stopPlaying();
}
return;
}
const RecordingContext* context = &_recording->getContext();
if (_playFromCurrentPosition) {
context = &_currentContext;
}
const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame);
_avatar->setPosition(context->position + context->orientation * currentFrame.getTranslation());
_avatar->setOrientation(context->orientation * currentFrame.getRotation());
_avatar->setTargetScale(context->scale * currentFrame.getScale());
_avatar->setJointRotations(currentFrame.getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
if (head) {
head->setBlendshapeCoefficients(currentFrame.getBlendshapeCoefficients());
head->setLeanSideways(currentFrame.getLeanSideways());
head->setLeanForward(currentFrame.getLeanForward());
glm::vec3 eulers = glm::degrees(safeEulerAngles(currentFrame.getHeadRotation()));
head->setFinalPitch(eulers.x);
head->setFinalYaw(eulers.y);
head->setFinalRoll(eulers.z);
head->setLookAtPosition(currentFrame.getLookAtPosition());
} else {
qDebug() << "WARNING: Player couldn't find head data.";
}
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector->setOptions(_options);
}
void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
_playFromCurrentPosition = playFromCurrentLocation;
}
bool Player::computeCurrentFrame() {
if (!isPlaying()) {
_currentFrame = -1;
return false;
}
if (_currentFrame < 0) {
_currentFrame = 0;
}
while (_currentFrame < _recording->getFrameNumber() - 1 &&
_recording->getFrameTimestamp(_currentFrame) < _timer.elapsed()) {
++_currentFrame;
}
return true;
}

View file

@ -0,0 +1,78 @@
//
// Player.h
//
//
// Created by Clement on 9/17/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Player_h
#define hifi_Player_h
#include <AudioInjector.h>
#include <QElapsedTimer>
#include "Recording.h"
class AvatarData;
class Player;
typedef QSharedPointer<Player> PlayerPointer;
typedef QWeakPointer<Player> WeakPlayerPointer;
/// Plays back a recording
class Player {
public:
Player(AvatarData* avatar);
bool isPlaying() const;
qint64 elapsed() const;
RecordingPointer getRecording() const { return _recording; }
public slots:
void startPlaying();
void stopPlaying();
void loadFromFile(QString file);
void loadRecording(RecordingPointer recording);
void play();
void setPlayFromCurrentLocation(bool playFromCurrentPosition);
void setLoop(bool loop) { _loop = loop; }
void useAttachements(bool useAttachments) { _useAttachments = useAttachments; }
void useDisplayName(bool useDisplayName) { _useDisplayName = useDisplayName; }
void useHeadModel(bool useHeadURL) { _useHeadURL = useHeadURL; }
void useSkeletonModel(bool useSkeletonURL) { _useSkeletonURL = useSkeletonURL; }
private:
void setupAudioThread();
void cleanupAudioThread();
void loopRecording();
bool computeCurrentFrame();
QElapsedTimer _timer;
RecordingPointer _recording;
int _currentFrame;
QSharedPointer<AudioInjector> _injector;
AudioInjectorOptions _options;
AvatarData* _avatar;
QThread* _audioThread;
RecordingContext _currentContext;
bool _playFromCurrentPosition;
bool _loop;
bool _useAttachments;
bool _useDisplayName;
bool _useHeadURL;
bool _useSkeletonURL;
};
#endif // hifi_Player_h

View file

@ -9,94 +9,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GLMHelpers.h>
#include <NetworkAccessManager.h>
#include <QEventLoop>
#include <QFile>
#include <QMetaObject>
#include <QObject>
#include <GLMHelpers.h>
#include <NodeList.h>
#include <StreamUtils.h>
#include "AvatarData.h"
#include "Recorder.h"
void RecordingFrame::setBlendshapeCoefficients(QVector<float> blendshapeCoefficients) {
_blendshapeCoefficients = blendshapeCoefficients;
}
void RecordingFrame::setJointRotations(QVector<glm::quat> jointRotations) {
_jointRotations = jointRotations;
}
void RecordingFrame::setTranslation(glm::vec3 translation) {
_translation = translation;
}
void RecordingFrame::setRotation(glm::quat rotation) {
_rotation = rotation;
}
void RecordingFrame::setScale(float scale) {
_scale = scale;
}
void RecordingFrame::setHeadRotation(glm::quat headRotation) {
_headRotation = headRotation;
}
void RecordingFrame::setLeanSideways(float leanSideways) {
_leanSideways = leanSideways;
}
void RecordingFrame::setLeanForward(float leanForward) {
_leanForward = leanForward;
}
Recording::Recording() : _audio(NULL) {
}
Recording::~Recording() {
delete _audio;
}
int Recording::getLength() const {
if (_timestamps.isEmpty()) {
return 0;
}
return _timestamps.last();
}
qint32 Recording::getFrameTimestamp(int i) const {
if (i >= _timestamps.size()) {
return getLength();
}
return _timestamps[i];
}
const RecordingFrame& Recording::getFrame(int i) const {
assert(i < _timestamps.size());
return _frames[i];
}
void Recording::addFrame(int timestamp, RecordingFrame &frame) {
_timestamps << timestamp;
_frames << frame;
}
void Recording::addAudioPacket(QByteArray byteArray) {
if (!_audio) {
_audio = new Sound(byteArray);
}
_audio->append(byteArray);
}
void Recording::clear() {
_timestamps.clear();
_frames.clear();
delete _audio;
_audio = NULL;
}
Recorder::Recorder(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar)
@ -119,24 +39,44 @@ qint64 Recorder::elapsed() const {
void Recorder::startRecording() {
qDebug() << "Recorder::startRecording()";
_recording->clear();
RecordingContext& context = _recording->getContext();
context.globalTimestamp = usecTimestampNow();
context.domain = NodeList::getInstance()->getDomainHandler().getHostname();
context.position = _avatar->getPosition();
context.orientation = _avatar->getOrientation();
context.scale = _avatar->getTargetScale();
context.headModel = _avatar->getFaceModelURL().toString();
context.skeletonModel = _avatar->getSkeletonModelURL().toString();
context.displayName = _avatar->getDisplayName();
context.attachments = _avatar->getAttachmentData();
context.orientationInv = glm::inverse(context.orientation);
bool wantDebug = false;
if (wantDebug) {
qDebug() << "Recorder::startRecording(): Recording Context";
qDebug() << "Global timestamp:" << context.globalTimestamp;
qDebug() << "Domain:" << context.domain;
qDebug() << "Position:" << context.position;
qDebug() << "Orientation:" << context.orientation;
qDebug() << "Scale:" << context.scale;
qDebug() << "Head URL:" << context.headModel;
qDebug() << "Skeleton URL:" << context.skeletonModel;
qDebug() << "Display Name:" << context.displayName;
qDebug() << "Num Attachments:" << context.attachments.size();
for (int i = 0; i < context.attachments.size(); ++i) {
qDebug() << "Model URL:" << context.attachments[i].modelURL;
qDebug() << "Joint Name:" << context.attachments[i].jointName;
qDebug() << "Translation:" << context.attachments[i].translation;
qDebug() << "Rotation:" << context.attachments[i].rotation;
qDebug() << "Scale:" << context.attachments[i].scale;
}
}
_timer.start();
RecordingFrame frame;
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
frame.setJointRotations(_avatar->getJointRotations());
frame.setTranslation(_avatar->getPosition());
frame.setRotation(_avatar->getOrientation());
frame.setScale(_avatar->getTargetScale());
const HeadData* head = _avatar->getHeadData();
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
head->getFinalYaw(),
head->getFinalRoll())));
frame.setHeadRotation(rotation);
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
_recording->addFrame(0, frame);
record();
}
void Recorder::stopRecording() {
@ -156,22 +96,41 @@ void Recorder::saveToFile(QString file) {
void Recorder::record() {
if (isRecording()) {
const RecordingFrame& referenceFrame = _recording->getFrame(0);
const RecordingContext& context = _recording->getContext();
RecordingFrame frame;
frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients());
frame.setJointRotations(_avatar->getJointRotations());
frame.setTranslation(_avatar->getPosition() - referenceFrame.getTranslation());
frame.setRotation(glm::inverse(referenceFrame.getRotation()) * _avatar->getOrientation());
frame.setScale(_avatar->getTargetScale() / referenceFrame.getScale());
frame.setTranslation(context.orientationInv * (_avatar->getPosition() - context.position));
frame.setRotation(context.orientationInv * _avatar->getOrientation());
frame.setScale(_avatar->getTargetScale() / context.scale);
const HeadData* head = _avatar->getHeadData();
glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(),
head->getFinalYaw(),
head->getFinalRoll())));
frame.setHeadRotation(rotation);
frame.setLeanForward(_avatar->getHeadData()->getLeanForward());
frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways());
if (head) {
glm::vec3 rotationDegrees = glm::vec3(head->getFinalPitch(),
head->getFinalYaw(),
head->getFinalRoll());
frame.setHeadRotation(glm::quat(glm::radians(rotationDegrees)));
frame.setLeanForward(head->getLeanForward());
frame.setLeanSideways(head->getLeanSideways());
glm::vec3 relativeLookAt = context.orientationInv *
(head->getLookAtPosition() - context.position);
frame.setLookAtPosition(relativeLookAt);
}
bool wantDebug = false;
if (wantDebug) {
qDebug() << "Recording frame #" << _recording->getFrameNumber();
qDebug() << "Blendshapes:" << frame.getBlendshapeCoefficients().size();
qDebug() << "JointRotations:" << frame.getJointRotations().size();
qDebug() << "Translation:" << frame.getTranslation();
qDebug() << "Rotation:" << frame.getRotation();
qDebug() << "Scale:" << frame.getScale();
qDebug() << "Head rotation:" << frame.getHeadRotation();
qDebug() << "Lean Forward:" << frame.getLeanForward();
qDebug() << "Lean Sideways:" << frame.getLeanSideways();
qDebug() << "LookAtPosition:" << frame.getLookAtPosition();
}
_recording->addFrame(_timer.elapsed(), frame);
}
@ -181,473 +140,3 @@ void Recorder::record(char* samples, int size) {
QByteArray byteArray(samples, size);
_recording->addAudioPacket(byteArray);
}
Player::Player(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL),
_startingScale(1.0f),
_playFromCurrentPosition(true),
_loop(false)
{
_timer.invalidate();
_options.setLoop(false);
_options.setVolume(1.0f);
}
bool Player::isPlaying() const {
return _timer.isValid();
}
qint64 Player::elapsed() const {
if (isPlaying()) {
return _timer.elapsed();
} else {
return 0;
}
}
glm::quat Player::getHeadRotation() {
if (!computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getHeadRotation()";
return glm::quat();
}
if (_currentFrame == 0) {
return _recording->getFrame(_currentFrame).getHeadRotation();
}
return _recording->getFrame(0).getHeadRotation() *
_recording->getFrame(_currentFrame).getHeadRotation();
}
float Player::getLeanSideways() {
if (!computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getLeanSideways()";
return 0.0f;
}
return _recording->getFrame(_currentFrame).getLeanSideways();
}
float Player::getLeanForward() {
if (!computeCurrentFrame()) {
qWarning() << "Incorrect use of Player::getLeanForward()";
return 0.0f;
}
return _recording->getFrame(_currentFrame).getLeanForward();
}
void Player::startPlaying() {
if (_recording && _recording->getFrameNumber() > 0) {
qDebug() << "Recorder::startPlaying()";
_currentFrame = 0;
// Setup audio thread
_audioThread = new QThread();
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector.reset(new AudioInjector(_recording->getAudio(), _options), &QObject::deleteLater);
_injector->moveToThread(_audioThread);
_audioThread->start();
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
// Fake faceshift connection
_avatar->setForceFaceshiftConnected(true);
if (_playFromCurrentPosition) {
_startingPosition = _avatar->getPosition();
_startingRotation = _avatar->getOrientation();
_startingScale = _avatar->getTargetScale();
} else {
_startingPosition = _recording->getFrame(0).getTranslation();
_startingRotation = _recording->getFrame(0).getRotation();
_startingScale = _recording->getFrame(0).getScale();
}
_timer.start();
}
}
void Player::stopPlaying() {
if (!isPlaying()) {
return;
}
_timer.invalidate();
_avatar->clearJointsData();
// Cleanup audio thread
_injector->stop();
QObject::connect(_injector.data(), &AudioInjector::finished,
_injector.data(), &AudioInjector::deleteLater);
QObject::connect(_injector.data(), &AudioInjector::destroyed,
_audioThread, &QThread::quit);
QObject::connect(_audioThread, &QThread::finished,
_audioThread, &QThread::deleteLater);
_injector.clear();
_audioThread = NULL;
// Turn off fake faceshift connection
_avatar->setForceFaceshiftConnected(false);
qDebug() << "Recorder::stopPlaying()";
}
void Player::loadFromFile(QString file) {
if (_recording) {
_recording->clear();
} else {
_recording = RecordingPointer(new Recording());
}
readRecordingFromFile(_recording, file);
}
void Player::loadRecording(RecordingPointer recording) {
_recording = recording;
}
void Player::play() {
computeCurrentFrame();
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) {
// If it's the end of the recording, stop playing
stopPlaying();
if (_loop) {
startPlaying();
}
return;
}
if (_currentFrame == 0) {
// Don't play frame 0
// only meant to store absolute values
return;
}
_avatar->setPosition(_startingPosition +
glm::inverse(_recording->getFrame(0).getRotation()) * _startingRotation *
_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_startingRotation *
_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_startingScale *
_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
if (head) {
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
head->setLeanSideways(_recording->getFrame(_currentFrame).getLeanSideways());
head->setLeanForward(_recording->getFrame(_currentFrame).getLeanForward());
glm::vec3 eulers = glm::degrees(safeEulerAngles(_recording->getFrame(_currentFrame).getHeadRotation()));
head->setFinalPitch(eulers.x);
head->setFinalYaw(eulers.y);
head->setFinalRoll(eulers.z);
}
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector->setOptions(_options);
}
void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
_playFromCurrentPosition = playFromCurrentLocation;
}
void Player::setLoop(bool loop) {
_loop = loop;
}
bool Player::computeCurrentFrame() {
if (!isPlaying()) {
_currentFrame = -1;
return false;
}
if (_currentFrame < 0) {
_currentFrame = 0;
}
while (_currentFrame < _recording->getFrameNumber() - 1 &&
_recording->getFrameTimestamp(_currentFrame) < _timer.elapsed()) {
++_currentFrame;
}
return true;
}
void writeRecordingToFile(RecordingPointer recording, QString filename) {
if (!recording || recording->getFrameNumber() < 1) {
qDebug() << "Can't save empty recording";
return;
}
qDebug() << "Writing recording to " << filename << ".";
QElapsedTimer timer;
QFile file(filename);
if (!file.open(QIODevice::WriteOnly)){
return;
}
timer.start();
QDataStream fileStream(&file);
fileStream << recording->_timestamps;
RecordingFrame& baseFrame = recording->_frames[0];
int totalLength = 0;
// Blendshape coefficients
fileStream << baseFrame._blendshapeCoefficients;
totalLength += baseFrame._blendshapeCoefficients.size();
// Joint Rotations
int jointRotationSize = baseFrame._jointRotations.size();
fileStream << jointRotationSize;
for (int i = 0; i < jointRotationSize; ++i) {
fileStream << baseFrame._jointRotations[i].x << baseFrame._jointRotations[i].y << baseFrame._jointRotations[i].z << baseFrame._jointRotations[i].w;
}
totalLength += jointRotationSize;
// Translation
fileStream << baseFrame._translation.x << baseFrame._translation.y << baseFrame._translation.z;
totalLength += 1;
// Rotation
fileStream << baseFrame._rotation.x << baseFrame._rotation.y << baseFrame._rotation.z << baseFrame._rotation.w;
totalLength += 1;
// Scale
fileStream << baseFrame._scale;
totalLength += 1;
// Head Rotation
fileStream << baseFrame._headRotation.x << baseFrame._headRotation.y << baseFrame._headRotation.z << baseFrame._headRotation.w;
totalLength += 1;
// Lean Sideways
fileStream << baseFrame._leanSideways;
totalLength += 1;
// Lean Forward
fileStream << baseFrame._leanForward;
totalLength += 1;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask(totalLength);
int maskIndex = 0;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::WriteOnly);
RecordingFrame& previousFrame = recording->_frames[i - 1];
RecordingFrame& frame = recording->_frames[i];
// Blendshape coefficients
for (int i = 0; i < frame._blendshapeCoefficients.size(); ++i) {
if (frame._blendshapeCoefficients[i] != previousFrame._blendshapeCoefficients[i]) {
stream << frame._blendshapeCoefficients[i];
mask.setBit(maskIndex);
}
maskIndex++;
}
// Joint Rotations
for (int i = 0; i < frame._jointRotations.size(); ++i) {
if (frame._jointRotations[i] != previousFrame._jointRotations[i]) {
stream << frame._jointRotations[i].x << frame._jointRotations[i].y << frame._jointRotations[i].z << frame._jointRotations[i].w;
mask.setBit(maskIndex);
}
maskIndex++;
}
// Translation
if (frame._translation != previousFrame._translation) {
stream << frame._translation.x << frame._translation.y << frame._translation.z;
mask.setBit(maskIndex);
}
maskIndex++;
// Rotation
if (frame._rotation != previousFrame._rotation) {
stream << frame._rotation.x << frame._rotation.y << frame._rotation.z << frame._rotation.w;
mask.setBit(maskIndex);
}
maskIndex++;
// Scale
if (frame._scale != previousFrame._scale) {
stream << frame._scale;
mask.setBit(maskIndex);
}
maskIndex++;
// Head Rotation
if (frame._headRotation != previousFrame._headRotation) {
stream << frame._headRotation.x << frame._headRotation.y << frame._headRotation.z << frame._headRotation.w;
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Sideways
if (frame._leanSideways != previousFrame._leanSideways) {
stream << frame._leanSideways;
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Forward
if (frame._leanForward != previousFrame._leanForward) {
stream << frame._leanForward;
mask.setBit(maskIndex);
}
maskIndex++;
fileStream << mask;
fileStream << buffer;
}
fileStream << recording->_audio->getByteArray();
qDebug() << "Wrote " << file.size() << " bytes in " << timer.elapsed() << " ms.";
}
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) {
QElapsedTimer timer;
timer.start();
QByteArray byteArray;
QUrl url(filename);
if (url.scheme() == "http" || url.scheme() == "https" || url.scheme() == "ftp") {
qDebug() << "Downloading recording at" << url;
NetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkReply* reply = networkAccessManager.get(QNetworkRequest(url));
QEventLoop loop;
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
loop.exec();
if (reply->error() != QNetworkReply::NoError) {
qDebug() << "Error while downloading recording: " << reply->error();
reply->deleteLater();
return recording;
}
byteArray = reply->readAll();
reply->deleteLater();
} else {
qDebug() << "Reading recording from " << filename << ".";
QFile file(filename);
if (!file.open(QIODevice::ReadOnly)){
return recording;
}
byteArray = file.readAll();
file.close();
}
if (!recording) {
recording.reset(new Recording());
}
QDataStream fileStream(byteArray);
fileStream >> recording->_timestamps;
RecordingFrame baseFrame;
// Blendshape coefficients
fileStream >> baseFrame._blendshapeCoefficients;
// Joint Rotations
int jointRotationSize;
fileStream >> jointRotationSize;
baseFrame._jointRotations.resize(jointRotationSize);
for (int i = 0; i < jointRotationSize; ++i) {
fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w;
}
fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z;
fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w;
fileStream >> baseFrame._scale;
fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w;
fileStream >> baseFrame._leanSideways;
fileStream >> baseFrame._leanForward;
recording->_frames << baseFrame;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::ReadOnly);
RecordingFrame frame;
RecordingFrame& previousFrame = recording->_frames.last();
fileStream >> mask;
fileStream >> buffer;
int maskIndex = 0;
// Blendshape Coefficients
frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size());
for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._blendshapeCoefficients[i];
} else {
frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i];
}
}
// Joint Rotations
frame._jointRotations.resize(baseFrame._jointRotations.size());
for (int i = 0; i < baseFrame._jointRotations.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w;
} else {
frame._jointRotations[i] = previousFrame._jointRotations[i];
}
}
if (mask[maskIndex++]) {
stream >> frame._translation.x >> frame._translation.y >> frame._translation.z;
} else {
frame._translation = previousFrame._translation;
}
if (mask[maskIndex++]) {
stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w;
} else {
frame._rotation = previousFrame._rotation;
}
if (mask[maskIndex++]) {
stream >> frame._scale;
} else {
frame._scale = previousFrame._scale;
}
if (mask[maskIndex++]) {
stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w;
} else {
frame._headRotation = previousFrame._headRotation;
}
if (mask[maskIndex++]) {
stream >> frame._leanSideways;
} else {
frame._leanSideways = previousFrame._leanSideways;
}
if (mask[maskIndex++]) {
stream >> frame._leanForward;
} else {
frame._leanForward = previousFrame._leanForward;
}
recording->_frames << frame;
}
QByteArray audioArray;
fileStream >> audioArray;
recording->addAudioPacket(audioArray);
qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
return recording;
}

View file

@ -12,98 +12,18 @@
#ifndef hifi_Recorder_h
#define hifi_Recorder_h
#include <QBitArray>
#include <QElapsedTimer>
#include <QHash>
#include <QSharedPointer>
#include <QVector>
#include <QWeakPointer>
#include "Recording.h"
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <AudioInjector.h>
#include <SharedUtil.h>
#include <Sound.h>
template<class C>
class QSharedPointer;
class AttachmentData;
class AvatarData;
class Recorder;
class Recording;
class Player;
typedef QSharedPointer<Recording> RecordingPointer;
typedef QSharedPointer<Recorder> RecorderPointer;
typedef QWeakPointer<Recorder> WeakRecorderPointer;
typedef QSharedPointer<Player> PlayerPointer;
typedef QWeakPointer<Player> WeakPlayerPointer;
/// Stores the different values associated to one recording frame
class RecordingFrame {
public:
QVector<float> getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
QVector<glm::quat> getJointRotations() const { return _jointRotations; }
glm::vec3 getTranslation() const { return _translation; }
glm::quat getRotation() const { return _rotation; }
float getScale() const { return _scale; }
glm::quat getHeadRotation() const { return _headRotation; }
float getLeanSideways() const { return _leanSideways; }
float getLeanForward() const { return _leanForward; }
protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointRotations(QVector<glm::quat> jointRotations);
void setTranslation(glm::vec3 translation);
void setRotation(glm::quat rotation);
void setScale(float scale);
void setHeadRotation(glm::quat headRotation);
void setLeanSideways(float leanSideways);
void setLeanForward(float leanForward);
private:
QVector<float> _blendshapeCoefficients;
QVector<glm::quat> _jointRotations;
glm::vec3 _translation;
glm::quat _rotation;
float _scale;
glm::quat _headRotation;
float _leanSideways;
float _leanForward;
friend class Recorder;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
};
/// Stores a recording
class Recording {
public:
Recording();
~Recording();
bool isEmpty() const { return _timestamps.isEmpty(); }
int getLength() const; // in ms
int getFrameNumber() const { return _frames.size(); }
qint32 getFrameTimestamp(int i) const;
const RecordingFrame& getFrame(int i) const;
Sound* getAudio() const { return _audio; }
protected:
void addFrame(int timestamp, RecordingFrame& frame);
void addAudioPacket(QByteArray byteArray);
void clear();
private:
QVector<qint32> _timestamps;
QVector<RecordingFrame> _frames;
Sound* _audio;
friend class Recorder;
friend class Player;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
};
/// Records a recording
class Recorder {
@ -129,54 +49,5 @@ private:
AvatarData* _avatar;
};
/// Plays back a recording
class Player {
public:
Player(AvatarData* avatar);
bool isPlaying() const;
qint64 elapsed() const;
RecordingPointer getRecording() const { return _recording; }
// Those should only be called if isPlaying() returns true
glm::quat getHeadRotation();
float getLeanSideways();
float getLeanForward();
public slots:
void startPlaying();
void stopPlaying();
void loadFromFile(QString file);
void loadRecording(RecordingPointer recording);
void play();
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setLoop(bool loop);
private:
bool computeCurrentFrame();
QElapsedTimer _timer;
RecordingPointer _recording;
int _currentFrame;
QSharedPointer<AudioInjector> _injector;
AudioInjectorOptions _options;
AvatarData* _avatar;
QThread* _audioThread;
glm::vec3 _startingPosition;
glm::quat _startingRotation;
float _startingScale;
bool _playFromCurrentPosition;
bool _loop;
};
void writeRecordingToFile(RecordingPointer recording, QString file);
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
#endif // hifi_Recorder_h

View file

@ -0,0 +1,806 @@
//
// Recording.cpp
//
//
// Created by Clement on 9/17/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <GLMHelpers.h>
#include <NetworkAccessManager.h>
#include <NodeList.h>
#include <Sound.h>
#include <StreamUtils.h>
#include <QBitArray>
#include <QElapsedTimer>
#include <QEventLoop>
#include <QFile>
#include <QFileInfo>
#include <QMessageBox>
#include <QPair>
#include "AvatarData.h"
#include "Recording.h"
// HFR file format magic number (Inspired by PNG)
// (decimal) 17 72 70 82 13 10 26 10
// (hexadecimal) 11 48 46 52 0d 0a 1a 0a
// (ASCII C notation) \021 H F R \r \n \032 \n
static const int MAGIC_NUMBER_SIZE = 8;
static const char MAGIC_NUMBER[MAGIC_NUMBER_SIZE] = {17, 72, 70, 82, 13, 10, 26, 10};
// Version (Major, Minor)
static const QPair<quint8, quint8> VERSION(0, 1);
int SCALE_RADIX = 10;
int BLENDSHAPE_RADIX = 15;
int LEAN_RADIX = 7;
void RecordingFrame::setBlendshapeCoefficients(QVector<float> blendshapeCoefficients) {
_blendshapeCoefficients = blendshapeCoefficients;
}
Recording::Recording() : _audio(NULL) {
}
Recording::~Recording() {
delete _audio;
}
int Recording::getLength() const {
if (_timestamps.isEmpty()) {
return 0;
}
return _timestamps.last();
}
qint32 Recording::getFrameTimestamp(int i) const {
if (i >= _timestamps.size()) {
return getLength();
}
return _timestamps[i];
}
const RecordingFrame& Recording::getFrame(int i) const {
assert(i < _timestamps.size());
return _frames[i];
}
void Recording::addFrame(int timestamp, RecordingFrame &frame) {
_timestamps << timestamp;
_frames << frame;
}
void Recording::addAudioPacket(QByteArray byteArray) {
if (!_audio) {
_audio = new Sound(byteArray);
return;
}
_audio->append(byteArray);
}
void Recording::clear() {
_timestamps.clear();
_frames.clear();
delete _audio;
_audio = NULL;
}
void writeVec3(QDataStream& stream, glm::vec3 value) {
unsigned char buffer[sizeof(value)];
memcpy(buffer, &value, sizeof(value));
stream.writeRawData(reinterpret_cast<char*>(buffer), sizeof(value));
}
bool readVec3(QDataStream& stream, glm::vec3& value) {
unsigned char buffer[sizeof(value)];
stream.readRawData(reinterpret_cast<char*>(buffer), sizeof(value));
memcpy(&value, buffer, sizeof(value));
return true;
}
void writeQuat(QDataStream& stream, glm::quat value) {
unsigned char buffer[256];
int writtenToBuffer = packOrientationQuatToBytes(buffer, value);
stream.writeRawData(reinterpret_cast<char*>(buffer), writtenToBuffer);
}
bool readQuat(QDataStream& stream, glm::quat& value) {
int quatByteSize = 4 * 2; // 4 floats * 2 bytes
unsigned char buffer[256];
stream.readRawData(reinterpret_cast<char*>(buffer), quatByteSize);
int readFromBuffer = unpackOrientationQuatFromBytes(buffer, value);
if (readFromBuffer != quatByteSize) {
return false;
}
return true;
}
void writeFloat(QDataStream& stream, float value, int radix) {
unsigned char buffer[256];
int writtenToBuffer = packFloatScalarToSignedTwoByteFixed(buffer, value, radix);
stream.writeRawData(reinterpret_cast<char*>(buffer), writtenToBuffer);
}
bool readFloat(QDataStream& stream, float& value, int radix) {
int floatByteSize = 2; // 1 floats * 2 bytes
int16_t buffer[256];
stream.readRawData(reinterpret_cast<char*>(buffer), floatByteSize);
int readFromBuffer = unpackFloatScalarFromSignedTwoByteFixed(buffer, &value, radix);
if (readFromBuffer != floatByteSize) {
return false;
}
return true;
}
void writeRecordingToFile(RecordingPointer recording, QString filename) {
if (!recording || recording->getFrameNumber() < 1) {
qDebug() << "Can't save empty recording";
return;
}
QElapsedTimer timer;
QFile file(filename);
if (!file.open(QIODevice::ReadWrite | QIODevice::Truncate)){
qDebug() << "Couldn't open " << filename;
return;
}
timer.start();
qDebug() << "Writing recording to " << filename << ".";
QDataStream fileStream(&file);
// HEADER
file.write(MAGIC_NUMBER, MAGIC_NUMBER_SIZE); // Magic number
fileStream << VERSION; // File format version
const qint64 dataOffsetPos = file.pos();
fileStream << (quint16)0; // Save two empty bytes for the data offset
const qint64 dataLengthPos = file.pos();
fileStream << (quint32)0; // Save four empty bytes for the data offset
const quint64 crc16Pos = file.pos();
fileStream << (quint16)0; // Save two empty bytes for the CRC-16
// METADATA
// TODO
// Write data offset
quint16 dataOffset = file.pos();
file.seek(dataOffsetPos);
fileStream << dataOffset;
file.seek(dataOffset);
// CONTEXT
RecordingContext& context = recording->getContext();
// Global Timestamp
fileStream << context.globalTimestamp;
// Domain
fileStream << context.domain;
// Position
writeVec3(fileStream, context.position);
// Orientation
writeQuat(fileStream, context.orientation);
// Scale
writeFloat(fileStream, context.scale, SCALE_RADIX);
// Head model
fileStream << context.headModel;
// Skeleton model
fileStream << context.skeletonModel;
// Display name
fileStream << context.displayName;
// Attachements
fileStream << (quint8)context.attachments.size();
foreach (AttachmentData data, context.attachments) {
// Model
fileStream << data.modelURL.toString();
// Joint name
fileStream << data.jointName;
// Position
writeVec3(fileStream, data.translation);
// Orientation
writeQuat(fileStream, data.rotation);
// Scale
writeFloat(fileStream, data.scale, SCALE_RADIX);
}
// RECORDING
fileStream << recording->_timestamps;
QBitArray mask;
quint32 numBlendshapes = 0;
quint32 numJoints = 0;
for (int i = 0; i < recording->_timestamps.size(); ++i) {
mask.fill(false);
int maskIndex = 0;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::WriteOnly);
RecordingFrame& previousFrame = recording->_frames[(i != 0) ? i - 1 : i];
RecordingFrame& frame = recording->_frames[i];
// Blendshape Coefficients
if (i == 0) {
numBlendshapes = frame.getBlendshapeCoefficients().size();
stream << numBlendshapes;
mask.resize(mask.size() + numBlendshapes);
}
for (int j = 0; j < numBlendshapes; ++j) {
if (i == 0 ||
frame._blendshapeCoefficients[j] != previousFrame._blendshapeCoefficients[j]) {
writeFloat(stream, frame.getBlendshapeCoefficients()[j], BLENDSHAPE_RADIX);
mask.setBit(maskIndex);
}
++maskIndex;
}
// Joint Rotations
if (i == 0) {
numJoints = frame.getJointRotations().size();
stream << numJoints;
mask.resize(mask.size() + numJoints);
}
for (int j = 0; j < numJoints; ++j) {
if (i == 0 ||
frame._jointRotations[j] != previousFrame._jointRotations[j]) {
writeQuat(stream, frame._jointRotations[j]);
mask.setBit(maskIndex);
}
maskIndex++;
}
// Translation
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._translation != previousFrame._translation) {
writeVec3(stream, frame._translation);
mask.setBit(maskIndex);
}
maskIndex++;
// Rotation
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._rotation != previousFrame._rotation) {
writeQuat(stream, frame._rotation);
mask.setBit(maskIndex);
}
maskIndex++;
// Scale
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._scale != previousFrame._scale) {
writeFloat(stream, frame._scale, SCALE_RADIX);
mask.setBit(maskIndex);
}
maskIndex++;
// Head Rotation
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._headRotation != previousFrame._headRotation) {
writeQuat(stream, frame._headRotation);
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Sideways
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._leanSideways != previousFrame._leanSideways) {
writeFloat(stream, frame._leanSideways, LEAN_RADIX);
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Forward
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._leanForward != previousFrame._leanForward) {
writeFloat(stream, frame._leanForward, LEAN_RADIX);
mask.setBit(maskIndex);
}
maskIndex++;
// LookAt Position
if (i == 0) {
mask.resize(mask.size() + 1);
}
if (i == 0 || frame._lookAtPosition != previousFrame._lookAtPosition) {
writeVec3(stream, frame._lookAtPosition);
mask.setBit(maskIndex);
}
maskIndex++;
fileStream << mask;
fileStream << buffer;
}
fileStream << recording->_audio->getByteArray();
qint64 writtingTime = timer.restart();
// Write data length and CRC-16
quint32 dataLength = file.pos() - dataOffset;
file.seek(dataOffset); // Go to beginning of data for checksum
quint16 crc16 = qChecksum(file.readAll().constData(), dataLength);
file.seek(dataLengthPos);
fileStream << dataLength;
file.seek(crc16Pos);
fileStream << crc16;
file.seek(dataOffset + dataLength);
bool wantDebug = true;
if (wantDebug) {
qDebug() << "[DEBUG] WRITE recording";
qDebug() << "Header:";
qDebug() << "File Format version:" << VERSION;
qDebug() << "Data length:" << dataLength;
qDebug() << "Data offset:" << dataOffset;
qDebug() << "CRC-16:" << crc16;
qDebug() << "Context block:";
qDebug() << "Global timestamp:" << context.globalTimestamp;
qDebug() << "Domain:" << context.domain;
qDebug() << "Position:" << context.position;
qDebug() << "Orientation:" << context.orientation;
qDebug() << "Scale:" << context.scale;
qDebug() << "Head Model:" << context.headModel;
qDebug() << "Skeleton Model:" << context.skeletonModel;
qDebug() << "Display Name:" << context.displayName;
qDebug() << "Num Attachments:" << context.attachments.size();
for (int i = 0; i < context.attachments.size(); ++i) {
qDebug() << "Model URL:" << context.attachments[i].modelURL;
qDebug() << "Joint Name:" << context.attachments[i].jointName;
qDebug() << "Translation:" << context.attachments[i].translation;
qDebug() << "Rotation:" << context.attachments[i].rotation;
qDebug() << "Scale:" << context.attachments[i].scale;
}
qDebug() << "Recording:";
qDebug() << "Total frames:" << recording->getFrameNumber();
qDebug() << "Audio array:" << recording->getAudio()->getByteArray().size();
}
qint64 checksumTime = timer.elapsed();
qDebug() << "Wrote" << file.size() << "bytes in" << writtingTime + checksumTime << "ms. (" << checksumTime << "ms for checksum)";
}
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) {
QByteArray byteArray;
QUrl url(filename);
QElapsedTimer timer;
timer.start(); // timer used for debug informations (download/parsing time)
// Aquire the data and place it in byteArray
// Return if data unavailable
if (url.scheme() == "http" || url.scheme() == "https" || url.scheme() == "ftp") {
// Download file if necessary
qDebug() << "Downloading recording at" << url;
NetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkReply* reply = networkAccessManager.get(QNetworkRequest(url));
QEventLoop loop;
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
loop.exec(); // wait for file
if (reply->error() != QNetworkReply::NoError) {
qDebug() << "Error while downloading recording: " << reply->error();
reply->deleteLater();
return recording;
}
byteArray = reply->readAll();
reply->deleteLater();
// print debug + restart timer
qDebug() << "Downloaded " << byteArray.size() << " bytes in " << timer.restart() << " ms.";
} else {
// If local file, just read it.
qDebug() << "Reading recording from " << filename << ".";
QFile file(filename);
if (!file.open(QIODevice::ReadOnly)){
qDebug() << "Could not open local file: " << url;
return recording;
}
byteArray = file.readAll();
file.close();
}
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
qDebug() << "Old .rec format";
QMessageBox::warning(NULL,
QString("Old recording format"),
QString("Converting your file to the new format."),
QMessageBox::Ok);
readRecordingFromRecFile(recording, filename, byteArray);
return recording;
} else if (!filename.endsWith(".hfr") && !filename.endsWith(".HFR")) {
qDebug() << "File extension not recognized";
}
// Reset the recording passed in the arguments
if (!recording) {
recording.reset(new Recording());
}
QDataStream fileStream(byteArray);
// HEADER
QByteArray magicNumber(MAGIC_NUMBER, MAGIC_NUMBER_SIZE);
if (!byteArray.startsWith(magicNumber)) {
qDebug() << "ERROR: This is not a .HFR file. (Magic Number incorrect)";
return recording;
}
fileStream.skipRawData(MAGIC_NUMBER_SIZE);
QPair<quint8, quint8> version;
fileStream >> version; // File format version
if (version != VERSION) {
qDebug() << "ERROR: This file format version is not supported.";
return recording;
}
quint16 dataOffset = 0;
fileStream >> dataOffset;
quint32 dataLength = 0;
fileStream >> dataLength;
quint16 crc16 = 0;
fileStream >> crc16;
// Check checksum
quint16 computedCRC16 = qChecksum(byteArray.constData() + dataOffset, dataLength);
if (computedCRC16 != crc16) {
qDebug() << "Checksum does not match. Bailling!";
recording.clear();
return recording;
}
// METADATA
// TODO
// CONTEXT
RecordingContext& context = recording->getContext();
// Global Timestamp
fileStream >> context.globalTimestamp;
// Domain
fileStream >> context.domain;
// Position
if (!readVec3(fileStream, context.position)) {
qDebug() << "Couldn't read file correctly. (Invalid vec3)";
recording.clear();
return recording;
}
// Orientation
if (!readQuat(fileStream, context.orientation)) {
qDebug() << "Couldn't read file correctly. (Invalid quat)";
recording.clear();
return recording;
}
// Scale
if (!readFloat(fileStream, context.scale, SCALE_RADIX)) {
qDebug() << "Couldn't read file correctly. (Invalid float)";
recording.clear();
return recording;
}
// Head model
fileStream >> context.headModel;
// Skeleton model
fileStream >> context.skeletonModel;
// Display Name
fileStream >> context.displayName;
// Attachements
quint8 numAttachments = 0;
fileStream >> numAttachments;
for (int i = 0; i < numAttachments; ++i) {
AttachmentData data;
// Model
QString modelURL;
fileStream >> modelURL;
data.modelURL = modelURL;
// Joint name
fileStream >> data.jointName;
// Translation
if (!readVec3(fileStream, data.translation)) {
qDebug() << "Couldn't read attachment correctly. (Invalid vec3)";
continue;
}
// Rotation
if (!readQuat(fileStream, data.rotation)) {
qDebug() << "Couldn't read attachment correctly. (Invalid quat)";
continue;
}
// Scale
if (!readFloat(fileStream, data.scale, SCALE_RADIX)) {
qDebug() << "Couldn't read attachment correctly. (Invalid float)";
continue;
}
context.attachments << data;
}
quint32 numBlendshapes = 0;
quint32 numJoints = 0;
// RECORDING
fileStream >> recording->_timestamps;
for (int i = 0; i < recording->_timestamps.size(); ++i) {
QBitArray mask;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::ReadOnly);
RecordingFrame frame;
RecordingFrame& previousFrame = (i == 0) ? frame : recording->_frames.last();
fileStream >> mask;
fileStream >> buffer;
int maskIndex = 0;
// Blendshape Coefficients
if (i == 0) {
stream >> numBlendshapes;
}
frame._blendshapeCoefficients.resize(numBlendshapes);
for (int j = 0; j < numBlendshapes; ++j) {
if (!mask[maskIndex++] || !readFloat(stream, frame._blendshapeCoefficients[j], BLENDSHAPE_RADIX)) {
frame._blendshapeCoefficients[j] = previousFrame._blendshapeCoefficients[j];
}
}
// Joint Rotations
if (i == 0) {
stream >> numJoints;
}
frame._jointRotations.resize(numJoints);
for (int j = 0; j < numJoints; ++j) {
if (!mask[maskIndex++] || !readQuat(stream, frame._jointRotations[j])) {
frame._jointRotations[j] = previousFrame._jointRotations[j];
}
}
if (!mask[maskIndex++] || !readVec3(stream, frame._translation)) {
frame._translation = previousFrame._translation;
}
if (!mask[maskIndex++] || !readQuat(stream, frame._rotation)) {
frame._rotation = previousFrame._rotation;
}
if (!mask[maskIndex++] || !readFloat(stream, frame._scale, SCALE_RADIX)) {
frame._scale = previousFrame._scale;
}
if (!mask[maskIndex++] || !readQuat(stream, frame._headRotation)) {
frame._headRotation = previousFrame._headRotation;
}
if (!mask[maskIndex++] || !readFloat(stream, frame._leanSideways, LEAN_RADIX)) {
frame._leanSideways = previousFrame._leanSideways;
}
if (!mask[maskIndex++] || !readFloat(stream, frame._leanForward, LEAN_RADIX)) {
frame._leanForward = previousFrame._leanForward;
}
if (!mask[maskIndex++] || !readVec3(stream, frame._lookAtPosition)) {
frame._lookAtPosition = previousFrame._lookAtPosition;
}
recording->_frames << frame;
}
QByteArray audioArray;
fileStream >> audioArray;
recording->addAudioPacket(audioArray);
bool wantDebug = true;
if (wantDebug) {
qDebug() << "[DEBUG] READ recording";
qDebug() << "Header:";
qDebug() << "File Format version:" << VERSION;
qDebug() << "Data length:" << dataLength;
qDebug() << "Data offset:" << dataOffset;
qDebug() << "CRC-16:" << crc16;
qDebug() << "Context block:";
qDebug() << "Global timestamp:" << context.globalTimestamp;
qDebug() << "Domain:" << context.domain;
qDebug() << "Position:" << context.position;
qDebug() << "Orientation:" << context.orientation;
qDebug() << "Scale:" << context.scale;
qDebug() << "Head Model:" << context.headModel;
qDebug() << "Skeleton Model:" << context.skeletonModel;
qDebug() << "Display Name:" << context.displayName;
qDebug() << "Num Attachments:" << numAttachments;
for (int i = 0; i < numAttachments; ++i) {
qDebug() << "Model URL:" << context.attachments[i].modelURL;
qDebug() << "Joint Name:" << context.attachments[i].jointName;
qDebug() << "Translation:" << context.attachments[i].translation;
qDebug() << "Rotation:" << context.attachments[i].rotation;
qDebug() << "Scale:" << context.attachments[i].scale;
}
qDebug() << "Recording:";
qDebug() << "Total frames:" << recording->getFrameNumber();
qDebug() << "Audio array:" << recording->getAudio()->getByteArray().size();
}
qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
return recording;
}
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray) {
QElapsedTimer timer;
timer.start();
if (!recording) {
recording.reset(new Recording());
}
QDataStream fileStream(byteArray);
fileStream >> recording->_timestamps;
RecordingFrame baseFrame;
// Blendshape coefficients
fileStream >> baseFrame._blendshapeCoefficients;
// Joint Rotations
int jointRotationSize;
fileStream >> jointRotationSize;
baseFrame._jointRotations.resize(jointRotationSize);
for (int i = 0; i < jointRotationSize; ++i) {
fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w;
}
fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z;
fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w;
fileStream >> baseFrame._scale;
fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w;
fileStream >> baseFrame._leanSideways;
fileStream >> baseFrame._leanForward;
// Fake context
RecordingContext& context = recording->getContext();
context.globalTimestamp = usecTimestampNow();
context.domain = NodeList::getInstance()->getDomainHandler().getHostname();
context.position = glm::vec3(144.5f, 3.3f, 181.3f);
context.orientation = glm::angleAxis(glm::radians(-92.5f), glm::vec3(0, 1, 0));;
context.scale = baseFrame._scale;
context.headModel = "http://public.highfidelity.io/models/heads/Emily_v4.fst";
context.skeletonModel = "http://public.highfidelity.io/models/skeletons/EmilyCutMesh_A.fst";
context.displayName = "Leslie";
context.attachments.clear();
AttachmentData data;
data.modelURL = "http://public.highfidelity.io/models/attachments/fbx.fst";
data.jointName = "RightHand" ;
data.translation = glm::vec3(0.04f, 0.07f, 0.0f);
data.rotation = glm::angleAxis(glm::radians(102.0f), glm::vec3(0, 1, 0));
data.scale = 0.20f;
context.attachments << data;
context.orientationInv = glm::inverse(context.orientation);
baseFrame._translation = glm::vec3();
baseFrame._rotation = glm::quat();
baseFrame._scale = 1.0f;
recording->_frames << baseFrame;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::ReadOnly);
RecordingFrame frame;
RecordingFrame& previousFrame = recording->_frames.last();
fileStream >> mask;
fileStream >> buffer;
int maskIndex = 0;
// Blendshape Coefficients
frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size());
for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._blendshapeCoefficients[i];
} else {
frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i];
}
}
// Joint Rotations
frame._jointRotations.resize(baseFrame._jointRotations.size());
for (int i = 0; i < baseFrame._jointRotations.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w;
} else {
frame._jointRotations[i] = previousFrame._jointRotations[i];
}
}
if (mask[maskIndex++]) {
stream >> frame._translation.x >> frame._translation.y >> frame._translation.z;
frame._translation = context.orientationInv * frame._translation;
} else {
frame._translation = previousFrame._translation;
}
if (mask[maskIndex++]) {
stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w;
} else {
frame._rotation = previousFrame._rotation;
}
if (mask[maskIndex++]) {
stream >> frame._scale;
} else {
frame._scale = previousFrame._scale;
}
if (mask[maskIndex++]) {
stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w;
} else {
frame._headRotation = previousFrame._headRotation;
}
if (mask[maskIndex++]) {
stream >> frame._leanSideways;
} else {
frame._leanSideways = previousFrame._leanSideways;
}
if (mask[maskIndex++]) {
stream >> frame._leanForward;
} else {
frame._leanForward = previousFrame._leanForward;
}
recording->_frames << frame;
}
QByteArray audioArray;
fileStream >> audioArray;
// Cut down audio if necessary
int SAMPLE_RATE = 48000; // 48 kHz
int SAMPLE_SIZE = 2; // 16 bits
int MSEC_PER_SEC = 1000;
int audioLength = recording->getLength() * SAMPLE_SIZE * (SAMPLE_RATE / MSEC_PER_SEC);
audioArray.chop(audioArray.size() - audioLength);
recording->addAudioPacket(audioArray);
qDebug() << "Read " << byteArray.size() << " bytes in " << timer.elapsed() << " ms.";
// Set new filename
if (filename.startsWith("http") || filename.startsWith("https") || filename.startsWith("ftp")) {
filename = QUrl(filename).fileName();
}
if (filename.endsWith(".rec") || filename.endsWith(".REC")) {
filename.chop(qstrlen(".rec"));
}
filename.append(".hfr");
filename = QFileInfo(filename).absoluteFilePath();
// Set recording to new format
writeRecordingToFile(recording, filename);
QMessageBox::warning(NULL,
QString("New recording location"),
QString("The new recording was saved at:\n" + filename),
QMessageBox::Ok);
qDebug() << "Recording has been successfully converted at" << filename;
return recording;
}

View file

@ -0,0 +1,127 @@
//
// Recording.h
//
//
// Created by Clement on 9/17/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Recording_h
#define hifi_Recording_h
#include <QString>
#include <QVector>
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
template<class C>
class QSharedPointer;
class AttachmentData;
class Recording;
class RecordingFrame;
class Sound;
typedef QSharedPointer<Recording> RecordingPointer;
/// Stores recordings static data
class RecordingContext {
public:
quint64 globalTimestamp;
QString domain;
glm::vec3 position;
glm::quat orientation;
float scale;
QString headModel;
QString skeletonModel;
QString displayName;
QVector<AttachmentData> attachments;
// This avoids recomputation every frame while recording.
glm::quat orientationInv;
};
/// Stores a recording
class Recording {
public:
Recording();
~Recording();
bool isEmpty() const { return _timestamps.isEmpty(); }
int getLength() const; // in ms
RecordingContext& getContext() { return _context; }
int getFrameNumber() const { return _frames.size(); }
qint32 getFrameTimestamp(int i) const;
const RecordingFrame& getFrame(int i) const;
Sound* getAudio() const { return _audio; }
protected:
void addFrame(int timestamp, RecordingFrame& frame);
void addAudioPacket(QByteArray byteArray);
void clear();
private:
RecordingContext _context;
QVector<qint32> _timestamps;
QVector<RecordingFrame> _frames;
Sound* _audio;
friend class Recorder;
friend class Player;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
};
/// Stores the different values associated to one recording frame
class RecordingFrame {
public:
QVector<float> getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
QVector<glm::quat> getJointRotations() const { return _jointRotations; }
glm::vec3 getTranslation() const { return _translation; }
glm::quat getRotation() const { return _rotation; }
float getScale() const { return _scale; }
glm::quat getHeadRotation() const { return _headRotation; }
float getLeanSideways() const { return _leanSideways; }
float getLeanForward() const { return _leanForward; }
glm::vec3 getLookAtPosition() const { return _lookAtPosition; }
protected:
void setBlendshapeCoefficients(QVector<float> blendshapeCoefficients);
void setJointRotations(QVector<glm::quat> jointRotations) { _jointRotations = jointRotations; }
void setTranslation(glm::vec3 translation) { _translation = translation; }
void setRotation(glm::quat rotation) { _rotation = rotation; }
void setScale(float scale) { _scale = scale; }
void setHeadRotation(glm::quat headRotation) { _headRotation = headRotation; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; }
void setLookAtPosition(glm::vec3 lookAtPosition) { _lookAtPosition = lookAtPosition; }
private:
QVector<float> _blendshapeCoefficients;
QVector<glm::quat> _jointRotations;
glm::vec3 _translation;
glm::quat _rotation;
float _scale;
glm::quat _headRotation;
float _leanSideways;
float _leanForward;
glm::vec3 _lookAtPosition;
friend class Recorder;
friend void writeRecordingToFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file);
friend RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
};
void writeRecordingToFile(RecordingPointer recording, QString filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, QString filename, QByteArray byteArray);
#endif // hifi_Recording_h

View file

@ -83,7 +83,7 @@ int Referential::pack(unsigned char* destinationBuffer) const {
int Referential::unpack(const unsigned char* sourceBuffer) {
const unsigned char* startPosition = sourceBuffer;
_type = (Type)*sourceBuffer++;
if (_type < 0 || _type >= NUM_TYPE) {
if (_type < 0 || _type >= NUM_TYPES) {
_type = UNKNOWN;
}
memcpy(&_version, sourceBuffer, sizeof(_version));

View file

@ -26,7 +26,7 @@ public:
JOINT,
AVATAR,
NUM_TYPE
NUM_TYPES
};
Referential(const unsigned char*& sourceBuffer, AvatarData* avatar);

View file

@ -25,8 +25,6 @@
const bool VERBOSE_HTTP_REQUEST_DEBUGGING = false;
const QByteArray ACCESS_TOKEN_AUTHORIZATION_HEADER = "Authorization";
AccountManager& AccountManager::getInstance() {
static AccountManager sharedInstance;
return sharedInstance;

View file

@ -37,6 +37,8 @@ public:
QString updateSlot;
};
const QByteArray ACCESS_TOKEN_AUTHORIZATION_HEADER = "Authorization";
class AccountManager : public QObject {
Q_OBJECT
public:

View file

@ -0,0 +1,43 @@
//
// OAuthNetworkAccessManager.cpp
// libraries/networking/src
//
// Created by Stephen Birarda on 2014-09-18.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QNetworkAccessManager>
#include <QNetworkRequest>
#include <QThreadStorage>
#include "AccountManager.h"
#include "OAuthNetworkAccessManager.h"
QThreadStorage<OAuthNetworkAccessManager*> oauthNetworkAccessManagers;
OAuthNetworkAccessManager* OAuthNetworkAccessManager::getInstance() {
if (!oauthNetworkAccessManagers.hasLocalData()) {
oauthNetworkAccessManagers.setLocalData(new OAuthNetworkAccessManager());
}
return oauthNetworkAccessManagers.localData();
}
QNetworkReply* OAuthNetworkAccessManager::createRequest(QNetworkAccessManager::Operation op, const QNetworkRequest& req,
QIODevice* outgoingData) {
AccountManager& accountManager = AccountManager::getInstance();
if (accountManager.hasValidAccessToken()) {
QNetworkRequest authenticatedRequest(req);
authenticatedRequest.setRawHeader(ACCESS_TOKEN_AUTHORIZATION_HEADER,
accountManager.getAccountInfo().getAccessToken().authorizationHeaderValue());
return QNetworkAccessManager::createRequest(op, authenticatedRequest, outgoingData);
} else {
return QNetworkAccessManager::createRequest(op, req, outgoingData);
}
}

View file

@ -0,0 +1,24 @@
//
// OAuthNetworkAccessManager.h
// libraries/networking/src
//
// Created by Stephen Birarda on 2014-09-18.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OAuthNetworkAccessManager_h
#define hifi_OAuthNetworkAccessManager_h
#include <QNetworkAccessManager>
class OAuthNetworkAccessManager : public QNetworkAccessManager {
public:
static OAuthNetworkAccessManager* getInstance();
protected:
virtual QNetworkReply* createRequest(Operation op, const QNetworkRequest& req, QIODevice* outgoingData = 0);
};
#endif // hifi_OAuthNetworkAccessManager_h