mirror of
https://github.com/lubosz/overte.git
synced 2025-04-07 19:22:39 +02:00
Fixing issues found in AC playback test
This commit is contained in:
parent
fd3599acc5
commit
98cc7408b1
8 changed files with 67 additions and 49 deletions
|
@ -177,16 +177,24 @@ void Agent::run() {
|
|||
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
ScriptableAvatar scriptedAvatar(_scriptEngine.get());
|
||||
scriptedAvatar.setForceFaceTrackerConnected(true);
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar.setFaceModelURL(QUrl());
|
||||
scriptedAvatar.setSkeletonModelURL(QUrl());
|
||||
|
||||
scriptedAvatar->setFaceModelURL(QUrl());
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
// give this AvatarData object to the script engine
|
||||
auto scriptedAvatarPtr = &scriptedAvatar;
|
||||
setAvatarData(&scriptedAvatar, "Avatar");
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
|
||||
|
@ -194,8 +202,8 @@ void Agent::run() {
|
|||
const QByteArray& audio = frame->data;
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
Transform audioTransform;
|
||||
audioTransform.setTranslation(scriptedAvatar.getPosition());
|
||||
audioTransform.setRotation(scriptedAvatar.getOrientation());
|
||||
audioTransform.setTranslation(scriptedAvatar->getPosition());
|
||||
audioTransform.setRotation(scriptedAvatar->getOrientation());
|
||||
AbstractAudioInterface::emitAudioPacket(audio.data(), audio.size(), audioSequenceNumber, audioTransform, PacketType::MicrophoneAudioNoEcho);
|
||||
});
|
||||
|
||||
|
@ -240,7 +248,8 @@ void Agent::run() {
|
|||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [](Frame::ConstPointer frame) {});
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
setFinished(true);
|
||||
}
|
||||
|
@ -278,40 +287,30 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
}
|
||||
}
|
||||
|
||||
void Agent::setAvatarData(AvatarData* avatarData, const QString& objectName) {
|
||||
_avatarData = avatarData;
|
||||
_scriptEngine->registerGlobalObject(objectName, avatarData);
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this](Frame::ConstPointer frame) {
|
||||
AvatarData::fromFrame(frame->data, *_avatarData);
|
||||
});
|
||||
}
|
||||
|
||||
void Agent::sendAvatarIdentityPacket() {
|
||||
if (_isAvatar && _avatarData) {
|
||||
_avatarData->sendIdentityPacket();
|
||||
if (_isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
scriptedAvatar->sendIdentityPacket();
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::sendAvatarBillboardPacket() {
|
||||
if (_isAvatar && _avatarData) {
|
||||
_avatarData->sendBillboardPacket();
|
||||
if (_isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
scriptedAvatar->sendBillboardPacket();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
||||
if (!_scriptEngine->isFinished() && _isAvatar && _avatarData) {
|
||||
|
||||
if (!_scriptEngine->isFinished() && _isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
const int SCRIPT_AUDIO_BUFFER_SAMPLES = floor(((SCRIPT_DATA_CALLBACK_USECS * AudioConstants::SAMPLE_RATE)
|
||||
/ (1000 * 1000)) + 0.5);
|
||||
const int SCRIPT_AUDIO_BUFFER_BYTES = SCRIPT_AUDIO_BUFFER_SAMPLES * sizeof(int16_t);
|
||||
|
||||
QByteArray avatarByteArray = _avatarData->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
|
||||
_avatarData->doneEncoding(true);
|
||||
QByteArray avatarByteArray = scriptedAvatar->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
|
||||
scriptedAvatar->doneEncoding(true);
|
||||
|
||||
static AvatarDataSequenceNumber sequenceNumber = 0;
|
||||
auto avatarPacket = NLPacket::create(PacketType::AvatarData, avatarByteArray.size() + sizeof(sequenceNumber));
|
||||
|
@ -376,8 +375,8 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
|||
audioPacket->writePrimitive(SCRIPT_AUDIO_BUFFER_SAMPLES);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(_avatarData->getPosition());
|
||||
glm::quat headOrientation = _avatarData->getHeadOrientation();
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
}else if (nextSoundOutput) {
|
||||
|
@ -385,8 +384,8 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
|||
audioPacket->writePrimitive((quint8)0);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(_avatarData->getPosition());
|
||||
glm::quat headOrientation = _avatarData->getHeadOrientation();
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
// write the raw audio data
|
||||
|
|
|
@ -70,13 +70,11 @@ private:
|
|||
MixedAudioStream _receivedAudioStream;
|
||||
float _lastReceivedAudioLoudness;
|
||||
|
||||
void setAvatarData(AvatarData* avatarData, const QString& objectName);
|
||||
void setAvatarSound(Sound* avatarSound) { _avatarSound = avatarSound; }
|
||||
|
||||
void sendAvatarIdentityPacket();
|
||||
void sendAvatarBillboardPacket();
|
||||
|
||||
AvatarData* _avatarData = nullptr;
|
||||
bool _isListeningToAudioStream = false;
|
||||
Sound* _avatarSound = nullptr;
|
||||
int _numAvatarSoundSentBytes = 0;
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include "AssignmentActionFactory.h"
|
||||
|
||||
#include "AssignmentClient.h"
|
||||
#include "avatars/ScriptableAvatar.h"
|
||||
|
||||
const QString ASSIGNMENT_CLIENT_TARGET_NAME = "assignment-client";
|
||||
const long long ASSIGNMENT_REQUEST_INTERVAL_MSECS = 1 * 1000;
|
||||
|
@ -48,6 +49,7 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
|
||||
auto scriptableAvatar = DependencyManager::set<ScriptableAvatar>();
|
||||
auto addressManager = DependencyManager::set<AddressManager>();
|
||||
|
||||
// create a NodeList as an unassigned client, must be after addressManager
|
||||
|
|
|
@ -15,10 +15,6 @@
|
|||
|
||||
#include "ScriptableAvatar.h"
|
||||
|
||||
ScriptableAvatar::ScriptableAvatar(ScriptEngine* scriptEngine) : _scriptEngine(scriptEngine), _animation(NULL) {
|
||||
connect(_scriptEngine, SIGNAL(update(float)), this, SLOT(update(float)));
|
||||
}
|
||||
|
||||
// hold and priority unused but kept so that client side JS can run.
|
||||
void ScriptableAvatar::startAnimation(const QString& url, float fps, float priority,
|
||||
bool loop, bool hold, float firstFrame, float lastFrame, const QStringList& maskedJoints) {
|
||||
|
|
|
@ -16,11 +16,10 @@
|
|||
#include <AvatarData.h>
|
||||
#include <ScriptEngine.h>
|
||||
|
||||
class ScriptableAvatar : public AvatarData {
|
||||
class ScriptableAvatar : public AvatarData, public Dependency{
|
||||
Q_OBJECT
|
||||
public:
|
||||
ScriptableAvatar(ScriptEngine* scriptEngine);
|
||||
|
||||
|
||||
/// Allows scripts to run animations.
|
||||
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
|
||||
bool hold = false, float firstFrame = 0.0f, float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
|
@ -31,7 +30,6 @@ private slots:
|
|||
void update(float deltatime);
|
||||
|
||||
private:
|
||||
ScriptEngine* _scriptEngine;
|
||||
AnimationPointer _animation;
|
||||
AnimationDetails _animationDetails;
|
||||
QStringList _maskedJoints;
|
||||
|
|
24
examples/tests/playbackAcTest.js
Normal file
24
examples/tests/playbackAcTest.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
|
||||
var origin = {x: 512, y: 512, z: 512};
|
||||
var millisecondsToWaitBeforeStarting = 2 * 1000; // To give the various servers a chance to start.
|
||||
var millisecondsToWaitBeforeEnding = 30 * 1000;
|
||||
|
||||
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/dd03b8e3-52fb-4ab3-9ac9-3b17e00cd85d/98baa90b3b66803c5d7bd4537fca6993.fst"; //lovejoy
|
||||
Avatar.displayName = "AC Avatar";
|
||||
Agent.isAvatar = true;
|
||||
|
||||
Script.setTimeout(function () {
|
||||
Avatar.position = origin;
|
||||
Recording.loadRecording("d:/hifi.rec");
|
||||
Recording.setPlayerLoop(true);
|
||||
Recording.startPlaying();
|
||||
}, millisecondsToWaitBeforeStarting);
|
||||
|
||||
|
||||
Script.setTimeout(function () {
|
||||
print("Stopping script");
|
||||
Agent.isAvatar = false;
|
||||
Recording.stopPlaying();
|
||||
Script.stop();
|
||||
}, millisecondsToWaitBeforeEnding);
|
|
@ -72,16 +72,17 @@ FileFrameHeaderList parseFrameHeaders(uchar* const start, const qint64& size) {
|
|||
results.push_back(header);
|
||||
}
|
||||
qDebug() << "Parsed source data into " << results.size() << " frames";
|
||||
int i = 0;
|
||||
for (const auto& frameHeader : results) {
|
||||
qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset;
|
||||
}
|
||||
// int i = 0;
|
||||
// for (const auto& frameHeader : results) {
|
||||
// qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset << " Type " << frameHeader.type;
|
||||
// }
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
FileClip::FileClip(const QString& fileName) : _file(fileName) {
|
||||
auto size = _file.size();
|
||||
qDebug() << "Opening file of size: " << size;
|
||||
bool opened = _file.open(QIODevice::ReadOnly);
|
||||
if (!opened) {
|
||||
qCWarning(recordingLog) << "Unable to open file " << fileName;
|
||||
|
|
|
@ -117,7 +117,7 @@ Transform Transform::fromJson(const QJsonValue& json) {
|
|||
result.setTranslation(vec3FromJsonValue(obj[JSON_TRANSLATION]));
|
||||
}
|
||||
if (obj.contains(JSON_SCALE)) {
|
||||
result.setScale(vec3FromJsonValue(obj[JSON_TRANSLATION]));
|
||||
result.setScale(vec3FromJsonValue(obj[JSON_SCALE]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue