content/hifi-content/DomainContent/Event/Scripts/actorRecordingAC.js
2022-02-13 22:49:05 +01:00

158 lines
6.4 KiB
JavaScript

//
// actorRecordingAC.js
//
// Based on BetterClientSimulationBotFromRecording.js
// by Brad Hefta-Gaub on 2/6/17.
//
// Created by Thijs Wenker on 4/14/17
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// FIXME - currently setting an avatar while playing a recording doesn't work it will be ignored
var USE_AVATAR_MODEL_FROM_RECORDING = false;
var USE_DISPLAY_NAME_FROM_RECORDING = false;
// please enable this when possible (RC38>)
var USE_LOADRECORDING_CALLBACK = false;
var WANT_DEBUGGING = false;
var INVISIBLE_AVATAR_URL = 'https://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst';
var COORDINATOR_CHANNEL = 'COORDINATOR-AC';
var ACTOR_CHANNEL_PREFIX = 'ACTOR-PLAYBACK-AC_';
var actorChannel = ACTOR_CHANNEL_PREFIX + Agent.sessionUUID;
var initialized = false;
var playFromCurrentLocation = true;
var loop = true;
Agent.isAvatar = true;
Avatar.skeletonModelURL = INVISIBLE_AVATAR_URL;
Avatar.position = {x: 0, y: 0, z: 0};
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
Avatar.scale = 1.0;
// make the agent "listen" to the audio stream to cause additional audio-mixer load, technically this isn't needed when you're playing a recording
// but if you switch to a non-recording bot, you will need this, so we can leave this.
Agent.isListeningToAudioStream = true;
Messages.subscribe(actorChannel);
Messages.subscribe(COORDINATOR_CHANNEL);
Messages.messageReceived.connect(function (channel, message, senderID) {
if (channel === COORDINATOR_CHANNEL) {
if (message === 'init') {
print('init, sending request!');
Messages.sendMessage(COORDINATOR_CHANNEL, 'request');
} else {
try {
var jsonData = JSON.parse(message);
if (Recording.isPlaying()) {
}
} catch (e) {
// e
}
}
} else if (channel === actorChannel) {
try {
print('parsing actor!');
var actor = JSON.parse(message);
if (Recording.isPlaying()) {
print('Stopping recording which is already playing');
Recording.stopPlaying();
}
if (playFromCurrentLocation) {
Avatar.position = actor.position;
}
// make the agent "listen" to the audio stream to cause additional audio-mixer load, technically this isn't needed when you're playing a recording
// but if you switch to a non-recording bot, you will need this, so we can leave this.
Agent.isListeningToAudioStream = true;
Avatar.skeletonModelURL = USE_AVATAR_MODEL_FROM_RECORDING ? INVISIBLE_AVATAR_URL : actor.avatarURL;
if (!USE_DISPLAY_NAME_FROM_RECORDING) {
print('setting displayName to: ' + actor.displayName);
Avatar.displayName = actor.displayName;
}
// FIXME: new recording load method
var loadRecordingSuccessful = function() {
print('loading successful, playing:');
Recording.setPlayFromCurrentLocation(playFromCurrentLocation);
Recording.setPlayerLoop(loop);
Recording.setPlayerUseDisplayName(USE_DISPLAY_NAME_FROM_RECORDING);
Recording.setPlayerUseAttachments(true);
Recording.setPlayerUseHeadModel(false);
// FIXME - this would allow you to override the recording avatar, but that's not currently working
Recording.setPlayerUseSkeletonModel(USE_AVATAR_MODEL_FROM_RECORDING);
Recording.startPlaying();
Vec3.print("Playing from ", Avatar.position);
};
if (USE_LOADRECORDING_CALLBACK) {
Recording.loadRecording(actor.recordingURL, function (success) {
if (success) {
loadRecordingSuccessful();
} else {
print('Loading recording failed, trying again.');
Messages.sendMessage(COORDINATOR_CHANNEL, 'request');
}
});
} else {
Recording.loadRecording(actor.recordingURL);
// Take 5 seconds to load the recording
Script.setTimeout(loadRecordingSuccessful, 5000);
}
initialized = true;
} catch (e) {
print('Failed to parse JSON: ' + message);
}
}
});
Script.setTimeout(function() {
if (!initialized) {
Messages.sendMessage(COORDINATOR_CHANNEL, 'request');
}
}, 1000);
var count = 300; // This is necessary to wait for the audio mixer to connect
function update(event) {
if (!initialized) {
return;
}
if (count > 0) {
count--;
return;
}
if (count === 0) {
count--;
} else if (WANT_DEBUGGING) {
count = 100;
Vec3.print("Avatar at: ", Avatar.position);
Quat.print("Avatar head orientation: ", Avatar.headOrientation);
print("outbound:"
+" GP: " + Avatar.getDataRate("globalPositionOutbound").toFixed(2) + "\n"
+" LP: " + Avatar.getDataRate("localPositionOutbound").toFixed(2) + "\n"
+" BB: " + Avatar.getDataRate("avatarBoundingBoxOutbound").toFixed(2) + "\n"
+" AO: " + Avatar.getDataRate("avatarOrientationOutbound").toFixed(2) + "\n"
+" AS: " + Avatar.getDataRate("avatarScaleOutbound").toFixed(2) + "\n"
+" LA: " + Avatar.getDataRate("lookAtPositionOutbound").toFixed(2) + "\n"
+" AL: " + Avatar.getDataRate("audioLoudnessOutbound").toFixed(2) + "\n"
+" SW: " + Avatar.getDataRate("sensorToWorkMatrixOutbound").toFixed(2) + "\n"
+" AF: " + Avatar.getDataRate("additionalFlagsOutbound").toFixed(2) + "\n"
+" PI: " + Avatar.getDataRate("parentInfoOutbound").toFixed(2) + "\n"
+" FT: " + Avatar.getDataRate("faceTrackerOutbound").toFixed(2) + "\n"
+" JD: " + Avatar.getDataRate("jointDataOutbound").toFixed(2));
}
if (!Recording.isPlaying()) {
Script.update.disconnect(update);
}
}
Script.update.connect(update);