Merge pull request #7023 from ericrius1/acSoundSearcher

AC Sound Searcher
This commit is contained in:
James B. Pollack 2016-02-08 17:54:15 -08:00
commit ad36e84e24
4 changed files with 282 additions and 1 deletions

View file

@ -0,0 +1,218 @@
//
// ACAudioSearchAndInject.js
// audio
//
// Created by Eric Levin 2/1/2016
// Copyright 2016 High Fidelity, Inc.
// This AC script searches for special sound entities nearby avatars and plays those sounds based off information specified in the entity's
// user data field ( see acAudioSearchAndCompatibilityEntitySpawner.js for an example)
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
Script.include("https://rawgit.com/highfidelity/hifi/master/examples/libraries/utils.js");
var SOUND_DATA_KEY = "soundKey";
var QUERY_RADIUS = 50;
EntityViewer.setKeyholeRadius(QUERY_RADIUS);
Entities.setPacketsPerSecond(6000);
Agent.isAvatar = true;
var DEFAULT_SOUND_DATA = {
volume: 0.5,
loop: false,
playbackGap: 1000, // in ms
playbackGapRange: 0 // in ms
};
var MIN_PLAYBACK_GAP = 0;
var UPDATE_TIME = 100;
var EXPIRATION_TIME = 5000;
var soundEntityMap = {};
var soundUrls = {};
var avatarPositions = [];
function update() {
var avatars = AvatarList.getAvatarIdentifiers();
for (var i = 0; i < avatars.length; i++) {
var avatar = AvatarList.getAvatar(avatars[i]);
var avatarPosition = avatar.position;
if (!avatarPosition) {
continue;
}
EntityViewer.setPosition(avatarPosition);
EntityViewer.queryOctree();
avatarPositions.push(avatarPosition);
}
Script.setTimeout(function() {
avatarPositions.forEach(function(avatarPosition) {
var entities = Entities.findEntities(avatarPosition, QUERY_RADIUS);
handleFoundSoundEntities(entities);
});
//Now wipe list for next query;
avatarPositions = [];
}, UPDATE_TIME);
handleActiveSoundEntities();
}
function handleActiveSoundEntities() {
// Go through all our sound entities, if they have passed expiration time, remove them from map
for (var potentialSoundEntity in soundEntityMap) {
if (!soundEntityMap.hasOwnProperty(potentialSoundEntity)) {
// The current property is not a direct property of soundEntityMap so ignore it
continue;
}
var soundEntity = potentialSoundEntity;
var soundProperties = soundEntityMap[soundEntity];
soundProperties.timeWithoutAvatarInRange += UPDATE_TIME;
if (soundProperties.timeWithoutAvatarInRange > EXPIRATION_TIME && soundProperties.soundInjector) {
// An avatar hasn't been within range of this sound entity recently, so remove it from map
soundProperties.soundInjector.stop();
delete soundEntityMap[soundEntity];
} else if (soundProperties.isDownloaded) {
// If this sound hasn't expired yet, we want to potentially play it!
if (soundProperties.readyToPlay) {
var newPosition = Entities.getEntityProperties(soundEntity, "position").position;
if (!soundProperties.soundInjector) {
soundProperties.soundInjector = Audio.playSound(soundProperties.sound, {
volume: soundProperties.volume,
position: newPosition,
loop: soundProperties.loop
});
} else {
soundProperties.soundInjector.restart();
}
soundProperties.readyToPlay = false;
} else if (soundProperties.sound && soundProperties.loop === false) {
// We need to check all of our entities that are not looping but have an interval associated with them
// to see if it's time for them to play again
soundProperties.timeSinceLastPlay += UPDATE_TIME;
if (soundProperties.timeSinceLastPlay > soundProperties.clipDuration + soundProperties.currentPlaybackGap) {
soundProperties.readyToPlay = true;
soundProperties.timeSinceLastPlay = 0;
// Now let's get our new current interval
soundProperties.currentPlaybackGap = soundProperties.playbackGap + randFloat(-soundProperties.playbackGapRange, soundProperties.playbackGapRange);
soundProperties.currentPlaybackGap = Math.max(MIN_PLAYBACK_GAP, soundProperties.currentPlaybackGap);
}
}
}
}
}
function handleFoundSoundEntities(entities) {
entities.forEach(function(entity) {
var soundData = getEntityCustomData(SOUND_DATA_KEY, entity);
if (soundData && soundData.url) {
//check sound entities list- if it's not in, add it
if (!soundEntityMap[entity]) {
var soundProperties = {
url: soundData.url,
volume: soundData.volume || DEFAULT_SOUND_DATA.volume,
loop: soundData.loop || DEFAULT_SOUND_DATA.loop,
playbackGap: soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap,
playbackGapRange: soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange,
readyToPlay: false,
position: Entities.getEntityProperties(entity, "position").position,
timeSinceLastPlay: 0,
timeWithoutAvatarInRange: 0,
isDownloaded: false
};
soundProperties.currentPlaybackGap = soundProperties.playbackGap + randFloat(-soundProperties.playbackGapRange, soundProperties.playbackGapRange);
soundProperties.currentPlaybackGap = Math.max(MIN_PLAYBACK_GAP, soundProperties.currentPlaybackGap);
soundEntityMap[entity] = soundProperties;
if (!soundUrls[soundData.url]) {
// We need to download sound before we add it to our map
var sound = SoundCache.getSound(soundData.url);
// Only add it to map once it's downloaded
soundUrls[soundData.url] = sound;
sound.ready.connect(function() {
soundProperties.sound = sound;
soundProperties.readyToPlay = true;
soundProperties.isDownloaded = true;
soundProperties.clipDuration = sound.duration * 1000;
soundEntityMap[entity] = soundProperties;
});
} else {
// We already have sound downloaded, so just add it to map right away
soundProperties.sound = soundUrls[soundData.url];
soundProperties.clipDuration = soundProperties.sound.duration * 1000;
soundProperties.readyToPlay = true;
soundProperties.isDownloaded = true;
soundEntityMap[entity] = soundProperties;
}
} else {
//If this sound is in our map already, we want to reset timeWithoutAvatarInRange
// Also we want to check to see if the entity has been updated with new sound data- if so we want to update!
soundEntityMap[entity].timeWithoutAvatarInRange = 0;
checkForSoundPropertyChanges(soundEntityMap[entity], soundData);
}
}
});
}
function checkForSoundPropertyChanges(currentProps, newProps) {
var needsNewInjector = false;
if (currentProps.playbackGap !== newProps.playbackGap && !currentProps.loop) {
// playbackGap only applies to non looping sounds
currentProps.playbackGap = newProps.playbackGap;
currentProps.currentPlaybackGap = currentProps.playbackGap + randFloat(-currentProps.playbackGapRange, currentProps.playbackGapRange);
currentProps.currentPlaybackGap = Math.max(MIN_PLAYBACK_GAP, currentProps.currentPlaybackGap);
currentProps.readyToPlay = true;
}
if (currentProps.playbackGapRange !== currentProps.playbackGapRange) {
currentProps.playbackGapRange = newProps.playbackGapRange;
currentProps.currentPlaybackGap = currentProps.playbackGap + randFloat(-currentProps.playbackGapRange, currentProps.playbackGapRange);
currentProps.currentPlaybackGap = Math.max(MIN_PLAYBACK_GAP, currentProps.currentPlaybackGap);
currentProps.readyToPlay = true;
}
if (currentProps.volume !== newProps.volume) {
currentProps.volume = newProps.volume;
needsNewInjector = true;
}
if (currentProps.url !== newProps.url) {
currentProps.url = newProps.url;
currentProps.sound = null;
if (!soundUrls[currentProps.url]) {
var sound = SoundCache.getSound(currentProps.url);
currentProps.isDownloaded = false;
sound.ready.connect(function() {
currentProps.sound = sound;
currentProps.clipDuration = sound.duration * 1000;
currentProps.isDownloaded = true;
});
} else {
currentProps.sound = sound;
currentProps.clipDuration = sound.duration * 1000;
}
needsNewInjector = true;
}
if (currentProps.loop !== newProps.loop) {
currentProps.loop = newProps.loop;
needsNewInjector = true;
}
if (needsNewInjector) {
// If we were looping we need to stop that so new changes are applied
currentProps.soundInjector.stop();
currentProps.soundInjector = null;
currentProps.readyToPlay = true;
}
}
Script.setInterval(update, UPDATE_TIME);

View file

@ -0,0 +1,57 @@
//
// acAudioSearchCompatibleEntitySpawner.js
// audio/acAudioSearching
//
// Created by Eric Levin 2/2/2016
// Copyright 2016 High Fidelity, Inc.
// This is a client script which spawns entities with a field in userData compatible with the AcAudioSearchAndInject script
// These entities specify data about the sound they want to play, such as url, volume, and whether to loop or not
// The position of the entity determines the position from which the sound plays from
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("../../libraries/utils.js");
var orientation = Camera.getOrientation();
orientation = Quat.safeEulerAngles(orientation);
orientation.x = 0;
orientation = Quat.fromVec3Degrees(orientation);
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
// http://hifi-public.s3.amazonaws.com/ryan/demo/0619_Fireplace__Tree_B.L.wav
var SOUND_DATA_KEY = "soundKey";
var userData = {
soundKey: {
url: "http://hifi-content.s3.amazonaws.com/DomainContent/Junkyard/Sounds/ClothSail/cloth_sail3.L.wav",
volume: 0.3,
loop: false,
playbackGap: 2000, // In ms - time to wait in between clip plays
playbackGapRange: 500 // In ms - the range to wait in between clip plays
}
}
var entityProps = {
type: "Box",
position: center,
color: {
red: 200,
green: 10,
blue: 200
},
dimensions: {
x: 0.1,
y: 0.1,
z: 0.1
},
userData: JSON.stringify(userData)
}
var soundEntity = Entities.addEntity(entityProps);
function cleanup() {
Entities.deleteEntity(soundEntity);
}
Script.scriptEnding.connect(cleanup);

View file

@ -258,6 +258,8 @@ void Sound::interpretAsWav(const QByteArray& inputAudioByteArray, QByteArray& ou
qCDebug(audio) << "Error reading WAV file";
}
_duration = (float) (outputAudioByteArraySize / (fileHeader.wave.sampleRate * fileHeader.wave.numChannels * fileHeader.wave.bitsPerSample / 8.0f));
} else {
qCDebug(audio) << "Could not read wav audio file header.";
return;

View file

@ -22,12 +22,15 @@ class Sound : public Resource {
Q_OBJECT
Q_PROPERTY(bool downloaded READ isReady)
Q_PROPERTY(float duration READ getDuration)
public:
Sound(const QUrl& url, bool isStereo = false);
bool isStereo() const { return _isStereo; }
bool isReady() const { return _isReady; }
float getDuration() { return _duration; }
const QByteArray& getByteArray() { return _byteArray; }
signals:
@ -37,6 +40,7 @@ private:
QByteArray _byteArray;
bool _isStereo;
bool _isReady;
float _duration; // In seconds
void downSample(const QByteArray& rawAudioByteArray);
void interpretAsWav(const QByteArray& inputAudioByteArray, QByteArray& outputAudioByteArray);