Merge pull request #13637 from ctrlaltdavid/M08513-a

Audio API JSDoc
This commit is contained in:
MiladNazeri 2018-08-03 16:38:03 -07:00 committed by GitHub
commit a7a8214983
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 323 additions and 89 deletions

View file

@ -26,7 +26,7 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
SINGLETON_DEPENDENCY
/**jsdoc
* The Audio API features tools to help control audio contexts and settings.
* The <code>Audio</code> API provides facilities to interact with audio inputs and outputs and to play sounds.
*
* @namespace Audio
*
@ -35,14 +35,23 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {boolean} muted
* @property {boolean} noiseReduction
* @property {number} inputVolume
* @property {number} inputLevel <em>Read-only.</em>
* @property {string} context <em>Read-only.</em>
* @property {} devices <em>Read-only.</em>
* @property {boolean} muted - <code>true</code> if the audio input is muted, otherwise <code>false</code>.
* @property {boolean} noiseReduction - <code>true</code> if noise reduction is enabled, otherwise <code>false</code>. When
* enabled, the input audio signal is blocked (fully attenuated) when it falls below an adaptive threshold set just
* above the noise floor.
* @property {number} inputLevel - The loudness of the audio input, range <code>0.0</code> (no sound) &ndash;
* <code>1.0</code> (the onset of clipping). <em>Read-only.</em>
* @property {number} inputVolume - Adjusts the volume of the input audio; range <code>0.0</code> &ndash; <code>1.0</code>.
* If set to a value, the resulting value depends on the input device: for example, the volume can't be changed on some
* devices, and others might only support values of <code>0.0</code> and <code>1.0</code>.
* @property {boolean} isStereoInput - <code>true</code> if the input audio is being used in stereo, otherwise
* <code>false</code>. Some devices do not support stereo, in which case the value is always <code>false</code>.
* @property {string} context - The current context of the audio: either <code>"Desktop"</code> or <code>"HMD"</code>.
* <em>Read-only.</em>
* @property {object} devices <em>Read-only.</em> <strong>Deprecated:</strong> This property is deprecated and will be
* removed.
*/
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
Q_PROPERTY(bool noiseReduction READ noiseReductionEnabled WRITE enableNoiseReduction NOTIFY noiseReductionChanged)
Q_PROPERTY(float inputVolume READ getInputVolume WRITE setInputVolume NOTIFY inputVolumeChanged)
@ -69,45 +78,91 @@ public:
/**jsdoc
* @function Audio.setInputDevice
* @param {} device
* @param {object} device
* @param {boolean} isHMD
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE void setInputDevice(const QAudioDeviceInfo& device, bool isHMD);
/**jsdoc
* @function Audio.setOutputDevice
* @param {} device
* @param {object} device
* @param {boolean} isHMD
* @deprecated This function is deprecated and will be removed.
*/
Q_INVOKABLE void setOutputDevice(const QAudioDeviceInfo& device, bool isHMD);
/**jsdoc
* Enable or disable reverberation. Reverberation is done by the client, on the post-mix audio. The reverberation options
* come from either the domain's audio zone if used &mdash; configured on the server &mdash; or as scripted by
* {@link Audio.setReverbOptions|setReverbOptions}.
* @function Audio.setReverb
* @param {boolean} enable
*/
* @param {boolean} enable - <code>true</code> to enable reverberation, <code>false</code> to disable.
* @example <caption>Enable reverberation for a short while.</caption>
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* var injector;
* var injectorOptions = {
* position: MyAvatar.position
* };
*
* Script.setTimeout(function () {
* print("Reverb OFF");
* Audio.setReverb(false);
* injector = Audio.playSound(sound, injectorOptions);
* }, 1000);
*
* Script.setTimeout(function () {
* var reverbOptions = new AudioEffectOptions();
* reverbOptions.roomSize = 100;
* Audio.setReverbOptions(reverbOptions);
* print("Reverb ON");
* Audio.setReverb(true);
* }, 4000);
*
* Script.setTimeout(function () {
* print("Reverb OFF");
* Audio.setReverb(false);
* }, 8000); */
Q_INVOKABLE void setReverb(bool enable);
/**jsdoc
* Configure reverberation options. Use {@link Audio.setReverb|setReverb} to enable or disable reverberation.
* @function Audio.setReverbOptions
* @param {AudioEffectOptions} options
* @param {AudioEffectOptions} options - The reverberation options.
*/
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
/**jsdoc
* Starts making an audio recording of the audio being played in-world (i.e., not local-only audio) to a file in WAV format.
* @function Audio.startRecording
* @param {string} filename
* @returns {boolean}
* @param {string} filename - The path and name of the file to make the recording in. Should have a <code>.wav</code>
* extension. The file is overwritten if it already exists.
* @returns {boolean} <code>true</code> if the specified file could be opened and audio recording has started, otherwise
* <code>false</code>.
* @example <caption>Make a 10 second audio recording.</caption>
* var filename = File.getTempDir() + "/audio.wav";
* if (Audio.startRecording(filename)) {
* Script.setTimeout(function () {
* Audio.stopRecording();
* print("Audio recording made in: " + filename);
* }, 10000);
*
* } else {
* print("Could not make an audio recording in: " + filename);
* }
*/
Q_INVOKABLE bool startRecording(const QString& filename);
/**jsdoc
* Finish making an audio recording started with {@link Audio.startRecording|startRecording}.
* @function Audio.stopRecording
*/
Q_INVOKABLE void stopRecording();
/**jsdoc
* Check whether an audio recording is currently being made.
* @function Audio.getRecording
* @returns {boolean}
* @returns {boolean} <code>true</code> if an audio recording is currently being made, otherwise <code>false</code>.
*/
Q_INVOKABLE bool getRecording();
@ -116,40 +171,54 @@ signals:
/**jsdoc
* @function Audio.nop
* @returns {Signal}
* @deprecated This signal is deprecated and will be removed.
*/
void nop();
/**jsdoc
* Triggered when the audio input is muted or unmuted.
* @function Audio.mutedChanged
* @param {boolean} isMuted
* @param {boolean} isMuted - <code>true</code> if the audio input is muted, otherwise <code>false</code>.
* @returns {Signal}
* @example <caption>Report when audio input is muted or unmuted</caption>
* Audio.mutedChanged.connect(function (isMuted) {
* print("Audio muted: " + isMuted);
* });
*/
void mutedChanged(bool isMuted);
/**jsdoc
* Triggered when the audio input noise reduction is enabled or disabled.
* @function Audio.noiseReductionChanged
* @param {boolean} isEnabled
* @param {boolean} isEnabled - <code>true</code> if audio input noise reduction is enabled, otherwise <code>false</code>.
* @returns {Signal}
*/
void noiseReductionChanged(bool isEnabled);
/**jsdoc
* Triggered when the input audio volume changes.
* @function Audio.inputVolumeChanged
* @param {number} volume
* @param {number} volume - The requested volume to be applied to the audio input, range <code>0.0</code> &ndash;
* <code>1.0</code>. The resulting value of <code>Audio.inputVolume</code> depends on the capabilities of the device:
* for example, the volume can't be changed on some devices, and others might only support values of <code>0.0</code>
* and <code>1.0</code>.
* @returns {Signal}
*/
void inputVolumeChanged(float volume);
/**jsdoc
* Triggered when the input audio level changes.
* @function Audio.inputLevelChanged
* @param {number} level
* @param {number} level - The loudness of the input audio, range <code>0.0</code> (no sound) &ndash; <code>1.0</code> (the
* onset of clipping).
* @returns {Signal}
*/
void inputLevelChanged(float level);
/**jsdoc
* Triggered when the current context of the audio changes.
* @function Audio.contextChanged
* @param {string} context
* @param {string} context - The current context of the audio: either <code>"Desktop"</code> or <code>"HMD"</code>.
* @returns {Signal}
*/
void contextChanged(const QString& context);
@ -158,7 +227,7 @@ public slots:
/**jsdoc
* @function Audio.onContextChanged
* @returns {Signal}
* @deprecated This function is deprecated and will be removed.
*/
void onContextChanged();

View file

@ -59,28 +59,29 @@ static void setOption(QScriptValue arguments, const QString name, float defaultV
}
/**jsdoc
* Reverberation options that can be used to initialize an {@link AudioEffectOptions} object when created.
* @typedef {object} AudioEffectOptions.ReverbOptions
* @property {number} bandwidth
* @property {number} preDelay
* @property {number} lateDelay
* @property {number} reverbTime
* @property {number} earlyDiffusion
* @property {number} lateDiffusion
* @property {number} roomSize
* @property {number} density
* @property {number} bassMult
* @property {number} bassFreq
* @property {number} highGain
* @property {number} highFreq
* @property {number} modRate
* @property {number} modDepth
* @property {number} earlyGain
* @property {number} lateGain
* @property {number} earlyMixLeft
* @property {number} earlyMixRight
* @property {number} lateMixLeft
* @property {number} lateMixRight
* @property {number} wetDryMix
* @property {number} bandwidth=10000 - The corner frequency (Hz) of the low-pass filter at reverb input.
* @property {number} preDelay=20 - The delay (milliseconds) between dry signal and the onset of early reflections.
* @property {number} lateDelay=0 - The delay (milliseconds) between early reflections and the onset of reverb tail.
* @property {number} reverbTime=2 - The time (seconds) for the reverb tail to decay by 60dB, also known as RT60.
* @property {number} earlyDiffusion=100 - Adjusts the buildup of echo density in the early reflections, normally 100%.
* @property {number} lateDiffusion=100 - Adjusts the buildup of echo density in the reverb tail, normally 100%.
* @property {number} roomSize=50 - The apparent room size, from small (0%) to large (100%).
* @property {number} density=100 - Adjusts the echo density in the reverb tail, normally 100%.
* @property {number} bassMult=1.5 - Adjusts the bass-frequency reverb time, as multiple of reverbTime.
* @property {number} bassFreq=250 - The crossover frequency (Hz) for the onset of bassMult.
* @property {number} highGain=-6 - Reduces the high-frequency reverb time, as attenuation (dB).
* @property {number} highFreq=3000 - The crossover frequency (Hz) for the onset of highGain.
* @property {number} modRate=2.3 - The rate of modulation (Hz) of the LFO-modulated delay lines.
* @property {number} modDepth=50 - The depth of modulation (percent) of the LFO-modulated delay lines.
* @property {number} earlyGain=0 - Adjusts the relative level (dB) of the early reflections.
* @property {number} lateGain=0 - Adjusts the relative level (dB) of the reverb tail.
* @property {number} earlyMixLeft=20 - The apparent distance of the source (percent) in the early reflections.
* @property {number} earlyMixRight=20 - The apparent distance of the source (percent) in the early reflections.
* @property {number} lateMixLeft=90 - The apparent distance of the source (percent) in the reverb tail.
* @property {number} lateMixRight=90 - The apparent distance of the source (percent) in the reverb tail.
* @property {number} wetDryMix=50 - Adjusts the wet/dry ratio, from completely dry (0%) to completely wet (100%).
*/
AudioEffectOptions::AudioEffectOptions(QScriptValue arguments) {
setOption(arguments, BANDWIDTH_HANDLE, BANDWIDTH_DEFAULT, _bandwidth);

View file

@ -16,35 +16,39 @@
#include <QtScript/QScriptEngine>
/**jsdoc
* Audio effect options used by the {@link Audio} API.
*
* <p>Create using <code>new AudioEffectOptions(reverbOptions)</code>.</p>
*
* @class AudioEffectOptions
* @param {AudioEffectOptions.ReverbOptions} [reverbOptions=null]
* @param {AudioEffectOptions.ReverbOptions} [reverbOptions=null] - Reverberation options.
*
* @hifi-interface
* @hifi-client-entity
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {number} bandwidth=10000
* @property {number} preDelay=20
* @property {number} lateDelay=0
* @property {number} reverbTime=2
* @property {number} earlyDiffusion=100
* @property {number} lateDiffusion=100
* @property {number} roomSize=50
* @property {number} density=100
* @property {number} bassMult=1.5
* @property {number} bassFreq=250
* @property {number} highGain=-6
* @property {number} highFreq=3000
* @property {number} modRate=2.3
* @property {number} modDepth=50
* @property {number} earlyGain=0
* @property {number} lateGain=0
* @property {number} earlyMixLeft=20
* @property {number} earlyMixRight=20
* @property {number} lateMixLeft=90
* @property {number} lateMixRight=90
* @property {number} wetDryMix=50
* @property {number} bandwidth=10000 - The corner frequency (Hz) of the low-pass filter at reverb input.
* @property {number} preDelay=20 - The delay (milliseconds) between dry signal and the onset of early reflections.
* @property {number} lateDelay=0 - The delay (milliseconds) between early reflections and the onset of reverb tail.
* @property {number} reverbTime=2 - The time (seconds) for the reverb tail to decay by 60dB, also known as RT60.
* @property {number} earlyDiffusion=100 - Adjusts the buildup of echo density in the early reflections, normally 100%.
* @property {number} lateDiffusion=100 - Adjusts the buildup of echo density in the reverb tail, normally 100%.
* @property {number} roomSize=50 - The apparent room size, from small (0%) to large (100%).
* @property {number} density=100 - Adjusts the echo density in the reverb tail, normally 100%.
* @property {number} bassMult=1.5 - Adjusts the bass-frequency reverb time, as multiple of reverbTime.
* @property {number} bassFreq=250 - The crossover frequency (Hz) for the onset of bassMult.
* @property {number} highGain=-6 - Reduces the high-frequency reverb time, as attenuation (dB).
* @property {number} highFreq=3000 - The crossover frequency (Hz) for the onset of highGain.
* @property {number} modRate=2.3 - The rate of modulation (Hz) of the LFO-modulated delay lines.
* @property {number} modDepth=50 - The depth of modulation (percent) of the LFO-modulated delay lines.
* @property {number} earlyGain=0 - Adjusts the relative level (dB) of the early reflections.
* @property {number} lateGain=0 - Adjusts the relative level (dB) of the reverb tail.
* @property {number} earlyMixLeft=20 - The apparent distance of the source (percent) in the early reflections.
* @property {number} earlyMixRight=20 - The apparent distance of the source (percent) in the early reflections.
* @property {number} lateMixLeft=90 - The apparent distance of the source (percent) in the reverb tail.
* @property {number} lateMixRight=90 - The apparent distance of the source (percent) in the reverb tail.
* @property {number} wetDryMix=50 - Adjusts the wet/dry ratio, from completely dry (0%) to completely wet (100%).
*/
class AudioEffectOptions : public QObject {

View file

@ -45,6 +45,23 @@ QScriptValue injectorOptionsToScriptValue(QScriptEngine* engine, const AudioInje
return obj;
}
/**jsdoc
* Configures how an audio injector plays its audio.
* @typedef {object} AudioInjector.AudioInjectorOptions
* @property {Vec3} position=Vec3.ZERO - The position in the domain to play the sound.
* @property {Quat} orientation=Quat.IDENTITY - The orientation in the domain to play the sound in.
* @property {number} volume=1.0 - Playback volume, between <code>0.0</code> and <code>1.0</code>.
* @property {number} pitch=1.0 - Alter the pitch of the sound, within +/- 2 octaves. The value is the relative sample rate to
* resample the sound at, range <code>0.0625</code> &ndash; <code>16.0</code>. A value of <code>0.0625</code> lowers the
* pitch by 2 octaves; <code>1.0</code> is no change in pitch; <code>16.0</code> raises the pitch by 2 octaves.
* @property {boolean} loop=false - If <code>true</code>, the sound is played repeatedly until playback is stopped.
* @property {number} secondOffset=0 - Starts playback from a specified time (seconds) within the sound file, &ge;
* <code>0</code>.
* @property {boolean} localOnly=false - IF <code>true</code>, the sound is played back locally on the client rather than to
* others via the audio mixer.
* @property {boolean} ignorePenumbra=false - <strong>Deprecated:</strong> This property is deprecated and will be
* removed.
*/
void injectorOptionsFromScriptValue(const QScriptValue& object, AudioInjectorOptions& injectorOptions) {
if (!object.isObject()) {
qWarning() << "Audio injector options is not an object.";

View file

@ -79,6 +79,14 @@ private:
typedef QSharedPointer<Sound> SharedSoundPointer;
/**jsdoc
* An audio resource, created by {@link SoundCache.getSound}, to be played back using {@link Audio.playSound}.
* <p>Supported formats:</p>
* <ul>
* <li>WAV: 16-bit uncompressed WAV at any sample rate, with 1 (mono), 2(stereo), or 4 (ambisonic) channels.</li>
* <li>MP3: Mono or stereo, at any sample rate.</li>
* <li>RAW: 48khz 16-bit mono or stereo. Filename must include <code>".stereo"</code> to be interpreted as stereo.</li>
* </ul>
*
* @class SoundObject
*
* @hifi-interface
@ -86,8 +94,9 @@ typedef QSharedPointer<Sound> SharedSoundPointer;
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {boolean} downloaded
* @property {number} duration
* @property {boolean} downloaded - <code>true</code> if the sound has been downloaded and is ready to be played, otherwise
* <code>false</code>.
* @property {number} duration - The duration of the sound, in seconds.
*/
class SoundScriptingInterface : public QObject {
Q_OBJECT
@ -103,6 +112,7 @@ public:
float getDuration() { return _sound->getDuration(); }
/**jsdoc
* Triggered when the sound has been downloaded and is ready to be played.
* @function SoundObject.ready
* @returns {Signal}
*/

View file

@ -48,9 +48,11 @@ public:
SoundCacheScriptingInterface();
/**jsdoc
* Loads the content of an audio file into a {@link SoundObject}, ready for playback by {@link Audio.playSound}.
* @function SoundCache.getSound
* @param {string} url
* @returns {SoundObject}
* @param {string} url - The URL of the audio file to load &mdash; Web, ATP, or file. See {@link SoundObject} for supported
* formats.
* @returns {SoundObject} The sound ready for playback.
*/
Q_INVOKABLE SharedSoundPointer getSound(const QUrl& url);
};

View file

@ -23,6 +23,7 @@ class AudioScriptingInterface : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
// JSDoc for property is in Audio.h.
Q_PROPERTY(bool isStereoInput READ isStereoInput WRITE setStereoInput NOTIFY isStereoInputChanged)
public:
@ -35,91 +36,121 @@ protected:
// these methods are protected to stop C++ callers from calling, but invokable from script
/**jsdoc
* Starts playing &mdash; "injecting" &mdash; the content of an audio file. The sound is played globally (sent to the audio
* mixer) so that everyone hears it, unless the <code>injectorOptions</code> has <code>localOnly</code> set to
* <code>true</code> in which case only the client hears the sound played. No sound is played if sent to the audio mixer
* but the client is not connected to an audio mixer. The {@link AudioInjector} object returned by the function can be used
* to control the playback and get information about its current state.
* @function Audio.playSound
* @param {} sound
* @param {} [injectorOptions=null]
* @returns {object}
* @param {SoundObject} sound - The content of an audio file, loaded using {@link SoundCache.getSound}. See
* {@link SoundObject} for supported formats.
* @param {AudioInjector.AudioInjectorOptions} [injectorOptions={}] - Audio injector configuration.
* @returns {AudioInjector} The audio injector that plays the audio file.
* @example <caption>Play a sound.</caption>
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* var injector;
* var injectorOptions = {
* position: MyAvatar.position
* };
*
* Script.setTimeout(function () { // Give the sound time to load.
* injector = Audio.playSound(sound, injectorOptions);
* }, 1000);
*/
Q_INVOKABLE ScriptAudioInjector* playSound(SharedSoundPointer sound, const AudioInjectorOptions& injectorOptions = AudioInjectorOptions());
/**jsdoc
* Start playing the content of an audio file, locally (isn't sent to the audio mixer). This is the same as calling
* {@link Audio.playSound} with {@link AudioInjector.AudioInjectorOptions} <code>localOnly</code> set <code>true</code> and
* the specified <code>position</code>.
* @function Audio.playSystemSound
* @param {} sound
* @param {} position
* @returns {object}
* @param {SoundObject} sound - The content of an audio file, loaded using {@link SoundCache.getSound}. See
* {@link SoundObject} for supported formats.
* @param {Vec3} position - The position in the domain to play the sound.
* @returns {AudioInjector} The audio injector that plays the audio file.
*/
// FIXME: there is no way to play a positionless sound
Q_INVOKABLE ScriptAudioInjector* playSystemSound(SharedSoundPointer sound, const QVector3D& position);
/**jsdoc
* Set whether or not the audio input should be used in stereo. If the audio input does not support stereo then setting a
* value of <code>true</code> has no effect.
* @function Audio.setStereoInput
* @param {boolean} stereo
* @param {boolean} stereo - <code>true</code> if the audio input should be used in stereo, otherwise <code>false</code>.
*/
Q_INVOKABLE void setStereoInput(bool stereo);
/**jsdoc
* Get whether or not the audio input is used in stereo.
* @function Audio.isStereoInput
* @returns {boolean}
* @returns {boolean} <code>true</code> if the audio input is used in stereo, otherwise <code>false</code>.
*/
Q_INVOKABLE bool isStereoInput();
signals:
/**jsdoc
* The client has been muted by the mixer.
* Triggered when the client is muted by the mixer because their loudness value for the noise background has reached the
* threshold set for the domain in the server settings.
* @function Audio.mutedByMixer
* @returns {Signal}
*/
void mutedByMixer();
/**jsdoc
* The entire environment has been muted by the mixer.
* Triggered when the client is muted by the mixer because they're within a certain radius (50m) of someone who requested
* the mute through Developer &gt; Audio &gt; Mute Environment.
* @function Audio.environmentMuted
* @returns {Signal}
*/
void environmentMuted();
/**jsdoc
* The client has received its first packet from the audio mixer.
* Triggered when the client receives its first packet from the audio mixer.
* @function Audio.receivedFirstPacket
* @returns {Signal}
*/
void receivedFirstPacket();
/**jsdoc
* The client has been disconnected from the audio mixer.
* Triggered when the client is disconnected from the audio mixer.
* @function Audio.disconnected
* @returns {Signal}
*/
void disconnected();
/**jsdoc
* The noise gate has opened.
* Triggered when the noise gate is opened: the input audio signal is no longer blocked (fully attenuated) because it has
* risen above an adaptive threshold set just above the noise floor. Only occurs if <code>Audio.noiseReduction</code> is
* <code>true</code>.
* @function Audio.noiseGateOpened
* @returns {Signal}
*/
void noiseGateOpened();
/**jsdoc
* The noise gate has closed.
* Triggered when the noise gate is closed: the input audio signal is blocked (fully attenuated) because it has fallen
* below an adaptive threshold set just above the noise floor. Only occurs if <code>Audio.noiseReduction</code> is
* <code>true</code>.
* @function Audio.noiseGateClosed
* @returns {Signal}
*/
void noiseGateClosed();
/**jsdoc
* A frame of mic input audio has been received and processed.
* Triggered when a frame of audio input is processed.
* @function Audio.inputReceived
* @param {} inputSamples
* @param {Int16Array} inputSamples - The audio input processed.
* @returns {Signal}
*/
void inputReceived(const QByteArray& inputSamples);
/**jsdoc
* @function Audio.isStereoInputChanged
* @param {boolean} isStereo
* @returns {Signal}
*/
* Triggered when the input audio use changes between mono and stereo.
* @function Audio.isStereoInputChanged
* @param {boolean} isStereo - <code>true</code> if the input audio is stereo, otherwise <code>false</code>.
* @returns {Signal}
*/
void isStereoInputChanged(bool isStereo);
private:

View file

@ -16,6 +16,22 @@
#include <AudioInjector.h>
/**jsdoc
* Plays &mdash; "injects" &mdash; the content of an audio file. Used in the {@link Audio} API.
*
* @class AudioInjector
*
* @hifi-interface
* @hifi-client-entity
* @hifi-server-entity
* @hifi-assignment-client
*
* @property {boolean} playing - <code>true</code> if the audio is currently playing, otherwise <code>false</code>.
* <em>Read-only.</em>
* @property {number} loudness - The loudness in the last frame of audio, range <code>0.0</code> &ndash; <code>1.0</code>.
* <em>Read-only.</em>
* @property {AudioInjector.AudioInjectorOptions} options - Configures how the injector plays the audio.
*/
class ScriptAudioInjector : public QObject {
Q_OBJECT
@ -26,19 +42,103 @@ public:
ScriptAudioInjector(const AudioInjectorPointer& injector);
~ScriptAudioInjector();
public slots:
/**jsdoc
* Stop current playback, if any, and start playing from the beginning.
* @function AudioInjector.restart
*/
void restart() { _injector->restart(); }
/**jsdoc
* Stop audio playback.
* @function AudioInjector.stop
* @example <caption>Stop playing a sound before it finishes.</caption>
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* var injector;
* var injectorOptions = {
* position: MyAvatar.position
* };
*
* Script.setTimeout(function () { // Give the sound time to load.
* injector = Audio.playSound(sound, injectorOptions);
* }, 1000);
*
* Script.setTimeout(function () {
* injector.stop();
* }, 2000);
*/
void stop() { _injector->stop(); }
/**jsdoc
* Get the current configuration of the audio injector.
* @function AudioInjector.getOptions
* @returns {AudioInjector.AudioInjectorOptions} Configuration of how the injector plays the audio.
*/
const AudioInjectorOptions& getOptions() const { return _injector->getOptions(); }
/**jsdoc
* Configure how the injector plays the audio.
* @function AudioInjector.setOptions
* @param {AudioInjector.AudioInjectorOptions} options - Configuration of how the injector plays the audio.
*/
void setOptions(const AudioInjectorOptions& options) { _injector->setOptions(options); }
/**jsdoc
* Get the loudness of the most recent frame of audio played.
* @function AudioInjector.getLoudness
* @returns {number} The loudness of the most recent frame of audio played, range <code>0.0</code> &ndash; <code>1.0</code>.
*/
float getLoudness() const { return _injector->getLoudness(); }
/**jsdoc
* Get whether or not the audio is currently playing.
* @function AudioInjector.isPlaying
* @returns {boolean} <code>true</code> if the audio is currently playing, otherwise <code>false</code>.
* @example <caption>See if a sound is playing.</caption>
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* var injector;
* var injectorOptions = {
* position: MyAvatar.position
* };
*
* Script.setTimeout(function () { // Give the sound time to load.
* injector = Audio.playSound(sound, injectorOptions);
* }, 1000);
*
* Script.setTimeout(function () {
* print("Sound is playing: " + injector.isPlaying());
* }, 2000);
*/
bool isPlaying() const { return _injector->isPlaying(); }
signals:
/**jsdoc
* Triggered when the audio has finished playing.
* @function AudioInjector.finished
* @returns {Signal}
* @example <caption>Report when a sound has finished playing.</caption>
* var sound = SoundCache.getSound(Script.resourcesPath() + "sounds/sample.wav");
* var injector;
* var injectorOptions = {
* position: MyAvatar.position
* };
*
* Script.setTimeout(function () { // Give the sound time to load.
* injector = Audio.playSound(sound, injectorOptions);
* injector.finished.connect(function () {
* print("Finished playing sound");
* });
* }, 1000);
*/
void finished();
protected slots:
/**jsdoc
* Stop audio playback. (Synonym of {@link AudioInjector.stop|stop}.)
* @function AudioInjector.stopInjectorImmediately
*/
void stopInjectorImmediately();
private:
AudioInjectorPointer _injector;