Merge branch 'master' of github.com:highfidelity/hifi into audio-buffer-starvation-detection

Conflicts:
	interface/src/Audio.cpp
	interface/src/Audio.h
	interface/src/Menu.cpp
	interface/src/ui/PreferencesDialog.cpp
This commit is contained in:
Ryan Huffman 2015-01-07 09:46:07 -08:00
commit a07d92b6ea
167 changed files with 2049 additions and 4115 deletions

View file

@ -6,6 +6,7 @@
* [OpenSSL](https://www.openssl.org/related/binaries.html) ~> 1.0.1g
* IMPORTANT: OpenSSL 1.0.1g is critical to avoid a security vulnerability.
* [Intel Threading Building Blocks](https://www.threadingbuildingblocks.org/) ~> 4.3
* [Bullet Physics Engine](http://bulletphysics.org) ~> 2.82
### OS Specific Build Guides
* [BUILD_OSX.md](BUILD_OSX.md) - additional instructions for OS X.

View file

@ -17,7 +17,6 @@
#include <QtNetwork/QNetworkRequest>
#include <QtNetwork/QNetworkReply>
#include <AudioRingBuffer.h>
#include <AvatarData.h>
#include <NetworkAccessManager.h>
#include <NodeList.h>
@ -37,7 +36,7 @@ static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
Agent::Agent(const QByteArray& packet) :
ThreadedAssignment(packet),
_entityEditSender(),
_receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES,
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false)),

View file

@ -273,8 +273,8 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
// Mono input to stereo output (item 1 above)
int OUTPUT_SAMPLES_PER_INPUT_SAMPLE = 2;
int inputSampleCount = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
int maxOutputIndex = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO;
int inputSampleCount = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
int maxOutputIndex = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
// attenuation and fade applied to all samples (item 2 above)
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
@ -352,9 +352,10 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
for (int s = 0; s < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; s++) {
_preMixSamples[s] = glm::clamp(_preMixSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
AudioConstants::MIN_SAMPLE_VALUE,
AudioConstants::MAX_SAMPLE_VALUE);
}
}
@ -416,14 +417,15 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
AudioFilterHSF1s& penumbraFilter = listenerNodeData->getListenerSourcePairData(streamUUID)->getPenumbraFilter();
// set the gain on both filter channels
penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
penumbraFilter.render(_preMixSamples, _preMixSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
penumbraFilter.setParameters(0, 0, AudioConstants::SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
penumbraFilter.setParameters(0, 1, AudioConstants::SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
penumbraFilter.render(_preMixSamples, _preMixSamples, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / 2);
}
// Actually mix the _preMixSamples into the _mixSamples here.
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
_mixSamples[s] = glm::clamp(_mixSamples[s] + _preMixSamples[s], MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
for (int s = 0; s < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; s++) {
_mixSamples[s] = glm::clamp(_mixSamples[s] + _preMixSamples[s], AudioConstants::MIN_SAMPLE_VALUE,
AudioConstants::MAX_SAMPLE_VALUE);
}
return 1;
@ -702,7 +704,7 @@ void AudioMixer::run() {
char clientMixBuffer[MAX_PACKET_SIZE];
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
int usecToSleep = AudioConstants::NETWORK_FRAME_USECS;
const int TRAILING_AVERAGE_FRAMES = 100;
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
@ -721,7 +723,7 @@ void AudioMixer::run() {
}
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) BUFFER_SEND_INTERVAL_USECS);
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
float lastCutoffRatio = _performanceThrottlingRatio;
bool hasRatioChanged = false;
@ -800,8 +802,8 @@ void AudioMixer::run() {
mixDataAt += sizeof(quint16);
// pack mixed audio samples
memcpy(mixDataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
mixDataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
memcpy(mixDataAt, _mixSamples, AudioConstants::NETWORK_FRAME_BYTES_STEREO);
mixDataAt += AudioConstants::NETWORK_FRAME_BYTES_STEREO;
} else {
// pack header
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame);
@ -813,7 +815,7 @@ void AudioMixer::run() {
mixDataAt += sizeof(quint16);
// pack number of silent audio samples
quint16 numSilentSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO;
quint16 numSilentSamples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
memcpy(mixDataAt, &numSilentSamples, sizeof(quint16));
mixDataAt += sizeof(quint16);
}
@ -844,7 +846,7 @@ void AudioMixer::run() {
break;
}
usecToSleep = (++nextFrame * BUFFER_SEND_INTERVAL_USECS) - timer.nsecsElapsed() / 1000; // ns to us
usecToSleep = (++nextFrame * AudioConstants::NETWORK_FRAME_USECS) - timer.nsecsElapsed() / 1000; // ns to us
if (usecToSleep > 0) {
usleep(usecToSleep);

View file

@ -55,11 +55,11 @@ private:
// used on a per stream basis to run the filter on before mixing, large enough to handle the historical
// data from a phase delay as well as an entire network buffer
int16_t _preMixSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
int16_t _preMixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
// client samples capacity is larger than what will be sent to optimize mixing
// we are MMX adding 4 samples at a time so we need client samples to have an extra 4
int16_t _mixSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
int16_t _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
void perSecondActions();

View file

@ -24,7 +24,7 @@
class PerListenerSourcePairData {
public:
PerListenerSourcePairData() {
_penumbraFilter.initialize(SAMPLE_RATE, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
_penumbraFilter.initialize(AudioConstants::SAMPLE_RATE, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / 2);
};
AudioFilterHSF1s& getPenumbraFilter() { return _penumbraFilter; }

View file

@ -40,7 +40,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {
_ringBuffer.resizeForFrameSize(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
_ringBuffer.resizeForFrameSize(isStereo
? AudioConstants::NETWORK_FRAME_SAMPLES_STEREO
: AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
_isStereo = isStereo;
}

View file

@ -279,7 +279,7 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
return packetsSent;
}
/// Version of voxel distributor that sends the deepest LOD level at once
/// Version of octree element distributor that sends the deepest LOD level at once
int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrustumChanged) {
OctreeServer::didPacketDistributor(this);
@ -595,7 +595,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
//int elapsedCompressTimeMsecs = endCompressTimeMsecs - startCompressTimeMsecs;
// if after sending packets we've emptied our bag, then we want to remember that we've sent all
// the voxels from the current view frustum
// the octree elements from the current view frustum
if (nodeData->elementBag.isEmpty()) {
nodeData->updateLastKnownViewFrustum();
nodeData->setViewSent(true);

View file

@ -364,7 +364,7 @@ function makeTableInputs(setting) {
_.each(setting.columns, function(col) {
html += "<td class='" + Settings.DATA_COL_CLASS + "'name='" + col.name + "'>\
<input type='text' class='form-control' placeholder='" + (col.placeholder ? col.placeholder : "") + "'\
value='" + (col.default ? col.default : "") + "'>\
value='" + (col.default ? col.default : "") + "' data-default='" + (col.default ? col.default : "") + "'>\
</td>"
})
@ -504,7 +504,7 @@ function addTableRow(add_glyphicon) {
})
input_clone.find('input').each(function(){
$(this).val('')
$(this).val($(this).attr('data-default'));
});
if (isArray) {
@ -525,9 +525,9 @@ function deleteTableRow(delete_glyphicon) {
var table = $(row).closest('table')
var isArray = table.data('setting-type') === 'array'
row.empty();
if (!isArray) {
// this is a hash row, so we empty it but leave the hidden input blank so it is cleared when we save
row.empty()
row.html("<input type='hidden' class='form-control' name='"
+ row.attr('name') + "' data-changed='true' value=''>");
} else {
@ -538,7 +538,6 @@ function deleteTableRow(delete_glyphicon) {
row.remove()
} else {
// this is the last row, we can't remove it completely since we need to post an empty array
row.empty()
row.removeClass(Settings.DATA_ROW_CLASS).removeClass(Settings.NEW_ROW_CLASS)
row.addClass('empty-array-row')
@ -592,7 +591,7 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
// if they are equal, we don't need data-changed
isTrue = _.isEqual(panelSettingJSON, initialPanelSettingJSON)
isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON)
} else {
isTrue = true
}

View file

@ -1,139 +0,0 @@
//
// avatarLocalLight.js
//
// Created by Tony Peng on July 2nd, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Set the local light direction and color on the avatar
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var localLightDirections = [ {x: 1.0, y:0.0, z: 0.0}, {x: 0.0, y:0.0, z: 1.0} ];
var localLightColors = [ {x: 0.4, y:0.335, z: 0.266}, {x: 0.4, y:0.335, z: 0.266} ];
var currentSelection = 0;
var currentNumLights = 2;
var maxNumLights = 2;
var currentNumAvatars = 0;
var changeDelta = 0.1;
var lightsDirty = true;
function keyPressEvent(event) {
var choice = parseInt(event.text);
if (event.text == "1") {
currentSelection = 0;
print("light election = " + currentSelection);
}
else if (event.text == "2" ) {
currentSelection = 1;
print("light selection = " + currentSelection);
}
else if (event.text == "3" ) {
currentSelection = 2;
print("light selection = " + currentSelection);
}
else if (event.text == "4" ) {
currentSelection = 3;
print("light selection = " + currentSelection);
}
else if (event.text == "5" ) {
localLightColors[currentSelection].x += changeDelta;
if ( localLightColors[currentSelection].x > 1.0) {
localLightColors[currentSelection].x = 0.0;
}
lightsDirty = true;
print("CHANGE RED light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
}
else if (event.text == "6" ) {
localLightColors[currentSelection].y += changeDelta;
if ( localLightColors[currentSelection].y > 1.0) {
localLightColors[currentSelection].y = 0.0;
}
lightsDirty = true;
print("CHANGE GREEN light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
}
else if (event.text == "7" ) {
localLightColors[currentSelection].z += changeDelta;
if ( localLightColors[currentSelection].z > 1.0) {
localLightColors[currentSelection].z = 0.0;
}
lightsDirty = true;
print("CHANGE BLUE light " + currentSelection + " color (" + localLightColors[currentSelection].x + ", " + localLightColors[currentSelection].y + ", " + localLightColors[currentSelection].z + " )" );
}
else if (event.text == "8" ) {
localLightDirections[currentSelection].x += changeDelta;
if (localLightDirections[currentSelection].x > 1.0) {
localLightDirections[currentSelection].x = -1.0;
}
lightsDirty = true;
print("PLUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
}
else if (event.text == "9" ) {
localLightDirections[currentSelection].x -= changeDelta;
if (localLightDirections[currentSelection].x < -1.0) {
localLightDirections[currentSelection].x = 1.0;
}
lightsDirty = true;
print("MINUS X light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
}
else if (event.text == "0" ) {
localLightDirections[currentSelection].y += changeDelta;
if (localLightDirections[currentSelection].y > 1.0) {
localLightDirections[currentSelection].y = -1.0;
}
lightsDirty = true;
print("PLUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
}
else if (event.text == "-" ) {
localLightDirections[currentSelection].y -= changeDelta;
if (localLightDirections[currentSelection].y < -1.0) {
localLightDirections[currentSelection].y = 1.0;
}
lightsDirty = true;
print("MINUS Y light " + currentSelection + " direction (" + localLightDirections[currentSelection].x + ", " + localLightDirections[currentSelection].y + ", " + localLightDirections[currentSelection].z + " )" );
}
else if (event.text == "," ) {
if (currentNumLights + 1 <= maxNumLights) {
++currentNumLights;
lightsDirty = true;
}
print("ADD LIGHT, number of lights " + currentNumLights);
}
else if (event.text == "." ) {
if (currentNumLights - 1 >= 0 ) {
--currentNumLights;
lightsDirty = true;
}
print("REMOVE LIGHT, number of lights " + currentNumLights);
}
}
function updateLocalLights()
{
if (lightsDirty) {
var localLights = [];
for (var i = 0; i < currentNumLights; i++) {
localLights.push({ direction: localLightDirections[i], color: localLightColors[i] });
}
AvatarManager.setLocalLights(localLights);
lightsDirty = false;
}
}
// main
Script.update.connect(updateLocalLights);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -10,7 +10,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
function length(v) {
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);

View file

@ -10,7 +10,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
function length(v) {
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);

View file

@ -15,8 +15,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("libraries/toolBars.js");
Script.include("../../libraries/globals.js");
Script.include("../../libraries/toolBars.js");
const LEFT_PALM = 0;
const LEFT_TIP = 1;
@ -192,15 +192,18 @@ function cleanupFrisbees() {
}
function checkControllerSide(hand) {
// print("cCS");
// If I don't currently have a frisbee in my hand, then try to catch closest one
if (!hand.holdingFrisbee && hand.grabButtonPressed()) {
var closestEntity = Entities.findClosestEntity(hand.palmPosition(), CATCH_RADIUS);
var modelUrl = Entities.getEntityProperties(closestEntity).modelURL;
print("lol2"+closestEntity.isKnownID);
if (closestEntity.isKnownID && validFrisbeeURL(Entities.getEntityProperties(closestEntity).modelURL)) {
print("lol");
Entities.editEntity(closestEntity, {modelScale: 1, inHand: true, position: hand.holdPosition(), shouldDie: true});
Entities.deleteEntity(closestEntity);
debugPrint(hand.message + " HAND- CAUGHT SOMETHING!!");
print("lol");
var properties = {
type: "Model",
position: hand.holdPosition(),
@ -208,10 +211,10 @@ function checkControllerSide(hand) {
gravity: { x: 0, y: 0, z: 0},
inHand: true,
dimensions: { x: FRISBEE_RADIUS, y: FRISBEE_RADIUS / 5, z: FRISBEE_RADIUS },
damping: 0.00001,
damping: 0.999,
modelURL: modelUrl,
modelScale: FRISBEE_MODEL_SCALE,
modelRotation: hand.holdRotation(),
scale: FRISBEE_MODEL_SCALE,
rotation: hand.holdRotation(),
lifetime: FRISBEE_LIFETIME
};
@ -235,10 +238,10 @@ function checkControllerSide(hand) {
gravity: { x: 0, y: 0, z: 0},
inHand: true,
dimensions: { x: FRISBEE_RADIUS, y: FRISBEE_RADIUS / 5, z: FRISBEE_RADIUS },
damping: 0.00001,
damping: 0,
modelURL: frisbeeURL(),
modelScale: FRISBEE_MODEL_SCALE,
modelRotation: hand.holdRotation(),
scale: FRISBEE_MODEL_SCALE,
rotation: hand.holdRotation(),
lifetime: FRISBEE_LIFETIME
};
@ -270,7 +273,7 @@ function checkControllerSide(hand) {
inHand: false,
lifetime: FRISBEE_LIFETIME,
gravity: { x: 0, y: -GRAVITY_STRENGTH, z: 0},
modelRotation: hand.holdRotation()
rotation: hand.holdRotation()
};
Entities.editEntity(hand.entity, properties);
@ -304,7 +307,7 @@ function hydraCheck() {
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
return hydrasConnected;
return true;//hydrasConnected;
}
function checkController(deltaTime) {
@ -314,6 +317,7 @@ function checkController(deltaTime) {
}
// this is expected for hydras
if (hydraCheck()) {
///print("testrr ");
checkControllerSide(leftHand);
checkControllerSide(rightHand);
}
@ -333,7 +337,7 @@ function controlFrisbees(deltaTime) {
killSimulations.push(frisbee);
continue;
}
Entities.editEntity(simulatedFrisbees[frisbee], {modelRotation: Quat.multiply(properties.modelRotation, Quat.fromPitchYawRollDegrees(0, speed * deltaTime * SPIN_MULTIPLIER, 0))});
Entities.editEntity(simulatedFrisbees[frisbee], {rotation: Quat.multiply(properties.modelRotation, Quat.fromPitchYawRollDegrees(0, speed * deltaTime * SPIN_MULTIPLIER, 0))});
}
for (var i = killSimulations.length - 1; i >= 0; i--) {
@ -444,4 +448,4 @@ Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Menu.menuItemEvent.connect(menuItemEvent);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(checkController);
Script.update.connect(controlFrisbees);
Script.update.connect(controlFrisbees);

View file

@ -14,7 +14,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
function getRandomFloat(min, max) {
return Math.random() * (max - min) + min;
@ -194,8 +194,8 @@ function playLoadSound() {
Audio.playSound(loadSound, audioOptions);
}
//MyAvatar.attach(gunModel, "RightHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
MyAvatar.attach(gunModel, "LeftHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
MyAvatar.attach(gunModel, "RightHand", {x:0.02, y: 0.11, z: 0.04}, Quat.fromPitchYawRollDegrees(-0, -160, -79), 0.20);
//MyAvatar.attach(gunModel, "LeftHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
// Give a bit of time to load before playing sound
Script.setTimeout(playLoadSound, 2000);

View file

@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
var rightHandAnimation = HIFI_PUBLIC_BUCKET + "animations/RightHandAnimPhilip.fbx";
var leftHandAnimation = HIFI_PUBLIC_BUCKET + "animations/LeftHandAnimPhilip.fbx";

View file

@ -15,7 +15,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
const KBD_UPPERCASE_DEFAULT = 0;
const KBD_LOWERCASE_DEFAULT = 1;

View file

@ -1,30 +0,0 @@
//
// count.js
// examples
//
// Created by Brad Hefta-Gaub on 12/31/13.
// Copyright 2013 High Fidelity, Inc.
//
// This is an example script that runs in a loop and displays a counter to the log
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
var count = 0;
function displayCount(deltaTime) {
print("count =" + count + " deltaTime=" + deltaTime);
count++;
}
function scriptEnding() {
print("SCRIPT ENDNG!!!\n");
}
// register the call back so it fires before each data send
Script.update.connect(displayCount);
// register our scriptEnding callback
Script.scriptEnding.connect(scriptEnding);

View file

@ -77,7 +77,6 @@ var modelURLs = [
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Alder.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Bush1.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Bush6.fbx",
HIFI_PUBLIC_BUCKET + "meshes/newInvader16x16-large-purple.svo",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed2.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed4.fbx",
@ -192,8 +191,7 @@ var toolBar = (function () {
});
newTextButton = toolBar.addTool({
//imageURL: toolIconUrl + "add-text.svg",
imageURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/add-text.svg", // temporarily
imageURL: toolIconUrl + "add-text.svg",
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: toolWidth,
height: toolHeight,

View file

@ -56,7 +56,6 @@ var modelURLs = [
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Alder.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Bush1.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Bush6.fbx",
HIFI_PUBLIC_BUCKET + "meshes/newInvader16x16-large-purple.svo",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed2.fbx",
HIFI_PUBLIC_BUCKET + "models/entities/3-Buildings-1-Rustic-Shed4.fbx",
@ -1215,8 +1214,7 @@ var toolBar = (function () {
});
newTextButton = toolBar.addTool({
//imageURL: toolIconUrl + "add-text.svg",
imageURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/add-text.svg", // temporarily
imageURL: toolIconUrl + "add-text.svg",
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: toolWidth,
height: toolHeight,

View file

@ -1,202 +0,0 @@
//
// particleBirds.js
// examples
//
// Created by Benjamin Arnold on May 29, 2014
// Copyright 2014 High Fidelity, Inc.
//
// This sample script creates a swarm of tweeting bird entities that fly around the avatar.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
// Multiply vector by scalar
function vScalarMult(v, s) {
var rval = { x: v.x * s, y: v.y * s, z: v.z * s };
return rval;
}
function printVector(v) {
print(v.x + ", " + v.y + ", " + v.z + "\n");
}
// Create a random vector with individual lengths between a,b
function randVector(a, b) {
var rval = { x: a + Math.random() * (b - a), y: a + Math.random() * (b - a), z: a + Math.random() * (b - a) };
return rval;
}
// Returns a vector which is fraction of the way between a and b
function vInterpolate(a, b, fraction) {
var rval = { x: a.x + (b.x - a.x) * fraction, y: a.y + (b.y - a.y) * fraction, z: a.z + (b.z - a.z) * fraction };
return rval;
}
var startTimeInSeconds = new Date().getTime() / 1000;
var birdLifetime = 20; // lifetime of the birds in seconds!
var range = 1.0; // Over what distance in meters do you want the flock to fly around
var frame = 0;
var CHANCE_OF_MOVING = 0.1;
var CHANCE_OF_TWEETING = 0.05;
var BIRD_GRAVITY = -0.1;
var BIRD_FLAP_SPEED = 10.0;
var BIRD_VELOCITY = 0.5;
var myPosition = MyAvatar.position;
var range = 1.0; // Distance around avatar where I can move
// This is our Bird object
function Bird (particleID, tweetSound, targetPosition) {
this.particleID = particleID;
this.tweetSound = tweetSound;
this.previousFlapOffset = 0;
this.targetPosition = targetPosition;
this.moving = false;
this.tweeting = -1;
}
// Array of birds
var birds = [];
function addBird()
{
// Decide what kind of bird we are
var tweet;
var color;
var size;
var which = Math.random();
if (which < 0.2) {
tweet = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/bushtit_1.raw");
color = { red: 100, green: 50, blue: 120 };
size = 0.08;
} else if (which < 0.4) {
tweet = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/rosyfacedlovebird.raw");
color = { red: 100, green: 150, blue: 75 };
size = 0.09;
} else if (which < 0.6) {
tweet = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/saysphoebe.raw");
color = { red: 84, green: 121, blue: 36 };
size = 0.05;
} else if (which < 0.8) {
tweet = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/mexicanWhipoorwill.raw");
color = { red: 23, green: 197, blue: 230 };
size = 0.12;
} else {
tweet = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/westernscreechowl.raw");
color = { red: 50, green: 67, blue: 144 };
size = 0.15;
}
var properties = {
type: "Sphere",
lifetime: birdLifetime,
position: Vec3.sum(randVector(-range, range), myPosition),
velocity: { x: 0, y: 0, z: 0 },
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
dimensions: { x: size * 2, y: size * 2, z: size * 2 },
color: color
};
birds.push(new Bird(Entities.addEntity(properties), tweet, properties.position));
}
var numBirds = 30;
// Generate the birds
for (var i = 0; i < numBirds; i++) {
addBird();
}
// Main update function
function updateBirds(deltaTime) {
// Check to see if we've been running long enough that our birds are dead
var nowTimeInSeconds = new Date().getTime() / 1000;
if ((nowTimeInSeconds - startTimeInSeconds) >= birdLifetime) {
print("our birds are dying, stop our script");
Script.stop();
return;
}
frame++;
// Only update every third frame
if ((frame % 3) == 0) {
myPosition = MyAvatar.position;
// Update all the birds
for (var i = 0; i < numBirds; i++) {
particleID = birds[i].particleID;
var properties = Entities.getEntityProperties(particleID);
// Tweeting behavior
if (birds[i].tweeting == 0) {
if (Math.random() < CHANCE_OF_TWEETING) {
Audio.playSound(birds[i].tweetSound, {
position: properties.position,
volume: 0.75
});
birds[i].tweeting = 10;
}
} else {
birds[i].tweeting -= 1;
}
// Begin movement by getting a target
if (birds[i].moving == false) {
if (Math.random() < CHANCE_OF_MOVING) {
var targetPosition = Vec3.sum(randVector(-range, range), myPosition);
if (targetPosition.x < 0) {
targetPosition.x = 0;
}
if (targetPosition.y < 0) {
targetPosition.y = 0;
}
if (targetPosition.z < 0) {
targetPosition.z = 0;
}
if (targetPosition.x > TREE_SCALE) {
targetPosition.x = TREE_SCALE;
}
if (targetPosition.y > TREE_SCALE) {
targetPosition.y = TREE_SCALE;
}
if (targetPosition.z > TREE_SCALE) {
targetPosition.z = TREE_SCALE;
}
birds[i].targetPosition = targetPosition;
birds[i].moving = true;
}
}
// If we are moving, move towards the target
if (birds[i].moving) {
var desiredVelocity = Vec3.subtract(birds[i].targetPosition, properties.position);
desiredVelocity = vScalarMult(Vec3.normalize(desiredVelocity), BIRD_VELOCITY);
properties.velocity = vInterpolate(properties.velocity, desiredVelocity, 0.2);
// If we are near the target, we should get a new target
if (Vec3.length(Vec3.subtract(properties.position, birds[i].targetPosition)) < (properties.dimensions.x / 5.0)) {
birds[i].moving = false;
}
}
// Use a cosine wave offset to make it look like its flapping.
var offset = Math.cos(nowTimeInSeconds * BIRD_FLAP_SPEED) * properties.dimensions.x;
properties.position.y = properties.position.y + (offset - birds[i].previousFlapOffset);
// Change position relative to previous offset.
birds[i].previousFlapOffset = offset;
// Update the particle
Entities.editEntity(particleID, properties);
}
}
}
// register the call back so it fires before each data send
Script.update.connect(updateBirds);

View file

@ -13,7 +13,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
var sound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/mexicanWhipoorwill.raw");
var CHANCE_OF_PLAYING_SOUND = 0.01;

View file

@ -4,6 +4,10 @@
//
// Copyright 2014 High Fidelity, Inc.
//
//
// Gives the ability to be set various reverb settings.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -14,7 +18,7 @@ var audioOptions = new AudioEffectOptions({
roomSize: 50,
// Seconds
reverbTime: 4,
reverbTime: 10,
// Between 0 - 1
damping: 0.50,

View file

@ -12,7 +12,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
function getRandomFloat(min, max) {
return Math.random() * (max - min) + min;

View file

@ -20,7 +20,7 @@
//
//For procedural walk animation
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
Script.include(HIFI_PUBLIC_BUCKET + "scripts/proceduralAnimationAPI.js");
var procAnimAPI = new ProcAnimAPI();

View file

@ -11,7 +11,7 @@
//
//For procedural walk animation
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
Script.include(HIFI_PUBLIC_BUCKET + "scripts/proceduralAnimationAPI.js");
var procAnimAPI = new ProcAnimAPI();

View file

@ -12,7 +12,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
function getRandomFloat(min, max) {
return Math.random() * (max - min) + min;

View file

@ -82,9 +82,9 @@ function addButterfly() {
damping: 0.00001,
dimensions: dimensions,
color: color,
animationURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/models/content/butterfly/butterfly.fbx",
animationURL: "http://public.highfidelity.io/models/content/butterfly/butterfly.fbx",
animationSettings: "{\"firstFrame\":0,\"fps\":" + newFrameRate + ",\"frameIndex\":0,\"hold\":false,\"lastFrame\":10000,\"loop\":true,\"running\":true,\"startAutomatically\":false}",
modelURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/models/content/butterfly/butterfly.fbx"
modelURL: "http://public.highfidelity.io/models/content/butterfly/butterfly.fbx"
};
butterflies.push(Entities.addEntity(properties));
}

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
var count = 0;
var moveUntil = 2000;

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
// The "Swatches" example of this script will create 9 different image overlays, that use the color feature to
// display different colors as color swatches. The overlays can be clicked on, to change the "selectedSwatch" variable

View file

@ -9,7 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
var modelURL = HIFI_PUBLIC_BUCKET + "models/entities/radio/Speakers.fbx";
var soundURL = HIFI_PUBLIC_BUCKET + "sounds/family.stereo.raw";

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../libraries/globals.js");
var iteration = 0;
@ -59,7 +59,7 @@ var middleY = gameAt.y + (gameSize.y/2);
var invaderSize = 0.4;
var shipSize = 0.25;
var missileSize = 0.1;
var missileSize = 1.0;
var myShip;
var myShipProperties;
@ -101,7 +101,7 @@ var soundInMyHead = true;
// models...
var invaderModels = new Array();
invaderModels[0] = {
modelURL: HIFI_PUBLIC_BUCKET + "meshes/newInvader16x16-large-purple.svo",
modelURL: HIFI_PUBLIC_BUCKET + "meshes/space_invader.fbx",
modelScale: 450,
modelTranslation: { x: -1.3, y: -1.3, z: -1.3 },
};
@ -141,7 +141,7 @@ function initializeMyShip() {
damping: 0,
dimensions: { x: shipSize * 2, y: shipSize * 2, z: shipSize * 2 },
color: { red: 0, green: 255, blue: 0 },
modelURL: HIFI_PUBLIC_BUCKET + "meshes/myCannon16x16.svo",
modelURL: HIFI_PUBLIC_BUCKET + "meshes/space_invader.fbx",
lifetime: itemLifetimes
};
myShip = Entities.addEntity(myShipProperties);
@ -360,7 +360,7 @@ function keyPressEvent(key) {
myShipProperties.position.x = gameAt.x + gameSize.x;
}
moveShipTo(myShipProperties.position);
} else if (key.text == " ") {
} else if (key.text == "f") {
fireMissile();
} else if (key.text == "q") {
endGame();

View file

@ -58,7 +58,7 @@ var button1 = Overlays.addOverlay("image", { // green button
width: 40,
height: 35,
subImage: { x: 0, y: 0, width: 39, height: 35 },
imageURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/thumb.png",
imageURL: "https://public.highfidelity.io/images/thumb.png",
color: readyColor,
visible: true
});
@ -69,7 +69,7 @@ var button2 = Overlays.addOverlay("image", { // red button
width: 40,
height: 35,
subImage: { x: 0, y: 0, width: 39, height: 35 },
imageURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/thumb.png",
imageURL: "https://public.highfidelity.io/images/thumb.png",
color: { red: 250, green: 2, blue: 2},
visible: true,
});

View file

@ -1,986 +0,0 @@
//
// follow-through-and-overlapping-action.js
//
// Simple demonstration showing the visual effect of adding the Disney
// follow through and overlapping action animation technique to avatar movement.
//
// Designed and created by David Wooldridge and Ozan Serim, August 2014
//
// Version 1.001
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// location of overlay images
var pathToOverlays = "https://s3.amazonaws.com/hifi-public/ArmSwingScript/ArmSwingOverlays/";
// animation
var LEFT = 1;
var RIGHT = 2;
var DIRECTION = 0;
// min max
var weightMin = 1;
var weightMax = 20;
var jointEffectMax = 5;
// animation effect variables
var handEffect = 3.4; // 0 to jointEffectMax
var forearmEffect = 2.5; // 0 to jointEffectMax
var limbWeight = 8; // will only use nearest integer, as defines an array length
var effectStrength = 1; // 0 to 1 - overall effect strength
// Overshoot: false uses upper arm as driver for forearm and hand
// true uses upper arm for forearm and lower arm as driver for hand.
var overShoot = false;
// animate self (tap the 'r' key)
var animateSelf = false;
var selfAnimateFrequency = 7.5;
// overlay values
var handleValue = 0;
var controlPosX = Window.innerWidth / 2 - 500;
var controlPosY = 0;
var minSliderX = controlPosX + 18;
var sliderRangeX = 190;
var minHandleX = controlPosX - 50;
var handleRangeX = 350 / 2;
var handlePosition = 0;
// background overlay
var controllerBackground = Overlays.addOverlay("image", {
bounds: {x: controlPosX, y: controlPosY, width: 250, height: 380},
imageURL: pathToOverlays + "flourish-augmentation-control-overlay.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
var controllerRadioSelectedBackground = Overlays.addOverlay("image", {
bounds: {x: controlPosX, y: controlPosY, width: 250, height: 380},
imageURL: pathToOverlays + "flourish-augmentation-control-radio-selected-overlay.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1,
visible: false
});
// handle overlay
var applyMotionHandle = Overlays.addOverlay("image", {
bounds: {x: minHandleX+handleRangeX-39, y: controlPosY+232,
width: 79, height: 100},
imageURL: pathToOverlays + "flourish-augmentation-handle-overlay.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
// slider overlays
var handEffectSlider = Overlays.addOverlay("image", {
bounds: {x: minSliderX + (handEffect / jointEffectMax * sliderRangeX),
y: controlPosY + 46, width: 25, height: 25},
imageURL: pathToOverlays + "ddao-slider-handle.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
var forearmEffectSlider = Overlays.addOverlay("image", {
bounds: {x: minSliderX + (forearmEffect / jointEffectMax * sliderRangeX), y: controlPosY + 86,
width: 25, height: 25},
imageURL: pathToOverlays + "ddao-slider-handle.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
var limbWeightSlider = Overlays.addOverlay("image", {
bounds: {x: minSliderX + (limbWeight / weightMax * sliderRangeX), y: controlPosY+126,
width: 25, height: 25},
imageURL: pathToOverlays + "ddao-slider-handle.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
var effectStrengthSlider = Overlays.addOverlay("image", {
bounds: {x: minSliderX + (effectStrength * sliderRangeX), y: controlPosY+206,
width: 25, height: 25},
imageURL: pathToOverlays + "ddao-slider-handle.png",
color: {red: 255, green: 255, blue: 255},
alpha: 1
});
// main loop - using averaging filters to add limb element follow-through
var upperArmPDampingFilter = [];
upperArmPDampingFilter.length = parseInt(limbWeight); // sets amount of damping for upper arm pitch
var forearmPDampingFilter = [];
forearmPDampingFilter.length = parseInt(limbWeight) + 2; // sets amount of damping for lower arm pitch
var cumulativeTime = 0;
Script.update.connect(function(deltaTime) {
// used for self animating (press r to invoke)
cumulativeTime += deltaTime;
// blend three keyframes using handle position to determine progress between keyframes
var animationProgress = handleValue;
if(animateSelf) {
animationProgress = Math.sin(cumulativeTime * selfAnimateFrequency);
animationProgress++;
animationProgress /= 2;
}
var keyframeOneWeight = 0;
var keyframeTwoWeight = 0;
var keyframeThreeWeight = 0;
if(movingHandle || animateSelf) {
keyframeOneWeight = 0;
keyframeTwoWeight = animationProgress;
keyframeThreeWeight = 1 - animationProgress;
}
else if(!movingHandle) {
// idle
keyframeOneWeight = 1;
keyframeTwoWeight = 0;
keyframeThreeWeight = 0;
}
var shoulderPitch =
keyframeOneWeight * keyFrameOne.joints[8].pitchOffset +
keyframeTwoWeight * keyFrameTwo.joints[8].pitchOffset +
keyframeThreeWeight * keyFrameThree.joints[8].pitchOffset;
var upperArmPitch =
keyframeOneWeight * keyFrameOne.joints[9].pitchOffset +
keyframeTwoWeight * keyFrameTwo.joints[9].pitchOffset +
keyframeThreeWeight * keyFrameThree.joints[9].pitchOffset;
// get the change in upper arm pitch and use to add weight effect to forearm (always) and hand (only for overShoot)
var deltaUpperArmPitch = effectStrength *
(upperArmPDampingFilter[upperArmPDampingFilter.length - 1] -
upperArmPDampingFilter[0]);
var forearmPitch =
keyframeOneWeight * keyFrameOne.joints[10].pitchOffset +
keyframeTwoWeight * keyFrameTwo.joints[10].pitchOffset +
keyframeThreeWeight * keyFrameThree.joints[10].pitchOffset -
(deltaUpperArmPitch/(jointEffectMax - forearmEffect));
// there are two methods for calculating the hand follow through
var handPitch = 0;
if(overShoot) {
// get the change in forearm pitch and use to add weight effect to hand
var deltaForearmPitch = effectStrength *
(forearmPDampingFilter[forearmPDampingFilter.length - 1] -
forearmPDampingFilter[0]);
handPitch =
keyframeOneWeight * keyFrameOne.joints[11].pitchOffset +
keyframeTwoWeight * keyFrameTwo.joints[11].pitchOffset +
keyframeThreeWeight * keyFrameThree.joints[11].pitchOffset +
(deltaForearmPitch /(jointEffectMax - handEffect)); // hand driven by forearm
} else {
handPitch =
keyframeOneWeight * keyFrameOne.joints[11].pitchOffset +
keyframeTwoWeight * keyFrameTwo.joints[11].pitchOffset +
keyframeThreeWeight * keyFrameThree.joints[11].pitchOffset -
(deltaUpperArmPitch /(jointEffectMax - handEffect)); // hand driven by upper arm
}
var shoulderYaw =
keyframeOneWeight * keyFrameOne.joints[8].yawOffset +
keyframeTwoWeight * keyFrameTwo.joints[8].yawOffset +
keyframeThreeWeight * keyFrameThree.joints[8].yawOffset;
var upperArmYaw =
keyframeOneWeight * keyFrameOne.joints[9].yawOffset +
keyframeTwoWeight * keyFrameTwo.joints[9].yawOffset +
keyframeThreeWeight * keyFrameThree.joints[9].yawOffset;
var lowerArmYaw =
keyframeOneWeight * keyFrameOne.joints[10].yawOffset +
keyframeTwoWeight * keyFrameTwo.joints[10].yawOffset +
keyframeThreeWeight * keyFrameThree.joints[10].yawOffset;
var handYaw =
keyframeOneWeight * keyFrameOne.joints[11].yawOffset +
keyframeTwoWeight * keyFrameTwo.joints[11].yawOffset +
keyframeThreeWeight * keyFrameThree.joints[11].yawOffset;
var shoulderRoll =
keyframeOneWeight * keyFrameOne.joints[8].rollOffset +
keyframeTwoWeight * keyFrameTwo.joints[8].rollOffset +
keyframeThreeWeight * keyFrameThree.joints[8].rollOffset;
var upperArmRoll =
keyframeOneWeight * keyFrameOne.joints[9].rollOffset +
keyframeTwoWeight * keyFrameTwo.joints[9].rollOffset +
keyframeThreeWeight * keyFrameThree.joints[9].rollOffset;
var lowerArmRoll =
keyframeOneWeight * keyFrameOne.joints[10].rollOffset +
keyframeTwoWeight * keyFrameTwo.joints[10].rollOffset +
keyframeThreeWeight * keyFrameThree.joints[10].rollOffset;
var handRoll =
keyframeOneWeight * keyFrameOne.joints[11].rollOffset +
keyframeTwoWeight * keyFrameTwo.joints[11].rollOffset +
keyframeThreeWeight * keyFrameThree.joints[11].rollOffset;
// filter upper arm pitch
upperArmPDampingFilter.push(upperArmPitch);
upperArmPDampingFilter.shift();
var upperArmPitchFiltered = 0;
for(ea in upperArmPDampingFilter) upperArmPitchFiltered += upperArmPDampingFilter[ea];
upperArmPitchFiltered /= upperArmPDampingFilter.length;
upperArmPitch = (effectStrength * upperArmPitchFiltered) + ((1 - effectStrength) * upperArmPitch);
// filter forearm pitch only if using for hand follow-though
if(overShoot) {
forearmPDampingFilter.push(forearmPitch);
forearmPDampingFilter.shift();
var forearmPitchFiltered = 0;
for(ea in forearmPDampingFilter) forearmPitchFiltered += forearmPDampingFilter[ea];
forearmPitchFiltered /= forearmPDampingFilter.length;
forearmPitch = (effectStrength*forearmPitchFiltered) + ((1-effectStrength) * forearmPitch);
}
// apply the new rotation data to the joints
MyAvatar.setJointData("RightShoulder", Quat.fromPitchYawRollDegrees(shoulderPitch, shoulderYaw, shoulderRoll));
MyAvatar.setJointData("RightArm", Quat.fromPitchYawRollDegrees(upperArmPitch, -upperArmYaw, upperArmRoll));
MyAvatar.setJointData("RightForeArm", Quat.fromPitchYawRollDegrees(forearmPitch, lowerArmYaw, lowerArmRoll));
MyAvatar.setJointData("RightHand", Quat.fromPitchYawRollDegrees(handPitch, handYaw, handRoll));
});
// mouse handling
var movingHandEffectSlider = false;
var movingForearmEffectSlider = false;
var movingLimbWeightSlider = false;
var movingDampingSlider = false;
var movingEffectStrengthSlider = false;
var movingHandle = false;
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if(clickedOverlay === applyMotionHandle) movingHandle = true;
else if(clickedOverlay === handEffectSlider) movingHandEffectSlider = true;
else if(clickedOverlay === forearmEffectSlider) movingForearmEffectSlider = true;
else if(clickedOverlay === limbWeightSlider) movingLimbWeightSlider = true;
else if(clickedOverlay === effectStrengthSlider) movingEffectStrengthSlider = true;
else if(clickedOverlay === controllerRadioSelectedBackground &&
event.x > 477 && event.x < 497 && event.y > 338 && event.y < 360) {
overShoot = false;
Overlays.editOverlay(controllerBackground, {visible: true});
Overlays.editOverlay(controllerRadioSelectedBackground, {visible: false});
}
else if(clickedOverlay === controllerBackground &&
event.x > 477 && event.x < 497 && event.y > 338 && event.y < 360){
overShoot = true;
Overlays.editOverlay(controllerBackground, {visible: false});
Overlays.editOverlay(controllerRadioSelectedBackground, {visible: true});
}
}
function mouseMoveEvent(event) {
if(movingHandle) {
var thumbClickOffsetX = event.x - minHandleX;
var thumbPositionNormalised = (thumbClickOffsetX - handleRangeX) / handleRangeX;
if(thumbPositionNormalised <= -1) thumbPositionNormalised = -1;
else if(thumbPositionNormalised > 1) thumbPositionNormalised = 1;
if(thumbPositionNormalised < 0) DIRECTION = LEFT;
else DIRECTION = RIGHT;
handleValue = (thumbPositionNormalised + 1) / 2;
var handleX = (thumbPositionNormalised * handleRangeX) + handleRangeX - 39;
Overlays.editOverlay(applyMotionHandle, {x: handleX + minHandleX});
return;
}
else if(movingHandEffectSlider) {
var thumbClickOffsetX = event.x - minSliderX;
var thumbPositionNormalised = thumbClickOffsetX / sliderRangeX;
if(thumbPositionNormalised < 0) thumbPositionNormalised = 0;
if(thumbPositionNormalised > 1) thumbPositionNormalised = 1;
handEffect = (thumbPositionNormalised - 0.08) * jointEffectMax;
var sliderX = thumbPositionNormalised * sliderRangeX ;
Overlays.editOverlay(handEffectSlider, {x: sliderX + minSliderX});
}
else if(movingForearmEffectSlider) {
var thumbClickOffsetX = event.x - minSliderX;
var thumbPositionNormalised = thumbClickOffsetX / sliderRangeX;
if(thumbPositionNormalised < 0) thumbPositionNormalised = 0;
if(thumbPositionNormalised > 1) thumbPositionNormalised = 1;
forearmEffect = (thumbPositionNormalised - 0.1) * jointEffectMax;
var sliderX = thumbPositionNormalised * sliderRangeX ;
Overlays.editOverlay(forearmEffectSlider, {x: sliderX + minSliderX});
}
else if(movingLimbWeightSlider) {
var thumbClickOffsetX = event.x - minSliderX;
var thumbPositionNormalised = thumbClickOffsetX / sliderRangeX;
if(thumbPositionNormalised<0) thumbPositionNormalised = 0;
if(thumbPositionNormalised>1) thumbPositionNormalised = 1;
limbWeight = thumbPositionNormalised * weightMax;
if(limbWeight < weightMin) limbWeight = weightMin;
upperArmPDampingFilter.length = parseInt(limbWeight);
var sliderX = thumbPositionNormalised * sliderRangeX ;
Overlays.editOverlay(limbWeightSlider, {x: sliderX + minSliderX});
}
else if(movingEffectStrengthSlider) {
var thumbClickOffsetX = event.x - minSliderX;
var thumbPositionNormalised = thumbClickOffsetX / sliderRangeX;
if(thumbPositionNormalised < 0) thumbPositionNormalised = 0;
if(thumbPositionNormalised > 1) thumbPositionNormalised = 1;
effectStrength = thumbPositionNormalised;
var sliderX = thumbPositionNormalised * sliderRangeX ;
Overlays.editOverlay(effectStrengthSlider, {x: sliderX + minSliderX});
return;
}
}
function mouseReleaseEvent(event) {
if(movingHandle) {
movingHandle = false;
handleValue = 0;
Overlays.editOverlay(applyMotionHandle, {x: minHandleX+handleRangeX - 39});
}
else if(movingHandEffectSlider) movingHandEffectSlider = false;
else if(movingForearmEffectSlider) movingForearmEffectSlider = false;
else if(movingLimbWeightSlider) movingLimbWeightSlider = false;
else if(movingEffectStrengthSlider) movingEffectStrengthSlider = false;
else if(movingDampingSlider) movingDampingSlider = false;
}
// set up mouse and keyboard callbacks
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);
// keyboard command
function keyPressEvent(event) {
if (event.text == "q") {
print('hand effect = ' + handEffect + '\n');
print('forearmEffect = ' + forearmEffect + '\n');
print('limbWeight = ' + limbWeight + '\n');
print('effectStrength = ' + effectStrength + '\n');
}
else if (event.text == "r") {
animateSelf = !animateSelf;
}
else if (event.text == "[") {
selfAnimateFrequency += 0.5;
print('selfAnimateFrequency = '+selfAnimateFrequency);
}
else if (event.text == "]") {
selfAnimateFrequency -= 0.5;
print('selfAnimateFrequency = '+selfAnimateFrequency);
}
}
// zero out all joints
function resetJoints() {
var avatarJointNames = MyAvatar.getJointNames();
for (var i = 0; i < avatarJointNames.length; i++)
MyAvatar.clearJointData(avatarJointNames[i]);
}
// Script ending
Script.scriptEnding.connect(function() {
// delete the overlays
Overlays.deleteOverlay(controllerBackground);
Overlays.deleteOverlay(controllerRadioSelectedBackground);
Overlays.deleteOverlay(handEffectSlider);
Overlays.deleteOverlay(forearmEffectSlider);
Overlays.deleteOverlay(limbWeightSlider);
Overlays.deleteOverlay(effectStrengthSlider);
Overlays.deleteOverlay(applyMotionHandle);
// leave the avi in zeroed out stance
resetJoints();
});
// animation data. animation keyframes produced using walk.js
MyAvatar.setJointData("LeftArm", Quat.fromPitchYawRollDegrees(80,0,0));
var keyFrameOne =
{
"name":"FemaleStandingOne",
"settings":{
"baseFrequency":70,
"flyingHipsPitch":60,
"takeFlightVelocity":40,
"maxBankingAngle":40
},
"adjusters":{
"legsSeparation":{
"strength":-0.03679245283018867,
"separationAngle":50
},
"stride":{
"strength":0,
"upperLegsPitch":30,
"lowerLegsPitch":15,
"upperLegsPitchOffset":0.2,
"lowerLegsPitchOffset":1.5
}
},
"joints":[
{
"name":"hips",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0,
"thrust":0,
"bob":0,
"sway":0,
"thrustPhase":180,
"bobPhase":0,
"swayPhase":-90,
"thrustOffset":0,
"bobOffset":0,
"swayOffset":0
},
{
"name":"upperLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"lowerLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"feet",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"toes",
"pitch":2.0377358490566038,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":4.415094339622641,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine",
"pitch":1.660377358490566,
"yaw":0,
"roll":0,
"pitchPhase":-180,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine1",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine2",
"pitch":2.1132075471698113,
"yaw":0,
"roll":0,
"pitchPhase":-0.6792452830188722,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"shoulders",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0.6792452830188678,
"yawOffset":-5.20754716981132,
"rollOffset":-2.9433962264150937
},
{
"name":"upperArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":77.77358490566039,
"yawOffset":9.169811320754715,
"rollOffset":0
},
{
"name":"lowerArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"hands",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":1.6981132075471694,
"yawOffset":-1.0188679245283017,
"rollOffset":1.0188679245283017
},
{
"name":"head",
"pitch":0,
"yaw":1.7358490566037734,
"roll":1.5094339622641508,
"pitchPhase":-90.33962264150944,
"yawPhase":94.41509433962267,
"rollPhase":0,
"pitchOffset":1.6981132075471694,
"yawOffset":0,
"rollOffset":0
}
]
};
var keyFrameTwo =
{
"name":"FemaleStandingOne",
"settings":{
"baseFrequency":70,
"flyingHipsPitch":60,
"takeFlightVelocity":40,
"maxBankingAngle":40
},
"adjusters":{
"legsSeparation":{
"strength":-0.03679245283018867,
"separationAngle":50
},
"stride":{
"strength":0,
"upperLegsPitch":30,
"lowerLegsPitch":15,
"upperLegsPitchOffset":0.2,
"lowerLegsPitchOffset":1.5
}
},
"joints":[
{
"name":"hips",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0,
"thrust":0,
"bob":0,
"sway":0,
"thrustPhase":180,
"bobPhase":0,
"swayPhase":-90,
"thrustOffset":0,
"bobOffset":0,
"swayOffset":0
},
{
"name":"upperLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"lowerLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"feet",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"toes",
"pitch":2.0377358490566038,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":4.415094339622641,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine",
"pitch":1.660377358490566,
"yaw":0,
"roll":0,
"pitchPhase":-180,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine1",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine2",
"pitch":2.1132075471698113,
"yaw":0,
"roll":0,
"pitchPhase":-0.6792452830188722,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"shoulders",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0.6792452830188678,
"yawOffset":-5.20754716981132,
"rollOffset":-2.9433962264150937
},
{
"name":"upperArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":49.584905660377345,
"yawOffset":9.169811320754715,
"rollOffset":0
},
{
"name":"lowerArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"hands",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":-13,
"yawOffset":-1.0188679245283017,
"rollOffset":1.0188679245283017
},
{
"name":"head",
"pitch":0,
"yaw":1.7358490566037734,
"roll":1.5094339622641508,
"pitchPhase":-90.33962264150944,
"yawPhase":94.41509433962267,
"rollPhase":0,
"pitchOffset":1.6981132075471694,
"yawOffset":0,
"rollOffset":0
}
]
};
var keyFrameThree =
{
"name":"FemaleStandingOne",
"settings":{
"baseFrequency":70,
"flyingHipsPitch":60,
"takeFlightVelocity":40,
"maxBankingAngle":40
},
"adjusters":{
"legsSeparation":{
"strength":-0.03679245283018867,
"separationAngle":50
},
"stride":{
"strength":0,
"upperLegsPitch":30,
"lowerLegsPitch":15,
"upperLegsPitchOffset":0.2,
"lowerLegsPitchOffset":1.5
}
},
"joints":[
{
"name":"hips",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0,
"thrust":0,
"bob":0,
"sway":0,
"thrustPhase":180,
"bobPhase":0,
"swayPhase":-90,
"thrustOffset":0,
"bobOffset":0,
"swayOffset":0
},
{
"name":"upperLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"lowerLegs",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"feet",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"toes",
"pitch":2.0377358490566038,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":4.415094339622641,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine",
"pitch":1.660377358490566,
"yaw":0,
"roll":0,
"pitchPhase":-180,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine1",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"spine2",
"pitch":2.1132075471698113,
"yaw":0,
"roll":0,
"pitchPhase":-0.6792452830188722,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"shoulders",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":-21.0566037735849,
"yawOffset":-5.20754716981132,
"rollOffset":-2.9433962264150937
},
{
"name":"upperArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":-33.28301886792452,
"yawOffset":9.169811320754715,
"rollOffset":0
},
{
"name":"lowerArms",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":0,
"yawOffset":0,
"rollOffset":0
},
{
"name":"hands",
"pitch":0,
"yaw":0,
"roll":0,
"pitchPhase":0,
"yawPhase":0,
"rollPhase":0,
"pitchOffset":-13,
"yawOffset":-1.0188679245283017,
"rollOffset":1.0188679245283017
},
{
"name":"head",
"pitch":0,
"yaw":1.7358490566037734,
"roll":1.5094339622641508,
"pitchPhase":-90.33962264150944,
"yawPhase":94.41509433962267,
"rollPhase":0,
"pitchOffset":1.6981132075471694,
"yawOffset":0,
"rollOffset":0
}
]
};

View file

@ -1,77 +0,0 @@
//
// fountain.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
function vLength(v) {
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
}
function printVector(v) {
print(v.x + ", " + v.y + ", " + v.z + "\n");
}
// Create a random vector with individual lengths between a,b
function randVector(a, b) {
var rval = { x: a + Math.random() * (b - a), y: a + Math.random() * (b - a), z: a + Math.random() * (b - a) };
return rval;
}
function vMinus(a, b) {
var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z };
return rval;
}
function vPlus(a, b) {
var rval = { x: a.x + b.x, y: a.y + b.y, z: a.z + b.z };
return rval;
}
function vCopy(a, b) {
a.x = b.x;
a.y = b.y;
a.z = b.z;
return;
}
// Returns a vector which is fraction of the way between a and b
function vInterpolate(a, b, fraction) {
var rval = { x: a.x + (b.x - a.x) * fraction, y: a.y + (b.y - a.y) * fraction, z: a.z + (b.z - a.z) * fraction };
return rval;
}
var position = { x: 5.0, y: 0.6, z: 5.0 };
//Voxels.setVoxel(position.x, 0, position.z, 0.5, 0, 0, 255);
var totalEntities = 0;
function makeFountain(deltaTime) {
if (Math.random() < 0.10) {
//print("Made entity!\n");
var radius = (0.02 + (Math.random() * 0.05));
var properties = {
type: "Sphere",
position: position,
dimensions: { x: radius, y: radius, z: radius},
color: { red: 0, green: 0, blue: 128 },
velocity: { x: (Math.random() * 1.0 - 0.5),
y: (1.0 + (Math.random() * 2.0)),
z: (Math.random() * 1.0 - 0.5) },
gravity: { x: 0, y: -0.1, z: 0 },
damping: 0.25,
lifetime: 1
}
Entities.addEntity(properties);
totalEntities++;
}
if (totalEntities > 100) {
Script.stop();
}
}
// register the call back so it fires before each data send
Script.update.connect(makeFountain);

View file

@ -1,328 +0,0 @@
//
// grenadeLauncher.js
// examples
// Created by Ben Arnold on 7/11/14.
// This is a modified version of gun.js by Brad Hefta-Gaub.
//
// Copyright 2013 High Fidelity, Inc.
//
// This is an example script that turns the hydra controllers and mouse into a entity gun.
// It reads the controller, watches for trigger pulls, and launches entities.
// When entities collide with voxels they blow big holes out of the voxels.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
function getRandomFloat(min, max) {
return Math.random() * (max - min) + min;
}
var lastX = 0;
var lastY = 0;
var yawFromMouse = 0;
var pitchFromMouse = 0;
var isMouseDown = false;
var BULLET_VELOCITY = 3.0;
var MIN_THROWER_DELAY = 1000;
var MAX_THROWER_DELAY = 1000;
var LEFT_BUTTON_1 = 1;
var LEFT_BUTTON_3 = 3;
var RELOAD_INTERVAL = 5;
var showScore = false;
// Load some sound to use for loading and firing
var fireSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/GUN-SHOT2.raw");
var loadSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/Gun_Reload_Weapon22.raw");
var impactSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/BulletImpact2.raw");
var targetHitSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/hit.raw");
var targetLaunchSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/shoot.raw");
var gunModel = "http://public.highfidelity.io/models/attachments/HaloGun.fst";
var audioOptions {
volume: 0.9
}
var shotsFired = 0;
var shotTime = new Date();
// initialize our triggers
var triggerPulled = new Array();
var numberOfTriggers = Controller.getNumberOfTriggers();
for (t = 0; t < numberOfTriggers; t++) {
triggerPulled[t] = false;
}
var isLaunchButtonPressed = false;
var score = 0;
// Create a reticle image in center of screen
var screenSize = Controller.getViewportDimensions();
var reticle = Overlays.addOverlay("image", {
x: screenSize.x / 2 - 16,
y: screenSize.y / 2 - 16,
width: 32,
height: 32,
imageURL: HIFI_PUBLIC_BUCKET + "images/reticle.png",
color: { red: 255, green: 255, blue: 255},
alpha: 1
});
if (showScore) {
var text = Overlays.addOverlay("text", {
x: screenSize.x / 2 - 100,
y: screenSize.y / 2 - 50,
width: 150,
height: 50,
color: { red: 0, green: 0, blue: 0},
textColor: { red: 255, green: 0, blue: 0},
topMargin: 4,
leftMargin: 4,
text: "Score: " + score
});
}
function printVector(string, vector) {
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
}
function shootBullet(position, velocity) {
var BULLET_SIZE = 0.1;
var BULLET_GRAVITY = -3.0;
//Creates a grenade with a reasonable lifetime so that one is less likely to accidentally blow up
//far away voxels
Entities.addEntity(
{ type: "Sphere",
position: position,
collisionsWillMove: true,
dimensions: { x: BULLET_SIZE, y: BULLET_SIZE, z: BULLET_SIZE },
color: { red: 10, green: 10, blue: 10 },
velocity: velocity,
gravity: { x: 0, y: BULLET_GRAVITY, z: 0 },
lifetime: 10.0,
damping: 0 });
// Play firing sounds
audioOptions.position = position;
Audio.playSound(fireSound, audioOptions);
shotsFired++;
if ((shotsFired % RELOAD_INTERVAL) == 0) {
Audio.playSound(loadSound, audioOptions);
}
}
function shootTarget() {
var TARGET_SIZE = 0.25;
var TARGET_GRAVITY = -0.6;
var TARGET_UP_VELOCITY = 3.0;
var TARGET_FWD_VELOCITY = 5.0;
var DISTANCE_TO_LAUNCH_FROM = 3.0;
var camera = Camera.getPosition();
//printVector("camera", camera);
var targetDirection = Quat.angleAxis(getRandomFloat(-20.0, 20.0), { x:0, y:1, z:0 });
targetDirection = Quat.multiply(Camera.getOrientation(), targetDirection);
var forwardVector = Quat.getFront(targetDirection);
//printVector("forwardVector", forwardVector);
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_TO_LAUNCH_FROM));
//printVector("newPosition", newPosition);
var velocity = Vec3.multiply(forwardVector, TARGET_FWD_VELOCITY);
velocity.y += TARGET_UP_VELOCITY;
//printVector("velocity", velocity);
Entities.addEntity(
{ type: "Sphere",
position: newPosition,
collisionsWillMove: true,
dimensions: { x: TARGET_SIZE, y: TARGET_SIZE, z: TARGET_SIZE },
color: { red: 0, green: 200, blue: 200 },
velocity: velocity,
gravity: { x: 0, y: TARGET_GRAVITY, z: 0 },
lifetime: 1000.0,
damping: 0.0001 });
// Record start time
shotTime = new Date();
// Play target shoot sound
audioOptions.position = newPosition;
Audio.playSound(targetLaunchSound, audioOptions);
}
function entityCollisionWithEntity(entity1, entity2, collision) {
score++;
if (showScore) {
Overlays.editOverlay(text, { text: "Score: " + score } );
}
// Record shot time
var endTime = new Date();
var msecs = endTime.valueOf() - shotTime.valueOf();
//print("hit, msecs = " + msecs);
//Vec3.print("penetration = ", collision.penetration);
//Vec3.print("contactPoint = ", collision.contactPoint);
Entities.deleteEntity(entity1);
Entities.deleteEntity(entity2);
// play the sound near the camera so the shooter can hear it
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
Audio.playSound(targetHitSound, audioOptions);
}
function keyPressEvent(event) {
// if our tools are off, then don't do anything
if (event.text == "t") {
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
Script.setTimeout(shootTarget, time);
} else if (event.text == ".") {
shootFromMouse();
} else if (event.text == "r") {
playLoadSound();
}
}
function playLoadSound() {
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
Audio.playSound(loadSound, audioOptions);
}
//MyAvatar.attach(gunModel, "RightHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
MyAvatar.attach(gunModel, "LeftHand", {x: -0.02, y: -.14, z: 0.07}, Quat.fromPitchYawRollDegrees(-70, -151, 72), 0.20);
// Give a bit of time to load before playing sound
Script.setTimeout(playLoadSound, 2000);
function update(deltaTime) {
// Check for mouseLook movement, update rotation
// rotate body yaw for yaw received from controller or mouse
var newOrientation = Quat.multiply(MyAvatar.orientation, Quat.fromVec3Radians( { x: 0, y: yawFromMouse, z: 0 } ));
MyAvatar.orientation = newOrientation;
yawFromMouse = 0;
// apply pitch from controller or mouse
var newPitch = MyAvatar.headPitch + pitchFromMouse;
MyAvatar.headPitch = newPitch;
pitchFromMouse = 0;
// Check hydra controller for launch button press
if (!isLaunchButtonPressed && Controller.isButtonPressed(LEFT_BUTTON_3)) {
isLaunchButtonPressed = true;
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
Script.setTimeout(shootTarget, time);
} else if (isLaunchButtonPressed && !Controller.isButtonPressed(LEFT_BUTTON_3)) {
isLaunchButtonPressed = false;
}
// Check hydra controller for trigger press
var numberOfTriggers = Controller.getNumberOfTriggers();
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
// this is expected for hydras
if (numberOfTriggers == 2 && controllersPerTrigger == 2) {
for (var t = 0; t < numberOfTriggers; t++) {
var shootABullet = false;
var triggerValue = Controller.getTriggerValue(t);
if (triggerPulled[t]) {
// must release to at least 0.1
if (triggerValue < 0.1) {
triggerPulled[t] = false; // unpulled
}
} else {
// must pull to at least 0.9
if (triggerValue > 0.9) {
triggerPulled[t] = true; // pulled
shootABullet = true;
}
}
if (shootABullet) {
var palmController = t * controllersPerTrigger;
var palmPosition = Controller.getSpatialControlPosition(palmController);
var fingerTipController = palmController + 1;
var fingerTipPosition = Controller.getSpatialControlPosition(fingerTipController);
var palmToFingerTipVector =
{ x: (fingerTipPosition.x - palmPosition.x),
y: (fingerTipPosition.y - palmPosition.y),
z: (fingerTipPosition.z - palmPosition.z) };
// just off the front of the finger tip
var position = { x: fingerTipPosition.x + palmToFingerTipVector.x/2,
y: fingerTipPosition.y + palmToFingerTipVector.y/2,
z: fingerTipPosition.z + palmToFingerTipVector.z/2};
var linearVelocity = 25;
var velocity = { x: palmToFingerTipVector.x * linearVelocity,
y: palmToFingerTipVector.y * linearVelocity,
z: palmToFingerTipVector.z * linearVelocity };
shootBullet(position, velocity);
}
}
}
}
function mousePressEvent(event) {
isMouseDown = true;
lastX = event.x;
lastY = event.y;
//audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
//Audio.playSound(loadSound, audioOptions);
}
function shootFromMouse() {
var DISTANCE_FROM_CAMERA = 2.0;
var camera = Camera.getPosition();
var forwardVector = Quat.getFront(Camera.getOrientation());
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_FROM_CAMERA));
var velocity = Vec3.multiply(forwardVector, BULLET_VELOCITY);
shootBullet(newPosition, velocity);
}
function mouseReleaseEvent(event) {
// position
isMouseDown = false;
}
function mouseMoveEvent(event) {
//Move the camera if LEFT_BUTTON_1 is pressed
if (Controller.isButtonPressed(LEFT_BUTTON_1)) {
var MOUSE_YAW_SCALE = -0.25;
var MOUSE_PITCH_SCALE = -12.5;
var FIXED_MOUSE_TIMESTEP = 0.016;
yawFromMouse += ((event.x - lastX) * MOUSE_YAW_SCALE * FIXED_MOUSE_TIMESTEP);
pitchFromMouse += ((event.y - lastY) * MOUSE_PITCH_SCALE * FIXED_MOUSE_TIMESTEP);
lastX = event.x;
lastY = event.y;
}
}
function scriptEnding() {
Overlays.deleteOverlay(reticle);
Overlays.deleteOverlay(text);
MyAvatar.detachOne(gunModel);
}
Entities.entityCollisionWithEntity.connect(entityCollisionWithEntity);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.keyPressEvent.connect(keyPressEvent);

View file

@ -590,7 +590,7 @@ SelectionDisplay = (function () {
});
var yawHandle = Overlays.addOverlay("billboard", {
url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png",
url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png",
position: { x:0, y: 0, z: 0},
color: rotateHandleColor,
alpha: rotateHandleAlpha,
@ -603,7 +603,7 @@ SelectionDisplay = (function () {
var pitchHandle = Overlays.addOverlay("billboard", {
url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png",
url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png",
position: { x:0, y: 0, z: 0},
color: rotateHandleColor,
alpha: rotateHandleAlpha,
@ -616,7 +616,7 @@ SelectionDisplay = (function () {
var rollHandle = Overlays.addOverlay("billboard", {
url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png",
url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png",
position: { x:0, y: 0, z: 0},
color: rotateHandleColor,
alpha: rotateHandleAlpha,
@ -835,8 +835,8 @@ SelectionDisplay = (function () {
rollCenter = { x: boundsCenter.x, y: boundsCenter.y, z: far };
Overlays.editOverlay(pitchHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-south.png" });
Overlays.editOverlay(rollHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-south.png" });
Overlays.editOverlay(pitchHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-south.png" });
Overlays.editOverlay(rollHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-south.png" });
} else {
@ -867,8 +867,8 @@ SelectionDisplay = (function () {
pitchCenter = { x: right, y: boundsCenter.y, z: boundsCenter.z };
rollCenter = { x: boundsCenter.x, y: boundsCenter.y, z: near};
Overlays.editOverlay(pitchHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(pitchHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
}
} else {
@ -899,8 +899,8 @@ SelectionDisplay = (function () {
pitchCenter = { x: left, y: boundsCenter.y, z: boundsCenter.z };
rollCenter = { x: boundsCenter.x, y: boundsCenter.y, z: far};
Overlays.editOverlay(pitchHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(pitchHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
} else {
@ -928,8 +928,8 @@ SelectionDisplay = (function () {
rollCenter = { x: boundsCenter.x, y: boundsCenter.y, z: near };
pitchCenter = { x: left, y: boundsCenter.y, z: boundsCenter.z};
Overlays.editOverlay(pitchHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://s3-us-west-1.amazonaws.com/highfidelity-public/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(pitchHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
Overlays.editOverlay(rollHandle, { url: "https://public.highfidelity.io/images/rotate-arrow-west-north.png" });
}
}

View file

@ -0,0 +1,104 @@
//
// Unittest.js
// examples
//
// Created by Ryan Huffman on 5/4/14
// Copyright 2014 High Fidelity, Inc.
//
// This provides very basic unit testing functionality.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
test = function(name, func) {
print("Running test: " + name);
var unitTest = new UnitTest(name, func);
try {
unitTest.run();
print(" Success: " + unitTest.numAssertions + " assertions passed");
} catch (error) {
print(" Failure: " + error.name + " " + error.message);
}
};
AssertionException = function(expected, actual, message) {
print("Creating exception");
this.message = message + "\n: " + actual + " != " + expected;
this.name = 'AssertionException';
};
UnthrownException = function(message) {
print("Creating exception");
this.message = message + "\n";
this.name = 'UnthrownException';
};
UnitTest = function(name, func) {
this.numAssertions = 0;
this.func = func;
};
UnitTest.prototype.run = function() {
this.func();
};
UnitTest.prototype.assertNotEquals = function(expected, actual, message) {
this.numAssertions++;
if (expected == actual) {
throw new AssertionException(expected, actual, message);
}
};
UnitTest.prototype.assertEquals = function(expected, actual, message) {
this.numAssertions++;
if (expected != actual) {
throw new AssertionException(expected, actual, message);
}
};
UnitTest.prototype.assertContains = function (expected, actual, message) {
this.numAssertions++;
if (actual.indexOf(expected) == -1) {
throw new AssertionException(expected, actual, message);
}
};
UnitTest.prototype.assertHasProperty = function(property, actual, message) {
this.numAssertions++;
if (actual[property] === undefined) {
throw new AssertionException(property, actual, message);
}
};
UnitTest.prototype.assertNull = function(value, message) {
this.numAssertions++;
if (value !== null) {
throw new AssertionException(value, null, message);
}
}
UnitTest.prototype.arrayEqual = function(array1, array2, message) {
this.numAssertions++;
if (array1.length !== array2.length) {
throw new AssertionException(array1.length , array2.length , message);
}
for (var i = 0; i < array1.length; ++i) {
if (array1[i] !== array2[i]) {
throw new AssertionException(array1[i], array2[i], i + " " + message);
}
}
}
UnitTest.prototype.raises = function(func, message) {
this.numAssertions++;
try {
func();
} catch (error) {
return;
}
throw new UnthrownException(message);
}

View file

@ -1,121 +0,0 @@
//
// myBalance.js
// examples
//
// Created by Stojce Slavkovski on June 5, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Show wallet ₵ balance
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
var Controller = Controller || {};
var Overlays = Overlays || {};
var Script = Script || {};
var Account = Account || {};
(function () {
"use strict";
var iconUrl = HIFI_PUBLIC_BUCKET + 'images/tools/',
overlayWidth = 150,
overlayHeight = 50,
overlayTopOffset = 15,
overlayRightOffset = 140,
textRightOffset = 105,
maxIntegers = 5,
downColor = {
red: 0,
green: 0,
blue: 255
},
upColor = {
red: 0,
green: 255,
blue: 0
},
normalColor = {
red: 204,
green: 204,
blue: 204
},
balance = -1,
walletBox = Overlays.addOverlay("image", {
x: 0,
y: overlayTopOffset,
width: 122,
height: 32,
imageURL: iconUrl + "wallet.svg",
alpha: 1
}),
textOverlay = Overlays.addOverlay("text", {
x: 0,
y: overlayTopOffset,
topMargin: 10,
font: {
size: 16
},
color: normalColor
});
function scriptEnding() {
Overlays.deleteOverlay(walletBox);
Overlays.deleteOverlay(textOverlay);
}
function update(deltaTime) {
var xPos = Controller.getViewportDimensions().x;
Overlays.editOverlay(walletBox, {
x: xPos - overlayRightOffset,
visible: Account.isLoggedIn()
});
Overlays.editOverlay(textOverlay, {
x: xPos - textRightOffset,
visible: Account.isLoggedIn()
});
}
function formatedBalance() {
var integers = balance.toFixed(0).length,
decimals = Math.abs(maxIntegers - integers) + 2;
var x = balance.toFixed(decimals).split('.'),
x1 = x[0],
x2 = x.length > 1 ? '.' + x[1] : '';
var rgx = /(\d+)(\d{3})/;
while (rgx.test(x1)) {
x1 = x1.replace(rgx, '$1' + ',' + '$2');
}
return x1 + x2;
}
function updateBalance(newBalance) {
if (balance === newBalance) {
return;
}
var change = newBalance - balance,
textColor = change < 0 ? downColor : upColor;
balance = newBalance;
Overlays.editOverlay(textOverlay, {
text: formatedBalance(),
color: textColor
});
Script.setTimeout(function () {
Overlays.editOverlay(textOverlay, {
color: normalColor
});
}, 1000);
}
updateBalance(Account.getBalance());
Account.balanceChanged.connect(updateBalance);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);
}());

View file

@ -1,27 +0,0 @@
//
// playSound.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
// Plays a sample audio file at the avatar's current location
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
Script.include("libraries/globals.js");
// First, load a sample sound from a URL
var bird = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Animals/bushtit_1.raw");
function maybePlaySound(deltaTime) {
if (Math.random() < 0.01) {
// Set the location and other info for the sound to play
Audio.playSound(bird, {
position: MyAvatar.position,
volume: 0.5
});
}
}
// Connect a call back that happens every frame
Script.update.connect(maybePlaySound);

View file

@ -1,85 +0,0 @@
//
// seeingVoxelsExample.js
// examples
//
// Created by Brad Hefta-Gaub on 2/26/14
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var count = 0;
var yawDirection = -1;
var yaw = 45;
var yawMax = 70;
var yawMin = 20;
var vantagePoint = {x: 5000, y: 500, z: 5000};
var isLocal = false;
// set up our VoxelViewer with a position and orientation
var orientation = Quat.fromPitchYawRollDegrees(0, yaw, 0);
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
function init() {
if (isLocal) {
MyAvatar.position = vantagePoint;
MyAvatar.orientation = orientation;
} else {
VoxelViewer.setPosition(vantagePoint);
VoxelViewer.setOrientation(orientation);
VoxelViewer.queryOctree();
Agent.isAvatar = true;
}
}
function keepLooking(deltaTime) {
//print("count =" + count);
if (count == 0) {
init();
}
count++;
if (count % getRandomInt(5, 15) == 0) {
yaw += yawDirection;
orientation = Quat.fromPitchYawRollDegrees(0, yaw, 0);
if (yaw > yawMax || yaw < yawMin) {
yawDirection = yawDirection * -1;
}
if (count % 10000 == 0) {
print("calling VoxelViewer.queryOctree()... count=" + count + " yaw=" + yaw);
}
if (isLocal) {
MyAvatar.orientation = orientation;
} else {
VoxelViewer.setOrientation(orientation);
VoxelViewer.queryOctree();
if (count % 10000 == 0) {
print("VoxelViewer.getOctreeElementsCount()=" + VoxelViewer.getOctreeElementsCount());
}
}
}
}
function scriptEnding() {
print("SCRIPT ENDNG!!!\n");
}
// register the call back so it fires before each data send
Script.update.connect(keepLooking);
// register our scriptEnding callback
Script.scriptEnding.connect(scriptEnding);
// test for local...
Menu.isOptionChecked("Voxels");
isLocal = true; // will only get here on local client

View file

@ -1,94 +0,0 @@
//
// testingVoxelSeeingRestart.js
// hifi
//
// Created by Brad Hefta-Gaub on 2/26/14
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var count = 0;
var yawDirection = -1;
var yaw = 45;
var yawMax = 70;
var yawMin = 20;
var vantagePoint = {x: 5000, y: 500, z: 5000};
var isLocal = false;
// set up our VoxelViewer with a position and orientation
var orientation = Quat.fromPitchYawRollDegrees(0, yaw, 0);
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
function init() {
if (isLocal) {
MyAvatar.position = vantagePoint;
MyAvatar.orientation = orientation;
} else {
VoxelViewer.setPosition(vantagePoint);
VoxelViewer.setOrientation(orientation);
VoxelViewer.queryOctree();
Agent.isAvatar = true;
}
}
function keepLooking(deltaTime) {
//print("count =" + count);
if (count == 0) {
init();
}
count++;
if (count % getRandomInt(5, 15) == 0) {
yaw += yawDirection;
orientation = Quat.fromPitchYawRollDegrees(0, yaw, 0);
if (yaw > yawMax || yaw < yawMin) {
yawDirection = yawDirection * -1;
}
//if (count % 10000 == 0) {
// print("calling VoxelViewer.queryOctree()... count=" + count + " yaw=" + yaw);
//}
if (isLocal) {
MyAvatar.orientation = orientation;
} else {
VoxelViewer.setOrientation(orientation);
VoxelViewer.queryOctree();
//if (count % 10000 == 0) {
// print("VoxelViewer.getOctreeElementsCount()=" + VoxelViewer.getOctreeElementsCount());
//}
}
}
// approximately every second, consider stopping
if (count % 60 == 0) {
print("considering stop.... elementCount:" + VoxelViewer.getOctreeElementsCount());
var stopProbability = 0.05; // 5% chance of stopping
if (Math.random() < stopProbability) {
print("stopping.... elementCount:" + VoxelViewer.getOctreeElementsCount());
Script.stop();
}
}
}
function scriptEnding() {
print("SCRIPT ENDNG!!!\n");
}
// register the call back so it fires before each data send
Script.update.connect(keepLooking);
// register our scriptEnding callback
Script.scriptEnding.connect(scriptEnding);
// test for local...
Menu.isOptionChecked("Voxels");
isLocal = true; // will only get here on local client

View file

@ -29,7 +29,7 @@ var RIGHT_BUTTON_FWD = 11;
var RIGHT_BUTTON_3 = 9;
var BALL_RADIUS = 0.08;
var GRAVITY_STRENGTH = 0.5;
var GRAVITY_STRENGTH = 1.0;
var HELD_COLOR = { red: 240, green: 0, blue: 0 };
var THROWN_COLOR = { red: 128, green: 0, blue: 0 };
@ -136,7 +136,7 @@ function checkControllerSide(whichSide) {
velocity: { x: 0, y: 0, z: 0},
gravity: { x: 0, y: 0, z: 0},
inHand: true,
radius: { x: BALL_RADIUS * 2, y: BALL_RADIUS * 2, z: BALL_RADIUS * 2 },
dimensions: { x: BALL_RADIUS * 2, y: BALL_RADIUS * 2, z: BALL_RADIUS * 2 },
damping: 0.00001,
color: HELD_COLOR,
@ -185,6 +185,7 @@ function checkControllerSide(whichSide) {
velocity: { x: tipVelocity.x * THROWN_VELOCITY_SCALING,
y: tipVelocity.y * THROWN_VELOCITY_SCALING,
z: tipVelocity.z * THROWN_VELOCITY_SCALING } ,
collisionsWillMove: true,
inHand: false,
color: THROWN_COLOR,
lifetime: 10,

View file

@ -4,21 +4,21 @@
// Creates a red 0.2 meter diameter ball right in front of your avatar that lives for 60 seconds
//
var radius = 0.1;
var diameter = 0.2;
var position = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var properties = {
type: "Sphere",
position: position,
velocity: { x: 0, y: 0, z: 0},
gravity: { x: 0, y: -0.05, z: 0},
radius: radius,
dimensions: { x: diameter, y: diameter, z: diameter };
damping: 0.00001,
color: { red: 200, green: 0, blue: 0 },
lifetime: 60
};
var newEntity = Entities.addEntity(properties);
position.x -= radius * 1.0;
position.x -= 0.5 * diameter;
properties.position = position;
var newEntityTwo = Entities.addEntity(properties);

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("Test.js");
Script.include("../../libraries/unitTest.js");
test("Test default request values", function(finished) {
var req = new XMLHttpRequest();

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
var sound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/220Sine.wav");

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
// A few sample files you may want to try:

View file

@ -9,9 +9,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
Script.include("../../libraries/globals.js");
var soundClip = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Voxels/voxel create 3.raw");
var soundClip = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guitars/Guitar+-+Nylon+A.raw");
var currentTime = 1.570079; // pi/2
var deltaTime = 0.05;
@ -38,6 +38,6 @@ function playSound() {
});
}
Script.setInterval(playSound, 250);
Script.setInterval(playSound, 850);

View file

@ -3,7 +3,7 @@ Instructions for adding the RtMidi library to Interface
Stephen Birarda, June 30, 2014
1. Download the RtMidi tarball from High Fidelity S3.
http://highfidelity-public.s3.amazonaws.com/dependencies/rtmidi-2.1.0.tar.gz
http://public.highfidelity.io/dependencies/rtmidi-2.1.0.tar.gz
2. Copy RtMidi.h to externals/rtmidi/include.

View file

@ -84,6 +84,8 @@
#include "ModelUploader.h"
#include "Util.h"
#include "audio/AudioToolBox.h"
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
@ -172,7 +174,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_touchAvgY(0.0f),
_isTouchPressed(false),
_mousePressed(false),
_audio(),
_enableProcessOctreeThread(true),
_octreeProcessor(),
_packetsPerSecond(0),
@ -246,9 +247,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// put the audio processing on a separate thread
QThread* audioThread = new QThread(this);
_audio.moveToThread(audioThread);
connect(audioThread, SIGNAL(started()), &_audio, SLOT(start()));
Audio::SharedPointer audioIO = DependencyManager::get<Audio>();
audioIO->moveToThread(audioThread);
connect(audioThread, &QThread::started, audioIO.data(), &Audio::start);
audioThread->start();
@ -299,11 +301,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// once the event loop has started, check and signal for an access token
QMetaObject::invokeMethod(&accountManager, "checkAndSignalForAccessToken", Qt::QueuedConnection);
AddressManager& addressManager = AddressManager::getInstance();
AddressManager::SharedPointer addressManager = DependencyManager::get<AddressManager>();
// use our MyAvatar position and quat for address manager path
addressManager.setPositionGetter(getPositionForPath);
addressManager.setOrientationGetter(getOrientationForPath);
addressManager->setPositionGetter(getPositionForPath);
addressManager->setOrientationGetter(getOrientationForPath);
_settings = new QSettings(this);
_numChangedSettings = 0;
@ -402,7 +404,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_trayIcon->show();
// set the local loopback interface for local sounds from audio scripts
AudioScriptingInterface::getInstance().setLocalAudioInterface(&_audio);
AudioScriptingInterface::getInstance().setLocalAudioInterface(audioIO.data());
#ifdef HAVE_RTMIDI
// setup the MIDIManager
@ -441,13 +443,15 @@ Application::~Application() {
// kill any audio injectors that are still around
AudioScriptingInterface::getInstance().stopAllInjectors();
Audio::SharedPointer audioIO = DependencyManager::get<Audio>();
// stop the audio process
QMetaObject::invokeMethod(&_audio, "stop");
QMetaObject::invokeMethod(audioIO.data(), "stop", Qt::BlockingQueuedConnection);
// ask the audio thread to quit and wait until it is done
_audio.thread()->quit();
_audio.thread()->wait();
audioIO->thread()->quit();
audioIO->thread()->wait();
_octreeProcessor.terminate();
_entityEditSender.terminate();
@ -797,7 +801,7 @@ bool Application::event(QEvent* event) {
QFileOpenEvent* fileEvent = static_cast<QFileOpenEvent*>(event);
if (!fileEvent->url().isEmpty()) {
AddressManager::getInstance().handleLookupString(fileEvent->url().toString());
DependencyManager::get<AddressManager>()->handleLookupString(fileEvent->url().toString());
}
return false;
@ -1246,7 +1250,7 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
_mousePressed = true;
if (mouseOnScreen()) {
if (_audio.mousePressEvent(getMouseX(), getMouseY())) {
if (DependencyManager::get<AudioToolBox>()->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
@ -1684,27 +1688,17 @@ void Application::init() {
_lastTimeUpdated.start();
Menu::getInstance()->loadSettings();
_audio.setReceivedAudioStreamSettings(Menu::getInstance()->getReceivedAudioStreamSettings());
// when --url in command line, teleport to location
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
int urlIndex = arguments().indexOf(HIFI_URL_COMMAND_LINE_KEY);
QString addressLookupString;
if (urlIndex != -1) {
AddressManager::getInstance().handleLookupString(arguments().value(urlIndex + 1));
} else {
// check if we have a URL in settings to load to jump back to
// we load this separate from the other settings so we don't double lookup a URL
QSettings* interfaceSettings = lockSettings();
QVariant addressVariant = interfaceSettings->value(SETTINGS_ADDRESS_KEY);
QString addressString = addressVariant.isNull()
? DEFAULT_HIFI_ADDRESS : addressVariant.toUrl().toString();
unlockSettings();
AddressManager::getInstance().handleLookupString(addressString);
addressLookupString = arguments().value(urlIndex + 1);
}
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
qDebug() << "Loaded settings";
#ifdef __APPLE__
@ -1731,10 +1725,11 @@ void Application::init() {
_entities.setViewFrustum(getViewFrustum());
EntityTree* entityTree = _entities.getTree();
_entityCollisionSystem.init(&_entityEditSender, entityTree, &_audio, &_avatarManager);
_entityCollisionSystem.init(&_entityEditSender, entityTree, &_avatarManager);
entityTree->setSimulation(&_entityCollisionSystem);
connect(&_entityCollisionSystem, &EntityCollisionSystem::entityCollisionWithEntity,
ScriptEngine::getEntityScriptingInterface(), &EntityScriptingInterface::entityCollisionWithEntity);
@ -1753,7 +1748,6 @@ void Application::init() {
_metavoxels.init();
GLCanvas::SharedPointer glCanvas = DependencyManager::get<GLCanvas>();
_audio.init(glCanvas.data());
_rearMirrorTools = new RearMirrorTools(glCanvas.data(), _mirrorViewRect, _settings);
connect(_rearMirrorTools, SIGNAL(closeView()), SLOT(closeMirrorView()));
@ -1761,9 +1755,6 @@ void Application::init() {
connect(_rearMirrorTools, SIGNAL(shrinkView()), SLOT(shrinkMirrorView()));
connect(_rearMirrorTools, SIGNAL(resetView()), SLOT(resetSensors()));
connect(getAudio(), &Audio::muteToggled, AudioDeviceScriptingInterface::getInstance(),
&AudioDeviceScriptingInterface::muteToggled, Qt::DirectConnection);
// save settings when avatar changes
connect(_myAvatar, &MyAvatar::transformChanged, this, &Application::bumpSettings);
@ -2023,6 +2014,8 @@ void Application::updateCursor(float deltaTime) {
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateCursor()");
static QPoint lastMousePos = QPoint();
_lastMouseMove = (lastMousePos == QCursor::pos()) ? _lastMouseMove : usecTimestampNow();
bool hideMouse = false;
bool underMouse = QGuiApplication::topLevelAt(QCursor::pos()) ==
Application::getInstance()->getWindow()->windowHandle();
@ -2035,6 +2028,7 @@ void Application::updateCursor(float deltaTime) {
}
setCursorVisible(!hideMouse);
lastMousePos = QCursor::pos();
}
void Application::setCursorVisible(bool visible) {
@ -2162,7 +2156,7 @@ void Application::update(float deltaTime) {
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) {
_lastSendDownstreamAudioStats = now;
QMetaObject::invokeMethod(&_audio, "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
}
}
}
@ -2505,8 +2499,8 @@ void Application::loadViewFrustum(Camera& camera, ViewFrustum& viewFrustum) {
}
glm::vec3 Application::getSunDirection() {
return glm::normalize(_environment.getClosestData(_myCamera.getPosition()).getSunLocation(_myCamera.getPosition()) -
_myCamera.getPosition());
// Sun direction is in fact just the location of the sun relative to the origin
return glm::normalize(_environment.getClosestData(_myCamera.getPosition()).getSunLocation(_myCamera.getPosition()));
}
void Application::updateShadowMap() {
@ -2514,7 +2508,7 @@ void Application::updateShadowMap() {
QOpenGLFramebufferObject* fbo = DependencyManager::get<TextureCache>()->getShadowFramebufferObject();
fbo->bind();
glEnable(GL_DEPTH_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClear(GL_DEPTH_BUFFER_BIT);
glm::vec3 lightDirection = -getSunDirection();
glm::quat rotation = rotationBetween(IDENTITY_FRONT, lightDirection);
@ -2539,8 +2533,10 @@ void Application::updateShadowMap() {
const glm::vec2& coord = MAP_COORDS[i];
glViewport(coord.s * fbo->width(), coord.t * fbo->height(), targetSize, targetSize);
// if simple shadow then since the resolution is twice as much as with cascaded, cover 2 regions with the map, not just one
int regionIncrement = (matrixCount == 1 ? 2 : 1);
float nearScale = SHADOW_MATRIX_DISTANCES[i] * frustumScale;
float farScale = SHADOW_MATRIX_DISTANCES[i + 1] * frustumScale;
float farScale = SHADOW_MATRIX_DISTANCES[i + regionIncrement] * frustumScale;
glm::vec3 points[] = {
glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), nearScale),
glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), nearScale),
@ -2626,7 +2622,7 @@ void Application::updateShadowMap() {
{
PerformanceTimer perfTimer("entities");
// _entities.render(RenderArgs::SHADOW_RENDER_MODE);
_entities.render(RenderArgs::SHADOW_RENDER_MODE);
}
// render JS/scriptable overlays
@ -3318,7 +3314,7 @@ void Application::resetSensors() {
_myAvatar->reset();
QMetaObject::invokeMethod(&_audio, "reset", Qt::QueuedConnection);
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "reset", Qt::QueuedConnection);
}
static void setShortcutsEnabled(QWidget* widget, bool enabled) {
@ -3348,7 +3344,7 @@ void Application::updateWindowTitle(){
QString connectionStatus = nodeList->getDomainHandler().isConnected() ? "" : " (NOT CONNECTED) ";
QString username = AccountManager::getInstance().getAccountInfo().getUsername();
QString title = QString() + (!username.isEmpty() ? username + " @ " : QString())
+ AddressManager::getInstance().getCurrentDomain() + connectionStatus + buildVersion;
+ DependencyManager::get<AddressManager>()->getCurrentDomain() + connectionStatus + buildVersion;
#ifndef WIN32
// crashes with vs2013/win32
@ -3369,7 +3365,7 @@ void Application::updateLocationInServer() {
QJsonObject locationObject;
QString pathString = AddressManager::getInstance().currentPath();
QString pathString = DependencyManager::get<AddressManager>()->currentPath();
const QString LOCATION_KEY_IN_ROOT = "location";
const QString PATH_KEY_IN_LOCATION = "path";
@ -3441,7 +3437,7 @@ void Application::nodeKilled(SharedNodePointer node) {
_entityEditSender.nodeKilled(node);
if (node->getType() == NodeType::AudioMixer) {
QMetaObject::invokeMethod(&_audio, "audioMixerKilled");
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "audioMixerKilled");
}
if (node->getType() == NodeType::EntityServer) {
@ -3853,7 +3849,7 @@ void Application::uploadEntity() {
void Application::openUrl(const QUrl& url) {
if (!url.isEmpty()) {
if (url.scheme() == HIFI_URL_SCHEME) {
AddressManager::getInstance().handleLookupString(url.toString());
DependencyManager::get<AddressManager>()->handleLookupString(url.toString());
} else {
// address manager did not handle - ask QDesktopServices to handle
QDesktopServices::openUrl(url);

View file

@ -164,7 +164,6 @@ public:
MyAvatar* getAvatar() { return _myAvatar; }
const MyAvatar* getAvatar() const { return _myAvatar; }
Audio* getAudio() { return &_audio; }
Camera* getCamera() { return &_myCamera; }
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
ViewFrustum* getDisplayViewFrustum() { return &_displayViewFrustum; }
@ -520,9 +519,8 @@ private:
QSet<int> _keysPressed;
Audio _audio;
bool _enableProcessOctreeThread;
OctreePacketProcessor _octreeProcessor;
EntityEditPacketSender _entityEditSender;

File diff suppressed because it is too large Load diff

View file

@ -15,18 +15,6 @@
#include <fstream>
#include <vector>
#include "InterfaceConfig.h"
#include "AudioStreamStats.h"
#include "Recorder.h"
#include "RingBufferHistory.h"
#include "MovingMinMaxAvg.h"
#include "AudioRingBuffer.h"
#include "AudioFormat.h"
#include "AudioBuffer.h"
#include "AudioSourceTone.h"
#include "AudioSourceNoise.h"
#include "AudioGain.h"
#include <QAudio>
#include <QAudioInput>
#include <QElapsedTimer>
@ -38,12 +26,26 @@
#include <QByteArray>
#include <AbstractAudioInterface.h>
#include <AudioRingBuffer.h>
#include <DependencyManager.h>
#include <StDev.h>
#include "InterfaceConfig.h"
#include "audio/AudioIOStats.h"
#include "audio/AudioNoiseGate.h"
#include "AudioStreamStats.h"
#include "Recorder.h"
#include "RingBufferHistory.h"
#include "AudioRingBuffer.h"
#include "AudioFormat.h"
#include "AudioBuffer.h"
#include "AudioSourceTone.h"
#include "AudioSourceNoise.h"
#include "AudioGain.h"
#include "MixedProcessedAudioStream.h"
#include "AudioEffectOptions.h"
#include <AudioRingBuffer.h>
#include <StDev.h>
#ifdef _WIN32
#pragma warning( push )
@ -59,8 +61,6 @@ extern "C" {
static const int NUM_AUDIO_CHANNELS = 2;
static const int MAX_16_BIT_AUDIO_SAMPLE = 32767;
static const int DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 3;
static const int MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 1;
static const int MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 20;
@ -74,6 +74,7 @@ class QIODevice;
class Audio : public AbstractAudioInterface {
Q_OBJECT
SINGLETON_DEPENDENCY(Audio)
public:
class AudioOutputIODevice : public QIODevice {
@ -92,46 +93,26 @@ public:
Audio* _audio;
int _unfulfilledReads;
};
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
friend class AudioOutputIODevice;
float getLastInputLoudness() const { return glm::max(_lastInputLoudness - _inputGate.getMeasuredFloor(), 0.0f); }
// setup for audio I/O
Audio(QObject* parent = 0);
float getLastInputLoudness() const { return glm::max(_lastInputLoudness - _noiseGateMeasuredFloor, 0.0f); }
float getTimeSinceLastClip() const { return _timeSinceLastClip; }
float getAudioAverageInputLoudness() const { return _lastInputLoudness; }
void setNoiseGateEnabled(bool noiseGateEnabled) { _noiseGateEnabled = noiseGateEnabled; }
virtual void startCollisionSound(float magnitude, float frequency, float noise, float duration, bool flashScreen);
virtual void startDrumSound(float volume, float frequency, float duration, float decay);
void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& settings) { _receivedAudioStream.setSettings(settings); }
int getDesiredJitterBufferFrames() const { return _receivedAudioStream.getDesiredJitterBufferFrames(); }
float getCollisionSoundMagnitude() { return _collisionSoundMagnitude; }
bool isMuted() { return _muted; }
bool getCollisionFlashesScreen() { return _collisionFlashesScreen; }
void setIsStereoInput(bool isStereoInput);
bool getMuted() { return _muted; }
void init(QGLWidget *parent = 0);
bool mousePressEvent(int x, int y);
void renderToolBox(int x, int y, bool boxed);
void renderScope(int width, int height);
void renderStats(const float* color, int width, int height);
int getNetworkSampleRate() { return SAMPLE_RATE; }
int getNetworkBufferLengthSamplesPerChannel() { return NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; }
const AudioIOStats& getStats() const { return _stats; }
float getInputRingBufferMsecsAvailable() const;
float getInputRingBufferAverageMsecsAvailable() const { return (float)_inputRingBufferMsecsAvailableStats.getWindowAverage(); }
float getAudioOutputMsecsUnplayed() const;
float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); }
void setRecorder(RecorderPointer recorder) { _recorder = recorder; }
@ -146,40 +127,38 @@ public:
int getOutputStarveDetectionThreshold() { return _outputStarveDetectionThreshold; }
void setOutputStarveDetectionThreshold(int threshold) { _outputStarveDetectionThreshold = threshold; }
static const float CALLBACK_ACCELERATOR_RATIO;
public slots:
void start();
void stop();
void addReceivedAudioToStream(const QByteArray& audioByteArray);
void parseAudioStreamStatsPacket(const QByteArray& packet);
void parseAudioEnvironmentData(const QByteArray& packet);
void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); }
void parseAudioStreamStatsPacket(const QByteArray& packet) { _stats.parseAudioStreamStatsPacket(packet); }
void handleAudioInput();
void reset();
void resetStats();
void audioMixerKilled();
void toggleMute();
void toggleAudioNoiseReduction();
void toggleAudioSourceInject();
void selectAudioSourcePinkNoise();
void selectAudioSourceSine440();
void toggleScope();
void toggleScopePause();
void toggleStats();
void toggleStatsShowInjectedStreams();
void toggleStereoInput();
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
void addStereoSilenceToScope(int silentSamplesPerChannel);
void addLastFrameRepeatedWithFadeToScope(int samplesPerChannel);
void addStereoSamplesToScope(const QByteArray& samples);
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
void toggleLocalEcho() { _shouldEchoLocally = !_shouldEchoLocally; }
void toggleServerEcho() { _shouldEchoToServer = !_shouldEchoToServer; }
void toggleStereoInput() { setIsStereoInput(!_isStereoInput); }
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
void sendMuteEnvironmentPacket();
void setOutputBufferSize(int numFrames);
virtual bool outputLocalInjector(bool isStereo, qreal volume, AudioInjector* injector);
void sendDownstreamAudioStatsPacket();
bool switchInputToAudioDevice(const QString& inputDeviceName);
bool switchOutputToAudioDevice(const QString& outputDeviceName);
QString getDeviceName(QAudio::Mode mode) const { return (mode == QAudio::AudioInput) ?
@ -192,17 +171,14 @@ public slots:
void setReverb(bool reverb) { _reverb = reverb; }
void setReverbOptions(const AudioEffectOptions* options);
const AudioStreamStats& getAudioMixerAvatarStreamAudioStats() const { return _audioMixerAvatarStreamAudioStats; }
const QHash<QUuid, AudioStreamStats>& getAudioMixerInjectedStreamAudioStatsMap() const { return _audioMixerInjectedStreamAudioStatsMap; }
void outputNotify();
signals:
bool muteToggled();
void preProcessOriginalInboundAudio(unsigned int sampleTime, QByteArray& samples, const QAudioFormat& format);
void processInboundAudio(unsigned int sampleTime, const QByteArray& samples, const QAudioFormat& format);
void processLocalAudio(unsigned int sampleTime, const QByteArray& samples, const QAudioFormat& format);
void inputReceived(const QByteArray& inputSamples);
protected:
Audio();
private:
void outputFormatChanged();
@ -213,17 +189,15 @@ private:
QAudioFormat _inputFormat;
QIODevice* _inputDevice;
int _numInputCallbackBytes;
int16_t _localProceduralSamples[NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL];
int16_t _localProceduralSamples[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL];
QAudioOutput* _audioOutput;
QAudioFormat _desiredOutputFormat;
QAudioFormat _outputFormat;
int _outputFrameSize;
int16_t _outputProcessingBuffer[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO];
int16_t _outputProcessingBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int _numOutputCallbackBytes;
QAudioOutput* _loopbackAudioOutput;
QIODevice* _loopbackOutputDevice;
QAudioOutput* _proceduralAudioOutput;
QIODevice* _proceduralOutputDevice;
AudioRingBuffer _inputRingBuffer;
MixedProcessedAudioStream _receivedAudioStream;
bool _isStereoInput;
@ -243,106 +217,37 @@ private:
QElapsedTimer _timeSinceLastReceived;
float _averagedLatency;
float _lastInputLoudness;
int _inputFrameCounter;
float _quietestFrame;
float _loudestFrame;
float _timeSinceLastClip;
float _dcOffset;
float _noiseGateMeasuredFloor;
float* _noiseSampleFrames;
int _noiseGateSampleCounter;
bool _noiseGateOpen;
bool _noiseGateEnabled;
bool _audioSourceInjectEnabled;
int _noiseGateFramesToClose;
int _totalInputAudioSamples;
float _collisionSoundMagnitude;
float _collisionSoundFrequency;
float _collisionSoundNoise;
float _collisionSoundDuration;
bool _collisionFlashesScreen;
// Drum sound generator
float _drumSoundVolume;
float _drumSoundFrequency;
float _drumSoundDuration;
float _drumSoundDecay;
int _drumSoundSample;
int _proceduralEffectSample;
bool _muted;
bool _localEcho;
bool _shouldEchoLocally;
bool _shouldEchoToServer;
bool _isNoiseGateEnabled;
bool _audioSourceInjectEnabled;
bool _reverb;
AudioEffectOptions _scriptReverbOptions;
AudioEffectOptions _zoneReverbOptions;
AudioEffectOptions* _reverbOptions;
ty_gverb* _gverbLocal;
ty_gverb* _gverb;
GLuint _micTextureId;
GLuint _muteTextureId;
GLuint _boxTextureId;
QRect _iconBounds;
float _iconColor;
qint64 _iconPulseTimeReference;
// Process procedural audio by
// 1. Echo to the local procedural output device
// 2. Mix with the audio input
void processProceduralAudio(int16_t* monoInput, int numSamples);
// Adds Reverb
void initGverb();
void updateGverbOptions();
void addReverb(ty_gverb* gverb, int16_t* samples, int numSamples, QAudioFormat& format, bool noEcho = false);
void addReverb(ty_gverb* gverb, int16_t* samples, int numSamples, QAudioFormat& format);
void handleLocalEchoAndReverb(QByteArray& inputByteArray);
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
void addProceduralSounds(int16_t* monoInput, int numSamples);
bool switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo);
bool switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo);
// Callback acceleration dependent calculations
static const float CALLBACK_ACCELERATOR_RATIO;
int calculateNumberOfInputCallbackBytes(const QAudioFormat& format) const;
int calculateNumberOfFrameSamples(int numBytes) const;
float calculateDeviceToNetworkInputRatio(int numBytes) const;
// Audio scope methods for allocation/deallocation
void allocateScope();
void freeScope();
void reallocateScope(int frames);
// Audio scope methods for data acquisition
int addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamples,
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade = 1.0f);
int addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples);
// Audio scope methods for rendering
void renderBackground(const float* color, int x, int y, int width, int height);
void renderGrid(const float* color, int x, int y, int width, int height, int rows, int cols);
void renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray* byteArray);
// audio stats methods for rendering
void renderAudioStreamStats(const AudioStreamStats& streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
// Audio scope data
static const unsigned int NETWORK_SAMPLES_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
static const unsigned int SCOPE_WIDTH = NETWORK_SAMPLES_PER_FRAME * DEFAULT_FRAMES_PER_SCOPE;
static const unsigned int MULTIPLIER_SCOPE_HEIGHT = 20;
static const unsigned int SCOPE_HEIGHT = 2 * 15 * MULTIPLIER_SCOPE_HEIGHT;
bool _scopeEnabled;
bool _scopeEnabledPause;
int _scopeInputOffset;
int _scopeOutputOffset;
int _framesPerScope;
int _samplesPerScope;
// Input framebuffer
AudioBufferFloat32 _inputFrameBuffer;
@ -359,38 +264,16 @@ private:
// Tone source
bool _toneSourceEnabled;
AudioSourceTone _toneSource;
QMutex _guard;
QByteArray* _scopeInput;
QByteArray* _scopeOutputLeft;
QByteArray* _scopeOutputRight;
QByteArray _scopeLastFrame;
#ifdef _WIN32
static const unsigned int STATS_WIDTH = 1500;
#else
static const unsigned int STATS_WIDTH = 650;
#endif
static const unsigned int STATS_HEIGHT_PER_LINE = 20;
bool _statsEnabled;
bool _statsShowInjectedStreams;
AudioStreamStats _audioMixerAvatarStreamAudioStats;
QHash<QUuid, AudioStreamStats> _audioMixerInjectedStreamAudioStatsMap;
quint16 _outgoingAvatarAudioSequenceNumber;
MovingMinMaxAvg<float> _audioInputMsecsReadStats;
MovingMinMaxAvg<float> _inputRingBufferMsecsAvailableStats;
MovingMinMaxAvg<float> _audioOutputMsecsUnplayedStats;
quint64 _lastSentAudioPacket;
MovingMinMaxAvg<quint64> _packetSentTimeGaps;
AudioOutputIODevice _audioOutputIODevice;
WeakRecorderPointer _recorder;
AudioIOStats _stats;
AudioNoiseGate _inputGate;
};

View file

@ -54,13 +54,16 @@ void DatagramProcessor::processDatagrams() {
case PacketTypeMixedAudio:
case PacketTypeSilentAudioFrame: {
if (incomingType == PacketTypeAudioStreamStats) {
QMetaObject::invokeMethod(&application->_audio, "parseAudioStreamStatsPacket", Qt::QueuedConnection,
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "parseAudioStreamStatsPacket",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
} else if (incomingType == PacketTypeAudioEnvironment) {
QMetaObject::invokeMethod(&application->_audio, "parseAudioEnvironmentData", Qt::QueuedConnection,
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "parseAudioEnvironmentData",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
} else {
QMetaObject::invokeMethod(&application->_audio, "addReceivedAudioToStream", Qt::QueuedConnection,
QMetaObject::invokeMethod(DependencyManager::get<Audio>().data(), "addReceivedAudioToStream",
Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
}
@ -127,7 +130,7 @@ void DatagramProcessor::processDatagrams() {
}
case PacketTypeNoisyMute:
case PacketTypeMuteEnvironment: {
bool mute = !Application::getInstance()->getAudio()->getMuted();
bool mute = !DependencyManager::get<Audio>()->isMuted();
if (incomingType == PacketTypeMuteEnvironment) {
glm::vec3 position;
@ -142,7 +145,7 @@ void DatagramProcessor::processDatagrams() {
}
if (mute) {
Application::getInstance()->getAudio()->toggleMute();
DependencyManager::get<Audio>()->toggleMute();
if (incomingType == PacketTypeMuteEnvironment) {
AudioScriptingInterface::getInstance().environmentMuted();
} else {

View file

@ -12,6 +12,7 @@
#ifndef hifi_GLCanvas_h
#define hifi_GLCanvas_h
#include <QDebug>
#include <QGLWidget>
#include <QTimer>

View file

@ -42,6 +42,8 @@
#include "Application.h"
#include "AccountManager.h"
#include "Audio.h"
#include "audio/AudioIOStatsRenderer.h"
#include "audio/AudioScope.h"
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "devices/Visage.h"
@ -83,7 +85,6 @@ const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
const float DEFAULT_AVATAR_LOD_DISTANCE_MULTIPLIER = 1.0f;
const int ONE_SECOND_OF_FRAMES = 60;
const int FIVE_SECONDS_OF_FRAMES = 5 * ONE_SECOND_OF_FRAMES;
const float MUTE_RADIUS = 50;
const QString CONSOLE_TITLE = "Scripting Console";
const float CONSOLE_WINDOW_OPACITY = 0.95f;
@ -155,11 +156,6 @@ Menu::Menu() :
// connect to the appropriate signal of the AccountManager so that we can change the Login/Logout menu item
connect(&accountManager, &AccountManager::profileChanged, this, &Menu::toggleLoginMenuItem);
connect(&accountManager, &AccountManager::logoutComplete, this, &Menu::toggleLoginMenuItem);
// connect to signal of account manager so we can tell user when the user/place they looked at is offline
AddressManager& addressManager = AddressManager::getInstance();
connect(&addressManager, &AddressManager::lookupResultIsOffline, this, &Menu::displayAddressOfflineMessage);
connect(&addressManager, &AddressManager::lookupResultIsNotFound, this, &Menu::displayAddressNotFoundMessage);
addDisabledActionAndSeparator(fileMenu, "Scripts");
addActionToQMenuAndActionHash(fileMenu, MenuOption::LoadScript, Qt::CTRL | Qt::Key_O, appInstance, SLOT(loadDialog()));
@ -527,81 +523,85 @@ Menu::Menu() :
SLOT(cycleFrustumRenderMode()));
updateFrustumRenderModeAction();
Audio::SharedPointer audioIO = DependencyManager::get<Audio>();
QMenu* audioDebugMenu = developerMenu->addMenu("Audio");
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNoiseReduction,
0,
true,
appInstance->getAudio(),
audioIO.data(),
SLOT(toggleAudioNoiseReduction()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio);
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio);
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio, 0, false,
audioIO.data(), SLOT(toggleServerEcho()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio, 0, false,
audioIO.data(), SLOT(toggleLocalEcho()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::StereoAudio, 0, false,
appInstance->getAudio(), SLOT(toggleStereoInput()));
audioIO.data(), SLOT(toggleStereoInput()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::MuteAudio,
Qt::CTRL | Qt::Key_M,
false,
appInstance->getAudio(),
audioIO.data(),
SLOT(toggleMute()));
addActionToQMenuAndActionHash(audioDebugMenu,
MenuOption::MuteEnvironment,
0,
this,
SLOT(muteEnvironment()));
audioIO.data(),
SLOT(sendMuteEnvironmentPacket()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioSourceInject,
0,
false,
appInstance->getAudio(),
audioIO.data(),
SLOT(toggleAudioSourceInject()));
QMenu* audioSourceMenu = audioDebugMenu->addMenu("Generated Audio Source");
{
QAction *pinkNoise = addCheckableActionToQMenuAndActionHash(audioSourceMenu, MenuOption::AudioSourcePinkNoise,
0,
false,
appInstance->getAudio(),
audioIO.data(),
SLOT(selectAudioSourcePinkNoise()));
QAction *sine440 = addCheckableActionToQMenuAndActionHash(audioSourceMenu, MenuOption::AudioSourceSine440,
0,
true,
appInstance->getAudio(),
audioIO.data(),
SLOT(selectAudioSourceSine440()));
QActionGroup* audioSourceGroup = new QActionGroup(audioSourceMenu);
audioSourceGroup->addAction(pinkNoise);
audioSourceGroup->addAction(sine440);
}
AudioScope::SharedPointer scope = DependencyManager::get<AudioScope>();
QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope");
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScope,
Qt::CTRL | Qt::Key_P, false,
appInstance->getAudio(),
SLOT(toggleScope()));
scope.data(),
SLOT(toggle()));
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopePause,
Qt::CTRL | Qt::SHIFT | Qt::Key_P ,
false,
appInstance->getAudio(),
SLOT(toggleScopePause()));
scope.data(),
SLOT(togglePause()));
addDisabledActionAndSeparator(audioScopeMenu, "Display Frames");
{
QAction *fiveFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiveFrames,
0,
true,
appInstance->getAudio(),
scope.data(),
SLOT(selectAudioScopeFiveFrames()));
QAction *twentyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeTwentyFrames,
0,
false,
appInstance->getAudio(),
scope.data(),
SLOT(selectAudioScopeTwentyFrames()));
QAction *fiftyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiftyFrames,
0,
false,
appInstance->getAudio(),
scope.data(),
SLOT(selectAudioScopeFiftyFrames()));
QActionGroup* audioScopeFramesGroup = new QActionGroup(audioScopeMenu);
@ -609,20 +609,21 @@ Menu::Menu() :
audioScopeFramesGroup->addAction(twentyFrames);
audioScopeFramesGroup->addAction(fiftyFrames);
}
AudioIOStatsRenderer::SharedPointer statsRenderer = DependencyManager::get<AudioIOStatsRenderer>();
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
Qt::CTRL | Qt::Key_A,
false,
appInstance->getAudio(),
SLOT(toggleStats()));
Qt::CTRL | Qt::Key_A,
false,
statsRenderer.data(),
SLOT(toggle()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStatsShowInjectedStreams,
0,
false,
appInstance->getAudio(),
SLOT(toggleStatsShowInjectedStreams()));
statsRenderer.data(),
SLOT(toggleShowInjectedStreams()));
connect(appInstance->getAudio(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled()));
connect(audioIO.data(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled()));
#ifndef Q_OS_MAC
QMenu* helpMenu = addMenu("Help");
@ -763,7 +764,7 @@ void Menu::saveSettings(QSettings* settings) {
scanMenuBar(&saveAction, settings);
Application::getInstance()->getAvatar()->saveData(settings);
settings->setValue(SETTINGS_ADDRESS_KEY, AddressManager::getInstance().currentAddress());
DependencyManager::get<AddressManager>()->storeCurrentAddress();
if (lockedSettings) {
Application::getInstance()->unlockSettings();
@ -1147,40 +1148,6 @@ void Menu::toggleAddressBar() {
}
}
void Menu::displayAddressOfflineMessage() {
QMessageBox::information(Application::getInstance()->getWindow(), "Address offline",
"That user or place is currently offline.");
}
void Menu::displayAddressNotFoundMessage() {
QMessageBox::information(Application::getInstance()->getWindow(), "Address not found",
"There is no address information for that user or place.");
}
void Menu::muteEnvironment() {
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketTypeMuteEnvironment);
int packetSize = headerSize + sizeof(glm::vec3) + sizeof(float);
glm::vec3 position = Application::getInstance()->getAvatar()->getPosition();
char* packet = (char*)malloc(packetSize);
populatePacketHeader(packet, PacketTypeMuteEnvironment);
memcpy(packet + headerSize, &position, sizeof(glm::vec3));
memcpy(packet + headerSize + sizeof(glm::vec3), &MUTE_RADIUS, sizeof(float));
QByteArray mutePacket(packet, packetSize);
// grab our audio mixer from the NodeList, if it exists
SharedNodePointer audioMixer = NodeList::getInstance()->soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// send off this mute packet
NodeList::getInstance()->writeDatagram(mutePacket, audioMixer);
}
free(packet);
}
void Menu::changeVSync() {
Application::getInstance()->setVSyncEnabled(isOptionChecked(MenuOption::RenderTargetFramerateVSyncOn));
}
@ -1197,7 +1164,7 @@ void Menu::displayNameLocationResponse(const QString& errorString) {
void Menu::toggleLocationList() {
if (!_userLocationsDialog) {
JavascriptObjectMap locationObjectMap;
locationObjectMap.insert("InterfaceLocation", &AddressManager::getInstance());
locationObjectMap.insert("InterfaceLocation", DependencyManager::get<AddressManager>().data());
_userLocationsDialog = DataWebDialog::dialogForPath("/user/locations", locationObjectMap);
}
@ -1241,7 +1208,7 @@ void Menu::nameLocation() {
if (!_newLocationDialog) {
JavascriptObjectMap locationObjectMap;
locationObjectMap.insert("InterfaceLocation", &AddressManager::getInstance());
locationObjectMap.insert("InterfaceLocation", DependencyManager::get<AddressManager>().data());
_newLocationDialog = DataWebDialog::dialogForPath("/user/locations/new", locationObjectMap);
}
@ -1371,7 +1338,7 @@ void Menu::toggleToolWindow() {
void Menu::audioMuteToggled() {
QAction *muteAction = _actionHash.value(MenuOption::MuteAudio);
if (muteAction) {
muteAction->setChecked(Application::getInstance()->getAudio()->getMuted());
muteAction->setChecked(DependencyManager::get<Audio>()->isMuted());
}
}

View file

@ -51,8 +51,6 @@ const float ADJUST_LOD_MAX_SIZE_SCALE = DEFAULT_OCTREE_SIZE_SCALE;
const float MINIMUM_AVATAR_LOD_DISTANCE_MULTIPLIER = 0.1f;
const float MAXIMUM_AVATAR_LOD_DISTANCE_MULTIPLIER = 15.0f;
const QString SETTINGS_ADDRESS_KEY = "address";
enum FrustumDrawMode {
FRUSTUM_DRAW_MODE_ALL,
FRUSTUM_DRAW_MODE_VECTORS,
@ -229,9 +227,6 @@ private slots:
void toggleChat();
void audioMuteToggled();
void displayNameLocationResponse(const QString& errorString);
void displayAddressOfflineMessage();
void displayAddressNotFoundMessage();
void muteEnvironment();
void changeVSync();
private:

View file

@ -0,0 +1,134 @@
//
// AudioStats.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <AudioConstants.h>
#include <MixedProcessedAudioStream.h>
#include <NodeList.h>
#include <PositionalAudioStream.h>
#include "Audio.h"
#include "AudioIOStats.h"
const int FRAMES_AVAILABLE_STATS_WINDOW_SECONDS = 10;
const int APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS = (int)(30.0f * 1000.0f / AudioConstants::NETWORK_FRAME_MSECS);
AudioIOStats::AudioIOStats(MixedProcessedAudioStream* receivedAudioStream) :
_receivedAudioStream(receivedAudioStream),
_audioInputMsecsReadStats(MSECS_PER_SECOND / (float)AudioConstants::NETWORK_FRAME_MSECS * Audio::CALLBACK_ACCELERATOR_RATIO, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_inputRingBufferMsecsAvailableStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_lastSentAudioPacket(0),
_packetSentTimeGaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS)
{
}
AudioStreamStats AudioIOStats::getMixerDownstreamStats() const {
return _receivedAudioStream->getAudioStreamStats();
}
void AudioIOStats::reset() {
_receivedAudioStream->resetStats();
_mixerAvatarStreamStats = AudioStreamStats();
_mixerInjectedStreamStatsMap.clear();
_audioInputMsecsReadStats.reset();
_inputRingBufferMsecsAvailableStats.reset();
_audioOutputMsecsUnplayedStats.reset();
_packetSentTimeGaps.reset();
}
void AudioIOStats::sentPacket() {
// first time this is 0
if (_lastSentAudioPacket == 0) {
_lastSentAudioPacket = usecTimestampNow();
} else {
quint64 now = usecTimestampNow();
quint64 gap = now - _lastSentAudioPacket;
_packetSentTimeGaps.update(gap);
_lastSentAudioPacket = now;
}
}
void AudioIOStats::parseAudioStreamStatsPacket(const QByteArray& packet) {
int numBytesPacketHeader = numBytesForPacketHeader(packet);
const char* dataAt = packet.constData() + numBytesPacketHeader;
// parse the appendFlag, clear injected audio stream stats if 0
quint8 appendFlag = *(reinterpret_cast<const quint16*>(dataAt));
dataAt += sizeof(quint8);
if (!appendFlag) {
_mixerInjectedStreamStatsMap.clear();
}
// parse the number of stream stats structs to follow
quint16 numStreamStats = *(reinterpret_cast<const quint16*>(dataAt));
dataAt += sizeof(quint16);
// parse the stream stats
AudioStreamStats streamStats;
for (quint16 i = 0; i < numStreamStats; i++) {
memcpy(&streamStats, dataAt, sizeof(AudioStreamStats));
dataAt += sizeof(AudioStreamStats);
if (streamStats._streamType == PositionalAudioStream::Microphone) {
_mixerAvatarStreamStats = streamStats;
} else {
_mixerInjectedStreamStatsMap[streamStats._streamIdentifier] = streamStats;
}
}
}
void AudioIOStats::sendDownstreamAudioStatsPacket() {
Audio::SharedPointer audioIO = DependencyManager::get<Audio>();
// since this function is called every second, we'll sample for some of our stats here
_inputRingBufferMsecsAvailableStats.update(audioIO->getInputRingBufferMsecsAvailable());
_audioOutputMsecsUnplayedStats.update(audioIO->getAudioOutputMsecsUnplayed());
// also, call _receivedAudioStream's per-second callback
_receivedAudioStream->perSecondCallbackForUpdatingStats();
char packet[MAX_PACKET_SIZE];
// pack header
int numBytesPacketHeader = populatePacketHeader(packet, PacketTypeAudioStreamStats);
char* dataAt = packet + numBytesPacketHeader;
// pack append flag
quint8 appendFlag = 0;
memcpy(dataAt, &appendFlag, sizeof(quint8));
dataAt += sizeof(quint8);
// pack number of stats packed
quint16 numStreamStatsToPack = 1;
memcpy(dataAt, &numStreamStatsToPack, sizeof(quint16));
dataAt += sizeof(quint16);
// pack downstream audio stream stats
AudioStreamStats stats = _receivedAudioStream->getAudioStreamStats();
memcpy(dataAt, &stats, sizeof(AudioStreamStats));
dataAt += sizeof(AudioStreamStats);
// send packet
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
nodeList->writeDatagram(packet, dataAt - packet, audioMixer);
}

View file

@ -0,0 +1,60 @@
//
// AudioIOStats.h
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioIOStats_h
#define hifi_AudioIOStats_h
#include "MovingMinMaxAvg.h"
#include <QObject>
#include <AudioStreamStats.h>
class MixedProcessedAudioStream;
class AudioIOStats : public QObject {
Q_OBJECT
public:
AudioIOStats(MixedProcessedAudioStream* receivedAudioStream);
void reset();
void updateInputMsecsRead(float msecsRead) { _audioInputMsecsReadStats.update(msecsRead); }
void sentPacket();
AudioStreamStats getMixerDownstreamStats() const;
const AudioStreamStats& getMixerAvatarStreamStats() const { return _mixerAvatarStreamStats; }
const QHash<QUuid, AudioStreamStats>& getMixerInjectedStreamStatsMap() const { return _mixerInjectedStreamStatsMap; }
const MovingMinMaxAvg<float>& getAudioInputMsecsReadStats() const { return _audioInputMsecsReadStats; }
const MovingMinMaxAvg<float>& getInputRungBufferMsecsAvailableStats() const { return _inputRingBufferMsecsAvailableStats; }
const MovingMinMaxAvg<float>& getAudioOutputMsecsUnplayedStats() const { return _audioOutputMsecsUnplayedStats; }
const MovingMinMaxAvg<quint64>& getPacketSentTimeGaps() const { return _packetSentTimeGaps; }
void sendDownstreamAudioStatsPacket();
void parseAudioStreamStatsPacket(const QByteArray& packet);
private:
MixedProcessedAudioStream* _receivedAudioStream;
MovingMinMaxAvg<float> _audioInputMsecsReadStats;
MovingMinMaxAvg<float> _inputRingBufferMsecsAvailableStats;
MovingMinMaxAvg<float> _audioOutputMsecsUnplayedStats;
AudioStreamStats _mixerAvatarStreamStats;
QHash<QUuid, AudioStreamStats> _mixerInjectedStreamStatsMap;
quint64 _lastSentAudioPacket;
MovingMinMaxAvg<quint64> _packetSentTimeGaps;
};
#endif // hifi_AudioIOStats_h

View file

@ -0,0 +1,238 @@
//
// AudioIOStatsRenderer.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <AudioConstants.h>
#include <DependencyManager.h>
#include <NodeList.h>
#include <Util.h>
#include "Audio.h"
#include "AudioIOStats.h"
#include "AudioIOStatsRenderer.h"
AudioIOStatsRenderer::AudioIOStatsRenderer() :
_stats(NULL),
_isEnabled(false),
_shouldShowInjectedStreams(false)
{
// grab the stats object from the audio I/O singleton
_stats = &DependencyManager::get<Audio>()->getStats();
}
#ifdef _WIN32
const unsigned int STATS_WIDTH = 1500;
#else
const unsigned int STATS_WIDTH = 650;
#endif
const unsigned int STATS_HEIGHT_PER_LINE = 20;
void AudioIOStatsRenderer::render(const float* color, int width, int height) {
if (!_isEnabled) {
return;
}
const int linesWhenCentered = _shouldShowInjectedStreams ? 34 : 27;
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * linesWhenCentered;
int lines = _shouldShowInjectedStreams ? _stats->getMixerInjectedStreamStatsMap().size() * 7 + 27 : 27;
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
static const float backgroundColor[4] = { 0.2f, 0.2f, 0.2f, 0.6f };
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
int backgroundHeight = statsHeight;
glColor4fv(backgroundColor);
glBegin(GL_QUADS);
glVertex2i(x, y);
glVertex2i(x + STATS_WIDTH, y);
glVertex2i(x + STATS_WIDTH, y + backgroundHeight);
glVertex2i(x , y + backgroundHeight);
glEnd();
glColor4f(1, 1, 1, 1);
int horizontalOffset = x + 5;
int verticalOffset = y;
float scale = 0.10f;
float rotation = 0.0f;
int font = 2;
char latencyStatString[512];
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
SharedNodePointer audioMixerNodePointer = NodeList::getInstance()->soloNodeOfType(NodeType::AudioMixer);
if (!audioMixerNodePointer.isNull()) {
audioInputBufferLatency = _stats->getAudioInputMsecsReadStats().getWindowAverage();
inputRingBufferLatency = (float) _stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
networkRoundtripLatency = audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = _stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
audioOutputBufferLatency = _stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
}
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
sprintf(latencyStatString, " Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s", audioInputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s", inputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Network to mixer: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s", mixerRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Network to client: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s", outputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s", audioOutputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " TOTAL: %7.2fms\n", totalLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char clientUpstreamMicLabelString[] = "Upstream Mic Audio Packets Sent Gaps (by client):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, clientUpstreamMicLabelString, color);
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
char stringBuffer[512];
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getMax()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamMicLabelString[] = "Upstream mic audio stats (received and reported by audio-mixer):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), horizontalOffset, verticalOffset,
scale, rotation, font, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char downstreamLabelString[] = "Downstream mixed audio stats:";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, horizontalOffset, verticalOffset,
scale, rotation, font, color, true);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamInjectedLabelString[512];
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
renderAudioStreamStats(&injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
}
}
}
void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
char stringBuffer[512];
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
streamStats->_packetStreamStats.getLostRate() * 100.0f,
streamStats->_packetStreamStats._lost,
streamStats->_packetStreamWindowStats.getLostRate() * 100.0f,
streamStats->_packetStreamWindowStats._lost);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
if (isDownstreamStats) {
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
streamStats->_desiredJitterBufferFrames,
streamStats->_framesAvailableAverage,
(int)(_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
streamStats->_framesAvailable,
(int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample()
/ AudioConstants::NETWORK_FRAME_MSECS));
} else {
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u",
streamStats->_desiredJitterBufferFrames,
streamStats->_framesAvailableAverage,
streamStats->_framesAvailable);
}
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
streamStats->_starveCount,
streamStats->_consecutiveNotMixedCount,
streamStats->_framesDropped,
streamStats->_overflowCount);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
}

View file

@ -0,0 +1,45 @@
//
// AudioIOStatsRenderer.h
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioIOStatsRenderer_h
#define hifi_AudioIOStatsRenderer_h
#include <QObject>
#include <DependencyManager.h>
class AudioIOStats;
class AudioStreamStats;
class AudioIOStatsRenderer : public QObject {
Q_OBJECT
SINGLETON_DEPENDENCY(AudioIOStatsRenderer)
public:
void render(const float* color, int width, int height);
public slots:
void toggle() { _isEnabled = !_isEnabled; }
void toggleShowInjectedStreams() { _shouldShowInjectedStreams = !_shouldShowInjectedStreams; }
protected:
AudioIOStatsRenderer();
private:
// audio stats methods for rendering
void renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
const AudioIOStats* _stats;
bool _isEnabled;
bool _shouldShowInjectedStreams;
};
#endif // hifi_AudioIOStatsRenderer_h

View file

@ -0,0 +1,143 @@
//
// AudioNoiseGate.cpp
// interface/src/audio
//
// Created by Stephen Birarda on 2014-12-16.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <string.h>
#include <AudioConstants.h>
#include "AudioNoiseGate.h"
const float AudioNoiseGate::CLIPPING_THRESHOLD = 0.90f;
AudioNoiseGate::AudioNoiseGate() :
_inputFrameCounter(0),
_lastLoudness(0.0f),
_quietestFrame(std::numeric_limits<float>::max()),
_loudestFrame(0.0f),
_didClipInLastFrame(false),
_dcOffset(0.0f),
_measuredFloor(0.0f),
_sampleCounter(0),
_isOpen(false),
_framesToClose(0)
{
}
void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
//
// Impose Noise Gate
//
// The Noise Gate is used to reject constant background noise by measuring the noise
// floor observed at the microphone and then opening the 'gate' to allow microphone
// signals to be transmitted when the microphone samples average level exceeds a multiple
// of the noise floor.
//
// NOISE_GATE_HEIGHT: How loud you have to speak relative to noise background to open the gate.
// Make this value lower for more sensitivity and less rejection of noise.
// NOISE_GATE_WIDTH: The number of samples in an audio frame for which the height must be exceeded
// to open the gate.
// NOISE_GATE_CLOSE_FRAME_DELAY: Once the noise is below the gate height for the frame, how many frames
// will we wait before closing the gate.
// NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
// More means better rejection but also can reject continuous things like singing.
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
float loudness = 0;
float thisSample = 0;
int samplesOverNoiseGate = 0;
const float NOISE_GATE_HEIGHT = 7.0f;
const int NOISE_GATE_WIDTH = 5;
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
const float DC_OFFSET_AVERAGING = 0.99f;
// Check clipping, adjust DC offset, and check if should open noise gate
float measuredDcOffset = 0.0f;
_didClipInLastFrame = false;
for (int i = 0; i < numSamples; i++) {
measuredDcOffset += samples[i];
samples[i] -= (int16_t) _dcOffset;
thisSample = fabsf(samples[i]);
if (thisSample >= ((float) AudioConstants::MAX_SAMPLE_VALUE * CLIPPING_THRESHOLD)) {
_didClipInLastFrame = true;
}
loudness += thisSample;
// Noise Reduction: Count peaks above the average loudness
if (thisSample > (_measuredFloor * NOISE_GATE_HEIGHT)) {
samplesOverNoiseGate++;
}
}
measuredDcOffset /= numSamples;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
_lastLoudness = fabs(loudness / numSamples);
if (_quietestFrame > _lastLoudness) {
_quietestFrame = _lastLoudness;
}
if (_loudestFrame < _lastLoudness) {
_loudestFrame = _lastLoudness;
}
const int FRAMES_FOR_NOISE_DETECTION = 400;
if (_inputFrameCounter++ > FRAMES_FOR_NOISE_DETECTION) {
_quietestFrame = std::numeric_limits<float>::max();
_loudestFrame = 0.0f;
_inputFrameCounter = 0;
}
// If Noise Gate is enabled, check and turn the gate on and off
float averageOfAllSampleFrames = 0.0f;
_sampleFrames[_sampleCounter++] = _lastLoudness;
if (_sampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
float smallestSample = std::numeric_limits<float>::max();
for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_FRAMES - NOISE_GATE_FRAMES_TO_AVERAGE; i += NOISE_GATE_FRAMES_TO_AVERAGE) {
float thisAverage = 0.0f;
for (int j = i; j < i + NOISE_GATE_FRAMES_TO_AVERAGE; j++) {
thisAverage += _sampleFrames[j];
averageOfAllSampleFrames += _sampleFrames[j];
}
thisAverage /= NOISE_GATE_FRAMES_TO_AVERAGE;
if (thisAverage < smallestSample) {
smallestSample = thisAverage;
}
}
averageOfAllSampleFrames /= NUMBER_OF_NOISE_SAMPLE_FRAMES;
_measuredFloor = smallestSample;
_sampleCounter = 0;
}
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
_isOpen = true;
_framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
} else {
if (--_framesToClose == 0) {
_isOpen = false;
}
}
if (!_isOpen) {
memset(samples, 0, numSamples * sizeof(int16_t));
_lastLoudness = 0;
}
}

Some files were not shown because too many files have changed in this diff Show more