mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 04:44:11 +02:00
Merge branch 'master' of https://github.com/worklist/hifi
This commit is contained in:
commit
0508726c40
21 changed files with 6019 additions and 3885 deletions
|
@ -465,6 +465,63 @@ int AudioMixer::prepareMixForListeningNode(Node* node) {
|
|||
return streamsMixed;
|
||||
}
|
||||
|
||||
void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
||||
static char clientEnvBuffer[MAX_PACKET_SIZE];
|
||||
|
||||
// Send stream properties
|
||||
bool hasReverb = false;
|
||||
float reverbTime, wetLevel;
|
||||
// find reverb properties
|
||||
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
|
||||
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
|
||||
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
|
||||
hasReverb = true;
|
||||
reverbTime = _zoneReverbSettings[i].reverbTime;
|
||||
wetLevel = _zoneReverbSettings[i].wetLevel;
|
||||
break;
|
||||
}
|
||||
}
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
AvatarAudioStream* stream = nodeData->getAvatarAudioStream();
|
||||
bool dataChanged = (stream->hasReverb() != hasReverb) ||
|
||||
(stream->hasReverb() && (stream->getRevebTime() != reverbTime ||
|
||||
stream->getWetLevel() != wetLevel));
|
||||
if (dataChanged) {
|
||||
// Update stream
|
||||
if (hasReverb) {
|
||||
stream->setReverb(reverbTime, wetLevel);
|
||||
} else {
|
||||
stream->clearReverb();
|
||||
}
|
||||
}
|
||||
|
||||
// Send at change or every so often
|
||||
float CHANCE_OF_SEND = 0.01f;
|
||||
bool sendData = dataChanged || (randFloat() < CHANCE_OF_SEND);
|
||||
|
||||
if (sendData) {
|
||||
int numBytesEnvPacketHeader = populatePacketHeader(clientEnvBuffer, PacketTypeAudioEnvironment);
|
||||
char* envDataAt = clientEnvBuffer + numBytesEnvPacketHeader;
|
||||
|
||||
unsigned char bitset = 0;
|
||||
if (hasReverb) {
|
||||
setAtBit(bitset, HAS_REVERB_BIT);
|
||||
}
|
||||
|
||||
memcpy(envDataAt, &bitset, sizeof(unsigned char));
|
||||
envDataAt += sizeof(unsigned char);
|
||||
|
||||
if (hasReverb) {
|
||||
memcpy(envDataAt, &reverbTime, sizeof(float));
|
||||
envDataAt += sizeof(float);
|
||||
memcpy(envDataAt, &wetLevel, sizeof(float));
|
||||
envDataAt += sizeof(float);
|
||||
}
|
||||
NodeList::getInstance()->writeDatagram(clientEnvBuffer, envDataAt - clientEnvBuffer, node);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
|
@ -642,7 +699,6 @@ void AudioMixer::run() {
|
|||
timer.start();
|
||||
|
||||
char clientMixBuffer[MAX_PACKET_SIZE];
|
||||
char clientEnvBuffer[MAX_PACKET_SIZE];
|
||||
|
||||
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
|
||||
|
||||
|
@ -759,58 +815,6 @@ void AudioMixer::run() {
|
|||
// pack mixed audio samples
|
||||
memcpy(mixDataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||
mixDataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
|
||||
|
||||
// Send stream properties
|
||||
bool hasReverb = false;
|
||||
float reverbTime, wetLevel;
|
||||
// find reverb properties
|
||||
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
|
||||
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
|
||||
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
|
||||
hasReverb = true;
|
||||
reverbTime = _zoneReverbSettings[i].reverbTime;
|
||||
wetLevel = _zoneReverbSettings[i].wetLevel;
|
||||
break;
|
||||
}
|
||||
}
|
||||
AvatarAudioStream* stream = nodeData->getAvatarAudioStream();
|
||||
bool dataChanged = (stream->hasReverb() != hasReverb) ||
|
||||
(stream->hasReverb() && (stream->getRevebTime() != reverbTime ||
|
||||
stream->getWetLevel() != wetLevel));
|
||||
if (dataChanged) {
|
||||
// Update stream
|
||||
if (hasReverb) {
|
||||
stream->setReverb(reverbTime, wetLevel);
|
||||
} else {
|
||||
stream->clearReverb();
|
||||
}
|
||||
}
|
||||
|
||||
// Send at change or every so often
|
||||
float CHANCE_OF_SEND = 0.01f;
|
||||
bool sendData = dataChanged || (randFloat() < CHANCE_OF_SEND);
|
||||
|
||||
if (sendData) {
|
||||
int numBytesEnvPacketHeader = populatePacketHeader(clientEnvBuffer, PacketTypeAudioEnvironment);
|
||||
char* envDataAt = clientEnvBuffer + numBytesEnvPacketHeader;
|
||||
|
||||
unsigned char bitset = 0;
|
||||
if (hasReverb) {
|
||||
setAtBit(bitset, HAS_REVERB_BIT);
|
||||
}
|
||||
|
||||
memcpy(envDataAt, &bitset, sizeof(unsigned char));
|
||||
envDataAt += sizeof(unsigned char);
|
||||
|
||||
if (hasReverb) {
|
||||
memcpy(envDataAt, &reverbTime, sizeof(float));
|
||||
envDataAt += sizeof(float);
|
||||
memcpy(envDataAt, &wetLevel, sizeof(float));
|
||||
envDataAt += sizeof(float);
|
||||
}
|
||||
nodeList->writeDatagram(clientEnvBuffer, envDataAt - clientEnvBuffer, node);
|
||||
}
|
||||
} else {
|
||||
// pack header
|
||||
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame);
|
||||
|
@ -826,6 +830,9 @@ void AudioMixer::run() {
|
|||
memcpy(mixDataAt, &numSilentSamples, sizeof(quint16));
|
||||
mixDataAt += sizeof(quint16);
|
||||
}
|
||||
|
||||
// Send audio environment
|
||||
sendAudioEnvironmentPacket(node);
|
||||
|
||||
// send mixed audio packet
|
||||
nodeList->writeDatagram(clientMixBuffer, mixDataAt - clientMixBuffer, node);
|
||||
|
|
|
@ -49,6 +49,9 @@ private:
|
|||
|
||||
/// prepares and sends a mix to one Node
|
||||
int prepareMixForListeningNode(Node* node);
|
||||
|
||||
/// Send Audio Environment packet for a single node
|
||||
void sendAudioEnvironmentPacket(SharedNodePointer node);
|
||||
|
||||
// used on a per stream basis to run the filter on before mixing, large enough to handle the historical
|
||||
// data from a phase delay as well as an entire network buffer
|
||||
|
|
412
examples/libraries/walkApi.js
Normal file
412
examples/libraries/walkApi.js
Normal file
|
@ -0,0 +1,412 @@
|
|||
//
|
||||
// walkObjects.js
|
||||
//
|
||||
// version 1.001
|
||||
//
|
||||
// Created by David Wooldridge, Autumn 2014
|
||||
//
|
||||
// Motion, state and Transition objects for use by the walk.js script v1.1
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// constructor for the Motion object
|
||||
Motion = function() {
|
||||
|
||||
this.setGender = function(gender) {
|
||||
|
||||
this.avatarGender = gender;
|
||||
|
||||
switch(this.avatarGender) {
|
||||
|
||||
case MALE:
|
||||
|
||||
this.selWalk = walkAssets.maleStandardWalk;
|
||||
this.selStand = walkAssets.maleStandOne;
|
||||
this.selFlyUp = walkAssets.maleFlyingUp;
|
||||
this.selFly = walkAssets.maleFlying;
|
||||
this.selFlyDown = walkAssets.maleFlyingDown;
|
||||
this.selSideStepLeft = walkAssets.maleSideStepLeft;
|
||||
this.selSideStepRight = walkAssets.maleSideStepRight;
|
||||
this.curAnim = this.selStand;
|
||||
return;
|
||||
|
||||
case FEMALE:
|
||||
|
||||
this.selWalk = walkAssets.femaleStandardWalk;
|
||||
this.selStand = walkAssets.femaleStandOne;
|
||||
this.selFlyUp = walkAssets.femaleFlyingUp;
|
||||
this.selFly = walkAssets.femaleFlying;
|
||||
this.selFlyDown = walkAssets.femaleFlyingDown;
|
||||
this.selSideStepLeft = walkAssets.femaleSideStepLeft;
|
||||
this.selSideStepRight = walkAssets.femaleSideStepRight;
|
||||
this.curAnim = this.selStand;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.hydraCheck = function() {
|
||||
|
||||
// function courtesy of Thijs Wenker, frisbee.js
|
||||
var numberOfButtons = Controller.getNumberOfButtons();
|
||||
var numberOfTriggers = Controller.getNumberOfTriggers();
|
||||
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
|
||||
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
|
||||
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
|
||||
return hydrasConnected;
|
||||
}
|
||||
|
||||
// settings
|
||||
this.armsFree = this.hydraCheck(); // automatically sets true for Hydra support - temporary fix
|
||||
this.makesFootStepSounds = true;
|
||||
this.avatarGender = MALE;
|
||||
this.motionPitchMax = 60;
|
||||
this.motionRollMax = 40;
|
||||
|
||||
// timing
|
||||
this.frameStartTime = 0; // used for measuring frame execution times
|
||||
this.frameExecutionTimeMax = 0; // keep track of the longest frame execution time
|
||||
this.cumulativeTime = 0.0;
|
||||
this.lastWalkStartTime = 0;
|
||||
|
||||
// selected animations
|
||||
this.selWalk = walkAssets.maleStandardWalk;
|
||||
this.selStand = walkAssets.maleStandOne;
|
||||
this.selFlyUp = walkAssets.maleFlyingUp;
|
||||
this.selFly = walkAssets.maleFlying;
|
||||
this.selFlyDown = walkAssets.maleFlyingDown;
|
||||
this.selSideStepLeft = walkAssets.maleSideStepLeft;
|
||||
this.selSideStepRight = walkAssets.maleSideStepRight;
|
||||
|
||||
// the currently selected animation, joint and transition
|
||||
this.curAnim = this.selStand;
|
||||
this.curJointIndex = 0;
|
||||
this.curTransition = null;
|
||||
|
||||
// zero out avi's joints, curl the fingers nicely then take some measurements
|
||||
this.avatarJointNames = MyAvatar.getJointNames();
|
||||
if (!this.armsFree) {
|
||||
|
||||
for (var i = 0; i < this.avatarJointNames.length; i++) {
|
||||
|
||||
if (i > 17 || i < 34) {
|
||||
// left hand fingers
|
||||
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
|
||||
} else if (i > 33 || i < 38) {
|
||||
// left hand thumb
|
||||
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
|
||||
} else if (i > 41 || i < 58) {
|
||||
// right hand fingers
|
||||
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
|
||||
} else if (i > 57 || i < 62) {
|
||||
// right hand thumb
|
||||
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
|
||||
} else {
|
||||
// zero out the remaining joints
|
||||
MyAvatar.clearJointData(this.avatarJointNames[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.footRPos = MyAvatar.getJointPosition("RightFoot");
|
||||
this.hipsToFeet = MyAvatar.getJointPosition("Hips").y - this.footRPos.y;
|
||||
|
||||
// walkwheel (foot / ground speed matching)
|
||||
this.direction = FORWARDS;
|
||||
this.nextStep = RIGHT;
|
||||
this.nFrames = 0;
|
||||
this.strideLength = this.selWalk.calibration.strideLengthForwards;
|
||||
this.walkWheelPos = 0;
|
||||
|
||||
this.advanceWalkWheel = function(angle){
|
||||
this.walkWheelPos += angle;
|
||||
if (motion.walkWheelPos >= 360) {
|
||||
this.walkWheelPos = this.walkWheelPos % 360;
|
||||
}
|
||||
}
|
||||
|
||||
// last frame history
|
||||
this.lastDirection = 0;
|
||||
this.lastVelocity = 0;
|
||||
this.lastStrideLength = 0; // kept for use during transitions
|
||||
|
||||
}; // end Motion constructor
|
||||
|
||||
// finite state machine
|
||||
state = (function () {
|
||||
|
||||
return {
|
||||
|
||||
// the finite list of states
|
||||
STANDING: 1,
|
||||
WALKING: 2,
|
||||
SIDE_STEP: 3,
|
||||
FLYING: 4,
|
||||
EDIT_WALK_STYLES: 5,
|
||||
EDIT_WALK_TWEAKS: 6,
|
||||
EDIT_WALK_JOINTS: 7,
|
||||
EDIT_STANDING: 8,
|
||||
EDIT_FLYING: 9,
|
||||
EDIT_FLYING_UP: 10,
|
||||
EDIT_FLYING_DOWN: 11,
|
||||
EDIT_SIDESTEP_LEFT: 12,
|
||||
EDIT_SIDESTEP_RIGHT: 14,
|
||||
currentState: this.STANDING,
|
||||
|
||||
// status vars
|
||||
powerOn: true,
|
||||
minimised: true,
|
||||
editing: false,
|
||||
editingTranslation: false,
|
||||
|
||||
setInternalState: function(newInternalState) {
|
||||
|
||||
switch (newInternalState) {
|
||||
|
||||
case this.WALKING:
|
||||
|
||||
this.currentState = this.WALKING;
|
||||
this.editing = false;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.FLYING:
|
||||
|
||||
this.currentState = this.FLYING;
|
||||
this.editing = false;
|
||||
motion.lastWalkStartTime = 0;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.SIDE_STEP:
|
||||
|
||||
this.currentState = this.SIDE_STEP;
|
||||
this.editing = false;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_WALK_STYLES:
|
||||
|
||||
this.currentState = this.EDIT_WALK_STYLES;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
motion.curAnim = motion.selWalk;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_WALK_TWEAKS:
|
||||
|
||||
this.currentState = this.EDIT_WALK_TWEAKS;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
motion.curAnim = motion.selWalk;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_WALK_JOINTS:
|
||||
|
||||
this.currentState = this.EDIT_WALK_JOINTS;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
motion.curAnim = motion.selWalk;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_STANDING:
|
||||
|
||||
this.currentState = this.EDIT_STANDING;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = 0;
|
||||
motion.curAnim = motion.selStand;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_SIDESTEP_LEFT:
|
||||
|
||||
this.currentState = this.EDIT_SIDESTEP_LEFT;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
motion.curAnim = motion.selSideStepLeft;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_SIDESTEP_RIGHT:
|
||||
|
||||
this.currentState = this.EDIT_SIDESTEP_RIGHT;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = new Date().getTime();
|
||||
motion.curAnim = motion.selSideStepRight;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_FLYING:
|
||||
|
||||
this.currentState = this.EDIT_FLYING;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = 0;
|
||||
motion.curAnim = motion.selFly;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_FLYING_UP:
|
||||
|
||||
this.currentState = this.EDIT_FLYING_UP;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = 0;
|
||||
motion.curAnim = motion.selFlyUp;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.EDIT_FLYING_DOWN:
|
||||
|
||||
this.currentState = this.EDIT_FLYING_DOWN;
|
||||
this.editing = true;
|
||||
motion.lastWalkStartTime = 0;
|
||||
motion.curAnim = motion.selFlyDown;
|
||||
walkInterface.updateMenu();
|
||||
return;
|
||||
|
||||
case this.STANDING:
|
||||
default:
|
||||
|
||||
this.currentState = this.STANDING;
|
||||
this.editing = false;
|
||||
motion.lastWalkStartTime = 0;
|
||||
motion.curAnim = motion.selStand;
|
||||
walkInterface.updateMenu();
|
||||
|
||||
// initialisation - runs at script startup only
|
||||
if (motion.strideLength === 0) {
|
||||
|
||||
motion.setGender(MALE);
|
||||
if (motion.direction === BACKWARDS) {
|
||||
motion.strideLength = motion.selWalk.calibration.strideLengthBackwards;
|
||||
} else {
|
||||
motion.strideLength = motion.selWalk.calibration.strideLengthForwards;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
})(); // end state object literal
|
||||
|
||||
// constructor for animation Transition
|
||||
Transition = function(lastAnimation, nextAnimation, reachPoses, transitionDuration, easingLower, easingUpper) {
|
||||
|
||||
this.lastAnim = lastAnimation; // name of last animation
|
||||
this.nextAnimation = nextAnimation; // name of next animation
|
||||
if (lastAnimation === motion.selWalk ||
|
||||
nextAnimation === motion.selSideStepLeft ||
|
||||
nextAnimation === motion.selSideStepRight) {
|
||||
// boolean - is the last animation a walking animation?
|
||||
this.walkingAtStart = true;
|
||||
} else {
|
||||
this.walkingAtStart = false;
|
||||
}
|
||||
if (nextAnimation === motion.selWalk ||
|
||||
nextAnimation === motion.selSideStepLeft ||
|
||||
nextAnimation === motion.selSideStepRight) {
|
||||
// boolean - is the next animation a walking animation?
|
||||
this.walkingAtEnd = true;
|
||||
} else {
|
||||
this.walkingAtEnd = false;
|
||||
}
|
||||
this.reachPoses = reachPoses; // placeholder / stub: array of reach poses for squash and stretch techniques
|
||||
this.transitionDuration = transitionDuration; // length of transition (seconds)
|
||||
this.easingLower = easingLower; // Bezier curve handle (normalised)
|
||||
this.easingUpper = easingUpper; // Bezier curve handle (normalised)
|
||||
this.startTime = new Date().getTime(); // Starting timestamp (seconds)
|
||||
this.progress = 0; // how far are we through the transition?
|
||||
this.walkWheelIncrement = 3; // how much to turn the walkwheel each frame when transitioning to / from walking
|
||||
this.walkWheelAdvance = 0; // how many degrees the walk wheel has been advanced during the transition
|
||||
this.walkStopAngle = 0; // what angle should we stop the walk cycle?
|
||||
|
||||
}; // end Transition constructor
|
||||
|
||||
|
||||
walkAssets = (function () {
|
||||
|
||||
// path to the sounds used for the footsteps
|
||||
var _pathToSounds = 'https://s3.amazonaws.com/hifi-public/sounds/Footsteps/';
|
||||
|
||||
// read in the sounds
|
||||
var _footsteps = [];
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Left-12db.wav"));
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Right-12db.wav"));
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Left-12db.wav"));
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Right-12db.wav"));
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Left-12db.wav"));
|
||||
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Right-12db.wav"));
|
||||
|
||||
// load the animation datafiles
|
||||
Script.include(pathToAssets+"animations/dd-female-standard-walk-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-flying-up-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-flying-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-flying-down-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-standing-one-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-sidestep-left-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-female-sidestep-right-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-standard-walk-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-flying-up-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-flying-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-flying-down-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-standing-one-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-sidestep-left-animation.js");
|
||||
Script.include(pathToAssets+"animations/dd-male-sidestep-right-animation.js");
|
||||
|
||||
// read in the animation files
|
||||
var _FemaleStandardWalkFile = new FemaleStandardWalk();
|
||||
var _femaleStandardWalk = _FemaleStandardWalkFile.loadAnimation();
|
||||
var _FemaleFlyingUpFile = new FemaleFlyingUp();
|
||||
var _femaleFlyingUp = _FemaleFlyingUpFile.loadAnimation();
|
||||
var _FemaleFlyingFile = new FemaleFlying();
|
||||
var _femaleFlying = _FemaleFlyingFile.loadAnimation();
|
||||
var _FemaleFlyingDownFile = new FemaleFlyingDown();
|
||||
var _femaleFlyingDown = _FemaleFlyingDownFile.loadAnimation();
|
||||
var _FemaleStandOneFile = new FemaleStandingOne();
|
||||
var _femaleStandOne = _FemaleStandOneFile.loadAnimation();
|
||||
var _FemaleSideStepLeftFile = new FemaleSideStepLeft();
|
||||
var _femaleSideStepLeft = _FemaleSideStepLeftFile.loadAnimation();
|
||||
var _FemaleSideStepRightFile = new FemaleSideStepRight();
|
||||
var _femaleSideStepRight = _FemaleSideStepRightFile.loadAnimation();
|
||||
var _MaleStandardWalkFile = new MaleStandardWalk(filter);
|
||||
var _maleStandardWalk = _MaleStandardWalkFile.loadAnimation();
|
||||
var _MaleFlyingUpFile = new MaleFlyingUp();
|
||||
var _maleFlyingUp = _MaleFlyingUpFile.loadAnimation();
|
||||
var _MaleFlyingFile = new MaleFlying();
|
||||
var _maleFlying = _MaleFlyingFile.loadAnimation();
|
||||
var _MaleFlyingDownFile = new MaleFlyingDown();
|
||||
var _maleFlyingDown = _MaleFlyingDownFile.loadAnimation();
|
||||
var _MaleStandOneFile = new MaleStandingOne();
|
||||
var _maleStandOne = _MaleStandOneFile.loadAnimation();
|
||||
var _MaleSideStepLeftFile = new MaleSideStepLeft();
|
||||
var _maleSideStepLeft = _MaleSideStepLeftFile.loadAnimation();
|
||||
var _MaleSideStepRightFile = new MaleSideStepRight();
|
||||
var _maleSideStepRight = _MaleSideStepRightFile.loadAnimation();
|
||||
|
||||
return {
|
||||
|
||||
// expose the sound assets
|
||||
footsteps: _footsteps,
|
||||
|
||||
// expose the animation assets
|
||||
femaleStandardWalk: _femaleStandardWalk,
|
||||
femaleFlyingUp: _femaleFlyingUp,
|
||||
femaleFlying: _femaleFlying,
|
||||
femaleFlyingDown: _femaleFlyingDown,
|
||||
femaleStandOne: _femaleStandOne,
|
||||
femaleSideStepLeft: _femaleSideStepLeft,
|
||||
femaleSideStepRight: _femaleSideStepRight,
|
||||
maleStandardWalk: _maleStandardWalk,
|
||||
maleFlyingUp: _maleFlyingUp,
|
||||
maleFlying: _maleFlying,
|
||||
maleFlyingDown: _maleFlyingDown,
|
||||
maleStandOne: _maleStandOne,
|
||||
maleSideStepLeft: _maleSideStepLeft,
|
||||
maleSideStepRight: _maleSideStepRight,
|
||||
}
|
||||
})();
|
225
examples/libraries/walkFilters.js
Normal file
225
examples/libraries/walkFilters.js
Normal file
|
@ -0,0 +1,225 @@
|
|||
//
|
||||
// walkFilters.js
|
||||
//
|
||||
// version 1.001
|
||||
//
|
||||
// Created by David Wooldridge, Autumn 2014
|
||||
//
|
||||
// Provides a variety of filters for use by the walk.js script v1.1
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
AveragingFilter = function(length) {
|
||||
|
||||
//this.name = name;
|
||||
this.pastValues = [];
|
||||
|
||||
for(var i = 0; i < length; i++) {
|
||||
this.pastValues.push(0);
|
||||
}
|
||||
|
||||
// single arg is the nextInputValue
|
||||
this.process = function() {
|
||||
|
||||
if (this.pastValues.length === 0 && arguments[0]) {
|
||||
return arguments[0];
|
||||
} else if (arguments[0]) {
|
||||
// apply quick and simple LP filtering
|
||||
this.pastValues.push(arguments[0]);
|
||||
this.pastValues.shift();
|
||||
var nextOutputValue = 0;
|
||||
for (var ea in this.pastValues) nextOutputValue += this.pastValues[ea];
|
||||
return nextOutputValue / this.pastValues.length;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// 2nd order Butterworth LP filter - calculate coeffs here: http://www-users.cs.york.ac.uk/~fisher/mkfilter/trad.html
|
||||
// provides LP filtering with a more stable frequency / phase response
|
||||
ButterworthFilter = function(cutOff) {
|
||||
|
||||
// cut off frequency = 5Hz
|
||||
this.gain = 20.20612010;
|
||||
this.coeffOne = -0.4775922501;
|
||||
this.coeffTwo = 1.2796324250;
|
||||
|
||||
// initialise the arrays
|
||||
this.xv = [];
|
||||
this.yv = [];
|
||||
for(var i = 0; i < 3; i++) {
|
||||
this.xv.push(0);
|
||||
this.yv.push(0);
|
||||
}
|
||||
|
||||
// process values
|
||||
this.process = function(nextInputValue) {
|
||||
|
||||
this.xv[0] = this.xv[1];
|
||||
this.xv[1] = this.xv[2];
|
||||
this.xv[2] = nextInputValue / this.gain;
|
||||
|
||||
this.yv[0] = this.yv[1];
|
||||
this.yv[1] = this.yv[2];
|
||||
this.yv[2] = (this.xv[0] + this.xv[2]) +
|
||||
2 * this.xv[1] +
|
||||
(this.coeffOne * this.yv[0]) +
|
||||
(this.coeffTwo * this.yv[1]);
|
||||
|
||||
return this.yv[2];
|
||||
};
|
||||
}; // end Butterworth filter contructor
|
||||
|
||||
// Add harmonics to a given sine wave to form square, sawtooth or triangle waves
|
||||
// Geometric wave synthesis fundamentals taken from: http://hyperphysics.phy-astr.gsu.edu/hbase/audio/geowv.html
|
||||
WaveSynth = function(waveShape, numHarmonics, smoothing) {
|
||||
|
||||
this.numHarmonics = numHarmonics;
|
||||
this.waveShape = waveShape;
|
||||
this.averagingFilter = new AveragingFilter(smoothing);
|
||||
|
||||
// NB: frequency in radians
|
||||
this.shapeWave = function(frequency) {
|
||||
|
||||
// make some shapes
|
||||
var harmonics = 0;
|
||||
var multiplier = 0;
|
||||
var iterations = this.numHarmonics * 2 + 2;
|
||||
if (this.waveShape === TRIANGLE) {
|
||||
iterations++;
|
||||
}
|
||||
|
||||
for(var n = 2; n < iterations; n++) {
|
||||
|
||||
switch(this.waveShape) {
|
||||
|
||||
case SAWTOOTH: {
|
||||
|
||||
multiplier = 1 / n;
|
||||
harmonics += multiplier * Math.sin(n * frequency);
|
||||
break;
|
||||
}
|
||||
|
||||
case TRIANGLE: {
|
||||
|
||||
if (n % 2 === 1) {
|
||||
var mulitplier = 1 / (n * n);
|
||||
// multiply (4n-1)th harmonics by -1
|
||||
if (n === 3 || n === 7 || n === 11 || n === 15) {
|
||||
mulitplier *= -1;
|
||||
}
|
||||
harmonics += mulitplier * Math.sin(n * frequency);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SQUARE: {
|
||||
|
||||
if (n % 2 === 1) {
|
||||
multiplier = 1 / n;
|
||||
harmonics += multiplier * Math.sin(n * frequency);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// smooth the result and return
|
||||
return this.averagingFilter.process(harmonics);
|
||||
};
|
||||
};
|
||||
|
||||
// Create a wave shape by summing pre-calcualted sinusoidal harmonics
|
||||
HarmonicsFilter = function(magnitudes, phaseAngles) {
|
||||
|
||||
this.magnitudes = magnitudes;
|
||||
this.phaseAngles = phaseAngles;
|
||||
|
||||
this.calculate = function(twoPiFT) {
|
||||
|
||||
var harmonics = 0;
|
||||
var numHarmonics = magnitudes.length;
|
||||
|
||||
for(var n = 0; n < numHarmonics; n++) {
|
||||
harmonics += this.magnitudes[n] * Math.cos(n * twoPiFT - this.phaseAngles[n]);
|
||||
}
|
||||
return harmonics;
|
||||
};
|
||||
};
|
||||
|
||||
// the main filter object literal
|
||||
filter = (function() {
|
||||
|
||||
// Bezier private functions
|
||||
function _B1(t) { return t * t * t };
|
||||
function _B2(t) { return 3 * t * t * (1 - t) };
|
||||
function _B3(t) { return 3 * t * (1 - t) * (1 - t) };
|
||||
function _B4(t) { return (1 - t) * (1 - t) * (1 - t) };
|
||||
|
||||
return {
|
||||
|
||||
// helper methods
|
||||
degToRad: function(degrees) {
|
||||
|
||||
var convertedValue = degrees * Math.PI / 180;
|
||||
return convertedValue;
|
||||
},
|
||||
|
||||
radToDeg: function(radians) {
|
||||
|
||||
var convertedValue = radians * 180 / Math.PI;
|
||||
return convertedValue;
|
||||
},
|
||||
|
||||
// these filters need instantiating, as they hold arrays of previous values
|
||||
createAveragingFilter: function(length) {
|
||||
|
||||
var newAveragingFilter = new AveragingFilter(length);
|
||||
return newAveragingFilter;
|
||||
},
|
||||
|
||||
createButterworthFilter: function(cutoff) {
|
||||
|
||||
var newButterworthFilter = new ButterworthFilter(cutoff);
|
||||
return newButterworthFilter;
|
||||
},
|
||||
|
||||
createWaveSynth: function(waveShape, numHarmonics, smoothing) {
|
||||
|
||||
var newWaveSynth = new WaveSynth(waveShape, numHarmonics, smoothing);
|
||||
return newWaveSynth;
|
||||
},
|
||||
|
||||
createHarmonicsFilter: function(magnitudes, phaseAngles) {
|
||||
|
||||
var newHarmonicsFilter = new HarmonicsFilter(magnitudes, phaseAngles);
|
||||
return newHarmonicsFilter;
|
||||
},
|
||||
|
||||
|
||||
// the following filters do not need separate instances, as they hold no previous values
|
||||
bezier: function(percent, C1, C2, C3, C4) {
|
||||
|
||||
// Bezier functions for more natural transitions
|
||||
// based on script by Dan Pupius (www.pupius.net) http://13thparallel.com/archive/bezier-curves/
|
||||
var pos = {x: 0, y: 0};
|
||||
pos.x = C1.x * _B1(percent) + C2.x * _B2(percent) + C3.x * _B3(percent) + C4.x * _B4(percent);
|
||||
pos.y = C1.y * _B1(percent) + C2.y * _B2(percent) + C3.y * _B3(percent) + C4.y * _B4(percent);
|
||||
return pos;
|
||||
},
|
||||
|
||||
// simple clipping filter (clips bottom of wave only, special case for hips y-axis skeleton offset)
|
||||
clipTrough: function(inputValue, peak, strength) {
|
||||
|
||||
var outputValue = inputValue * strength;
|
||||
if (outputValue < -peak) {
|
||||
outputValue = -peak;
|
||||
}
|
||||
return outputValue;
|
||||
}
|
||||
}
|
||||
|
||||
})();
|
2690
examples/libraries/walkInterface.js
Normal file
2690
examples/libraries/walkInterface.js
Normal file
File diff suppressed because it is too large
Load diff
6177
examples/walk.js
6177
examples/walk.js
File diff suppressed because it is too large
Load diff
|
@ -724,11 +724,11 @@ void Application::paintGL() {
|
|||
displaySide(*whichCamera);
|
||||
glPopMatrix();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
renderRearViewMirror(_mirrorViewRect);
|
||||
|
||||
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
_rearMirrorTools->render(true);
|
||||
|
||||
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
renderRearViewMirror(_mirrorViewRect);
|
||||
}
|
||||
|
||||
_glowEffect.render();
|
||||
|
@ -786,7 +786,7 @@ void Application::updateProjectionMatrix(Camera& camera, bool updateViewFrustum)
|
|||
// Tell our viewFrustum about this change, using the application camera
|
||||
if (updateViewFrustum) {
|
||||
loadViewFrustum(camera, _viewFrustum);
|
||||
computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
|
||||
_viewFrustum.computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
|
||||
|
||||
// If we're in Display Frustum mode, then we want to use the slightly adjust near/far clip values of the
|
||||
// _viewFrustumOffsetCamera, so that we can see more of the application content in the application's frustum
|
||||
|
@ -2009,25 +2009,17 @@ void Application::init() {
|
|||
|
||||
void Application::closeMirrorView() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::restoreMirrorView() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
||||
}
|
||||
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::shrinkMirrorView() {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
||||
}
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
||||
}
|
||||
|
@ -4314,8 +4306,6 @@ bool Application::isVSyncOn() const {
|
|||
if (wglewGetExtension("WGL_EXT_swap_control")) {
|
||||
int swapInterval = wglGetSwapIntervalEXT();
|
||||
return (swapInterval > 0);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
#elif defined(Q_OS_LINUX)
|
||||
// TODO: write the poper code for linux
|
||||
|
@ -4326,10 +4316,9 @@ bool Application::isVSyncOn() const {
|
|||
} else {
|
||||
return true;
|
||||
}
|
||||
*/
|
||||
#else
|
||||
return true;
|
||||
*/
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Application::isVSyncEditable() const {
|
||||
|
@ -4344,7 +4333,6 @@ bool Application::isVSyncEditable() const {
|
|||
return true;
|
||||
}
|
||||
*/
|
||||
#else
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -516,6 +516,33 @@ void Audio::initGverb() {
|
|||
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
|
||||
}
|
||||
|
||||
void Audio::updateGverbOptions() {
|
||||
bool reverbChanged = false;
|
||||
if (_receivedAudioStream.hasReverb()) {
|
||||
|
||||
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
|
||||
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
|
||||
reverbChanged = true;
|
||||
}
|
||||
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
|
||||
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
|
||||
reverbChanged = true;
|
||||
}
|
||||
|
||||
if (_reverbOptions != &_zoneReverbOptions) {
|
||||
_reverbOptions = &_zoneReverbOptions;
|
||||
reverbChanged = true;
|
||||
}
|
||||
} else if (_reverbOptions != &_scriptReverbOptions) {
|
||||
_reverbOptions = &_scriptReverbOptions;
|
||||
reverbChanged = true;
|
||||
}
|
||||
|
||||
if (reverbChanged) {
|
||||
initGverb();
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::setReverbOptions(const AudioEffectOptions* options) {
|
||||
// Save the new options
|
||||
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
|
||||
|
@ -547,14 +574,14 @@ void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioF
|
|||
gverb_do(_gverb, value, &lValue, &rValue);
|
||||
|
||||
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
|
||||
for (unsigned int j = sample; j < sample + audioFormat.channelCount(); j++) {
|
||||
for (int j = sample; j < sample + audioFormat.channelCount(); j++) {
|
||||
if (j == sample) {
|
||||
// left channel
|
||||
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
|
||||
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
samplesData[j] = (int16_t)lResult;
|
||||
} else if (j == (sample + 1)) {
|
||||
// right channel
|
||||
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), -32768, 32767);
|
||||
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
samplesData[j] = (int16_t)rResult;
|
||||
} else {
|
||||
// ignore channels above 2
|
||||
|
@ -563,6 +590,60 @@ void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioF
|
|||
}
|
||||
}
|
||||
|
||||
void Audio::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||
bool hasEcho = Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio);
|
||||
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
|
||||
bool hasLocalReverb = (_reverb || _receivedAudioStream.hasReverb()) &&
|
||||
!Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio);
|
||||
if (_muted || !_audioOutput || (!hasEcho && !hasLocalReverb)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
// if there is reverb apply it to local audio and substract the origin samples
|
||||
|
||||
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
|
||||
// we didn't have the loopback output device going so set that up now
|
||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||
}
|
||||
|
||||
QByteArray loopBackByteArray(inputByteArray);
|
||||
if (_inputFormat != _outputFormat) {
|
||||
float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate()) *
|
||||
(_outputFormat.channelCount() / _inputFormat.channelCount());
|
||||
loopBackByteArray.resize(inputByteArray.size() * loopbackOutputToInputRatio);
|
||||
loopBackByteArray.fill(0);
|
||||
linearResampling(reinterpret_cast<int16_t*>(inputByteArray.data()),
|
||||
reinterpret_cast<int16_t*>(loopBackByteArray.data()),
|
||||
inputByteArray.size() / sizeof(int16_t), loopBackByteArray.size() / sizeof(int16_t),
|
||||
_inputFormat, _outputFormat);
|
||||
}
|
||||
|
||||
if (hasLocalReverb) {
|
||||
QByteArray loopbackCopy;
|
||||
if (!hasEcho) {
|
||||
loopbackCopy = loopBackByteArray;
|
||||
}
|
||||
|
||||
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
|
||||
int numLoopbackSamples = loopBackByteArray.size() / sizeof(int16_t);
|
||||
updateGverbOptions();
|
||||
addReverb(loopbackSamples, numLoopbackSamples, _outputFormat);
|
||||
|
||||
if (!hasEcho) {
|
||||
int16_t* loopbackCopySamples = reinterpret_cast<int16_t*>(loopbackCopy.data());
|
||||
for (int i = 0; i < numLoopbackSamples; ++i) {
|
||||
loopbackSamples[i] = glm::clamp((int)loopbackSamples[i] - loopbackCopySamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_loopbackOutputDevice) {
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::handleAudioInput() {
|
||||
static char audioDataPacket[MAX_PACKET_SIZE];
|
||||
|
||||
|
@ -607,34 +688,8 @@ void Audio::handleAudioInput() {
|
|||
|
||||
_inputFrameBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
|
||||
}
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted && _audioOutput) {
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
|
||||
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
|
||||
// we didn't have the loopback output device going so set that up now
|
||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||
}
|
||||
|
||||
if (_inputFormat == _outputFormat) {
|
||||
if (_loopbackOutputDevice) {
|
||||
_loopbackOutputDevice->write(inputByteArray);
|
||||
}
|
||||
} else {
|
||||
float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
|
||||
* (_outputFormat.channelCount() / _inputFormat.channelCount());
|
||||
|
||||
QByteArray loopBackByteArray(inputByteArray.size() * loopbackOutputToInputRatio, 0);
|
||||
|
||||
linearResampling((int16_t*) inputByteArray.data(), (int16_t*) loopBackByteArray.data(),
|
||||
inputByteArray.size() / sizeof(int16_t),
|
||||
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
|
||||
|
||||
if (_loopbackOutputDevice) {
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleLocalEchoAndReverb(inputByteArray);
|
||||
|
||||
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
|
||||
|
||||
|
@ -971,30 +1026,7 @@ void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& ou
|
|||
_desiredOutputFormat, _outputFormat);
|
||||
|
||||
if(_reverb || _receivedAudioStream.hasReverb()) {
|
||||
bool reverbChanged = false;
|
||||
if (_receivedAudioStream.hasReverb()) {
|
||||
|
||||
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
|
||||
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
|
||||
reverbChanged = true;
|
||||
}
|
||||
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
|
||||
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
|
||||
reverbChanged = true;
|
||||
}
|
||||
|
||||
if (_reverbOptions != &_zoneReverbOptions) {
|
||||
_reverbOptions = &_zoneReverbOptions;
|
||||
reverbChanged = true;
|
||||
}
|
||||
} else if (_reverbOptions != &_scriptReverbOptions) {
|
||||
_reverbOptions = &_scriptReverbOptions;
|
||||
reverbChanged = true;
|
||||
}
|
||||
|
||||
if (reverbChanged) {
|
||||
initGverb();
|
||||
}
|
||||
updateGverbOptions();
|
||||
addReverb((int16_t*)outputBuffer.data(), numDeviceOutputSamples, _outputFormat);
|
||||
}
|
||||
}
|
||||
|
@ -1345,8 +1377,11 @@ void Audio::handleAudioByteArray(const QByteArray& audioByteArray, const AudioIn
|
|||
QAudioOutput* localSoundOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName), localFormat, this);
|
||||
|
||||
QIODevice* localIODevice = localSoundOutput->start();
|
||||
qDebug() << "Writing" << audioByteArray.size() << "to" << localIODevice;
|
||||
localIODevice->write(audioByteArray);
|
||||
if (localIODevice) {
|
||||
localIODevice->write(audioByteArray);
|
||||
} else {
|
||||
qDebug() << "Unable to handle audio byte array. Error:" << localSoundOutput->error();
|
||||
}
|
||||
} else {
|
||||
qDebug() << "Audio::handleAudioByteArray called with an empty byte array. Sound is likely still downloading.";
|
||||
}
|
||||
|
|
|
@ -272,8 +272,11 @@ private:
|
|||
|
||||
// Adds Reverb
|
||||
void initGverb();
|
||||
void updateGverbOptions();
|
||||
void addReverb(int16_t* samples, int numSamples, QAudioFormat& format);
|
||||
|
||||
void handleLocalEchoAndReverb(QByteArray& inputByteArray);
|
||||
|
||||
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
|
||||
void addProceduralSounds(int16_t* monoInput, int numSamples);
|
||||
|
||||
|
|
|
@ -386,7 +386,7 @@ Menu::Menu() :
|
|||
|
||||
#if defined(Q_OS_MAC)
|
||||
#else
|
||||
QAction* vsyncAction = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ const float PITCH_SPEED = 100.0f; // degrees/sec
|
|||
const float COLLISION_RADIUS_SCALAR = 1.2f; // pertains to avatar-to-avatar collisions
|
||||
const float COLLISION_RADIUS_SCALE = 0.125f;
|
||||
|
||||
const float MAX_WALKING_SPEED = 4.5f;
|
||||
const float MAX_WALKING_SPEED = 2.5f; // human walking speed
|
||||
const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // keyboard motor gets additive boost below this speed
|
||||
const float MIN_AVATAR_SPEED = 0.05f; // speed is set to zero below this
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ void Batch::setInputStream(Slot startChannel, const BufferStream& stream) {
|
|||
const Buffers& buffers = stream.getBuffers();
|
||||
const Offsets& offsets = stream.getOffsets();
|
||||
const Offsets& strides = stream.getStrides();
|
||||
for (int i = 0; i < buffers.size(); i++) {
|
||||
for (unsigned int i = 0; i < buffers.size(); i++) {
|
||||
setInputBuffer(startChannel + i, buffers[i], offsets[i], strides[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,15 +31,15 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
|||
|
||||
(&::gpu::GLBackend::do_glEnable),
|
||||
(&::gpu::GLBackend::do_glDisable),
|
||||
|
||||
|
||||
(&::gpu::GLBackend::do_glEnableClientState),
|
||||
(&::gpu::GLBackend::do_glDisableClientState),
|
||||
|
||||
(&::gpu::GLBackend::do_glCullFace),
|
||||
(&::gpu::GLBackend::do_glAlphaFunc),
|
||||
|
||||
(&::gpu::GLBackend::do_glDepthFunc),
|
||||
(&::gpu::GLBackend::do_glDepthMask),
|
||||
(&::gpu::GLBackend::do_glDepthFunc),
|
||||
(&::gpu::GLBackend::do_glDepthMask),
|
||||
(&::gpu::GLBackend::do_glDepthRange),
|
||||
|
||||
(&::gpu::GLBackend::do_glBindBuffer),
|
||||
|
@ -57,18 +57,18 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
|||
(&::gpu::GLBackend::do_glPushMatrix),
|
||||
(&::gpu::GLBackend::do_glPopMatrix),
|
||||
(&::gpu::GLBackend::do_glMultMatrixf),
|
||||
(&::gpu::GLBackend::do_glLoadMatrixf),
|
||||
(&::gpu::GLBackend::do_glLoadIdentity),
|
||||
(&::gpu::GLBackend::do_glRotatef),
|
||||
(&::gpu::GLBackend::do_glScalef),
|
||||
(&::gpu::GLBackend::do_glTranslatef),
|
||||
(&::gpu::GLBackend::do_glLoadMatrixf),
|
||||
(&::gpu::GLBackend::do_glLoadIdentity),
|
||||
(&::gpu::GLBackend::do_glRotatef),
|
||||
(&::gpu::GLBackend::do_glScalef),
|
||||
(&::gpu::GLBackend::do_glTranslatef),
|
||||
|
||||
(&::gpu::GLBackend::do_glDrawArrays),
|
||||
(&::gpu::GLBackend::do_glDrawArrays),
|
||||
(&::gpu::GLBackend::do_glDrawRangeElements),
|
||||
|
||||
(&::gpu::GLBackend::do_glColorPointer),
|
||||
(&::gpu::GLBackend::do_glNormalPointer),
|
||||
(&::gpu::GLBackend::do_glTexCoordPointer),
|
||||
|
||||
(&::gpu::GLBackend::do_glColorPointer),
|
||||
(&::gpu::GLBackend::do_glNormalPointer),
|
||||
(&::gpu::GLBackend::do_glTexCoordPointer),
|
||||
(&::gpu::GLBackend::do_glVertexPointer),
|
||||
|
||||
(&::gpu::GLBackend::do_glVertexAttribPointer),
|
||||
|
@ -77,7 +77,7 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
|||
|
||||
(&::gpu::GLBackend::do_glColor4f),
|
||||
|
||||
(&::gpu::GLBackend::do_glMaterialf),
|
||||
(&::gpu::GLBackend::do_glMaterialf),
|
||||
(&::gpu::GLBackend::do_glMaterialfv),
|
||||
};
|
||||
|
||||
|
@ -112,9 +112,8 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
|
|||
|
||||
GLBackend::GLBackend() :
|
||||
|
||||
_inputFormat(0),
|
||||
_inputAttributeActivation(0),
|
||||
_needInputFormatUpdate(true),
|
||||
_inputFormat(0),
|
||||
|
||||
_inputBuffersState(0),
|
||||
_inputBuffers(_inputBuffersState.size(), BufferPointer(0)),
|
||||
|
@ -122,7 +121,8 @@ GLBackend::GLBackend() :
|
|||
_inputBufferStrides(_inputBuffersState.size(), 0),
|
||||
|
||||
_indexBuffer(0),
|
||||
_indexBufferOffset(0)
|
||||
_indexBufferOffset(0),
|
||||
_inputAttributeActivation(0)
|
||||
{
|
||||
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ void GLBackend::renderBatch(Batch& batch) {
|
|||
|
||||
GLBackend backend;
|
||||
|
||||
for (int i = 0; i < numCommands; i++) {
|
||||
for (unsigned int i = 0; i < numCommands; i++) {
|
||||
CommandCall call = _commandCalls[(*command)];
|
||||
(backend.*(call))(batch, *offset);
|
||||
command++;
|
||||
|
@ -203,7 +203,7 @@ void GLBackend::do_drawIndexed(Batch& batch, uint32 paramOffset) {
|
|||
|
||||
GLenum glType = _elementTypeToGLType[_indexBufferType];
|
||||
|
||||
glDrawElements(mode, numIndices, glType, (GLvoid*)(startIndex + _indexBufferOffset));
|
||||
glDrawElements(mode, numIndices, glType, reinterpret_cast<GLvoid*>(startIndex + _indexBufferOffset));
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -265,7 +265,7 @@ void GLBackend::updateInput() {
|
|||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (int i = 0; i < newActivation.size(); i++) {
|
||||
for (unsigned int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _inputAttributeActivation[i]) {
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
|
@ -314,7 +314,7 @@ void GLBackend::updateInput() {
|
|||
CHECK_GL_ERROR();
|
||||
_inputBuffersState[bufferNum] = false;
|
||||
|
||||
for (int i = 0; i < channel._slots.size(); i++) {
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
||||
GLuint slot = attrib._slot;
|
||||
GLuint count = attrib._element.getDimensionCount();
|
||||
|
@ -325,16 +325,16 @@ void GLBackend::updateInput() {
|
|||
if (slot < NUM_CLASSIC_ATTRIBS) {
|
||||
switch (slot) {
|
||||
case Stream::POSITION:
|
||||
glVertexPointer(count, type, stride, (GLvoid*)pointer);
|
||||
glVertexPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
case Stream::NORMAL:
|
||||
glNormalPointer(type, stride, (GLvoid*)pointer);
|
||||
glNormalPointer(type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
case Stream::COLOR:
|
||||
glColorPointer(count, type, stride, (GLvoid*)pointer);
|
||||
glColorPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
case Stream::TEXCOORD:
|
||||
glTexCoordPointer(count, type, stride, (GLvoid*)pointer);
|
||||
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
};
|
||||
} else {
|
||||
|
@ -342,7 +342,8 @@ void GLBackend::updateInput() {
|
|||
{
|
||||
#endif
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
glVertexAttribPointer(slot, count, type, isNormalized, stride, (GLvoid*)pointer);
|
||||
glVertexAttribPointer(slot, count, type, isNormalized, stride,
|
||||
reinterpret_cast<GLvoid*>(pointer));
|
||||
}
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Context.h"
|
||||
#include "Resource.h"
|
||||
|
||||
#include <QDebug>
|
||||
|
|
|
@ -479,9 +479,7 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
|
|||
float bestDistance = std::numeric_limits<float>::max();
|
||||
float distanceToSubMesh;
|
||||
BoxFace subMeshFace;
|
||||
BoxFace bestSubMeshFace;
|
||||
int subMeshIndex = 0;
|
||||
int bestSubMeshIndex = -1;
|
||||
|
||||
// If we hit the models box, then consider the submeshes...
|
||||
foreach(const AABox& subMeshBox, _calculatedMeshBoxes) {
|
||||
|
@ -489,10 +487,9 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
|
|||
|
||||
if (subMeshBox.findRayIntersection(origin, direction, distanceToSubMesh, subMeshFace)) {
|
||||
if (distanceToSubMesh < bestDistance) {
|
||||
bestSubMeshIndex = subMeshIndex;
|
||||
bestDistance = distanceToSubMesh;
|
||||
bestSubMeshFace = subMeshFace;
|
||||
intersectedSomething = true;
|
||||
face = subMeshFace;
|
||||
extraInfo = geometry.getModelNameOfMesh(subMeshIndex);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -984,7 +984,9 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_INSET + AUDIO_METER_GAP;
|
||||
|
||||
int audioMeterY;
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
bool boxed = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) &&
|
||||
!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
|
||||
if (boxed) {
|
||||
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||
} else {
|
||||
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||
|
@ -1022,9 +1024,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
renderCollisionOverlay(glWidget->width(), glWidget->height(), magnitude, 1.0f);
|
||||
}
|
||||
|
||||
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP,
|
||||
audioMeterY,
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::Mirror));
|
||||
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
||||
|
||||
audio->renderScope(glWidget->width(), glWidget->height());
|
||||
|
||||
|
|
|
@ -156,6 +156,9 @@ void RearMirrorTools::displayIcon(QRect bounds, QRect iconBounds, GLuint texture
|
|||
}
|
||||
glEnd();
|
||||
glPopMatrix();
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ int TextRenderer::draw(int x, int y, const char* str) {
|
|||
leftBottom.x, rightTop.y, ls, tt, };
|
||||
|
||||
const int NUM_COLOR_SCALARS_PER_GLYPH = 4;
|
||||
unsigned int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
|
||||
int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
|
||||
|
||||
gpu::Buffer::Size offset = sizeof(vertexBuffer) * _numGlyphsBatched;
|
||||
gpu::Buffer::Size colorOffset = sizeof(colorBuffer) * _numGlyphsBatched;
|
||||
|
@ -181,9 +181,9 @@ TextRenderer::TextRenderer(const Properties& properties) :
|
|||
_color(properties.color),
|
||||
_glyphsBuffer(new gpu::Buffer()),
|
||||
_glyphsColorBuffer(new gpu::Buffer()),
|
||||
_numGlyphsBatched(0),
|
||||
_glyphsStreamFormat(new gpu::Stream::Format()),
|
||||
_glyphsStream(new gpu::BufferStream())
|
||||
_glyphsStream(new gpu::BufferStream()),
|
||||
_numGlyphsBatched(0)
|
||||
{
|
||||
_glyphsStreamFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::POS_XYZ), 0);
|
||||
const int NUM_POS_COORDS = 2;
|
||||
|
|
|
@ -297,7 +297,7 @@ void Player::play() {
|
|||
_injector->setOptions(_options);
|
||||
}
|
||||
|
||||
void Player::setCurrentFrame(unsigned int currentFrame) {
|
||||
void Player::setCurrentFrame(int currentFrame) {
|
||||
if (_recording && currentFrame >= _recording->getFrameNumber()) {
|
||||
stopPlaying();
|
||||
return;
|
||||
|
@ -314,7 +314,7 @@ void Player::setCurrentFrame(unsigned int currentFrame) {
|
|||
}
|
||||
}
|
||||
|
||||
void Player::setCurrentTime(unsigned int currentTime) {
|
||||
void Player::setCurrentTime(int currentTime) {
|
||||
if (currentTime >= _recording->getLength()) {
|
||||
stopPlaying();
|
||||
return;
|
||||
|
@ -393,7 +393,7 @@ bool Player::computeCurrentFrame() {
|
|||
_currentFrame = 0;
|
||||
}
|
||||
|
||||
quint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
|
||||
qint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
|
||||
while(_currentFrame >= 0 &&
|
||||
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
|
||||
--_currentFrame;
|
||||
|
|
|
@ -44,8 +44,8 @@ public slots:
|
|||
void loadRecording(RecordingPointer recording);
|
||||
void play();
|
||||
|
||||
void setCurrentFrame(unsigned int currentFrame);
|
||||
void setCurrentTime(unsigned int currentTime);
|
||||
void setCurrentFrame(int currentFrame);
|
||||
void setCurrentTime(int currentTime);
|
||||
|
||||
void setVolume(float volume);
|
||||
void setAudioOffset(int audioOffset);
|
||||
|
@ -87,4 +87,4 @@ private:
|
|||
bool _useSkeletonURL;
|
||||
};
|
||||
|
||||
#endif // hifi_Player_h
|
||||
#endif // hifi_Player_h
|
||||
|
|
|
@ -209,8 +209,15 @@ bool LimitedNodeList::packetVersionAndHashMatch(const QByteArray& packet) {
|
|||
if (hashFromPacketHeader(packet) == hashForPacketAndConnectionUUID(packet, sendingNode->getConnectionSecret())) {
|
||||
return true;
|
||||
} else {
|
||||
qDebug() << "Packet hash mismatch on" << checkType << "- Sender"
|
||||
static QMultiMap<QUuid, PacketType> hashDebugSuppressMap;
|
||||
|
||||
QUuid senderUUID = uuidFromPacketHeader(packet);
|
||||
if (!hashDebugSuppressMap.contains(senderUUID, checkType)) {
|
||||
qDebug() << "Packet hash mismatch on" << checkType << "- Sender"
|
||||
<< uuidFromPacketHeader(packet);
|
||||
|
||||
hashDebugSuppressMap.insert(senderUUID, checkType);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
static QString repeatedMessage
|
||||
|
|
Loading…
Reference in a new issue