spatialized audio demo for multiple local sources

This commit is contained in:
Stephen Birarda 2013-01-28 18:05:16 -08:00
parent fd30c92d8b
commit 5ef81d4a11
8 changed files with 98 additions and 94 deletions

Binary file not shown.

BIN
Resources/audio/jeska.raw Normal file

Binary file not shown.

View file

@ -9,6 +9,7 @@
#include <iostream>
#include <fstream>
#include "audio.h"
#include "util.h"
bool Audio::initialized;
PaError Audio::err;
@ -48,60 +49,57 @@ int audioCallback (const void *inputBuffer,
int16_t *outputLeft = ((int16_t **) outputBuffer)[0];
int16_t *outputRight = ((int16_t **) outputBuffer)[1];
// float yawRatio = 0;
// int numSamplesDelay = abs(floor(yawRatio * PHASE_DELAY_AT_90));
//
// if (numSamplesDelay > PHASE_DELAY_AT_90) {
// numSamplesDelay = PHASE_DELAY_AT_90;
// }
//
// int16_t *leadingBuffer = yawRatio > 0 ? outputLeft : outputRight;
// int16_t *trailingBuffer = yawRatio > 0 ? outputRight : outputLeft;
//
int16_t *samplesToQueue = new int16_t[BUFFER_LENGTH_BYTES];
memset(outputLeft, 0, BUFFER_LENGTH_BYTES);
memset(outputRight, 0, BUFFER_LENGTH_BYTES);
for (int s = 0; s < AUDIO_SOURCES; s++) {
int wrapAroundSamples = (BUFFER_LENGTH_BYTES / sizeof(int16_t)) - (data->sources[s].lengthInSamples - data->sources[s].samplePointer);
if (wrapAroundSamples <= 0) {
memcpy(samplesToQueue, data->sources[s].audioData + data->sources[s].samplePointer, BUFFER_LENGTH_BYTES);
data->sources[s].samplePointer += (BUFFER_LENGTH_BYTES / sizeof(int16_t));
} else {
memcpy(samplesToQueue, data->sources[s].audioData + data->sources[s].samplePointer, (data->sources[s].lengthInSamples - data->sources[s].samplePointer) * sizeof(int16_t));
memcpy(samplesToQueue + (data->sources[s].lengthInSamples - data->sources[s].samplePointer), data->sources[s].audioData, wrapAroundSamples * sizeof(int16_t));
data->sources[s].samplePointer = wrapAroundSamples;
}
for (int s = 0; s < NUM_AUDIO_SOURCES; s++) {
glm::vec3 headPos = data->linkedHead->getPos();
glm::vec3 sourcePos = data->sources[s].position;
int startPointer = data->sources[s].samplePointer;
int wrapAroundSamples = (BUFFER_LENGTH_BYTES / sizeof(int16_t)) - (data->sources[s].lengthInSamples - data->sources[s].samplePointer);
if (wrapAroundSamples <= 0) {
memcpy(data->samplesToQueue, data->sources[s].audioData + data->sources[s].samplePointer, BUFFER_LENGTH_BYTES);
data->sources[s].samplePointer += (BUFFER_LENGTH_BYTES / sizeof(int16_t));
} else {
memcpy(data->samplesToQueue, data->sources[s].audioData + data->sources[s].samplePointer, (data->sources[s].lengthInSamples - data->sources[s].samplePointer) * sizeof(int16_t));
memcpy(data->samplesToQueue + (data->sources[s].lengthInSamples - data->sources[s].samplePointer), data->sources[s].audioData, wrapAroundSamples * sizeof(int16_t));
data->sources[s].samplePointer = wrapAroundSamples;
}
float distance = sqrtf(powf(-headPos[0] - sourcePos[0], 2) + powf(-headPos[2] - sourcePos[2], 2));
float amplitudeRatio = powf(0.5, cbrtf(distance * 10));
float distanceAmpRatio = powf(0.5, cbrtf(distance * 10));
float angleToSource = angle_to(headPos * -1.f, sourcePos, data->linkedHead->getRenderYaw(), data->linkedHead->getYaw()) * M_PI/180;
float sinRatio = sqrt(fabsf(sinf(angleToSource)));
int numSamplesDelay = PHASE_DELAY_AT_90 * sinRatio;
float phaseAmpRatio = 1.f - (AMPLITUDE_RATIO_AT_90 * sinRatio);
std::cout << "S: " << numSamplesDelay << " A: " << angleToSource << " S: " << sinRatio << " AR: " << phaseAmpRatio << "\n";
int16_t *leadingOutput = angleToSource > 0 ? outputLeft : outputRight;
int16_t *trailingOutput = angleToSource > 0 ? outputRight : outputLeft;
for (int i = 0; i < BUFFER_LENGTH_BYTES / sizeof(int16_t); i++) {
samplesToQueue[i] *= amplitudeRatio;
outputLeft[i] = s == 0 ? samplesToQueue[i] : outputLeft[i] + samplesToQueue[i];
}
if (wrapAroundSamples > 0) {
delete[] samplesToQueue;
data->samplesToQueue[i] *= distanceAmpRatio / NUM_AUDIO_SOURCES;
leadingOutput[i] += data->samplesToQueue[i];
if (i >= numSamplesDelay) {
trailingOutput[i] += data->samplesToQueue[i - numSamplesDelay];
} else {
int sampleIndex = startPointer - numSamplesDelay + i;
if (sampleIndex < 0) {
sampleIndex += data->sources[s].lengthInSamples;
}
trailingOutput[i] += data->sources[s].audioData[sampleIndex] * (distanceAmpRatio * phaseAmpRatio / NUM_AUDIO_SOURCES);
}
}
}
for (int f = 0; f < BUFFER_LENGTH_BYTES; f++) {
outputLeft[f] = (int) floor(outputLeft[f] / AUDIO_SOURCES);
outputRight[f] = outputLeft[f];
}
// int offsetBytes = numSamplesDelay * sizeof(int16_t);
// memcpy(trailingBuffer, data->sources[1].delayBuffer + (PHASE_DELAY_AT_90 - numSamplesDelay), offsetBytes);
// memcpy(trailingBuffer + numSamplesDelay, samplesToQueue, BUFFER_LENGTH_BYTES - offsetBytes);
// // copy PHASE_DELAY_AT_90 samples to delayBuffer in case we need it next go around
// memcpy(data->sources[1].delayBuffer, data->sources[1].audioData + data->sources[1].samplePointer - PHASE_DELAY_AT_90, PHASE_DELAY_AT_90 * sizeof(int16_t));
return paContinue;
}
@ -127,6 +125,12 @@ bool Audio::init(Head* mainHead)
err = Pa_Initialize();
if (err != paNoError) goto error;
data->sources[0].position = glm::vec3(6, 0, -1);
readFile("jeska.raw", &data->sources[0]);
data->sources[1].position = glm::vec3(6, 0, 6);
readFile("grayson.raw", &data->sources[1]);
err = Pa_OpenDefaultStream(&stream,
NULL, // input channels
2, // output channels
@ -137,12 +141,6 @@ bool Audio::init(Head* mainHead)
(void *) data); // user data to be passed to callback
if (err != paNoError) goto error;
data->sources[0].position = glm::vec3(3, 0, -1);
readFile("love.raw", &data->sources[0]);
data->sources[1].position = glm::vec3(-1, 0, 3);
readFile("grayson.raw", &data->sources[1]);
initialized = true;
// start the stream now that sources are good to go
@ -161,22 +159,17 @@ error:
void Audio::sourceSetup()
{
if (initialized) {
// render gl objects on screen for our sources
glPushMatrix();
glTranslatef(data->sources[0].position[0], data->sources[0].position[1], data->sources[0].position[2]);
glColor3f(1, 0, 0);
glutSolidCube(0.5);
glPopMatrix();
glPushMatrix();
glTranslatef(data->sources[1].position[0], data->sources[1].position[1], data->sources[1].position[2]);
glColor3f(0, 0, 1);
glutSolidCube(0.5);
glPopMatrix();
for (int s = 0; s < NUM_AUDIO_SOURCES; s++) {
// render gl objects on screen for our sources
glPushMatrix();
glTranslatef(data->sources[s].position[0], data->sources[s].position[1], data->sources[s].position[2]);
glColor3f((s == 0 ? 1 : 0), (s == 1 ? 1 : 0), (s == 2 ? 1 : 0));
glutSolidCube(0.5);
glPopMatrix();
}
}
}

View file

@ -15,7 +15,8 @@
#define BUFFER_LENGTH_BYTES 1024
#define PHASE_DELAY_AT_90 20
#define AUDIO_SOURCES 2
#define AMPLITUDE_RATIO_AT_90 0.5
#define NUM_AUDIO_SOURCES 2
class Audio {
public:
@ -31,20 +32,14 @@ private:
struct AudioSource {
glm::vec3 position;
int16_t *audioData;
int16_t *delayBuffer;
int lengthInSamples;
int samplePointer;
AudioSource() {
samplePointer = 0;
// alloc memory for sample delay buffer
delayBuffer = new int16_t[PHASE_DELAY_AT_90];
memset(delayBuffer, 0, sizeof(int16_t) * PHASE_DELAY_AT_90);
samplePointer = 0;
};
~AudioSource() {
delete[] delayBuffer;
delete[] audioData;
}
};
@ -52,21 +47,25 @@ private:
static void readFile(const char *filename, struct AudioSource *source);
static bool initialized;
static struct AudioData {
static struct AudioData {
Head* linkedHead;
AudioSource sources[AUDIO_SOURCES];
AudioSource sources[NUM_AUDIO_SOURCES];
int16_t *samplesToQueue;
AudioData() {
sources[0] = AudioSource();
sources[1] = AudioSource();
sources[2] = AudioSource();
samplesToQueue = new int16_t[BUFFER_LENGTH_BYTES / sizeof(int16_t)];
}
// ~AudioData() {
~AudioData() {
// delete sources[0];
// delete sources[1];
// delete sources[2];
// }
delete[] samplesToQueue;
}
} *data;
// protects constructor so that public init method is used

View file

@ -33,6 +33,7 @@ Head::Head()
interPupilDistance = 0.6;
interBrowDistance = 0.75;
NominalPupilSize = 0.10;
Yaw = 0.0;
EyebrowPitch[0] = EyebrowPitch[1] = BrowPitchAngle[0];
EyebrowRoll[0] = 30;
EyebrowRoll[1] = -30;

View file

@ -23,6 +23,7 @@ class Head {
float PitchRate;
float YawRate;
float RollRate;
float renderYaw;
float EyeballPitch[2];
float EyeballYaw[2];
float EyebrowPitch[2];
@ -59,6 +60,7 @@ public:
void setPitch(float p) {Pitch = p; }
void setYaw(float y) {Yaw = y; }
void setRoll(float r) {Roll = r; };
void setRenderYaw(float y) {renderYaw = y;}
void setLeanForward(float dist);
void setLeanSideways(float dist);
void addPitch(float p) {Pitch -= p; }
@ -68,6 +70,7 @@ public:
float getPitch() {return Pitch;}
float getRoll() {return Roll;}
float getYaw() {return Yaw;}
float getRenderYaw() {return renderYaw;}
void render();
void simulate(float);
// Send and receive network data

View file

@ -119,8 +119,7 @@ int steps_per_frame = 0;
float yaw =0.f; // The yaw, pitch for the avatar head
float pitch = 0.f; //
float start_yaw = 116;
float render_yaw = start_yaw;
float start_yaw = 122;
float render_pitch = 0.f;
float render_yaw_rate = 0.f;
float render_pitch_rate = 0.f;
@ -131,7 +130,7 @@ GLfloat fwd_vec[] = {0.0, 0.0, 1.0};
//GLfloat start_location[] = { WORLD_SIZE*1.5, -WORLD_SIZE/2.0, -WORLD_SIZE/3.0};
//GLfloat start_location[] = { 0.1, -0.15, 0.1};
GLfloat start_location[] = {6.1, -2.0, 1.4};
GLfloat start_location[] = {6.1, 0, 1.4};
GLfloat location[] = {start_location[0], start_location[1], start_location[2]};
float fwd_vel = 0.0f;
@ -259,9 +258,8 @@ void display_stats(void)
char adc[200];
sprintf(adc, "location = %3.1f,%3.1f,%3.1f, angle_to(origin) = %3.1f, head yaw = %3.1f, render_yaw = %3.1f",
-location[0], -location[1], -location[2],
angle_to(myHead.getPos()*-1.f, glm::vec3(0,0,0), render_yaw, myHead.getYaw()),
myHead.getYaw(), render_yaw
);
angle_to(myHead.getPos()*-1.f, glm::vec3(0,0,0), myHead.getRenderYaw(), myHead.getYaw()),
myHead.getYaw(), myHead.getRenderYaw());
drawtext(10, 50, 0.10, 0, 1.0, 0, adc);
@ -313,6 +311,8 @@ void init(void)
{
int i;
myHead.setRenderYaw(start_yaw);
if (audio_on) {
if (serial_on) {
Audio::init(&myHead);
@ -398,7 +398,7 @@ void reset_sensors()
//
// Reset serial I/O sensors
//
render_yaw = start_yaw;
myHead.setRenderYaw(start_yaw);
yaw = render_yaw_rate = 0;
pitch = render_pitch = render_pitch_rate = 0;
lateral_vel = 0;
@ -455,12 +455,13 @@ void update_pos(float frametime)
*/
// Update render direction (pitch/yaw) based on measured gyro rates
const int MIN_YAW_RATE = 300;
const int MIN_YAW_RATE = 3000;
const float YAW_SENSITIVITY = 0.03;
const int MIN_PITCH_RATE = 300;
const int MIN_PITCH_RATE = 3000;
const float PITCH_SENSITIVITY = 0.04;
if (fabs(measured_yaw_rate) > MIN_YAW_RATE)
if (fabs(measured_yaw_rate) > MIN_YAW_RATE)
{
if (measured_yaw_rate > 0)
render_yaw_rate -= (measured_yaw_rate - MIN_YAW_RATE) * YAW_SENSITIVITY * frametime;
@ -474,7 +475,8 @@ void update_pos(float frametime)
else
render_pitch_rate += (measured_pitch_rate + MIN_PITCH_RATE) * PITCH_SENSITIVITY * frametime;
}
render_yaw += render_yaw_rate;
myHead.setRenderYaw(myHead.getRenderYaw() + render_yaw_rate);
render_pitch += render_pitch_rate;
// Decay render_pitch toward zero because we never look constantly up/down
@ -485,6 +487,7 @@ void update_pos(float frametime)
render_yaw_rate *= (1.f - 7.0*frametime);
// Update slide left/right based on accelerometer reading
/*
const int MIN_LATERAL_ACCEL = 20;
const float LATERAL_SENSITIVITY = 0.001;
if (fabs(measured_lateral_accel) > MIN_LATERAL_ACCEL)
@ -493,12 +496,13 @@ void update_pos(float frametime)
lateral_vel += (measured_lateral_accel - MIN_LATERAL_ACCEL) * LATERAL_SENSITIVITY * frametime;
else
lateral_vel += (measured_lateral_accel + MIN_LATERAL_ACCEL) * LATERAL_SENSITIVITY * frametime;
}
}*/
//slide += lateral_vel;
lateral_vel *= (1.f - 4.0*frametime);
// Update fwd/back based on accelerometer reading
/*
const int MIN_FWD_ACCEL = 20;
const float FWD_SENSITIVITY = 0.001;
@ -509,15 +513,15 @@ void update_pos(float frametime)
else
fwd_vel += (measured_fwd_accel + MIN_FWD_ACCEL) * FWD_SENSITIVITY * frametime;
}
}*/
// Decrease forward velocity
fwd_vel *= (1.f - 4.0*frametime);
// Update forward vector based on pitch and yaw
fwd_vec[0] = -sinf(render_yaw*PI/180);
fwd_vec[0] = -sinf(myHead.getRenderYaw()*PI/180);
fwd_vec[1] = sinf(render_pitch*PI/180);
fwd_vec[2] = cosf(render_yaw*PI/180);
fwd_vec[2] = cosf(myHead.getRenderYaw()*PI/180);
// Advance location forward
location[0] += fwd_vec[0]*fwd_vel;
@ -570,7 +574,7 @@ void display(void)
// Rotate, translate to camera location
glRotatef(render_pitch, 1, 0, 0);
glRotatef(render_yaw, 0, 1, 0);
glRotatef(myHead.getRenderYaw(), 0, 1, 0);
glTranslatef(location[0], location[1], location[2]);
// Draw cloud of dots

View file

@ -52,6 +52,7 @@
532C803B16AF3B1900B1A969 /* libopencv_videostab.2.4.3.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 532C802616AF3B1900B1A969 /* libopencv_videostab.2.4.3.dylib */; };
532C803C16AF3B1900B1A969 /* libportaudio.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 532C802816AF3B1900B1A969 /* libportaudio.a */; };
533B578716B2160C00FCABF1 /* grayson.raw in CopyFiles */ = {isa = PBXBuildFile; fileRef = 533B578516B2160600FCABF1 /* grayson.raw */; };
533BF9D516B31A4700AC31BB /* jeska.raw in CopyFiles */ = {isa = PBXBuildFile; fileRef = 533BF9D316B31A3B00AC31BB /* jeska.raw */; };
538BA8A316B1B71E000BF99C /* love.raw in CopyFiles */ = {isa = PBXBuildFile; fileRef = 538BA8A216B1B719000BF99C /* love.raw */; };
B6BDADE115F44A9D002A07DF /* CoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6BDADDE15F444DB002A07DF /* CoreServices.framework */; };
B6BDADE215F44AA5002A07DF /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6BDADD815F444C1002A07DF /* CoreAudio.framework */; };
@ -73,6 +74,7 @@
533B578716B2160C00FCABF1 /* grayson.raw in CopyFiles */,
532C7CCE16AF301E00B1A969 /* int-texture256-v4.png in CopyFiles */,
538BA8A316B1B71E000BF99C /* love.raw in CopyFiles */,
533BF9D516B31A4700AC31BB /* jeska.raw in CopyFiles */,
532C7CCF16AF301E00B1A969 /* int-texture256-v5.png in CopyFiles */,
532C7CD016AF301E00B1A969 /* philip-image.png in CopyFiles */,
532C7CD116AF301E00B1A969 /* pngtest8rgba.png in CopyFiles */,
@ -505,6 +507,7 @@
532C802816AF3B1900B1A969 /* libportaudio.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libportaudio.a; sourceTree = "<group>"; };
532C802916AF3B1900B1A969 /* portaudio.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = portaudio.h; sourceTree = "<group>"; };
533B578516B2160600FCABF1 /* grayson.raw */ = {isa = PBXFileReference; lastKnownFileType = file; path = grayson.raw; sourceTree = "<group>"; };
533BF9D316B31A3B00AC31BB /* jeska.raw */ = {isa = PBXFileReference; lastKnownFileType = file; path = jeska.raw; sourceTree = "<group>"; };
538BA8A216B1B719000BF99C /* love.raw */ = {isa = PBXFileReference; lastKnownFileType = file; path = love.raw; sourceTree = "<group>"; };
8DD76F6C0486A84900D96B5E /* interface */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = interface; sourceTree = BUILT_PRODUCTS_DIR; };
B6BDADD815F444C1002A07DF /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; };
@ -1286,6 +1289,7 @@
536E784516B0A1C900A2F6F3 /* audio */ = {
isa = PBXGroup;
children = (
533BF9D316B31A3B00AC31BB /* jeska.raw */,
533B578516B2160600FCABF1 /* grayson.raw */,
538BA8A216B1B719000BF99C /* love.raw */,
);