mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 12:04:18 +02:00
Merge branch 'master' of https://github.com/worklist/hifi into 19461
This commit is contained in:
commit
841d491b2f
14 changed files with 177 additions and 71 deletions
|
@ -164,7 +164,6 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
|||
if ((s / 2) < numSamplesDelay) {
|
||||
// pull the earlier sample for the delayed channel
|
||||
int earlierSample = (*bufferToAdd)[(s / 2) - numSamplesDelay] * attenuationCoefficient * weakChannelAmplitudeRatio;
|
||||
|
||||
_clientSamples[s + delayedChannelOffset] = glm::clamp(_clientSamples[s + delayedChannelOffset] + earlierSample,
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
@ -175,11 +174,11 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
|||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
|
||||
if ((s / 2) + numSamplesDelay < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
|
||||
// place the curernt sample at the right spot in the delayed channel
|
||||
int16_t clampedSample = glm::clamp((int) (_clientSamples[s + numSamplesDelay + delayedChannelOffset]
|
||||
// place the current sample at the right spot in the delayed channel
|
||||
int16_t clampedSample = glm::clamp((int) (_clientSamples[s + (numSamplesDelay * 2) + delayedChannelOffset]
|
||||
+ (currentSample * weakChannelAmplitudeRatio)),
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
_clientSamples[s + numSamplesDelay + delayedChannelOffset] = clampedSample;
|
||||
_clientSamples[s + (numSamplesDelay * 2) + delayedChannelOffset] = clampedSample;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,6 +52,8 @@ Audio::Audio(Oscilloscope* scope, int16_t initialJitterBufferSamples, QObject* p
|
|||
_outputFormat(),
|
||||
_outputDevice(NULL),
|
||||
_numOutputCallbackBytes(0),
|
||||
_loopbackAudioOutput(NULL),
|
||||
_loopbackOutputDevice(NULL),
|
||||
_inputRingBuffer(0),
|
||||
_ringBuffer(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL),
|
||||
_scope(scope),
|
||||
|
@ -201,19 +203,20 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
|
|||
// upsample from 24 to 48
|
||||
// for now this only supports a stereo to stereo conversion - this is our case for network audio to output
|
||||
int sourceIndex = 0;
|
||||
int destinationToSourceFactor = (1 / sourceToDestinationFactor);
|
||||
int destinationToSourceFactor = (1 / sourceToDestinationFactor);
|
||||
int dtsSampleRateFactor = (destinationAudioFormat.sampleRate() / sourceAudioFormat.sampleRate());
|
||||
|
||||
for (int i = 0; i < numDestinationSamples; i += destinationAudioFormat.channelCount() * destinationToSourceFactor) {
|
||||
for (int i = 0; i < numDestinationSamples; i += destinationAudioFormat.channelCount() * dtsSampleRateFactor) {
|
||||
sourceIndex = (i / destinationToSourceFactor);
|
||||
|
||||
// fill the L/R channels and make the rest silent
|
||||
for (int j = i; j < i + (destinationToSourceFactor * destinationAudioFormat.channelCount()); j++) {
|
||||
for (int j = i; j < i + (dtsSampleRateFactor * destinationAudioFormat.channelCount()); j++) {
|
||||
if (j % destinationAudioFormat.channelCount() == 0) {
|
||||
// left channel
|
||||
destinationSamples[j] = sourceSamples[sourceIndex];
|
||||
} else if (j % destinationAudioFormat.channelCount() == 1) {
|
||||
// right channel
|
||||
destinationSamples[j] = sourceSamples[sourceIndex + 1];
|
||||
destinationSamples[j] = sourceSamples[sourceIndex + (sourceAudioFormat.channelCount() > 1 ? 1 : 0)];
|
||||
} else {
|
||||
// channels above 2, fill with silence
|
||||
destinationSamples[j] = 0;
|
||||
|
@ -263,9 +266,13 @@ void Audio::start() {
|
|||
_inputDevice = _audioInput->start();
|
||||
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
||||
|
||||
// setup our general output device for audio-mixer audio
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
_outputDevice = _audioOutput->start();
|
||||
|
||||
// setup a loopback audio output device
|
||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
||||
gettimeofday(&_lastReceiveTime, NULL);
|
||||
}
|
||||
|
||||
|
@ -290,6 +297,30 @@ void Audio::handleAudioInput() {
|
|||
|
||||
QByteArray inputByteArray = _inputDevice->readAll();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio)) {
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
|
||||
if (!_loopbackOutputDevice) {
|
||||
// we didn't have the loopback output device going so set that up now
|
||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||
}
|
||||
|
||||
if (_inputFormat == _outputFormat) {
|
||||
_loopbackOutputDevice->write(inputByteArray);
|
||||
} else {
|
||||
static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
|
||||
* (_outputFormat.channelCount() / _inputFormat.channelCount());
|
||||
|
||||
QByteArray loopBackByteArray(inputByteArray.size() * loopbackOutputToInputRatio, 0);
|
||||
|
||||
linearResampling((int16_t*) inputByteArray.data(), (int16_t*) loopBackByteArray.data(),
|
||||
inputByteArray.size() / sizeof(int16_t),
|
||||
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
|
||||
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
}
|
||||
}
|
||||
|
||||
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
|
||||
|
||||
while (_inputRingBuffer.samplesAvailable() > inputSamplesRequired) {
|
||||
|
@ -324,11 +355,6 @@ void Audio::handleAudioInput() {
|
|||
Q_ARG(QByteArray, QByteArray((char*) monoAudioSamples,
|
||||
NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL)),
|
||||
Q_ARG(bool, false), Q_ARG(bool, true));
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio)) {
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
memcpy(_localInjectedSamples, monoAudioSamples, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
}
|
||||
} else {
|
||||
// our input loudness is 0, since we're muted
|
||||
_lastInputLoudness = 0;
|
||||
|
@ -438,8 +464,8 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
|
|||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
ringBufferSamples[i * 2] = glm::clamp(ringBufferSamples[i * 2] + _localInjectedSamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
ringBufferSamples[(i * 2) + 1] += glm::clamp(ringBufferSamples[(i * 2) + 1] + _localInjectedSamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
ringBufferSamples[(i * 2) + 1] = glm::clamp(ringBufferSamples[(i * 2) + 1] + _localInjectedSamples[i],
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
|
||||
// copy the packet from the RB to the output
|
||||
|
|
|
@ -78,8 +78,11 @@ private:
|
|||
QAudioFormat _outputFormat;
|
||||
QIODevice* _outputDevice;
|
||||
int _numOutputCallbackBytes;
|
||||
QAudioOutput* _loopbackAudioOutput;
|
||||
QIODevice* _loopbackOutputDevice;
|
||||
AudioRingBuffer _inputRingBuffer;
|
||||
AudioRingBuffer _ringBuffer;
|
||||
|
||||
Oscilloscope* _scope;
|
||||
StDev _stdev;
|
||||
timeval _lastReceiveTime;
|
||||
|
|
|
@ -374,7 +374,10 @@ Menu::Menu() :
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayLeapHands, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LeapDrive, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::BallFromHand, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::BallFromHand, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::VoxelDrumming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::PlaySlaps, 0, false);
|
||||
|
||||
|
||||
|
||||
QMenu* trackingOptionsMenu = developerMenu->addMenu("Tracking Options");
|
||||
|
|
|
@ -247,6 +247,8 @@ namespace MenuOption {
|
|||
const QString ShowAllLocalVoxels = "Show All Local Voxels";
|
||||
const QString ShowTrueColors = "Show TRUE Colors";
|
||||
const QString SimulateLeapHand = "Simulate Leap Hand";
|
||||
const QString VoxelDrumming = "Voxel Drumming";
|
||||
const QString PlaySlaps = "Play Slaps";
|
||||
const QString SkeletonTracking = "Skeleton Tracking";
|
||||
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
|
||||
const QString LEDTracking = "LED Tracking";
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
#include "Util.h"
|
||||
#include "renderer/ProgramObject.h"
|
||||
|
||||
//#define DEBUG_HAND
|
||||
|
||||
using namespace std;
|
||||
|
||||
const float FINGERTIP_VOXEL_SIZE = 0.05;
|
||||
|
@ -98,6 +100,9 @@ void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, f
|
|||
glm::vec3 newVelocity = NO_VELOCITY;
|
||||
|
||||
// update the particle with it's new state...
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Update caught particle!\n");
|
||||
#endif
|
||||
caughtParticle->updateParticle(newPosition,
|
||||
closestParticle->getRadius(),
|
||||
closestParticle->getXColor(),
|
||||
|
@ -143,6 +148,9 @@ void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, f
|
|||
// create the ball, call MakeParticle, and use the resulting ParticleEditHandle to
|
||||
// manage the newly created particle.
|
||||
// Create a particle on the particle server
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Created New Ball\n");
|
||||
#endif
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
_ballParticleEditHandles[handID] = Application::getInstance()->makeParticle(
|
||||
ballPosition / (float)TREE_SCALE,
|
||||
|
@ -156,6 +164,9 @@ void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, f
|
|||
}
|
||||
} else {
|
||||
// Ball is in hand
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Ball in hand\n");
|
||||
#endif
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
_ballParticleEditHandles[handID]->updateParticle(ballPosition / (float)TREE_SCALE,
|
||||
TOY_BALL_RADIUS / (float) TREE_SCALE,
|
||||
|
@ -172,16 +183,18 @@ void Hand::simulateToyBall(PalmData& palm, const glm::vec3& fingerTipPosition, f
|
|||
|
||||
_toyBallInHand[handID] = false;
|
||||
glm::vec3 ballPosition = ballFromHand ? palm.getPosition() : fingerTipPosition;
|
||||
glm::vec3 handVelocity = palm.getRawVelocity();
|
||||
glm::vec3 fingerTipVelocity = palm.getTipVelocity();
|
||||
glm::vec3 ballVelocity = ballFromHand ? palm.getRawVelocity() : palm.getTipVelocity();
|
||||
glm::quat avatarRotation = _owningAvatar->getOrientation();
|
||||
glm::vec3 toyBallVelocity = avatarRotation * fingerTipVelocity;
|
||||
ballVelocity = avatarRotation * ballVelocity;
|
||||
|
||||
// ball is no longer in hand...
|
||||
#ifdef DEBUG_HAND
|
||||
qDebug("Threw ball, v = %.3f\n", glm::length(ballVelocity));
|
||||
#endif
|
||||
_ballParticleEditHandles[handID]->updateParticle(ballPosition / (float)TREE_SCALE,
|
||||
TOY_BALL_RADIUS / (float) TREE_SCALE,
|
||||
TOY_BALL_ON_SERVER_COLOR[_whichBallColor[handID]],
|
||||
toyBallVelocity / (float)TREE_SCALE,
|
||||
ballVelocity / (float)TREE_SCALE,
|
||||
TOY_BALL_GRAVITY / (float) TREE_SCALE,
|
||||
TOY_BALL_DAMPING,
|
||||
NOT_IN_HAND,
|
||||
|
@ -272,32 +285,35 @@ void Hand::simulate(float deltaTime, bool isMine) {
|
|||
_lastFingerDeleteVoxel = fingerTipPosition;
|
||||
}
|
||||
}
|
||||
// Check if the finger is intersecting with a voxel in the client voxel tree
|
||||
VoxelTreeElement* fingerNode = Application::getInstance()->getVoxels()->getVoxelEnclosing(
|
||||
glm::vec3(fingerTipPosition / (float)TREE_SCALE));
|
||||
if (fingerNode) {
|
||||
if (!palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just started
|
||||
palm.setIsCollidingWithVoxel(true);
|
||||
handleVoxelCollision(&palm, fingerTipPosition, fingerNode, deltaTime);
|
||||
// Set highlight voxel
|
||||
VoxelDetail voxel;
|
||||
glm::vec3 pos = fingerNode->getCorner();
|
||||
voxel.x = pos.x;
|
||||
voxel.y = pos.y;
|
||||
voxel.z = pos.z;
|
||||
voxel.s = fingerNode->getScale();
|
||||
voxel.red = fingerNode->getColor()[0];
|
||||
voxel.green = fingerNode->getColor()[1];
|
||||
voxel.blue = fingerNode->getColor()[2];
|
||||
Application::getInstance()->setHighlightVoxel(voxel);
|
||||
Application::getInstance()->setIsHighlightVoxel(true);
|
||||
}
|
||||
} else {
|
||||
if (palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just ended
|
||||
palm.setIsCollidingWithVoxel(false);
|
||||
Application::getInstance()->setIsHighlightVoxel(false);
|
||||
|
||||
// Voxel Drumming with fingertips if enabled
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::VoxelDrumming)) {
|
||||
VoxelTreeElement* fingerNode = Application::getInstance()->getVoxels()->getVoxelEnclosing(
|
||||
glm::vec3(fingerTipPosition / (float)TREE_SCALE));
|
||||
if (fingerNode) {
|
||||
if (!palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just started
|
||||
palm.setIsCollidingWithVoxel(true);
|
||||
handleVoxelCollision(&palm, fingerTipPosition, fingerNode, deltaTime);
|
||||
// Set highlight voxel
|
||||
VoxelDetail voxel;
|
||||
glm::vec3 pos = fingerNode->getCorner();
|
||||
voxel.x = pos.x;
|
||||
voxel.y = pos.y;
|
||||
voxel.z = pos.z;
|
||||
voxel.s = fingerNode->getScale();
|
||||
voxel.red = fingerNode->getColor()[0];
|
||||
voxel.green = fingerNode->getColor()[1];
|
||||
voxel.blue = fingerNode->getColor()[2];
|
||||
Application::getInstance()->setHighlightVoxel(voxel);
|
||||
Application::getInstance()->setIsHighlightVoxel(true);
|
||||
}
|
||||
} else {
|
||||
if (palm.getIsCollidingWithVoxel()) {
|
||||
// Collision has just ended
|
||||
palm.setIsCollidingWithVoxel(false);
|
||||
Application::getInstance()->setIsHighlightVoxel(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -325,9 +341,37 @@ void Hand::updateCollisions() {
|
|||
for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
|
||||
if (node->getLinkedData() && node->getType() == NODE_TYPE_AGENT) {
|
||||
Avatar* otherAvatar = (Avatar*)node->getLinkedData();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::PlaySlaps)) {
|
||||
// Check for palm collisions
|
||||
glm::vec3 myPalmPosition = palm.getPosition();
|
||||
float palmCollisionDistance = 0.1f;
|
||||
palm.setIsCollidingWithPalm(false);
|
||||
// If 'Play Slaps' is enabled, look for palm-to-palm collisions and make sound
|
||||
for (int j = 0; j < otherAvatar->getHand().getNumPalms(); j++) {
|
||||
PalmData& otherPalm = otherAvatar->getHand().getPalms()[j];
|
||||
if (!otherPalm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 otherPalmPosition = otherPalm.getPosition();
|
||||
if (glm::length(otherPalmPosition - myPalmPosition) < palmCollisionDistance) {
|
||||
palm.setIsCollidingWithPalm(true);
|
||||
const float PALM_COLLIDE_VOLUME = 1.f;
|
||||
const float PALM_COLLIDE_FREQUENCY = 150.f;
|
||||
const float PALM_COLLIDE_DURATION_MAX = 2.f;
|
||||
const float PALM_COLLIDE_DECAY_PER_SAMPLE = 0.005f;
|
||||
Application::getInstance()->getAudio()->startDrumSound(PALM_COLLIDE_VOLUME,
|
||||
PALM_COLLIDE_FREQUENCY,
|
||||
PALM_COLLIDE_DURATION_MAX,
|
||||
PALM_COLLIDE_DECAY_PER_SAMPLE);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
glm::vec3 avatarPenetration;
|
||||
if (otherAvatar->findSpherePenetration(palm.getPosition(), scaledPalmRadius, avatarPenetration)) {
|
||||
totalPenetration = addPenetrations(totalPenetration, avatarPenetration);
|
||||
// Check for collisions with the other avatar's leap palms
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -534,7 +578,11 @@ void Hand::renderLeapHands() {
|
|||
PalmData& palm = getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
const float palmThickness = 0.02f;
|
||||
glColor4f(handColor.r, handColor.g, handColor.b, 0.25);
|
||||
if (palm.getIsCollidingWithPalm()) {
|
||||
glColor4f(1, 0, 0, 0.50);
|
||||
} else {
|
||||
glColor4f(handColor.r, handColor.g, handColor.b, 0.25);
|
||||
}
|
||||
glm::vec3 tip = palm.getPosition();
|
||||
glm::vec3 root = palm.getPosition() + palm.getNormal() * palmThickness;
|
||||
const float radiusA = 0.05f;
|
||||
|
|
|
@ -79,7 +79,6 @@ private:
|
|||
std::vector<HandBall> _leapFingerRootBalls;
|
||||
|
||||
glm::vec3 _lastFingerAddVoxel, _lastFingerDeleteVoxel;
|
||||
bool _isCollidingWithVoxel;
|
||||
VoxelDetail _collidingVoxel;
|
||||
|
||||
glm::vec3 _collisionCenter;
|
||||
|
|
|
@ -426,6 +426,31 @@ static TextRenderer* textRenderer() {
|
|||
return renderer;
|
||||
}
|
||||
|
||||
void MyAvatar::renderDebugBodyPoints() {
|
||||
glm::vec3 torsoPosition(getPosition());
|
||||
glm::vec3 headPosition(getHead().getEyePosition());
|
||||
float torsoToHead = glm::length(headPosition - torsoPosition);
|
||||
glm::vec3 position;
|
||||
printf("head-above-torso %.2f, scale = %0.2f\n", torsoToHead, getScale());
|
||||
|
||||
// Torso Sphere
|
||||
position = torsoPosition;
|
||||
glPushMatrix();
|
||||
glColor4f(0, 1, 0, .5f);
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glutSolidSphere(0.2, 10, 10);
|
||||
glPopMatrix();
|
||||
|
||||
// Head Sphere
|
||||
position = headPosition;
|
||||
glPushMatrix();
|
||||
glColor4f(0, 1, 0, .5f);
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glutSolidSphere(0.15, 10, 10);
|
||||
glPopMatrix();
|
||||
|
||||
|
||||
}
|
||||
void MyAvatar::render(bool forceRenderHead) {
|
||||
|
||||
// render body
|
||||
|
@ -438,6 +463,9 @@ void MyAvatar::render(bool forceRenderHead) {
|
|||
glPopMatrix();
|
||||
}
|
||||
|
||||
//renderDebugBodyPoints();
|
||||
|
||||
|
||||
if (!_chatMessage.empty()) {
|
||||
int width = 0;
|
||||
int lastWidth = 0;
|
||||
|
|
|
@ -30,6 +30,7 @@ public:
|
|||
void simulate(float deltaTime, Transmitter* transmitter);
|
||||
void updateFromGyrosAndOrWebcam(bool turnWithHead);
|
||||
void render(bool forceRenderHead);
|
||||
void renderDebugBodyPoints();
|
||||
|
||||
// setters
|
||||
void setMousePressed(bool mousePressed) { _mousePressed = mousePressed; }
|
||||
|
|
|
@ -79,12 +79,6 @@ void SixenseManager::update(float deltaTime) {
|
|||
palm->setTrigger(data.trigger);
|
||||
palm->setJoystick(data.joystick_x, data.joystick_y);
|
||||
|
||||
// Vibrate if needed
|
||||
if (palm->getIsCollidingWithVoxel()) {
|
||||
//printf("vibrate!\n");
|
||||
//vibrate(data.controller_index, 100, 1);
|
||||
}
|
||||
|
||||
glm::vec3 position(data.pos[0], data.pos[1], data.pos[2]);
|
||||
// Adjust for distance between acquisition 'orb' and the user's torso
|
||||
// (distance to the right of body center, distance below torso, distance behind torso)
|
||||
|
|
|
@ -38,22 +38,20 @@ PalmData& HandData::addNewPalm() {
|
|||
return _palms.back();
|
||||
}
|
||||
|
||||
const int SIXENSE_CONTROLLER_ID_LEFT_HAND = 0;
|
||||
const int SIXENSE_CONTROLLER_ID_RIGHT_HAND = 1;
|
||||
|
||||
void HandData::getLeftRightPalmIndices(int& leftPalmIndex, int& rightPalmIndex) const {
|
||||
leftPalmIndex = -1;
|
||||
float leftPalmX = FLT_MAX;
|
||||
rightPalmIndex = -1;
|
||||
float rightPalmX = -FLT_MAX;
|
||||
rightPalmIndex = -1;
|
||||
for (int i = 0; i < _palms.size(); i++) {
|
||||
const PalmData& palm = _palms[i];
|
||||
if (palm.isActive()) {
|
||||
float x = palm.getRawPosition().x;
|
||||
if (x < leftPalmX) {
|
||||
if (palm.getSixenseID() == SIXENSE_CONTROLLER_ID_LEFT_HAND) {
|
||||
leftPalmIndex = i;
|
||||
leftPalmX = x;
|
||||
}
|
||||
if (x > rightPalmX) {
|
||||
if (palm.getSixenseID() == SIXENSE_CONTROLLER_ID_RIGHT_HAND) {
|
||||
rightPalmIndex = i;
|
||||
rightPalmX = x;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +69,8 @@ _leapID(LEAPID_INVALID),
|
|||
_sixenseID(SIXENSEID_INVALID),
|
||||
_numFramesWithoutData(0),
|
||||
_owningHandData(owningHandData),
|
||||
_isCollidingWithVoxel(false)
|
||||
_isCollidingWithVoxel(false),
|
||||
_isCollidingWithPalm(false)
|
||||
{
|
||||
for (int i = 0; i < NUM_FINGERS_PER_HAND; ++i) {
|
||||
_fingers.push_back(FingerData(this, owningHandData));
|
||||
|
|
|
@ -182,6 +182,9 @@ public:
|
|||
bool getIsCollidingWithVoxel() { return _isCollidingWithVoxel; }
|
||||
void setIsCollidingWithVoxel(bool isCollidingWithVoxel) { _isCollidingWithVoxel = isCollidingWithVoxel; }
|
||||
|
||||
bool getIsCollidingWithPalm() { return _isCollidingWithPalm; }
|
||||
void setIsCollidingWithPalm(bool isCollidingWithPalm) { _isCollidingWithPalm = isCollidingWithPalm; }
|
||||
|
||||
private:
|
||||
std::vector<FingerData> _fingers;
|
||||
glm::quat _rawRotation;
|
||||
|
@ -205,6 +208,7 @@ private:
|
|||
HandData* _owningHandData;
|
||||
|
||||
bool _isCollidingWithVoxel; /// Whether the finger of this palm is inside a leaf voxel
|
||||
bool _isCollidingWithPalm;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -17,14 +17,14 @@ OctreeScriptingInterface::OctreeScriptingInterface(OctreeEditPacketSender* packe
|
|||
|
||||
OctreeScriptingInterface::~OctreeScriptingInterface() {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface()\n");
|
||||
if (_managedJuridiciontListerner) {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJuridiciontListerner... _jurisdictionListener->terminate()\n");
|
||||
if (_managedJurisdictionListener) {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... _jurisdictionListener->terminate()\n");
|
||||
_jurisdictionListener->terminate();
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJuridiciontListerner... deleting _jurisdictionListener\n");
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... deleting _jurisdictionListener\n");
|
||||
delete _jurisdictionListener;
|
||||
}
|
||||
if (_managedPacketSender) {
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJuridiciontListerner... _packetSender->terminate()\n");
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedJurisdictionListener... _packetSender->terminate()\n");
|
||||
_packetSender->terminate();
|
||||
//printf("OctreeScriptingInterface::~OctreeScriptingInterface() _managedPacketSender... deleting _packetSender\n");
|
||||
delete _packetSender;
|
||||
|
@ -42,11 +42,11 @@ void OctreeScriptingInterface::setJurisdictionListener(JurisdictionListener* jur
|
|||
void OctreeScriptingInterface::init() {
|
||||
//printf("OctreeScriptingInterface::init()\n");
|
||||
if (_jurisdictionListener) {
|
||||
_managedJuridiciontListerner = false;
|
||||
_managedJurisdictionListener = false;
|
||||
} else {
|
||||
_managedJuridiciontListerner = true;
|
||||
_managedJurisdictionListener = true;
|
||||
_jurisdictionListener = new JurisdictionListener(getServerNodeType());
|
||||
//printf("OctreeScriptingInterface::init() _managedJuridiciontListerner=true, creating _jurisdictionListener=%p\n", _jurisdictionListener);
|
||||
//printf("OctreeScriptingInterface::init() _managedJurisdictionListener=true, creating _jurisdictionListener=%p\n", _jurisdictionListener);
|
||||
_jurisdictionListener->initialize(true);
|
||||
}
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ protected:
|
|||
OctreeEditPacketSender* _packetSender;
|
||||
JurisdictionListener* _jurisdictionListener;
|
||||
bool _managedPacketSender;
|
||||
bool _managedJuridiciontListerner;
|
||||
bool _managedJurisdictionListener;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__OctreeScriptingInterface__) */
|
||||
|
|
Loading…
Reference in a new issue