mirror of
https://github.com/overte-org/overte.git
synced 2025-08-06 12:54:30 +02:00
Transmitter V2 drives the hand, and rendering levels with 'l' shows the hand transmitter data. Physics will need to be tuned.
This commit is contained in:
parent
29889725ef
commit
1b039c3755
6 changed files with 116 additions and 112 deletions
|
@ -774,11 +774,20 @@ void Application::idle() {
|
||||||
if (diffclock(&_lastTimeIdle, &check) > IDLE_SIMULATE_MSECS) {
|
if (diffclock(&_lastTimeIdle, &check) > IDLE_SIMULATE_MSECS) {
|
||||||
|
|
||||||
float deltaTime = 1.f/_fps;
|
float deltaTime = 1.f/_fps;
|
||||||
|
|
||||||
// update behaviors for avatar hand movement: handControl takes mouse values as input,
|
// Use Transmitter Hand to move hand if connected, else use mouse
|
||||||
// and gives back 3D values modulated for smooth transitioning between interaction modes.
|
if (_myAvatar.transmitterV2IsConnected()) {
|
||||||
_handControl.update(_mouseX, _mouseY);
|
const float HAND_FORCE_SCALING = 0.05f;
|
||||||
_myAvatar.setHandMovementValues(_handControl.getValues());
|
const float* handAcceleration = _myAvatar.getTransmitterHandLastAcceleration();
|
||||||
|
_myAvatar.setHandMovementValues(glm::vec3(-handAcceleration[0] * HAND_FORCE_SCALING,
|
||||||
|
handAcceleration[1] * HAND_FORCE_SCALING,
|
||||||
|
handAcceleration[2] * HAND_FORCE_SCALING));
|
||||||
|
} else {
|
||||||
|
// update behaviors for avatar hand movement: handControl takes mouse values as input,
|
||||||
|
// and gives back 3D values modulated for smooth transitioning between interaction modes.
|
||||||
|
_handControl.update(_mouseX, _mouseY);
|
||||||
|
_myAvatar.setHandMovementValues(_handControl.getValues());
|
||||||
|
}
|
||||||
|
|
||||||
// tell my avatar if the mouse is being pressed...
|
// tell my avatar if the mouse is being pressed...
|
||||||
_myAvatar.setMousePressed(_mousePressed);
|
_myAvatar.setMousePressed(_mousePressed);
|
||||||
|
@ -1668,6 +1677,10 @@ void Application::displayOverlay() {
|
||||||
// Show detected levels from the serial I/O ADC channel sensors
|
// Show detected levels from the serial I/O ADC channel sensors
|
||||||
if (_displayLevels) _serialPort.renderLevels(_glWidget->width(), _glWidget->height());
|
if (_displayLevels) _serialPort.renderLevels(_glWidget->width(), _glWidget->height());
|
||||||
|
|
||||||
|
// Show hand transmitter data if detected
|
||||||
|
if (_myAvatar.transmitterV2IsConnected()) {
|
||||||
|
_myAvatar.transmitterV2RenderLevels(_glWidget->width(), _glWidget->height());
|
||||||
|
}
|
||||||
// Display stats and log text onscreen
|
// Display stats and log text onscreen
|
||||||
glLineWidth(1.0f);
|
glLineWidth(1.0f);
|
||||||
glPointSize(1.0f);
|
glPointSize(1.0f);
|
||||||
|
@ -2020,13 +2033,17 @@ void* Application::networkReceive(void* args) {
|
||||||
app->_myAvatar.processTransmitterData(app->_incomingPacket, bytesReceived);
|
app->_myAvatar.processTransmitterData(app->_incomingPacket, bytesReceived);
|
||||||
break;
|
break;
|
||||||
case PACKET_HEADER_TRANSMITTER_DATA_V2:
|
case PACKET_HEADER_TRANSMITTER_DATA_V2:
|
||||||
|
/*
|
||||||
float rotationRates[3];
|
float rotationRates[3];
|
||||||
float accelerations[3];
|
float accelerations[3];
|
||||||
|
|
||||||
memcpy(rotationRates, app->_incomingPacket + 2, sizeof(rotationRates));
|
memcpy(rotationRates, app->_incomingPacket + 2, sizeof(rotationRates));
|
||||||
memcpy(accelerations, app->_incomingPacket + 3 + sizeof(rotationRates), sizeof(accelerations));
|
memcpy(accelerations, app->_incomingPacket + 3 + sizeof(rotationRates), sizeof(accelerations));
|
||||||
|
|
||||||
printf("The rotation: %f, %f, %f\n", rotationRates[0], rotationRates[1], rotationRates[2]);
|
printf("Acceleration: %f, %f, %f\n", accelerations[0], accelerations[1], accelerations[2]);
|
||||||
|
*/
|
||||||
|
app->_myAvatar.processTransmitterDataV2(app->_incomingPacket, bytesReceived);
|
||||||
|
|
||||||
break;
|
break;
|
||||||
case PACKET_HEADER_MIXED_AUDIO:
|
case PACKET_HEADER_MIXED_AUDIO:
|
||||||
app->_audio.addReceivedAudioToBuffer(app->_incomingPacket, bytesReceived);
|
app->_audio.addReceivedAudioToBuffer(app->_incomingPacket, bytesReceived);
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <lodepng.h>
|
#include <lodepng.h>
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
|
#include "world.h"
|
||||||
#include "Avatar.h"
|
#include "Avatar.h"
|
||||||
#include "Head.h"
|
#include "Head.h"
|
||||||
#include "Log.h"
|
#include "Log.h"
|
||||||
|
@ -82,6 +83,7 @@ Avatar::Avatar(bool isMine) {
|
||||||
_transmitterPackets = 0;
|
_transmitterPackets = 0;
|
||||||
_transmitterIsFirstData = true;
|
_transmitterIsFirstData = true;
|
||||||
_transmitterInitialReading = glm::vec3(0.f, 0.f, 0.f);
|
_transmitterInitialReading = glm::vec3(0.f, 0.f, 0.f);
|
||||||
|
_transmitterV2IsConnected = false;
|
||||||
_speed = 0.0;
|
_speed = 0.0;
|
||||||
_pelvisStandingHeight = 0.0f;
|
_pelvisStandingHeight = 0.0f;
|
||||||
_displayingHead = true;
|
_displayingHead = true;
|
||||||
|
@ -131,6 +133,7 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
||||||
_transmitterHz = otherAvatar._transmitterHz;
|
_transmitterHz = otherAvatar._transmitterHz;
|
||||||
_transmitterInitialReading = otherAvatar._transmitterInitialReading;
|
_transmitterInitialReading = otherAvatar._transmitterInitialReading;
|
||||||
_transmitterPackets = otherAvatar._transmitterPackets;
|
_transmitterPackets = otherAvatar._transmitterPackets;
|
||||||
|
_transmitterV2IsConnected = otherAvatar._transmitterV2IsConnected;
|
||||||
_TEST_bigSphereRadius = otherAvatar._TEST_bigSphereRadius;
|
_TEST_bigSphereRadius = otherAvatar._TEST_bigSphereRadius;
|
||||||
_TEST_bigSpherePosition = otherAvatar._TEST_bigSpherePosition;
|
_TEST_bigSpherePosition = otherAvatar._TEST_bigSpherePosition;
|
||||||
_movedHandOffset = otherAvatar._movedHandOffset;
|
_movedHandOffset = otherAvatar._movedHandOffset;
|
||||||
|
@ -610,110 +613,6 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
||||||
|
|
||||||
void Avatar::updateHead(float deltaTime) {
|
void Avatar::updateHead(float deltaTime) {
|
||||||
|
|
||||||
/*
|
|
||||||
// Decay head back to center if turned on
|
|
||||||
if (_isMine && _returnHeadToCenter) {
|
|
||||||
// Decay back toward center
|
|
||||||
_headPitch *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
|
||||||
_headYaw *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
|
||||||
_headRoll *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For invensense gyro, decay only slightly when roughly centered
|
|
||||||
if (_isMine) {
|
|
||||||
const float RETURN_RANGE = 15.0;
|
|
||||||
const float RETURN_STRENGTH = 2.0;
|
|
||||||
if (fabs(_headPitch) < RETURN_RANGE) { _headPitch *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
|
||||||
if (fabs(_headYaw) < RETURN_RANGE) { _headYaw *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
|
||||||
if (fabs(_headRoll) < RETURN_RANGE) { _headRoll *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_head.noise) {
|
|
||||||
// Move toward new target
|
|
||||||
_headPitch += (_head.pitchTarget - _headPitch) * 10 * deltaTime; // (1.f - DECAY*deltaTime)*Pitch + ;
|
|
||||||
_headYaw += (_head.yawTarget - _headYaw ) * 10 * deltaTime; // (1.f - DECAY*deltaTime);
|
|
||||||
_headRoll *= 1.f - (DECAY * deltaTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
_head.leanForward *= (1.f - DECAY * 30 * deltaTime);
|
|
||||||
_head.leanSideways *= (1.f - DECAY * 30 * deltaTime);
|
|
||||||
|
|
||||||
// Update where the avatar's eyes are
|
|
||||||
//
|
|
||||||
// First, decide if we are making eye contact or not
|
|
||||||
if (randFloat() < 0.005) {
|
|
||||||
_head.eyeContact = !_head.eyeContact;
|
|
||||||
_head.eyeContact = 1;
|
|
||||||
if (!_head.eyeContact) {
|
|
||||||
// If we just stopped making eye contact,move the eyes markedly away
|
|
||||||
_head.eyeballPitch[0] = _head.eyeballPitch[1] = _head.eyeballPitch[0] + 5.0 + (randFloat() - 0.5) * 10;
|
|
||||||
_head.eyeballYaw [0] = _head.eyeballYaw [1] = _head.eyeballYaw [0] + 5.0 + (randFloat() - 0.5) * 5;
|
|
||||||
} else {
|
|
||||||
// If now making eye contact, turn head to look right at viewer
|
|
||||||
SetNewHeadTarget(0,0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const float DEGREES_BETWEEN_VIEWER_EYES = 3;
|
|
||||||
const float DEGREES_TO_VIEWER_MOUTH = 7;
|
|
||||||
|
|
||||||
if (_head.eyeContact) {
|
|
||||||
// Should we pick a new eye contact target?
|
|
||||||
if (randFloat() < 0.01) {
|
|
||||||
// Choose where to look next
|
|
||||||
if (randFloat() < 0.1) {
|
|
||||||
_head.eyeContactTarget = MOUTH;
|
|
||||||
} else {
|
|
||||||
if (randFloat() < 0.5) _head.eyeContactTarget = LEFT_EYE; else _head.eyeContactTarget = RIGHT_EYE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Set eyeball pitch and yaw to make contact
|
|
||||||
float eye_target_yaw_adjust = 0;
|
|
||||||
float eye_target_pitch_adjust = 0;
|
|
||||||
if (_head.eyeContactTarget == LEFT_EYE) eye_target_yaw_adjust = DEGREES_BETWEEN_VIEWER_EYES;
|
|
||||||
if (_head.eyeContactTarget == RIGHT_EYE) eye_target_yaw_adjust = -DEGREES_BETWEEN_VIEWER_EYES;
|
|
||||||
if (_head.eyeContactTarget == MOUTH) eye_target_pitch_adjust = DEGREES_TO_VIEWER_MOUTH;
|
|
||||||
|
|
||||||
_head.eyeballPitch[0] = _head.eyeballPitch[1] = -_headPitch + eye_target_pitch_adjust;
|
|
||||||
_head.eyeballYaw[0] = _head.eyeballYaw[1] = -_headYaw + eye_target_yaw_adjust;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_head.noise)
|
|
||||||
{
|
|
||||||
_headPitch += (randFloat() - 0.5) * 0.2 * _head.noiseEnvelope;
|
|
||||||
_headYaw += (randFloat() - 0.5) * 0.3 *_head.noiseEnvelope;
|
|
||||||
//PupilSize += (randFloat() - 0.5) * 0.001*NoiseEnvelope;
|
|
||||||
|
|
||||||
if (randFloat() < 0.005) _head.mouthWidth = MouthWidthChoices[rand()%3];
|
|
||||||
|
|
||||||
if (!_head.eyeContact) {
|
|
||||||
if (randFloat() < 0.01) _head.eyeballPitch[0] = _head.eyeballPitch[1] = (randFloat() - 0.5) * 20;
|
|
||||||
if (randFloat() < 0.01) _head.eyeballYaw[0] = _head.eyeballYaw[1] = (randFloat()- 0.5) * 10;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((randFloat() < 0.005) && (fabs(_head.pitchTarget - _headPitch) < 1.0) && (fabs(_head.yawTarget - _headYaw) < 1.0)) {
|
|
||||||
SetNewHeadTarget((randFloat()-0.5) * 20.0, (randFloat()-0.5) * 45.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (0) {
|
|
||||||
|
|
||||||
// Pick new target
|
|
||||||
_head.pitchTarget = (randFloat() - 0.5) * 45;
|
|
||||||
_head.yawTarget = (randFloat() - 0.5) * 22;
|
|
||||||
}
|
|
||||||
if (randFloat() < 0.01)
|
|
||||||
{
|
|
||||||
_head.eyebrowPitch[0] = _head.eyebrowPitch[1] = BrowPitchAngle[rand()%3];
|
|
||||||
_head.eyebrowRoll [0] = _head.eyebrowRoll[1] = BrowRollAngle[rand()%5];
|
|
||||||
_head.eyebrowRoll [1] *=-1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update audio trailing average for rendering facial animations
|
|
||||||
const float AUDIO_AVERAGING_SECS = 0.05;
|
|
||||||
_head.averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * _head.averageLoudness +
|
|
||||||
(deltaTime / AUDIO_AVERAGING_SECS) * _audioLoudness;
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1425,6 +1324,84 @@ void Avatar::processTransmitterData(unsigned char* packetData, int numBytes) {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
//
|
||||||
|
// Process UDP data from version 2 Transmitter acting as Hand
|
||||||
|
//
|
||||||
|
void Avatar::processTransmitterDataV2(unsigned char* packetData, int numBytes) {
|
||||||
|
if (numBytes == 3 + sizeof(_transmitterHandLastRotationRates) +
|
||||||
|
sizeof(_transmitterHandLastAcceleration)) {
|
||||||
|
memcpy(_transmitterHandLastRotationRates, packetData + 2,
|
||||||
|
sizeof(_transmitterHandLastRotationRates));
|
||||||
|
memcpy(_transmitterHandLastAcceleration, packetData + 3 +
|
||||||
|
sizeof(_transmitterHandLastRotationRates),
|
||||||
|
sizeof(_transmitterHandLastAcceleration));
|
||||||
|
// Convert from transmitter units to internal units
|
||||||
|
for (int i = 0; i < 3; i++) {
|
||||||
|
_transmitterHandLastRotationRates[i] *= 180.f / PI;
|
||||||
|
_transmitterHandLastAcceleration[i] *= GRAVITY_EARTH;
|
||||||
|
}
|
||||||
|
if (!_transmitterV2IsConnected) {
|
||||||
|
printf("Transmitter V2 Connected.\n");
|
||||||
|
_transmitterV2IsConnected = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf("Transmitter V2 packet read error.\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Avatar::transmitterV2RenderLevels(int width, int height) {
|
||||||
|
|
||||||
|
char val[50];
|
||||||
|
const int LEVEL_CORNER_X = 10;
|
||||||
|
const int LEVEL_CORNER_Y = 400;
|
||||||
|
|
||||||
|
// Draw the numeric degree/sec values from the gyros
|
||||||
|
sprintf(val, "Yaw %4.1f", _transmitterHandLastRotationRates[1]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
sprintf(val, "Pitch %4.1f", _transmitterHandLastRotationRates[0]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 15, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
sprintf(val, "Roll %4.1f", _transmitterHandLastRotationRates[2]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 30, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
sprintf(val, "X %4.3f", _transmitterHandLastAcceleration[0]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 45, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
sprintf(val, "Y %4.3f", _transmitterHandLastAcceleration[1]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 60, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
sprintf(val, "Z %4.3f", _transmitterHandLastAcceleration[2]);
|
||||||
|
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 75, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||||
|
|
||||||
|
// Draw the levels as horizontal lines
|
||||||
|
const int LEVEL_CENTER = 150;
|
||||||
|
const float ACCEL_VIEW_SCALING = 50.f;
|
||||||
|
glLineWidth(2.0);
|
||||||
|
glColor4f(1, 1, 1, 1);
|
||||||
|
glBegin(GL_LINES);
|
||||||
|
// Gyro rates
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y - 3);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[1], LEVEL_CORNER_Y - 3);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 12);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[0], LEVEL_CORNER_Y + 12);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 27);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[2], LEVEL_CORNER_Y + 27);
|
||||||
|
// Acceleration
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 42);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[0] * ACCEL_VIEW_SCALING),
|
||||||
|
LEVEL_CORNER_Y + 42);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 57);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[1] * ACCEL_VIEW_SCALING),
|
||||||
|
LEVEL_CORNER_Y + 57);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 72);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[2] * ACCEL_VIEW_SCALING),
|
||||||
|
LEVEL_CORNER_Y + 72);
|
||||||
|
|
||||||
|
glEnd();
|
||||||
|
// Draw green vertical centerline
|
||||||
|
glColor4f(0, 1, 0, 0.5);
|
||||||
|
glBegin(GL_LINES);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y - 6);
|
||||||
|
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 30);
|
||||||
|
glEnd();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Avatar::setHeadFromGyros(glm::vec3* eulerAngles, glm::vec3* angularVelocity, float deltaTime, float smoothingTime) {
|
void Avatar::setHeadFromGyros(glm::vec3* eulerAngles, glm::vec3* angularVelocity, float deltaTime, float smoothingTime) {
|
||||||
//
|
//
|
||||||
|
|
|
@ -127,6 +127,12 @@ public:
|
||||||
|
|
||||||
// Related to getting transmitter UDP data used to animate the avatar hand
|
// Related to getting transmitter UDP data used to animate the avatar hand
|
||||||
void processTransmitterData(unsigned char * packetData, int numBytes);
|
void processTransmitterData(unsigned char * packetData, int numBytes);
|
||||||
|
void processTransmitterDataV2(unsigned char * packetData, int numBytes);
|
||||||
|
const bool transmitterV2IsConnected() const { return _transmitterV2IsConnected; };
|
||||||
|
const float* getTransmitterHandLastAcceleration() const { return _transmitterHandLastAcceleration; };
|
||||||
|
const float* getTransmitterHandLastRotationRates() const { return _transmitterHandLastRotationRates; };
|
||||||
|
void transmitterV2RenderLevels(int width, int height);
|
||||||
|
|
||||||
float getTransmitterHz() { return _transmitterHz; };
|
float getTransmitterHz() { return _transmitterHz; };
|
||||||
|
|
||||||
void writeAvatarDataToFile();
|
void writeAvatarDataToFile();
|
||||||
|
@ -181,6 +187,9 @@ private:
|
||||||
float _transmitterHz;
|
float _transmitterHz;
|
||||||
int _transmitterPackets;
|
int _transmitterPackets;
|
||||||
glm::vec3 _transmitterInitialReading;
|
glm::vec3 _transmitterInitialReading;
|
||||||
|
float _transmitterHandLastRotationRates[3];
|
||||||
|
float _transmitterHandLastAcceleration[3];
|
||||||
|
bool _transmitterV2IsConnected;
|
||||||
float _pelvisStandingHeight;
|
float _pelvisStandingHeight;
|
||||||
float _height;
|
float _height;
|
||||||
Balls* _balls;
|
Balls* _balls;
|
||||||
|
|
|
@ -125,7 +125,7 @@ void Oscilloscope::render(int x, int y) {
|
||||||
glColor3f(0.0f, 1.0f ,1.0f);
|
glColor3f(0.0f, 1.0f ,1.0f);
|
||||||
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 1, usedWidth);
|
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 1, usedWidth);
|
||||||
glColor3f(0.0f, 1.0f ,1.0f);
|
glColor3f(0.0f, 1.0f ,1.0f);
|
||||||
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 2, usedWidth);
|
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 2, usedWidth);
|
||||||
glDisableClientState(GL_VERTEX_ARRAY);
|
glDisableClientState(GL_VERTEX_ARRAY);
|
||||||
glPopMatrix();
|
glPopMatrix();
|
||||||
}
|
}
|
||||||
|
|
|
@ -187,7 +187,7 @@ void SerialInterface::readData() {
|
||||||
convertHexToInt(sensorBuffer + 10, accelYRate);
|
convertHexToInt(sensorBuffer + 10, accelYRate);
|
||||||
convertHexToInt(sensorBuffer + 14, accelXRate);
|
convertHexToInt(sensorBuffer + 14, accelXRate);
|
||||||
|
|
||||||
const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * 9.80665f;
|
const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
|
||||||
// From MPU-9150 register map, with setting on
|
// From MPU-9150 register map, with setting on
|
||||||
// highest resolution = +/- 2G
|
// highest resolution = +/- 2G
|
||||||
|
|
||||||
|
|
|
@ -15,5 +15,6 @@
|
||||||
const float WORLD_SIZE = 10.0;
|
const float WORLD_SIZE = 10.0;
|
||||||
#define PI 3.14159265
|
#define PI 3.14159265
|
||||||
#define PIf 3.14159265f
|
#define PIf 3.14159265f
|
||||||
|
#define GRAVITY_EARTH 9.80665f;
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
Loading…
Reference in a new issue