mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 04:44:11 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels
This commit is contained in:
commit
7af4e4e261
16 changed files with 427 additions and 137 deletions
|
@ -31,8 +31,7 @@ var toolWidth = 50;
|
|||
|
||||
var LASER_WIDTH = 4;
|
||||
var LASER_COLOR = { red: 255, green: 0, blue: 0 };
|
||||
var LASER_LENGTH_FACTOR = 500
|
||||
;
|
||||
var LASER_LENGTH_FACTOR = 500;
|
||||
|
||||
var MIN_ANGULAR_SIZE = 2;
|
||||
var MAX_ANGULAR_SIZE = 45;
|
||||
|
|
|
@ -31,7 +31,7 @@ var count=0; // iterations
|
|||
|
||||
var enableFlyTowardPoints = true; // some birds have a point they want to fly to
|
||||
var enabledClustedFlyTowardPoints = true; // the flyToward points will be generally near each other
|
||||
var flyToFrames = 10; // number of frames the bird would like to attempt to fly to it's flyTo point
|
||||
var flyToFrames = 100; // number of frames the bird would like to attempt to fly to it's flyTo point
|
||||
var PROBABILITY_OF_FLY_TOWARD_CHANGE = 0.01; // chance the bird will decide to change its flyTo point
|
||||
var PROBABILITY_EACH_BIRD_WILL_FLY_TOWARD = 0.2; // chance the bird will decide to flyTo, otherwise it follows
|
||||
var flyingToCount = 0; // count of birds currently flying to someplace
|
||||
|
@ -56,11 +56,11 @@ var PROBABILITY_TO_LEAD = 0.1; // probability a bird will choose to lead
|
|||
var birds = new Array(); // array of bird state data
|
||||
|
||||
|
||||
var flockStartPosition = { x: 100, y: 10, z: 100};
|
||||
var flockStartPosition = MyAvatar.position;
|
||||
var flockStartVelocity = { x: 0, y: 0, z: 0};
|
||||
var flockStartThrust = { x: 0, y: 0, z: 0}; // slightly upward against gravity
|
||||
var INITIAL_XY_VELOCITY_SCALE = 2;
|
||||
var birdRadius = 0.0625;
|
||||
var birdRadius = 0.0925;
|
||||
var baseBirdColor = { red: 0, green: 255, blue: 255 };
|
||||
var glidingColor = { red: 255, green: 0, blue: 0 };
|
||||
var thrustUpwardColor = { red: 0, green: 255, blue: 0 };
|
||||
|
|
|
@ -723,7 +723,6 @@ void Application::resizeGL(int width, int height) {
|
|||
resetCamerasOnResizeGL(_myCamera, width, height);
|
||||
|
||||
glViewport(0, 0, width, height); // shouldn't this account for the menu???
|
||||
_applicationOverlay.resize();
|
||||
|
||||
updateProjectionMatrix();
|
||||
glLoadIdentity();
|
||||
|
@ -2786,6 +2785,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
|||
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
|
||||
{
|
||||
PerformanceTimer perfTimer("avatars");
|
||||
|
||||
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE, selfAvatarOnly);
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ BuckyBalls::BuckyBalls() {
|
|||
|
||||
void BuckyBalls::grab(PalmData& palm, float deltaTime) {
|
||||
float penetration;
|
||||
glm::vec3 fingerTipPosition = palm.getFingerTipPosition();
|
||||
glm::vec3 fingerTipPosition = palm.getTipPosition();
|
||||
|
||||
if (palm.getControllerButtons() & BUTTON_FWD) {
|
||||
if (!_bballIsGrabbed[palm.getSixenseID()]) {
|
||||
|
|
|
@ -60,9 +60,9 @@ Avatar::Avatar() :
|
|||
_mouseRayDirection(0.0f, 0.0f, 0.0f),
|
||||
_moving(false),
|
||||
_collisionGroups(0),
|
||||
_numLocalLights(2),
|
||||
_initialized(false),
|
||||
_shouldRenderBillboard(true),
|
||||
_numLocalLights(1)
|
||||
_shouldRenderBillboard(true)
|
||||
{
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
moveToThread(Application::getInstance()->thread());
|
||||
|
@ -89,14 +89,14 @@ void Avatar::init() {
|
|||
_localLightDirections[i] = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
}
|
||||
|
||||
glm::vec3 darkGrayColor(0.3f, 0.3f, 0.3f);
|
||||
glm::vec3 darkGrayColor(0.4f, 0.4f, 0.4f);
|
||||
glm::vec3 greenColor(0.0f, 1.0f, 0.0f);
|
||||
glm::vec3 directionX(1.0f, 0.0f, 0.0f);
|
||||
glm::vec3 directionY(0.0f, 1.0f, 0.0f);
|
||||
|
||||
// initialize local lights
|
||||
_localLightColors[0] = darkGrayColor;
|
||||
_localLightColors[1] = greenColor;
|
||||
_localLightColors[1] = darkGrayColor;
|
||||
|
||||
_localLightDirections[0] = directionX;
|
||||
_localLightDirections[1] = directionY;
|
||||
|
@ -970,6 +970,11 @@ glm::quat Avatar::getJointCombinedRotation(const QString& name) const {
|
|||
return rotation;
|
||||
}
|
||||
|
||||
void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
|
||||
//Scale a world space vector as if it was relative to the position
|
||||
positionToScale = _position + _scale * (positionToScale - _position);
|
||||
}
|
||||
|
||||
void Avatar::setFaceModelURL(const QUrl& faceModelURL) {
|
||||
AvatarData::setFaceModelURL(faceModelURL);
|
||||
const QUrl DEFAULT_FACE_MODEL_URL = QUrl::fromLocalFile(Application::resourcesPath() + "meshes/defaultAvatar_head.fst");
|
||||
|
|
|
@ -152,6 +152,10 @@ public:
|
|||
|
||||
glm::vec3 getAcceleration() const { return _acceleration; }
|
||||
glm::vec3 getAngularVelocity() const { return _angularVelocity; }
|
||||
|
||||
/// Scales a world space position vector relative to the avatar position and scale
|
||||
/// \param vector position to be scaled. Will store the result
|
||||
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
|
||||
|
||||
public slots:
|
||||
void updateCollisionGroups();
|
||||
|
|
|
@ -130,6 +130,9 @@ void Hand::render(bool isMine, Model::RenderMode renderMode) {
|
|||
void Hand::renderHandTargets(bool isMine) {
|
||||
glPushMatrix();
|
||||
|
||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
||||
const float avatarScale = Application::getInstance()->getAvatar()->getScale();
|
||||
|
||||
const float alpha = 1.0f;
|
||||
const glm::vec3 handColor(1.0, 0.0, 0.0); // Color the hand targets red to be different than skin
|
||||
|
||||
|
@ -142,7 +145,7 @@ void Hand::renderHandTargets(bool isMine) {
|
|||
if (!palm.isActive()) {
|
||||
continue;
|
||||
}
|
||||
glm::vec3 targetPosition = palm.getFingerTipPosition();
|
||||
glm::vec3 targetPosition = palm.getTipPosition();
|
||||
glPushMatrix();
|
||||
glTranslatef(targetPosition.x, targetPosition.y, targetPosition.z);
|
||||
|
||||
|
@ -153,18 +156,23 @@ void Hand::renderHandTargets(bool isMine) {
|
|||
}
|
||||
}
|
||||
|
||||
const float PALM_BALL_RADIUS = 0.03f;
|
||||
const float PALM_DISK_RADIUS = 0.06f;
|
||||
const float PALM_DISK_THICKNESS = 0.01f;
|
||||
const float PALM_FINGER_ROD_RADIUS = 0.003f;
|
||||
const float PALM_BALL_RADIUS = 0.03f * avatarScale;
|
||||
const float PALM_DISK_RADIUS = 0.06f * avatarScale;
|
||||
const float PALM_DISK_THICKNESS = 0.01f * avatarScale;
|
||||
const float PALM_FINGER_ROD_RADIUS = 0.003f * avatarScale;
|
||||
|
||||
// Draw the palm ball and disk
|
||||
for (size_t i = 0; i < getNumPalms(); ++i) {
|
||||
PalmData& palm = getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
glColor4f(handColor.r, handColor.g, handColor.b, alpha);
|
||||
glm::vec3 tip = palm.getFingerTipPosition();
|
||||
glm::vec3 tip = palm.getTipPosition();
|
||||
glm::vec3 root = palm.getPosition();
|
||||
|
||||
//Scale the positions based on avatar scale
|
||||
myAvatar->scaleVectorRelativeToPosition(tip);
|
||||
myAvatar->scaleVectorRelativeToPosition(root);
|
||||
|
||||
Avatar::renderJointConnectingCone(root, tip, PALM_FINGER_ROD_RADIUS, PALM_FINGER_ROD_RADIUS);
|
||||
// Render sphere at palm/finger root
|
||||
glm::vec3 offsetFromPalm = root + palm.getNormal() * PALM_DISK_THICKNESS;
|
||||
|
|
|
@ -269,8 +269,7 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
// We only need to render the overlays to a texture once, then we just render the texture on the hemisphere
|
||||
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
|
||||
applicationOverlay.renderOverlay(true);
|
||||
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::UserInterface);
|
||||
|
||||
|
||||
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
Application::getInstance()->getGlowEffect()->prepare();
|
||||
|
@ -325,9 +324,7 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
|
||||
Application::getInstance()->displaySide(*_camera);
|
||||
|
||||
if (displayOverlays) {
|
||||
applicationOverlay.displayOverlayTextureOculus(*_camera);
|
||||
}
|
||||
applicationOverlay.displayOverlayTextureOculus(*_camera);
|
||||
}
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
|
@ -443,7 +440,15 @@ void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
|
|||
ovrPosef pose = ss.Predicted.Pose;
|
||||
Quatf orientation = Quatf(pose.Orientation);
|
||||
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
|
||||
} else {
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
}
|
||||
#else
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -459,3 +464,50 @@ QSize OculusManager::getRenderTargetSize() {
|
|||
#endif
|
||||
}
|
||||
|
||||
//Renders sixense laser pointers for UI selection in the oculus
|
||||
void OculusManager::renderLaserPointers() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||
|
||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
||||
|
||||
//If the Oculus is enabled, we will draw a blue cursor ray
|
||||
|
||||
for (size_t i = 0; i < myAvatar->getHand()->getNumPalms(); ++i) {
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
glColor4f(0, 1, 1, 1);
|
||||
glm::vec3 tip = getLaserPointerTipPosition(&palm);
|
||||
glm::vec3 root = palm.getPosition();
|
||||
|
||||
//Scale the root vector with the avatar scale
|
||||
myAvatar->scaleVectorRelativeToPosition(root);
|
||||
|
||||
Avatar::renderJointConnectingCone(root, tip, PALM_TIP_ROD_RADIUS, PALM_TIP_ROD_RADIUS);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Gets the tip position for the laser pointer
|
||||
glm::vec3 OculusManager::getLaserPointerTipPosition(const PalmData* palm) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
const ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
|
||||
const float PALM_TIP_ROD_LENGTH_MULT = 40.0f;
|
||||
|
||||
glm::vec3 direction = glm::normalize(palm->getTipPosition() - palm->getPosition());
|
||||
|
||||
glm::vec3 position = palm->getPosition();
|
||||
//scale the position with the avatar
|
||||
Application::getInstance()->getAvatar()->scaleVectorRelativeToPosition(position);
|
||||
|
||||
|
||||
glm::vec3 result;
|
||||
if (applicationOverlay.calculateRayUICollisionPoint(position, direction, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return palm->getPosition();
|
||||
#endif
|
||||
return glm::vec3(0.0f);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
const float DEFAULT_OCULUS_UI_ANGULAR_SIZE = 72.0f;
|
||||
|
||||
class Camera;
|
||||
class PalmData;
|
||||
|
||||
/// Handles interaction with the Oculus Rift.
|
||||
class OculusManager {
|
||||
|
@ -41,6 +42,10 @@ public:
|
|||
/// param \roll[out] roll in radians
|
||||
static void getEulerAngles(float& yaw, float& pitch, float& roll);
|
||||
static QSize getRenderTargetSize();
|
||||
|
||||
/// Renders a laser pointer for UI picking
|
||||
static void renderLaserPointers();
|
||||
static glm::vec3 getLaserPointerTipPosition(const PalmData* palm);
|
||||
|
||||
private:
|
||||
#ifdef HAVE_LIBOVR
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "SixenseManager.h"
|
||||
#include "devices/OculusManager.h"
|
||||
#include "UserActivityLogger.h"
|
||||
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
@ -172,8 +173,10 @@ void SixenseManager::update(float deltaTime) {
|
|||
// Use a velocity sensitive filter to damp small motions and preserve large ones with
|
||||
// no latency.
|
||||
float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
|
||||
palm->setRawPosition(palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter));
|
||||
palm->setRawRotation(safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter));
|
||||
position = palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter);
|
||||
rotation = safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter);
|
||||
palm->setRawPosition(position);
|
||||
palm->setRawRotation(rotation);
|
||||
} else {
|
||||
palm->setRawPosition(position);
|
||||
palm->setRawRotation(rotation);
|
||||
|
@ -366,9 +369,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
|||
MyAvatar* avatar = application->getAvatar();
|
||||
QGLWidget* widget = application->getGLWidget();
|
||||
QPoint pos;
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
|
||||
|
||||
|
||||
Qt::MouseButton bumperButton;
|
||||
Qt::MouseButton triggerButton;
|
||||
|
||||
|
@ -380,19 +381,27 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
|||
triggerButton = Qt::LeftButton;
|
||||
}
|
||||
|
||||
// Get the angles, scaled between (-0.5,0.5)
|
||||
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
|
||||
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
|
||||
if (OculusManager::isConnected()) {
|
||||
pos = application->getApplicationOverlay().getOculusPalmClickLocation(palm);
|
||||
} else {
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = widget->width() * getCursorPixelRangeMult();
|
||||
// Get the angles, scaled between (-0.5,0.5)
|
||||
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
|
||||
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
|
||||
|
||||
pos.setX(widget->width() / 2.0f + cursorRange * xAngle);
|
||||
pos.setY(widget->height() / 2.0f + cursorRange * yAngle);
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = widget->width() * getCursorPixelRangeMult();
|
||||
|
||||
pos.setX(widget->width() / 2.0f + cursorRange * xAngle);
|
||||
pos.setY(widget->height() / 2.0f + cursorRange * yAngle);
|
||||
|
||||
}
|
||||
|
||||
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
|
||||
//we should unpress them.
|
||||
if (pos.x() < 0 || pos.x() > widget->width() || pos.y() < 0 || pos.y() > widget->height()) {
|
||||
if (pos.x() == INT_MAX) {
|
||||
if (_bumperPressed[index]) {
|
||||
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ ApplicationOverlay::ApplicationOverlay() :
|
|||
_framebufferObject(NULL),
|
||||
_textureFov(DEFAULT_OCULUS_UI_ANGULAR_SIZE * RADIANS_PER_DEGREE),
|
||||
_alpha(1.0f),
|
||||
_oculusuiRadius(1.0f),
|
||||
_crosshairTexture(0) {
|
||||
|
||||
memset(_reticleActive, 0, sizeof(_reticleActive));
|
||||
|
@ -164,7 +165,7 @@ void ApplicationOverlay::displayOverlayTexture() {
|
|||
}
|
||||
|
||||
void ApplicationOverlay::computeOculusPickRay(float x, float y, glm::vec3& direction) const {
|
||||
glm::quat rot = Application::getInstance()->getAvatar()->getOrientation();
|
||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
||||
|
||||
//invert y direction
|
||||
y = 1.0 - y;
|
||||
|
@ -176,8 +177,11 @@ void ApplicationOverlay::computeOculusPickRay(float x, float y, glm::vec3& direc
|
|||
float dist = sqrt(x * x + y * y);
|
||||
float z = -sqrt(1.0f - dist * dist);
|
||||
|
||||
glm::vec3 relativePosition = myAvatar->getHead()->calculateAverageEyePosition() +
|
||||
glm::normalize(myAvatar->getOrientation() * glm::vec3(x, y, z));
|
||||
|
||||
//Rotate the UI pick ray by the avatar orientation
|
||||
direction = glm::normalize(rot * glm::vec3(x, y, z));
|
||||
direction = glm::normalize(relativePosition - Application::getInstance()->getCamera()->getPosition());
|
||||
}
|
||||
|
||||
// Calculates the click location on the screen by taking into account any
|
||||
|
@ -186,7 +190,7 @@ void ApplicationOverlay::getClickLocation(int &x, int &y) const {
|
|||
int dx;
|
||||
int dy;
|
||||
const float xRange = MAGNIFY_WIDTH * MAGNIFY_MULT / 2.0f;
|
||||
const float yRange = MAGNIFY_WIDTH * MAGNIFY_MULT / 2.0f;
|
||||
const float yRange = MAGNIFY_HEIGHT * MAGNIFY_MULT / 2.0f;
|
||||
|
||||
//Loop through all magnification windows
|
||||
for (int i = 0; i < NUMBER_OF_MAGNIFIERS; i++) {
|
||||
|
@ -204,6 +208,126 @@ void ApplicationOverlay::getClickLocation(int &x, int &y) const {
|
|||
}
|
||||
}
|
||||
|
||||
//Checks if the given ray intersects the sphere at the origin. result will store a multiplier that should
|
||||
//be multiplied by dir and added to origin to get the location of the collision
|
||||
bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r, float* result)
|
||||
{
|
||||
//Source: http://wiki.cgsociety.org/index.php/Ray_Sphere_Intersection
|
||||
|
||||
//Compute A, B and C coefficients
|
||||
float a = glm::dot(dir, dir);
|
||||
float b = 2 * glm::dot(dir, origin);
|
||||
float c = glm::dot(origin, origin) - (r * r);
|
||||
|
||||
//Find discriminant
|
||||
float disc = b * b - 4 * a * c;
|
||||
|
||||
// if discriminant is negative there are no real roots, so return
|
||||
// false as ray misses sphere
|
||||
if (disc < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// compute q as described above
|
||||
float distSqrt = sqrtf(disc);
|
||||
float q;
|
||||
if (b < 0) {
|
||||
q = (-b - distSqrt) / 2.0;
|
||||
} else {
|
||||
q = (-b + distSqrt) / 2.0;
|
||||
}
|
||||
|
||||
// compute t0 and t1
|
||||
float t0 = q / a;
|
||||
float t1 = c / q;
|
||||
|
||||
// make sure t0 is smaller than t1
|
||||
if (t0 > t1) {
|
||||
// if t0 is bigger than t1 swap them around
|
||||
float temp = t0;
|
||||
t0 = t1;
|
||||
t1 = temp;
|
||||
}
|
||||
|
||||
// if t1 is less than zero, the object is in the ray's negative direction
|
||||
// and consequently the ray misses the sphere
|
||||
if (t1 < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if t0 is less than zero, the intersection point is at t1
|
||||
if (t0 < 0) {
|
||||
*result = t1;
|
||||
return true;
|
||||
} else { // else the intersection point is at t0
|
||||
*result = t0;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
//Caculate the click location using one of the sixense controllers. Scale is not applied
|
||||
QPoint ApplicationOverlay::getOculusPalmClickLocation(const PalmData *palm) const {
|
||||
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
glm::vec3 tip = OculusManager::getLaserPointerTipPosition(palm);
|
||||
glm::vec3 eyePos = myAvatar->getHead()->calculateAverageEyePosition();
|
||||
glm::quat orientation = glm::inverse(myAvatar->getOrientation());
|
||||
glm::vec3 dir = orientation * glm::normalize(application->getCamera()->getPosition() - tip); //direction of ray goes towards camera
|
||||
glm::vec3 tipPos = orientation * (tip - eyePos);
|
||||
|
||||
QPoint rv;
|
||||
|
||||
float t;
|
||||
|
||||
//We back the ray up by dir to ensure that it will not start inside the UI.
|
||||
glm::vec3 adjustedPos = tipPos - dir;
|
||||
//Find intersection of crosshair ray.
|
||||
if (raySphereIntersect(dir, adjustedPos, _oculusuiRadius * myAvatar->getScale(), &t)){
|
||||
glm::vec3 collisionPos = adjustedPos + dir * t;
|
||||
//Normalize it in case its not a radius of 1
|
||||
collisionPos = glm::normalize(collisionPos);
|
||||
//If we hit the back hemisphere, mark it as not a collision
|
||||
if (collisionPos.z > 0) {
|
||||
rv.setX(INT_MAX);
|
||||
rv.setY(INT_MAX);
|
||||
} else {
|
||||
|
||||
float u = asin(collisionPos.x) / (_textureFov)+0.5f;
|
||||
float v = 1.0 - (asin(collisionPos.y) / (_textureFov)+0.5f);
|
||||
|
||||
rv.setX(u * glWidget->width());
|
||||
rv.setY(v * glWidget->height());
|
||||
}
|
||||
} else {
|
||||
//if they did not click on the overlay, just set the coords to INT_MAX
|
||||
rv.setX(INT_MAX);
|
||||
rv.setY(INT_MAX);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
||||
//Finds the collision point of a world space ray
|
||||
bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
|
||||
Application* application = Application::getInstance();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
glm::quat orientation = myAvatar->getOrientation();
|
||||
|
||||
glm::vec3 relativePosition = orientation * (position - myAvatar->getHead()->calculateAverageEyePosition());
|
||||
glm::vec3 relativeDirection = orientation * direction;
|
||||
|
||||
float t;
|
||||
if (raySphereIntersect(relativeDirection, relativePosition, _oculusuiRadius * myAvatar->getScale(), &t)){
|
||||
result = position + direction * t;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Draws the FBO texture for Oculus rift.
|
||||
void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
|
||||
|
||||
|
@ -214,41 +338,37 @@ void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
|
|||
Application* application = Application::getInstance();
|
||||
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
const glm::vec3& viewMatrixTranslation = application->getViewMatrixTranslation();
|
||||
|
||||
//Render the sixense lasers
|
||||
OculusManager::renderLaserPointers();
|
||||
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
|
||||
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
// Transform to world space
|
||||
glm::quat rotation = whichCamera.getRotation();
|
||||
glm::vec3 axis2 = glm::axis(rotation);
|
||||
glRotatef(-glm::degrees(glm::angle(rotation)), axis2.x, axis2.y, axis2.z);
|
||||
glTranslatef(viewMatrixTranslation.x, viewMatrixTranslation.y, viewMatrixTranslation.z);
|
||||
|
||||
// Translate to the front of the camera
|
||||
glm::vec3 pos = whichCamera.getPosition();
|
||||
glm::quat rot = myAvatar->getOrientation();
|
||||
glm::vec3 axis = glm::axis(rot);
|
||||
|
||||
glTranslatef(pos.x, pos.y, pos.z);
|
||||
glRotatef(glm::degrees(glm::angle(rot)), axis.x, axis.y, axis.z);
|
||||
|
||||
glDepthMask(GL_TRUE);
|
||||
|
||||
glEnable(GL_ALPHA_TEST);
|
||||
glAlphaFunc(GL_GREATER, 0.01f);
|
||||
|
||||
//Update and draw the magnifiers
|
||||
|
||||
glPushMatrix();
|
||||
const glm::quat& orientation = myAvatar->getOrientation();
|
||||
const glm::vec3& position = myAvatar->getHead()->calculateAverageEyePosition();
|
||||
|
||||
glm::mat4 rotation = glm::toMat4(orientation);
|
||||
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glMultMatrixf(&rotation[0][0]);
|
||||
for (int i = 0; i < NUMBER_OF_MAGNIFIERS; i++) {
|
||||
|
||||
if (_magActive[i]) {
|
||||
|
@ -268,6 +388,7 @@ void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
|
|||
renderMagnifier(_magX[i], _magY[i], _magSizeMult[i], i != MOUSE);
|
||||
}
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
glDepthMask(GL_FALSE);
|
||||
glDisable(GL_ALPHA_TEST);
|
||||
|
@ -275,10 +396,8 @@ void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
|
|||
glColor4f(1.0f, 1.0f, 1.0f, _alpha);
|
||||
|
||||
renderTexturedHemisphere();
|
||||
|
||||
renderControllerPointersOculus();
|
||||
|
||||
glPopMatrix();
|
||||
renderPointersOculus(whichCamera.getPosition());
|
||||
|
||||
glDepthMask(GL_TRUE);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
@ -333,7 +452,6 @@ void ApplicationOverlay::displayOverlayTexture3DTV(Camera& whichCamera, float as
|
|||
glColor4f(1.0f, 1.0f, 1.0f, _alpha);
|
||||
|
||||
//Render
|
||||
// fov -= RADIANS_PER_DEGREE * 2.5f; //reduce by 5 degrees so it fits in the view
|
||||
const GLfloat distance = 1.0f;
|
||||
|
||||
const GLfloat halfQuadHeight = distance * tan(fov);
|
||||
|
@ -405,7 +523,6 @@ void ApplicationOverlay::renderPointers() {
|
|||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
|
||||
|
||||
|
||||
if (OculusManager::isConnected() && application->getLastMouseMoveType() == QEvent::MouseMove) {
|
||||
//If we are in oculus, render reticle later
|
||||
_reticleActive[MOUSE] = true;
|
||||
|
@ -414,7 +531,6 @@ void ApplicationOverlay::renderPointers() {
|
|||
_mouseY[MOUSE] = application->getMouseY();
|
||||
_magX[MOUSE] = _mouseX[MOUSE];
|
||||
_magY[MOUSE] = _mouseY[MOUSE];
|
||||
|
||||
_reticleActive[LEFT_CONTROLLER] = false;
|
||||
_reticleActive[RIGHT_CONTROLLER] = false;
|
||||
|
||||
|
@ -503,13 +619,32 @@ void ApplicationOverlay::renderControllerPointers() {
|
|||
} else {
|
||||
bumperPressed[index] = false;
|
||||
}
|
||||
|
||||
//if we have the oculus, we should make the cursor smaller since it will be
|
||||
//magnified
|
||||
if (OculusManager::isConnected()) {
|
||||
|
||||
QPoint point = getOculusPalmClickLocation(palmData);
|
||||
|
||||
_mouseX[index] = point.x();
|
||||
_mouseY[index] = point.y();
|
||||
|
||||
//When button 2 is pressed we drag the mag window
|
||||
if (isPressed[index]) {
|
||||
_magActive[index] = true;
|
||||
_magX[index] = point.x();
|
||||
_magY[index] = point.y();
|
||||
}
|
||||
|
||||
// If oculus is enabled, we draw the crosshairs later
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get directon relative to avatar orientation
|
||||
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
|
||||
|
||||
// Get the angles, scaled between (-0.5,0.5)
|
||||
float xAngle = (atan2(direction.z, direction.x) + M_PI_2) ;
|
||||
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
|
||||
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
|
@ -524,24 +659,7 @@ void ApplicationOverlay::renderControllerPointers() {
|
|||
continue;
|
||||
}
|
||||
_reticleActive[index] = true;
|
||||
|
||||
//if we have the oculus, we should make the cursor smaller since it will be
|
||||
//magnified
|
||||
if (OculusManager::isConnected()) {
|
||||
|
||||
_mouseX[index] = mouseX;
|
||||
_mouseY[index] = mouseY;
|
||||
|
||||
//When button 2 is pressed we drag the mag window
|
||||
if (isPressed[index]) {
|
||||
_magActive[index] = true;
|
||||
_magX[index] = mouseX;
|
||||
_magY[index] = mouseY;
|
||||
}
|
||||
|
||||
// If oculus is enabled, we draw the crosshairs later
|
||||
continue;
|
||||
}
|
||||
|
||||
const float reticleSize = 40.0f;
|
||||
|
||||
|
@ -561,9 +679,11 @@ void ApplicationOverlay::renderControllerPointers() {
|
|||
}
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderControllerPointersOculus() {
|
||||
void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
|
||||
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
glm::vec3 cursorVerts[4];
|
||||
|
||||
const int widgetWidth = glWidget->width();
|
||||
const int widgetHeight = glWidget->height();
|
||||
|
@ -572,19 +692,86 @@ void ApplicationOverlay::renderControllerPointersOculus() {
|
|||
|
||||
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
for (int i = 0; i < NUMBER_OF_MAGNIFIERS; i++) {
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
//Dont render the reticle if its inactive
|
||||
if (!_reticleActive[i]) {
|
||||
continue;
|
||||
}
|
||||
//Controller Pointers
|
||||
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
||||
|
||||
float mouseX = (float)_mouseX[i];
|
||||
float mouseY = (float)_mouseY[i];
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
if (palm.isActive()) {
|
||||
glm::vec3 tip = OculusManager::getLaserPointerTipPosition(&palm);
|
||||
glm::vec3 tipPos = (tip - eyePos);
|
||||
|
||||
float length = glm::length(eyePos - tip);
|
||||
float size = 0.03f * length;
|
||||
|
||||
glm::vec3 up = glm::vec3(0.0, 1.0, 0.0) * size;
|
||||
glm::vec3 right = glm::vec3(1.0, 0.0, 0.0) * size;
|
||||
|
||||
cursorVerts[0] = -right + up;
|
||||
cursorVerts[1] = right + up;
|
||||
cursorVerts[2] = right - up;
|
||||
cursorVerts[3] = -right - up;
|
||||
|
||||
glPushMatrix();
|
||||
|
||||
// objToCamProj is the vector in world coordinates from the
|
||||
// local origin to the camera projected in the XZ plane
|
||||
glm::vec3 cursorToCameraXZ(-tipPos.x, 0, -tipPos.z);
|
||||
cursorToCameraXZ = glm::normalize(cursorToCameraXZ);
|
||||
|
||||
//Translate the cursor to the tip of the oculus ray
|
||||
glTranslatef(tip.x, tip.y, tip.z);
|
||||
|
||||
glm::vec3 direction(0, 0, 1);
|
||||
// easy fix to determine wether the angle is negative or positive
|
||||
// for positive angles upAux will be a vector pointing in the
|
||||
// positive y direction, otherwise upAux will point downwards
|
||||
// effectively reversing the rotation.
|
||||
glm::vec3 upAux = glm::cross(direction, cursorToCameraXZ);
|
||||
|
||||
// compute the angle
|
||||
float angleCosine = glm::dot(direction, cursorToCameraXZ);
|
||||
|
||||
//Rotate in XZ direction
|
||||
glRotatef(acos(angleCosine) * DEGREES_PER_RADIAN, upAux[0], upAux[1], upAux[2]);
|
||||
|
||||
glm::vec3 cursorToCamera = glm::normalize(-tipPos);
|
||||
|
||||
// Compute the angle between cursorToCameraXZ and cursorToCamera,
|
||||
angleCosine = glm::dot(cursorToCameraXZ, cursorToCamera);
|
||||
|
||||
//Rotate in Y direction
|
||||
if (cursorToCamera.y < 0) {
|
||||
glRotatef(acos(angleCosine) * DEGREES_PER_RADIAN, 1, 0, 0);
|
||||
} else {
|
||||
glRotatef(acos(angleCosine) * DEGREES_PER_RADIAN, -1, 0, 0);
|
||||
}
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
|
||||
glColor4f(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], _alpha);
|
||||
|
||||
glTexCoord2f(0.0f, 0.0f); glVertex3f(cursorVerts[0].x, cursorVerts[0].y, cursorVerts[0].z);
|
||||
glTexCoord2f(1.0f, 0.0f); glVertex3f(cursorVerts[1].x, cursorVerts[1].y, cursorVerts[1].z);
|
||||
glTexCoord2f(1.0f, 1.0f); glVertex3f(cursorVerts[2].x, cursorVerts[2].y, cursorVerts[2].z);
|
||||
glTexCoord2f(0.0f, 1.0f); glVertex3f(cursorVerts[3].x, cursorVerts[3].y, cursorVerts[3].z);
|
||||
|
||||
glEnd();
|
||||
|
||||
glPopMatrix();
|
||||
}
|
||||
}
|
||||
|
||||
//Mouse Pointer
|
||||
if (_reticleActive[MOUSE]) {
|
||||
|
||||
float mouseX = (float)_mouseX[MOUSE];
|
||||
float mouseY = (float)_mouseY[MOUSE];
|
||||
mouseX -= reticleSize / 2;
|
||||
mouseY += reticleSize / 2;
|
||||
|
||||
|
||||
//Get new UV coordinates from our magnification window
|
||||
float newULeft = mouseX / widgetWidth;
|
||||
float newURight = (mouseX + reticleSize) / widgetWidth;
|
||||
|
@ -614,15 +801,22 @@ void ApplicationOverlay::renderControllerPointersOculus() {
|
|||
glBegin(GL_QUADS);
|
||||
|
||||
glColor4f(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], _alpha);
|
||||
|
||||
glTexCoord2f(0.0f, 0.0f); glVertex3f(lX, tY, -tlZ);
|
||||
glTexCoord2f(1.0f, 0.0f); glVertex3f(rX, tY, -trZ);
|
||||
glTexCoord2f(1.0f, 1.0f); glVertex3f(rX, bY, -brZ);
|
||||
glTexCoord2f(0.0f, 1.0f); glVertex3f(lX, bY, -blZ);
|
||||
|
||||
const glm::quat& orientation = myAvatar->getOrientation();
|
||||
cursorVerts[0] = orientation * glm::vec3(lX, tY, -tlZ) + eyePos;
|
||||
cursorVerts[1] = orientation * glm::vec3(rX, tY, -trZ) + eyePos;
|
||||
cursorVerts[2] = orientation * glm::vec3(rX, bY, -brZ) + eyePos;
|
||||
cursorVerts[3] = orientation * glm::vec3(lX, bY, -blZ) + eyePos;
|
||||
|
||||
glTexCoord2f(0.0f, 0.0f); glVertex3f(cursorVerts[0].x, cursorVerts[0].y, cursorVerts[0].z);
|
||||
glTexCoord2f(1.0f, 0.0f); glVertex3f(cursorVerts[1].x, cursorVerts[1].y, cursorVerts[1].z);
|
||||
glTexCoord2f(1.0f, 1.0f); glVertex3f(cursorVerts[2].x, cursorVerts[2].y, cursorVerts[2].z);
|
||||
glTexCoord2f(0.0f, 1.0f); glVertex3f(cursorVerts[3].x, cursorVerts[3].y, cursorVerts[3].z);
|
||||
|
||||
glEnd();
|
||||
|
||||
}
|
||||
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
}
|
||||
|
||||
|
@ -663,18 +857,22 @@ void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY, float sizeMult,
|
|||
float newVTop = 1.0 - (newMouseY - newHeight) / widgetHeight;
|
||||
|
||||
// Project our position onto the hemisphere using the UV coordinates
|
||||
float lX = sin((newULeft - 0.5f) * _textureFov);
|
||||
float rX = sin((newURight - 0.5f) * _textureFov);
|
||||
float bY = sin((newVBottom - 0.5f) * _textureFov);
|
||||
float tY = sin((newVTop - 0.5f) * _textureFov);
|
||||
float radius = _oculusuiRadius * application->getAvatar()->getScale();
|
||||
float radius2 = radius * radius;
|
||||
|
||||
float lX = radius * sin((newULeft - 0.5f) * _textureFov);
|
||||
float rX = radius * sin((newURight - 0.5f) * _textureFov);
|
||||
float bY = radius * sin((newVBottom - 0.5f) * _textureFov);
|
||||
float tY = radius * sin((newVTop - 0.5f) * _textureFov);
|
||||
|
||||
float blZ, tlZ, brZ, trZ;
|
||||
|
||||
float dist;
|
||||
float discriminant;
|
||||
|
||||
//Bottom Left
|
||||
dist = sqrt(lX * lX + bY * bY);
|
||||
discriminant = 1.0f - dist * dist;
|
||||
discriminant = radius2 - dist * dist;
|
||||
if (discriminant > 0) {
|
||||
blZ = sqrt(discriminant);
|
||||
} else {
|
||||
|
@ -682,7 +880,7 @@ void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY, float sizeMult,
|
|||
}
|
||||
//Top Left
|
||||
dist = sqrt(lX * lX + tY * tY);
|
||||
discriminant = 1.0f - dist * dist;
|
||||
discriminant = radius2 - dist * dist;
|
||||
if (discriminant > 0) {
|
||||
tlZ = sqrt(discriminant);
|
||||
} else {
|
||||
|
@ -690,7 +888,7 @@ void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY, float sizeMult,
|
|||
}
|
||||
//Bottom Right
|
||||
dist = sqrt(rX * rX + bY * bY);
|
||||
discriminant = 1.0f - dist * dist;
|
||||
discriminant = radius2 - dist * dist;
|
||||
if (discriminant > 0) {
|
||||
brZ = sqrt(discriminant);
|
||||
} else {
|
||||
|
@ -698,7 +896,7 @@ void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY, float sizeMult,
|
|||
}
|
||||
//Top Right
|
||||
dist = sqrt(rX * rX + tY * tY);
|
||||
discriminant = 1.0f - dist * dist;
|
||||
discriminant = radius2 - dist * dist;
|
||||
if (discriminant > 0) {
|
||||
trZ = sqrt(discriminant);
|
||||
} else {
|
||||
|
@ -988,9 +1186,25 @@ void ApplicationOverlay::renderTexturedHemisphere() {
|
|||
|
||||
glVertexPointer(3, GL_FLOAT, sizeof(TextureVertex), (void*)0);
|
||||
glTexCoordPointer(2, GL_FLOAT, sizeof(TextureVertex), (void*)12);
|
||||
|
||||
glPushMatrix();
|
||||
Application* application = Application::getInstance();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
const glm::quat& orientation = myAvatar->getOrientation();
|
||||
const glm::vec3& position = myAvatar->getHead()->calculateAverageEyePosition();
|
||||
|
||||
glm::mat4 rotation = glm::toMat4(orientation);
|
||||
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glMultMatrixf(&rotation[0][0]);
|
||||
|
||||
const float scale = _oculusuiRadius * myAvatar->getScale();
|
||||
glScalef(scale, scale, scale);
|
||||
|
||||
glDrawRangeElements(GL_TRIANGLES, 0, vertices - 1, indices, GL_UNSIGNED_SHORT, 0);
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
glDisableClientState(GL_VERTEX_ARRAY);
|
||||
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
|
||||
|
||||
|
@ -999,17 +1213,13 @@ void ApplicationOverlay::renderTexturedHemisphere() {
|
|||
|
||||
}
|
||||
|
||||
void ApplicationOverlay::resize() {
|
||||
if (_framebufferObject != NULL) {
|
||||
delete _framebufferObject;
|
||||
_framebufferObject = NULL;
|
||||
}
|
||||
// _framebufferObject is recreated at the correct size the next time it is accessed via getFramebufferObject().
|
||||
}
|
||||
|
||||
QOpenGLFramebufferObject* ApplicationOverlay::getFramebufferObject() {
|
||||
if (!_framebufferObject) {
|
||||
_framebufferObject = new QOpenGLFramebufferObject(Application::getInstance()->getGLWidget()->size());
|
||||
QSize size = Application::getInstance()->getGLWidget()->size();
|
||||
if (!_framebufferObject || _framebufferObject->size() != size) {
|
||||
|
||||
delete _framebufferObject;
|
||||
|
||||
_framebufferObject = new QOpenGLFramebufferObject(size);
|
||||
glBindTexture(GL_TEXTURE_2D, _framebufferObject->texture());
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
|
|
|
@ -32,7 +32,9 @@ public:
|
|||
void displayOverlayTexture3DTV(Camera& whichCamera, float aspectRatio, float fov);
|
||||
void computeOculusPickRay(float x, float y, glm::vec3& direction) const;
|
||||
void getClickLocation(int &x, int &y) const;
|
||||
void resize();
|
||||
QPoint getOculusPalmClickLocation(const PalmData *palm) const;
|
||||
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
|
||||
|
||||
|
||||
// Getters
|
||||
QOpenGLFramebufferObject* getFramebufferObject();
|
||||
|
@ -49,7 +51,7 @@ private:
|
|||
|
||||
void renderPointers();
|
||||
void renderControllerPointers();
|
||||
void renderControllerPointersOculus();
|
||||
void renderPointersOculus(const glm::vec3& eyePos);
|
||||
void renderMagnifier(int mouseX, int mouseY, float sizeMult, bool showBorder) const;
|
||||
void renderAudioMeter();
|
||||
void renderStatsAndLogs();
|
||||
|
@ -69,6 +71,7 @@ private:
|
|||
float _magSizeMult[NUMBER_OF_MAGNIFIERS];
|
||||
|
||||
float _alpha;
|
||||
float _oculusuiRadius;
|
||||
|
||||
GLuint _crosshairTexture;
|
||||
};
|
||||
|
|
|
@ -108,12 +108,6 @@ glm::quat HandData::getBaseOrientation() const {
|
|||
glm::vec3 HandData::getBasePosition() const {
|
||||
return _owningAvatarData->getPosition();
|
||||
}
|
||||
|
||||
glm::vec3 PalmData::getFingerTipPosition() const {
|
||||
glm::vec3 fingerOffset(0.0f, 0.0f, 0.3f);
|
||||
glm::vec3 palmOffset(0.0f, -0.08f, 0.0f);
|
||||
return getPosition() + _owningHandData->localToWorldDirection(_rawRotation * (fingerOffset + palmOffset));
|
||||
}
|
||||
|
||||
glm::vec3 PalmData::getFingerDirection() const {
|
||||
const glm::vec3 LOCAL_FINGER_DIRECTION(0.0f, 0.0f, 1.0f);
|
||||
|
|
|
@ -140,7 +140,6 @@ public:
|
|||
void getBallHoldPosition(glm::vec3& position) const;
|
||||
|
||||
// return world-frame:
|
||||
glm::vec3 getFingerTipPosition() const;
|
||||
glm::vec3 getFingerDirection() const;
|
||||
glm::vec3 getNormal() const;
|
||||
|
||||
|
|
|
@ -34,7 +34,6 @@ OctreeEditPacketSender::OctreeEditPacketSender() :
|
|||
_maxPendingMessages(DEFAULT_MAX_PENDING_MESSAGES),
|
||||
_releaseQueuedMessagesPending(false),
|
||||
_serverJurisdictions(NULL),
|
||||
_sequenceNumber(0),
|
||||
_maxPacketSize(MAX_PACKET_SIZE) {
|
||||
}
|
||||
|
||||
|
@ -88,7 +87,7 @@ bool OctreeEditPacketSender::serversExist() const {
|
|||
|
||||
// This method is called when the edit packet layer has determined that it has a fully formed packet destined for
|
||||
// a known nodeID.
|
||||
void OctreeEditPacketSender::queuePacketToNode(const QUuid& nodeUUID, const unsigned char* buffer, ssize_t length) {
|
||||
void OctreeEditPacketSender::queuePacketToNode(const QUuid& nodeUUID, unsigned char* buffer, ssize_t length) {
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||
|
@ -96,13 +95,18 @@ void OctreeEditPacketSender::queuePacketToNode(const QUuid& nodeUUID, const unsi
|
|||
if (node->getType() == getMyNodeType() &&
|
||||
((node->getUUID() == nodeUUID) || (nodeUUID.isNull()))) {
|
||||
if (node->getActiveSocket()) {
|
||||
|
||||
// pack sequence number
|
||||
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<char*>(buffer));
|
||||
unsigned char* sequenceAt = buffer + numBytesPacketHeader;
|
||||
quint16 sequence = _outgoingSequenceNumbers[nodeUUID]++;
|
||||
memcpy(sequenceAt, &sequence, sizeof(quint16));
|
||||
|
||||
// send packet
|
||||
QByteArray packet(reinterpret_cast<const char*>(buffer), length);
|
||||
queuePacketForSending(node, packet);
|
||||
|
||||
// extract sequence number and add packet to history
|
||||
int numBytesPacketHeader = numBytesForPacketHeader(packet);
|
||||
const char* dataAt = reinterpret_cast<const char*>(packet.data()) + numBytesPacketHeader;
|
||||
unsigned short int sequence = *((unsigned short int*)dataAt);
|
||||
// add packet to history
|
||||
_sentPacketHistories[nodeUUID].packetSent(sequence, packet);
|
||||
|
||||
// debugging output...
|
||||
|
@ -312,11 +316,8 @@ void OctreeEditPacketSender::releaseQueuedPacket(EditPacketBuffer& packetBuffer)
|
|||
void OctreeEditPacketSender::initializePacket(EditPacketBuffer& packetBuffer, PacketType type) {
|
||||
packetBuffer._currentSize = populatePacketHeader(reinterpret_cast<char*>(&packetBuffer._currentBuffer[0]), type);
|
||||
|
||||
// pack in sequence numbers
|
||||
unsigned short int* sequenceAt = (unsigned short int*)&packetBuffer._currentBuffer[packetBuffer._currentSize];
|
||||
*sequenceAt = _sequenceNumber;
|
||||
packetBuffer._currentSize += sizeof(unsigned short int); // nudge past sequence
|
||||
_sequenceNumber++;
|
||||
// skip over sequence number for now; will be packed when packet is ready to be sent out
|
||||
packetBuffer._currentSize += sizeof(quint16);
|
||||
|
||||
// pack in timestamp
|
||||
quint64 now = usecTimestampNow();
|
||||
|
@ -373,5 +374,6 @@ void OctreeEditPacketSender::nodeKilled(SharedNodePointer node) {
|
|||
// TODO: add locks
|
||||
QUuid nodeUUID = node->getUUID();
|
||||
_pendingEditPackets.remove(nodeUUID);
|
||||
_outgoingSequenceNumbers.remove(nodeUUID);
|
||||
_sentPacketHistories.remove(nodeUUID);
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
/// Used for construction of edit packets
|
||||
class EditPacketBuffer {
|
||||
public:
|
||||
EditPacketBuffer() : _nodeUUID(), _currentType(PacketTypeUnknown), _currentSize(0) { }
|
||||
EditPacketBuffer() : _nodeUUID(), _currentType(PacketTypeUnknown), _currentSize(0) { }
|
||||
EditPacketBuffer(PacketType type, unsigned char* codeColorBuffer, ssize_t length, const QUuid nodeUUID = QUuid());
|
||||
QUuid _nodeUUID;
|
||||
PacketType _currentType;
|
||||
|
@ -100,7 +100,7 @@ public:
|
|||
|
||||
protected:
|
||||
bool _shouldSend;
|
||||
void queuePacketToNode(const QUuid& nodeID, const unsigned char* buffer, ssize_t length);
|
||||
void queuePacketToNode(const QUuid& nodeID, unsigned char* buffer, ssize_t length);
|
||||
void queuePendingPacketToNodes(PacketType type, unsigned char* buffer, ssize_t length);
|
||||
void queuePacketToNodes(unsigned char* buffer, ssize_t length);
|
||||
void initializePacket(EditPacketBuffer& packetBuffer, PacketType type);
|
||||
|
@ -120,12 +120,12 @@ protected:
|
|||
|
||||
NodeToJurisdictionMap* _serverJurisdictions;
|
||||
|
||||
unsigned short int _sequenceNumber;
|
||||
int _maxPacketSize;
|
||||
|
||||
QMutex _releaseQueuedPacketMutex;
|
||||
|
||||
// TODO: add locks for this and _pendingEditPackets
|
||||
QHash<QUuid, SentPacketHistory> _sentPacketHistories;
|
||||
QHash<QUuid, quint16> _outgoingSequenceNumbers;
|
||||
};
|
||||
#endif // hifi_OctreeEditPacketSender_h
|
||||
|
|
Loading…
Reference in a new issue