This commit is contained in:
Stephen Birarda 2014-10-09 14:36:05 -07:00
commit 6cef4dbce4
21 changed files with 473 additions and 384 deletions

View file

@ -23,7 +23,7 @@ var leapHands = (function () {
fingers,
NUM_FINGERS = 5, // 0 = thumb; ...; 4 = pinky
THUMB = 0,
NUM_FINGER_JOINTS = 3, // 0 = metacarpal(hand)-proximal(finger) joint; ...; 2 = intermediate-distal(tip) joint
NUM_FINGER_JOINTS = 3, // 0 = metacarpal(hand)-proximal(finger) joint; ...; 2 = intermediate-distal joint
MAX_HAND_INACTIVE_COUNT = 20,
calibrationStatus,
UNCALIBRATED = 0,
@ -226,8 +226,6 @@ var leapHands = (function () {
function setUp() {
// TODO: Leap Motion controller joint naming doesn't match up with skeleton joint naming; numbers are out by 1.
hands = [
{
jointName: "LeftHand",
@ -246,6 +244,9 @@ var leapHands = (function () {
{ controller: Controller.createInputController("Spatial", "joint_R_wrist") }
];
// The Leap controller's first joint is the hand-metacarpal joint but this joint's data is not used because it's too
// dependent on the model skeleton exactly matching the Leap skeleton; using just the second and subsequent joints
// seems to work better over all.
fingers = [{}, {}];
fingers[0] = [
[
@ -407,19 +408,26 @@ var leapHands = (function () {
MyAvatar.setJointModelPositionAndOrientation(hands[h].jointName, handOffset, handRotation, true);
// Finger joints ...
// TODO: 2.0 * scale factors should not be necessary; Leap Motion controller code needs investigating.
for (i = 0; i < NUM_FINGERS; i += 1) {
for (j = 0; j < NUM_FINGER_JOINTS; j += 1) {
if (fingers[h][i][j].controller !== null) {
locRotation = fingers[h][i][j].controller.getLocRotation();
if (i === THUMB) {
MyAvatar.setJointData(fingers[h][i][j].jointName,
Quat.fromPitchYawRollRadians(2.0 * side * locRotation.y, 2.0 * -locRotation.z,
2.0 * side * -locRotation.x));
locRotation = {
x: side * locRotation.y,
y: side * -locRotation.z,
z: side * -locRotation.x,
w: locRotation.w
};
} else {
MyAvatar.setJointData(fingers[h][i][j].jointName,
Quat.fromPitchYawRollRadians(2.0 * -locRotation.x, 0.0, 2.0 * -locRotation.y));
locRotation = {
x: -locRotation.x,
y: -locRotation.z,
z: -locRotation.y,
w: locRotation.w
};
}
MyAvatar.setJointData(fingers[h][i][j].jointName, locRotation);
}
}
}

View file

@ -13,16 +13,19 @@
uniform float pointScale;
void main(void) {
// the interpolated normal
varying vec4 normal;
// standard diffuse lighting
gl_FrontColor = vec4(gl_Color.rgb * (gl_LightModel.ambient.rgb + gl_LightSource[0].ambient.rgb +
gl_LightSource[0].diffuse.rgb * max(0.0, dot(gl_NormalMatrix * gl_Normal, gl_LightSource[0].position.xyz))),
0.0);
void main(void) {
// transform and store the normal for interpolation
normal = vec4(normalize(gl_NormalMatrix * gl_Normal), 0.0);
// extract the first three components of the vertex for position
gl_Position = gl_ModelViewProjectionMatrix * vec4(gl_Vertex.xyz, 1.0);
// the final component is the size in world space
gl_PointSize = pointScale * gl_Vertex.w / gl_Position.w;
// copy the color for interpolation
gl_FrontColor = vec4(gl_Color.rgb, 0.0);
}

View file

@ -153,7 +153,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_lastQueriedViewFrustum(),
_lastQueriedTime(usecTimestampNow()),
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT)),
_cameraPushback(0.0f),
_scaleMirror(1.0f),
_rotateMirror(0.0f),
_raiseMirror(0.0f),
@ -605,37 +604,36 @@ void Application::paintGL() {
glEnable(GL_LINE_SMOOTH);
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
_myCamera.setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay
if (!OculusManager::isConnected()) {
_myCamera.setTargetPosition(_myAvatar->getHead()->getEyePosition());
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
_myCamera.setPosition(_myAvatar->getHead()->getEyePosition());
_myCamera.setRotation(_myAvatar->getHead()->getCameraOrientation());
}
// OculusManager::display() updates camera position and rotation a bit further on.
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
//Note, the camera distance is set in Camera::setMode() so we dont have to do it here.
_myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing
_myCamera.setTargetPosition(_myAvatar->getUprightHeadPosition());
static const float THIRD_PERSON_CAMERA_DISTANCE = 1.5f;
_myCamera.setPosition(_myAvatar->getUprightHeadPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, 1.0f) * THIRD_PERSON_CAMERA_DISTANCE * _myAvatar->getScale());
if (OculusManager::isConnected()) {
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation());
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
} else {
_myCamera.setTargetRotation(_myAvatar->getHead()->getOrientation());
_myCamera.setRotation(_myAvatar->getHead()->getOrientation());
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness(0.0f);
//Only behave like a true mirror when in the OR
if (OculusManager::isConnected()) {
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setTargetPosition(_myAvatar->getHead()->getEyePosition() + glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0));
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0) +
(_myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
} else {
_myCamera.setTightness(0.0f);
glm::vec3 eyePosition = _myAvatar->getHead()->getEyePosition();
float headHeight = eyePosition.y - _myAvatar->getPosition().y;
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight + (_raiseMirror * _myAvatar->getScale()), 0));
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0) +
(_myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
}
}
@ -659,11 +657,13 @@ void Application::paintGL() {
ViewFrustumOffset viewFrustumOffset = Menu::getInstance()->getViewFrustumOffset();
// set the camera to third-person view but offset so we can see the frustum
_viewFrustumOffsetCamera.setTargetPosition(_myCamera.getTargetPosition());
_viewFrustumOffsetCamera.setTargetRotation(_myCamera.getTargetRotation() * glm::quat(glm::radians(glm::vec3(
viewFrustumOffset.pitch, viewFrustumOffset.yaw, viewFrustumOffset.roll))));
_viewFrustumOffsetCamera.setUpShift(viewFrustumOffset.up);
_viewFrustumOffsetCamera.setDistance(viewFrustumOffset.distance);
glm::quat frustumRotation = glm::quat(glm::radians(glm::vec3(viewFrustumOffset.pitch, viewFrustumOffset.yaw, viewFrustumOffset.roll)));
_viewFrustumOffsetCamera.setPosition(_myCamera.getPosition() +
frustumRotation * glm::vec3(0.0f, viewFrustumOffset.up, -viewFrustumOffset.distance));
_viewFrustumOffsetCamera.setRotation(_myCamera.getRotation() * frustumRotation);
_viewFrustumOffsetCamera.initialize(); // force immediate snap to ideal position and orientation
_viewFrustumOffsetCamera.update(1.f/_fps);
whichCamera = &_viewFrustumOffsetCamera;
@ -1779,10 +1779,8 @@ void Application::init() {
// TODO: move _myAvatar out of Application. Move relevant code to MyAvataar or AvatarManager
_avatarManager.init();
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
_myCamera.setModeShiftPeriod(1.0f);
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
_mirrorCamera.setModeShiftPeriod(0.0f);
OculusManager::connect();
if (OculusManager::isConnected()) {
@ -2122,17 +2120,14 @@ void Application::cameraMenuChanged() {
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
_myCamera.setMode(CAMERA_MODE_MIRROR);
_myCamera.setModeShiftPeriod(0.0f);
}
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
_myCamera.setModeShiftPeriod(modeShiftPeriod);
}
} else {
if (_myCamera.getMode() != CAMERA_MODE_THIRD_PERSON) {
_myCamera.setMode(CAMERA_MODE_THIRD_PERSON);
_myCamera.setModeShiftPeriod(modeShiftPeriod);
}
}
}
@ -2962,7 +2957,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
}
}
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
bool mirrorMode = (whichCamera.getMode() == CAMERA_MODE_MIRROR);
{
PerformanceTimer perfTimer("avatars");
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
@ -3101,30 +3096,30 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
bool eyeRelativeCamera = false;
if (billboard) {
_mirrorCamera.setFieldOfView(BILLBOARD_FIELD_OF_VIEW); // degees
_mirrorCamera.setDistance(BILLBOARD_DISTANCE * _myAvatar->getScale());
_mirrorCamera.setTargetPosition(_myAvatar->getPosition());
_mirrorCamera.setPosition(_myAvatar->getPosition() +
_myAvatar->getOrientation() * glm::vec3(0.f, 0.f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
} else if (_rearMirrorTools->getZoomLevel() == BODY) {
_mirrorCamera.setFieldOfView(MIRROR_FIELD_OF_VIEW); // degrees
_mirrorCamera.setDistance(MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
_mirrorCamera.setTargetPosition(_myAvatar->getChestPosition());
_mirrorCamera.setPosition(_myAvatar->getChestPosition() +
_myAvatar->getOrientation() * glm::vec3(0.f, 0.f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
} else { // HEAD zoom level
_mirrorCamera.setFieldOfView(MIRROR_FIELD_OF_VIEW); // degrees
_mirrorCamera.setDistance(MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
if (_myAvatar->getSkeletonModel().isActive() && _myAvatar->getHead()->getFaceModel().isActive()) {
// as a hack until we have a better way of dealing with coordinate precision issues, reposition the
// face/body so that the average eye position lies at the origin
eyeRelativeCamera = true;
_mirrorCamera.setTargetPosition(glm::vec3());
_mirrorCamera.setPosition(_myAvatar->getOrientation() * glm::vec3(0.f, 0.f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
} else {
_mirrorCamera.setTargetPosition(_myAvatar->getHead()->getEyePosition());
_mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.f, 0.f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
}
}
_mirrorCamera.setAspectRatio((float)region.width() / region.height());
_mirrorCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI, 0.0f)));
_mirrorCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI, 0.0f)));
_mirrorCamera.update(1.0f/_fps);
// set the bounds of rear mirror view

View file

@ -519,7 +519,6 @@ private:
QRect _mirrorViewRect;
RearMirrorTools* _rearMirrorTools;
float _cameraPushback;
glm::mat4 _untranslatedViewMatrix;
glm::vec3 _viewMatrixTranslation;
glm::mat4 _projectionMatrix;

View file

@ -20,102 +20,21 @@
#include "Util.h"
#include "devices/OculusManager.h"
const float CAMERA_FIRST_PERSON_MODE_UP_SHIFT = 0.0f;
const float CAMERA_FIRST_PERSON_MODE_DISTANCE = 0.0f;
const float CAMERA_FIRST_PERSON_MODE_TIGHTNESS = 100.0f;
const float CAMERA_INDEPENDENT_MODE_UP_SHIFT = 0.0f;
const float CAMERA_INDEPENDENT_MODE_DISTANCE = 0.0f;
const float CAMERA_INDEPENDENT_MODE_TIGHTNESS = 100.0f;
const float CAMERA_THIRD_PERSON_MODE_UP_SHIFT = -0.2f;
const float CAMERA_THIRD_PERSON_MODE_DISTANCE = 1.5f;
const float CAMERA_THIRD_PERSON_MODE_TIGHTNESS = 8.0f;
const float CAMERA_MIRROR_MODE_UP_SHIFT = 0.0f;
const float CAMERA_MIRROR_MODE_DISTANCE = 0.17f;
const float CAMERA_MIRROR_MODE_TIGHTNESS = 100.0f;
Camera::Camera() :
_needsToInitialize(true),
_mode(CAMERA_MODE_THIRD_PERSON),
_prevMode(CAMERA_MODE_THIRD_PERSON),
_frustumNeedsReshape(true),
_position(0.0f, 0.0f, 0.0f),
_idealPosition(0.0f, 0.0f, 0.0f),
_targetPosition(0.0f, 0.0f, 0.0f),
_fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES),
_aspectRatio(16.0f/9.0f),
_nearClip(DEFAULT_NEAR_CLIP), // default
_farClip(DEFAULT_FAR_CLIP), // default
_upShift(0.0f),
_distance(0.0f),
_tightness(10.0f), // default
_previousUpShift(0.0f),
_previousDistance(0.0f),
_previousTightness(0.0f),
_newUpShift(0.0f),
_newDistance(0.0f),
_newTightness(0.0f),
_modeShift(1.0f),
_linearModeShift(0.0f),
_modeShiftPeriod(1.0f),
_scale(1.0f),
_lookingAt(0.0f, 0.0f, 0.0f),
_isKeepLookingAt(false)
_scale(1.0f)
{
}
void Camera::update(float deltaTime) {
if (_mode != CAMERA_MODE_NULL) {
// use iterative forces to push the camera towards the target position and angle
updateFollowMode(deltaTime);
}
}
// use iterative forces to keep the camera at the desired position and angle
void Camera::updateFollowMode(float deltaTime) {
if (_linearModeShift < 1.0f) {
_linearModeShift += deltaTime / _modeShiftPeriod;
if (_needsToInitialize || _linearModeShift > 1.0f) {
_linearModeShift = 1.0f;
_modeShift = 1.0f;
_upShift = _newUpShift;
_distance = _newDistance;
_tightness = _newTightness;
} else {
_modeShift = ONE_HALF - ONE_HALF * cosf(_linearModeShift * PI );
_upShift = _previousUpShift * (1.0f - _modeShift) + _newUpShift * _modeShift;
_distance = _previousDistance * (1.0f - _modeShift) + _newDistance * _modeShift;
_tightness = _previousTightness * (1.0f - _modeShift) + _newTightness * _modeShift;
}
}
// derive t from tightness
float t = _tightness * _modeShift * deltaTime;
if (t > 1.0f) {
t = 1.0f;
}
// handle keepLookingAt
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
// Update position and rotation, setting directly if tightness is 0.0
if (_needsToInitialize || (_tightness == 0.0f)) {
_rotation = _targetRotation;
_idealPosition = _targetPosition + _scale * (_rotation * glm::vec3(0.0f, _upShift, _distance));
_position = _idealPosition;
_needsToInitialize = false;
} else {
// pull rotation towards ideal
_rotation = safeMix(_rotation, _targetRotation, t);
_idealPosition = _targetPosition + _scale * (_rotation * glm::vec3(0.0f, _upShift, _distance));
_position += (_idealPosition - _position) * t;
}
void Camera::update(float deltaTime) {
return;
}
float Camera::getFarClip() const {
@ -124,137 +43,46 @@ float Camera::getFarClip() const {
: std::numeric_limits<int16_t>::max() - 1;
}
void Camera::setModeShiftPeriod (float period) {
const float MIN_PERIOD = 0.001f;
const float MAX_PERIOD = 3.0f;
_modeShiftPeriod = glm::clamp(period, MIN_PERIOD, MAX_PERIOD);
// if a zero period was requested, we clearly want to snap immediately to the target
if (period == 0.0f) {
update(MAX_PERIOD);
}
}
void Camera::setMode(CameraMode m) {
_prevMode = _mode;
void Camera::setMode(CameraMode m) {
_mode = m;
_modeShift = 0.0;
_linearModeShift = 0.0;
_previousUpShift = _upShift;
_previousDistance = _distance;
_previousTightness = _tightness;
if (_mode == CAMERA_MODE_THIRD_PERSON) {
_newUpShift = CAMERA_THIRD_PERSON_MODE_UP_SHIFT;
_newDistance = CAMERA_THIRD_PERSON_MODE_DISTANCE;
_newTightness = CAMERA_THIRD_PERSON_MODE_TIGHTNESS;
} else if (_mode == CAMERA_MODE_FIRST_PERSON) {
_newUpShift = CAMERA_FIRST_PERSON_MODE_UP_SHIFT;
_newDistance = CAMERA_FIRST_PERSON_MODE_DISTANCE;
_newTightness = CAMERA_FIRST_PERSON_MODE_TIGHTNESS;
} else if (_mode == CAMERA_MODE_MIRROR) {
_newUpShift = CAMERA_MIRROR_MODE_UP_SHIFT;
_newDistance = CAMERA_MIRROR_MODE_DISTANCE;
_newTightness = CAMERA_MIRROR_MODE_TIGHTNESS;
} else if (_mode == CAMERA_MODE_INDEPENDENT) {
_newUpShift = CAMERA_INDEPENDENT_MODE_UP_SHIFT;
_newDistance = CAMERA_INDEPENDENT_MODE_DISTANCE;
_newTightness = CAMERA_INDEPENDENT_MODE_TIGHTNESS;
}
}
void Camera::setTargetPosition(const glm::vec3& t) {
_targetPosition = t;
// handle keepLookingAt
if (_isKeepLookingAt) {
lookAt(_lookingAt);
}
}
void Camera::setTargetRotation( const glm::quat& targetRotation ) {
_targetRotation = targetRotation;
}
void Camera::setFieldOfView(float f) {
_fieldOfView = f;
_frustumNeedsReshape = true;
}
void Camera::setAspectRatio(float a) {
_aspectRatio = a;
_frustumNeedsReshape = true;
_aspectRatio = a;
}
void Camera::setNearClip(float n) {
_nearClip = n;
_frustumNeedsReshape = true;
_nearClip = n;
}
void Camera::setFarClip(float f) {
_farClip = f;
_frustumNeedsReshape = true;
_farClip = f;
}
void Camera::setEyeOffsetPosition(const glm::vec3& p) {
_eyeOffsetPosition = p;
_frustumNeedsReshape = true;
}
void Camera::setEyeOffsetOrientation(const glm::quat& o) {
_eyeOffsetOrientation = o;
_frustumNeedsReshape = true;
}
void Camera::setScale(float s) {
_scale = s;
_needsToInitialize = true;
_frustumNeedsReshape = true;
}
void Camera::initialize() {
_needsToInitialize = true;
_modeShift = 0.0;
}
// call to find out if the view frustum needs to be reshaped
bool Camera::getFrustumNeedsReshape() const {
return _frustumNeedsReshape;
}
// call this when deciding whether to render the head or not
CameraMode Camera::getInterpolatedMode() const {
const float SHIFT_THRESHOLD_INTO_FIRST_PERSON = 0.7f;
const float SHIFT_THRESHOLD_OUT_OF_FIRST_PERSON = 0.6f;
if ((_mode == CAMERA_MODE_FIRST_PERSON && _linearModeShift < SHIFT_THRESHOLD_INTO_FIRST_PERSON) ||
(_prevMode == CAMERA_MODE_FIRST_PERSON && _linearModeShift < SHIFT_THRESHOLD_OUT_OF_FIRST_PERSON)) {
return _prevMode;
}
return _mode;
}
// call this after reshaping the view frustum
void Camera::setFrustumWasReshaped() {
_frustumNeedsReshape = false;
}
void Camera::lookAt(const glm::vec3& lookAt) {
glm::vec3 up = IDENTITY_UP;
glm::mat4 lookAtMatrix = glm::lookAt(_targetPosition, lookAt, up);
glm::quat rotation = glm::quat_cast(lookAtMatrix);
rotation.w = -rotation.w; // Rosedale approved
setTargetRotation(rotation);
}
void Camera::keepLookingAt(const glm::vec3& point) {
lookAt(point);
_isKeepLookingAt = true;
_lookingAt = point;
}
CameraScriptableObject::CameraScriptableObject(Camera* camera, ViewFrustum* viewFrustum) :
_camera(camera), _viewFrustum(viewFrustum)

View file

@ -35,14 +35,9 @@ public:
void initialize(); // instantly put the camera at the ideal position and rotation.
void update( float deltaTime );
void setUpShift(float u) { _upShift = u; }
void setDistance(float d) { _distance = d; }
void setPosition(const glm::vec3& p) { _position = p; }
void setTargetPosition(const glm::vec3& t);
void setTightness(float t) { _tightness = t; }
void setTargetRotation(const glm::quat& rotation);
void setModeShiftPeriod(float r);
void setRotation(const glm::quat& rotation) { _rotation = rotation; };
void setMode(CameraMode m);
void setFieldOfView(float f);
void setAspectRatio(float a);
@ -55,10 +50,6 @@ public:
const glm::vec3& getPosition() const { return _position; }
const glm::quat& getRotation() const { return _rotation; }
CameraMode getMode() const { return _mode; }
float getModeShiftPeriod() const { return _modeShiftPeriod; }
float getDistance() const { return _distance; }
const glm::vec3& getTargetPosition() const { return _targetPosition; }
const glm::quat& getTargetRotation() const { return _targetRotation; }
float getFieldOfView() const { return _fieldOfView; }
float getAspectRatio() const { return _aspectRatio; }
float getNearClip() const { return _scale * _nearClip; }
@ -67,31 +58,11 @@ public:
const glm::quat& getEyeOffsetOrientation() const { return _eyeOffsetOrientation; }
float getScale() const { return _scale; }
CameraMode getInterpolatedMode() const;
bool getFrustumNeedsReshape() const; // call to find out if the view frustum needs to be reshaped
void setFrustumWasReshaped(); // call this after reshaping the view frustum.
// These only work on independent cameras
/// one time change to what the camera is looking at
void lookAt(const glm::vec3& value);
/// fix what the camera is looking at, and keep the camera looking at this even if position changes
void keepLookingAt(const glm::vec3& value);
/// stops the keep looking at feature, doesn't change what's being looked at, but will stop camera from
/// continuing to update it's orientation to keep looking at the item
void stopLooking() { _isKeepLookingAt = false; }
private:
bool _needsToInitialize;
CameraMode _mode;
CameraMode _prevMode;
bool _frustumNeedsReshape;
glm::vec3 _position;
glm::vec3 _idealPosition;
glm::vec3 _targetPosition;
float _fieldOfView; // degrees
float _aspectRatio;
float _nearClip;
@ -99,25 +70,8 @@ private:
glm::vec3 _eyeOffsetPosition;
glm::quat _eyeOffsetOrientation;
glm::quat _rotation;
glm::quat _targetRotation;
float _upShift;
float _distance;
float _tightness;
float _previousUpShift;
float _previousDistance;
float _previousTightness;
float _newUpShift;
float _newDistance;
float _newTightness;
float _modeShift;
float _linearModeShift;
float _modeShiftPeriod;
float _scale;
glm::vec3 _lookingAt;
bool _isKeepLookingAt;
void updateFollowMode(float deltaTime);
float _scale;
};
@ -129,25 +83,13 @@ public:
public slots:
QString getMode() const;
void setMode(const QString& mode);
void setModeShiftPeriod(float r) {_camera->setModeShiftPeriod(r); }
void setPosition(const glm::vec3& value) { _camera->setTargetPosition(value);}
void setPosition(const glm::vec3& value) { _camera->setPosition(value);}
glm::vec3 getPosition() const { return _camera->getPosition(); }
void setOrientation(const glm::quat& value) { _camera->setTargetRotation(value); }
void setOrientation(const glm::quat& value) { _camera->setRotation(value); }
glm::quat getOrientation() const { return _camera->getRotation(); }
// These only work on independent cameras
/// one time change to what the camera is looking at
void lookAt(const glm::vec3& value) { _camera->lookAt(value);}
/// fix what the camera is looking at, and keep the camera looking at this even if position changes
void keepLookingAt(const glm::vec3& value) { _camera->keepLookingAt(value);}
/// stops the keep looking at feature, doesn't change what's being looked at, but will stop camera from
/// continuing to update it's orientation to keep looking at the item
void stopLooking() { _camera->stopLooking();}
PickRay computePickRay(float x, float y);
private:

View file

@ -423,6 +423,10 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DontFadeOnVoxelServerChanges);
addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DisableAutoAdjustLOD);
QMenu* metavoxelOptionsMenu = developerMenu->addMenu("Metavoxels");
addCheckableActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::DisplayHermiteData, 0, false,
Application::getInstance()->getMetavoxels(), SLOT(refreshVoxelData()));
QMenu* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);

View file

@ -374,6 +374,7 @@ namespace MenuOption {
const QString DisplayFrustum = "Display Frustum";
const QString DisplayHands = "Show Hand Info";
const QString DisplayHandTargets = "Show Hand Targets";
const QString DisplayHermiteData = "Display Hermite Data";
const QString DisplayModelBounds = "Display Model Bounds";
const QString DisplayModelElementChildProxies = "Display Model Element Children";
const QString DisplayModelElementProxy = "Display Model Element Bounds";

View file

@ -137,6 +137,18 @@ void MetavoxelSystem::render() {
emit rendering();
}
void MetavoxelSystem::refreshVoxelData() {
foreach (const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
if (node->getType() == NodeType::MetavoxelServer) {
QMutexLocker locker(&node->getMutex());
MetavoxelSystemClient* client = static_cast<MetavoxelSystemClient*>(node->getLinkedData());
if (client) {
QMetaObject::invokeMethod(client, "refreshVoxelData");
}
}
}
}
class RayHeightfieldIntersectionVisitor : public RayIntersectionVisitor {
public:
@ -577,6 +589,14 @@ void Augmenter::run() {
QMetaObject::invokeMethod(node->getLinkedData(), "setAugmentedData", Q_ARG(const MetavoxelData&, _data));
}
void MetavoxelSystemClient::refreshVoxelData() {
// make it look as if all the colors have changed
MetavoxelData oldData = getAugmentedData();
oldData.touch(AttributeRegistry::getInstance()->getVoxelColorAttribute());
QThreadPool::globalInstance()->start(new Augmenter(_node, _data, oldData, _remoteDataLOD));
}
void MetavoxelSystemClient::dataChanged(const MetavoxelData& oldData) {
MetavoxelClient::dataChanged(oldData);
QThreadPool::globalInstance()->start(new Augmenter(_node, _data, getAugmentedData(), _remoteDataLOD));
@ -970,12 +990,14 @@ void VoxelPoint::setNormal(const glm::vec3& normal) {
this->normal[2] = (char)(normal.z * 127.0f);
}
VoxelBuffer::VoxelBuffer(const QVector<VoxelPoint>& vertices, const QVector<int>& indices,
VoxelBuffer::VoxelBuffer(const QVector<VoxelPoint>& vertices, const QVector<int>& indices, const QVector<glm::vec3>& hermite,
const QVector<SharedObjectPointer>& materials) :
_vertices(vertices),
_indices(indices),
_hermite(hermite),
_vertexCount(vertices.size()),
_indexCount(indices.size()),
_hermiteCount(hermite.size()),
_indexBuffer(QOpenGLBuffer::IndexBuffer),
_materials(materials) {
}
@ -1095,6 +1117,39 @@ void VoxelBuffer::render(bool cursor) {
_vertexBuffer.release();
_indexBuffer.release();
if (_hermiteCount > 0 && Menu::getInstance()->isOptionChecked(MenuOption::DisplayHermiteData)) {
if (!_hermiteBuffer.isCreated()) {
_hermiteBuffer.create();
_hermiteBuffer.bind();
_hermiteBuffer.allocate(_hermite.constData(), _hermite.size() * sizeof(glm::vec3));
_hermite.clear();
} else {
_hermiteBuffer.bind();
}
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, 0);
Application::getInstance()->getDeferredLightingEffect()->getSimpleProgram().bind();
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glNormal3f(0.0f, 1.0f, 0.0f);
glLineWidth(1.0f);
glDrawArrays(GL_LINES, 0, _hermiteCount);
DefaultMetavoxelRendererImplementation::getBaseVoxelProgram().bind();
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
_hermiteBuffer.release();
}
}
BufferDataAttribute::BufferDataAttribute(const QString& name) :
@ -1117,7 +1172,10 @@ AttributeValue BufferDataAttribute::inherit(const AttributeValue& parentValue) c
void DefaultMetavoxelRendererImplementation::init() {
if (!_pointProgram.isLinked()) {
_pointProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/metavoxel_point.vert");
_pointProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_point.vert");
_pointProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_voxel_base.frag");
_pointProgram.link();
_pointProgram.bind();
@ -1560,18 +1618,19 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
if (!info.isLeaf) {
return DEFAULT_ORDER;
}
VoxelBuffer* buffer = NULL;
BufferData* buffer = NULL;
VoxelColorDataPointer color = info.inputValues.at(0).getInlineValue<VoxelColorDataPointer>();
VoxelMaterialDataPointer material = info.inputValues.at(1).getInlineValue<VoxelMaterialDataPointer>();
VoxelHermiteDataPointer hermite = info.inputValues.at(2).getInlineValue<VoxelHermiteDataPointer>();
if (color && material && hermite) {
if (color && hermite) {
QVector<VoxelPoint> vertices;
QVector<int> indices;
QVector<glm::vec3> hermiteSegments;
// see http://www.frankpetterson.com/publications/dualcontour/dualcontour.pdf for a description of the
// dual contour algorithm for generating meshes from voxel data using Hermite-tagged edges
const QVector<QRgb>& colorContents = color->getContents();
const QByteArray& materialContents = material->getContents();
const QVector<QRgb>& hermiteContents = hermite->getContents();
int size = color->getSize();
int area = size * size;
@ -1589,7 +1648,7 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
int hermiteStride = hermite->getSize() * VoxelHermiteData::EDGE_COUNT;
int hermiteArea = hermiteStride * hermite->getSize();
const char* materialData = materialContents.constData();
const char* materialData = material ? material->getContents().constData() : NULL;
// as we scan down the cube generating vertices between grid points, we remember the indices of the last
// (element, line, section--x, y, z) so that we can connect generated vertices as quads
@ -1605,6 +1664,7 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
float highest = size - 1.0f;
float scale = info.size / highest;
const int ALPHA_OFFSET = 24;
bool displayHermite = Menu::getInstance()->isOptionChecked(MenuOption::DisplayHermiteData);
for (int z = 0; z < expanded; z++) {
const QRgb* colorY = colorZ;
for (int y = 0; y < expanded; y++) {
@ -1667,7 +1727,8 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
int clampedX = qMax(x - 1, 0), clampedY = qMax(y - 1, 0), clampedZ = qMax(z - 1, 0);
const QRgb* hermiteBase = hermiteData + clampedZ * hermiteArea + clampedY * hermiteStride +
clampedX * VoxelHermiteData::EDGE_COUNT;
const char* materialBase = materialData + clampedZ * area + clampedY * size + clampedX;
const char* materialBase = materialData ?
(materialData + clampedZ * area + clampedY * size + clampedX) : NULL;
int crossingCount = 0;
if (middleX) {
if (alpha0 != alpha1) {
@ -1676,10 +1737,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha0 == 0) {
crossing.color = colorX[1];
crossing.material = materialBase[1];
crossing.material = materialBase ? materialBase[1] : 0;
} else {
crossing.color = colorX[0];
crossing.material = materialBase[0];
crossing.material = materialBase ? materialBase[0] : 0;
}
crossing.point = glm::vec3(qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 0.0f, 0.0f);
crossing.axis = 0;
@ -1691,10 +1752,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha1 == 0) {
crossing.color = colorX[offset3];
crossing.material = materialBase[offset3];
crossing.material = materialBase ? materialBase[offset3] : 0;
} else {
crossing.color = colorX[1];
crossing.material = materialBase[1];
crossing.material = materialBase ? materialBase[1] : 0;
}
crossing.point = glm::vec3(1.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 0.0f);
crossing.axis = 1;
@ -1705,10 +1766,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha2 == 0) {
crossing.color = colorX[offset3];
crossing.material = materialBase[offset3];
crossing.material = materialBase ? materialBase[offset3] : 0;
} else {
crossing.color = colorX[size];
crossing.material = materialBase[size];
crossing.material = materialBase ? materialBase[size] : 0;
}
crossing.point = glm::vec3(qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 1.0f, 0.0f);
crossing.axis = 0;
@ -1720,10 +1781,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha3 == 0) {
crossing.color = colorX[offset7];
crossing.material = materialBase[offset7];
crossing.material = materialBase ? materialBase[offset7] : 0;
} else {
crossing.color = colorX[offset3];
crossing.material = materialBase[offset3];
crossing.material = materialBase ? materialBase[offset3] : 0;
}
crossing.point = glm::vec3(1.0f, 1.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL);
crossing.axis = 2;
@ -1734,10 +1795,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha5 == 0) {
crossing.color = colorX[offset7];
crossing.material = materialBase[offset7];
crossing.material = materialBase ? materialBase[offset7] : 0;
} else {
crossing.color = colorX[offset5];
crossing.material = materialBase[offset5];
crossing.material = materialBase ? materialBase[offset5] : 0;
}
crossing.point = glm::vec3(1.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 1.0f);
crossing.axis = 1;
@ -1748,10 +1809,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha6 == 0) {
crossing.color = colorX[offset7];
crossing.material = materialBase[offset7];
crossing.material = materialBase ? materialBase[offset7] : 0;
} else {
crossing.color = colorX[offset6];
crossing.material = materialBase[offset6];
crossing.material = materialBase ? materialBase[offset6] : 0;
}
crossing.point = glm::vec3(qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 1.0f, 1.0f);
crossing.axis = 0;
@ -1765,10 +1826,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha1 == 0) {
crossing.color = colorX[offset5];
crossing.material = materialBase[offset5];
crossing.material = materialBase ? materialBase[offset5] : 0;
} else {
crossing.color = colorX[1];
crossing.material = materialBase[1];
crossing.material = materialBase ? materialBase[1] : 0;
}
crossing.point = glm::vec3(1.0f, 0.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL);
crossing.axis = 2;
@ -1779,10 +1840,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha4 == 0) {
crossing.color = colorX[offset5];
crossing.material = materialBase[offset5];
crossing.material = materialBase ? materialBase[offset5] : 0;
} else {
crossing.color = colorX[area];
crossing.material = materialBase[area];
crossing.material = materialBase ? materialBase[area] : 0;
}
crossing.point = glm::vec3(qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 0.0f, 1.0f);
crossing.axis = 0;
@ -1796,10 +1857,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha0 == 0) {
crossing.color = colorX[size];
crossing.material = materialBase[size];
crossing.material = materialBase ? materialBase[size] : 0;
} else {
crossing.color = colorX[0];
crossing.material = materialBase[0];
crossing.material = materialBase ? materialBase[0] : 0;
}
crossing.point = glm::vec3(0.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 0.0f);
crossing.axis = 1;
@ -1811,10 +1872,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha2 == 0) {
crossing.color = colorX[offset6];
crossing.material = materialBase[offset6];
crossing.material = materialBase ? materialBase[offset6] : 0;
} else {
crossing.color = colorX[size];
crossing.material = materialBase[size];
crossing.material = materialBase ? materialBase[size] : 0;
}
crossing.point = glm::vec3(0.0f, 1.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL);
crossing.axis = 2;
@ -1825,10 +1886,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha4 == 0) {
crossing.color = colorX[offset6];
crossing.material = materialBase[offset6];
crossing.material = materialBase ? materialBase[offset6] : 0;
} else {
crossing.color = colorX[area];
crossing.material = materialBase[area];
crossing.material = materialBase ? materialBase[area] : 0;
}
crossing.point = glm::vec3(0.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL, 1.0f);
crossing.axis = 1;
@ -1841,10 +1902,10 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
crossing.normal = unpackNormal(hermite);
if (alpha0 == 0) {
crossing.color = colorX[area];
crossing.material = materialBase[area];
crossing.material = materialBase ? materialBase[area] : 0;
} else {
crossing.color = colorX[0];
crossing.material = materialBase[0];
crossing.material = materialBase ? materialBase[0] : 0;
}
crossing.point = glm::vec3(0.0f, 0.0f, qAlpha(hermite) * EIGHT_BIT_MAXIMUM_RECIPROCAL);
crossing.axis = 2;
@ -1866,6 +1927,13 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
green += qGreen(crossing.color);
blue += qBlue(crossing.color);
if (displayHermite) {
glm::vec3 start = info.minimum + (glm::vec3(clampedX, clampedY, clampedZ) +
crossing.point) * scale;
hermiteSegments.append(start);
hermiteSegments.append(start + crossing.normal * scale);
}
// when assigning a material, search for its presence and, if not found,
// place it in the first empty slot
if (crossing.material != 0) {
@ -2090,8 +2158,8 @@ int VoxelAugmentVisitor::visit(MetavoxelInfo& info) {
colorZ += area;
}
}
buffer = new VoxelBuffer(vertices, indices, material->getMaterials());
buffer = new VoxelBuffer(vertices, indices, hermiteSegments,
material ? material->getMaterials() : QVector<SharedObjectPointer>());
}
BufferDataPointer pointer(buffer);
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(pointer));

View file

@ -56,6 +56,10 @@ signals:
void rendering();
public slots:
void refreshVoxelData();
protected:
virtual MetavoxelClient* createClient(const SharedNodePointer& node);
@ -99,9 +103,11 @@ public:
MetavoxelData getAugmentedData();
void setRenderedAugmentedData(const MetavoxelData& data) { _renderedAugmentedData = data; }
virtual int parseData(const QByteArray& packet);
Q_INVOKABLE void refreshVoxelData();
protected:
virtual void dataChanged(const MetavoxelData& oldData);
@ -234,7 +240,7 @@ public:
class VoxelBuffer : public BufferData {
public:
VoxelBuffer(const QVector<VoxelPoint>& vertices, const QVector<int>& indices,
VoxelBuffer(const QVector<VoxelPoint>& vertices, const QVector<int>& indices, const QVector<glm::vec3>& hermite,
const QVector<SharedObjectPointer>& materials = QVector<SharedObjectPointer>());
virtual void render(bool cursor = false);
@ -243,10 +249,13 @@ private:
QVector<VoxelPoint> _vertices;
QVector<int> _indices;
QVector<glm::vec3> _hermite;
int _vertexCount;
int _indexCount;
int _hermiteCount;
QOpenGLBuffer _vertexBuffer;
QOpenGLBuffer _indexBuffer;
QOpenGLBuffer _hermiteBuffer;
QVector<SharedObjectPointer> _materials;
QVector<NetworkTexturePointer> _networkTextures;
};
@ -272,6 +281,9 @@ public:
static void init();
static ProgramObject& getPointProgram() { return _pointProgram; }
static int getPointScaleLocation() { return _pointScaleLocation; }
static ProgramObject& getBaseHeightfieldProgram() { return _baseHeightfieldProgram; }
static int getBaseHeightScaleLocation() { return _baseHeightScaleLocation; }
static int getBaseColorScaleLocation() { return _baseColorScaleLocation; }

View file

@ -1126,7 +1126,7 @@ void MyAvatar::renderBody(RenderMode renderMode, bool postLighting, float glowLe
// Render head so long as the camera isn't inside it
const Camera *camera = Application::getInstance()->getCamera();
const glm::vec3 cameraPos = camera->getPosition() + (camera->getRotation() * glm::vec3(0.0f, 0.0f, 1.0f)) * camera->getDistance();
const glm::vec3 cameraPos = camera->getPosition();
if (shouldRenderHead(cameraPos, renderMode)) {
getHead()->render(1.0f, modelRenderMode, postLighting);

View file

@ -29,6 +29,10 @@ Joystick::Joystick(const QString& name, SDL_Joystick* sdlJoystick) :
#endif
Joystick::~Joystick() {
closeJoystick();
}
void Joystick::closeJoystick() {
#ifdef HAVE_SDL
SDL_JoystickClose(_sdlJoystick);
#endif
@ -38,14 +42,12 @@ void Joystick::update() {
#ifdef HAVE_SDL
// update our current values, emit a signal when there is a change
for (int j = 0; j < getNumAxes(); j++) {
float value = glm::round(SDL_JoystickGetAxis(_sdlJoystick, j) + 0.5f) / std::numeric_limits<short>::max();
const float DEAD_ZONE = 0.1f;
float cleanValue = glm::abs(value) < DEAD_ZONE ? 0.0f : value;
if (_axes[j] != cleanValue) {
float newValue = glm::round(SDL_JoystickGetAxis(_sdlJoystick, j) + 0.5f) / std::numeric_limits<short>::max();
if (_axes[j] != newValue) {
float oldValue = _axes[j];
_axes[j] = cleanValue;
emit axisValueChanged(j, cleanValue, oldValue);
_axes[j] = newValue;
emit axisValueChanged(j, newValue, oldValue);
}
}
for (int j = 0; j < getNumButtons(); j++) {

View file

@ -37,6 +37,12 @@ public:
void update();
void closeJoystick();
#ifdef HAVE_SDL
void setSDLJoystick(SDL_Joystick* sdlJoystick) { _sdlJoystick = sdlJoystick; }
#endif
const QString& getName() const { return _name; }
const QVector<float>& getAxes() const { return _axes; }

View file

@ -311,10 +311,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
ovrPosef eyeRenderPose[ovrEye_Count];
_camera->setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay
_camera->setDistance(0.0f);
_camera->setUpShift(0.0f);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
@ -348,8 +344,8 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
_camera->setTargetRotation(bodyOrientation * orientation);
_camera->setTargetPosition(position + trackerPosition);
_camera->setRotation(bodyOrientation * orientation);
_camera->setPosition(position + trackerPosition);
// Store the latest left and right eye render locations for things that need to know
glm::vec3 thisEyePosition = position + trackerPosition +
@ -413,8 +409,8 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
glBindTexture(GL_TEXTURE_2D, 0);
// Update camera for use by rest of Interface.
whichCamera.setTargetPosition((_leftEyePosition + _rightEyePosition) / 2.f);
whichCamera.setTargetRotation(_camera->getTargetRotation());
whichCamera.setPosition((_leftEyePosition + _rightEyePosition) / 2.f);
whichCamera.setRotation(_camera->getRotation());
#endif
}

View file

@ -27,17 +27,10 @@ JoystickScriptingInterface& JoystickScriptingInterface::getInstance() {
JoystickScriptingInterface::JoystickScriptingInterface() :
_openJoysticks(),
_availableDeviceNames()
_availableDeviceNames(),
_isInitialized(false)
{
#ifdef HAVE_SDL
SDL_Init(SDL_INIT_JOYSTICK);
int joystickCount = SDL_NumJoysticks();
for (int i = 0; i < joystickCount; i++) {
_availableDeviceNames << SDL_JoystickName(i);
}
#endif
reset();
}
JoystickScriptingInterface::~JoystickScriptingInterface() {
@ -45,18 +38,53 @@ JoystickScriptingInterface::~JoystickScriptingInterface() {
#ifdef HAVE_SDL
SDL_Quit();
_isInitialized = false;
#endif
}
void JoystickScriptingInterface::reset() {
#ifdef HAVE_SDL
if (_isInitialized) {
_isInitialized = false;
// close all the open joysticks before we quit
foreach(Joystick* openJoystick, _openJoysticks) {
openJoystick->closeJoystick();
}
SDL_Quit();
}
bool initSuccess = (SDL_Init(SDL_INIT_JOYSTICK) == 0);
if (initSuccess) {
int joystickCount = SDL_NumJoysticks();
for (int i = 0; i < joystickCount; i++) {
_availableDeviceNames << SDL_JoystickName(i);
}
foreach(const QString& joystickName, _openJoysticks.keys()) {
_openJoysticks[joystickName]->setSDLJoystick(openSDLJoystickWithName(joystickName));
}
_isInitialized = true;
}
#endif
}
void JoystickScriptingInterface::update() {
#ifdef HAVE_SDL
PerformanceTimer perfTimer("JoystickScriptingInterface::update");
SDL_JoystickUpdate();
foreach(Joystick* joystick, _openJoysticks) {
joystick->update();
if (_isInitialized) {
PerformanceTimer perfTimer("JoystickScriptingInterface::update");
SDL_JoystickUpdate();
foreach(Joystick* joystick, _openJoysticks) {
joystick->update();
}
}
#endif
}
@ -64,17 +92,13 @@ Joystick* JoystickScriptingInterface::joystickWithName(const QString& name) {
Joystick* matchingJoystick = _openJoysticks.value(name);
#ifdef HAVE_SDL
if (!matchingJoystick) {
// we haven't opened a joystick with this name yet - enumerate our SDL devices and see if it exists
int joystickCount = SDL_NumJoysticks();
SDL_Joystick* openSDLJoystick = openSDLJoystickWithName(name);
for (int i = 0; i < joystickCount; i++) {
if (SDL_JoystickName(i) == name) {
matchingJoystick = _openJoysticks.insert(name, new Joystick(name, SDL_JoystickOpen(i))).value();
break;
}
if (openSDLJoystick) {
matchingJoystick = _openJoysticks.insert(name, new Joystick(name, openSDLJoystick)).value();
} else {
qDebug() << "No matching joystick found with name" << name << "- returning NULL pointer.";
}
qDebug() << "No matching joystick found with name" << name << "- returning NULL pointer.";
}
#endif
@ -82,3 +106,20 @@ Joystick* JoystickScriptingInterface::joystickWithName(const QString& name) {
}
#ifdef HAVE_SDL
SDL_Joystick* JoystickScriptingInterface::openSDLJoystickWithName(const QString &name) {
// we haven't opened a joystick with this name yet - enumerate our SDL devices and see if it exists
int joystickCount = SDL_NumJoysticks();
for (int i = 0; i < joystickCount; i++) {
if (SDL_JoystickName(i) == name) {
return SDL_JoystickOpen(i);
break;
}
}
return NULL;
}
#endif

View file

@ -31,13 +31,19 @@ public:
public slots:
Joystick* joystickWithName(const QString& name);
void reset();
private:
#ifdef HAVE_SDL
SDL_Joystick* openSDLJoystickWithName(const QString& name);
#endif
JoystickScriptingInterface();
~JoystickScriptingInterface();
QMap<QString, Joystick*> _openJoysticks;
QStringList _availableDeviceNames;
bool _isInitialized;
};
#endif // hifi_JoystickScriptingInterface_h

View file

@ -225,14 +225,9 @@ border-color: palette(dark); border-style: solid; border-left-width: 1px; borde
<height>60</height>
</size>
</property>
<property name="font">
<font>
<family>Helvetica,Arial,sans-serif</family>
<pointsize>14</pointsize>
</font>
</property>
<property name="styleSheet">
<string notr="true"/>
<string notr="true">font-family: Arial;
font-size: 14px;</string>
</property>
<property name="frameShape">
<enum>QFrame::NoFrame</enum>

View file

@ -115,6 +115,11 @@ void EntityCollisionSystem::updateCollisionWithEntities(EntityItem* entityA) {
return; // bail early if this entity is to be ignored...
}
// don't collide entities with unknown IDs,
if (!entityA->isKnownID()) {
return;
}
glm::vec3 penetration;
EntityItem* entityB = NULL;
@ -133,6 +138,11 @@ void EntityCollisionSystem::updateCollisionWithEntities(EntityItem* entityA) {
penetration = collision->_penetration;
entityB = static_cast<EntityItem*>(collision->_extraData);
// don't collide entities with unknown IDs,
if (!entityB->isKnownID()) {
continue; // skip this loop pass if the entity has an unknown ID
}
// NOTE: 'penetration' is the depth that 'entityA' overlaps 'entityB'. It points from A into B.
glm::vec3 penetrationInTreeUnits = penetration / (float)(TREE_SCALE);
@ -267,6 +277,12 @@ void EntityCollisionSystem::updateCollisionWithAvatars(EntityItem* entity) {
}
void EntityCollisionSystem::applyHardCollision(EntityItem* entity, const CollisionInfo& collisionInfo) {
// don't collide entities with unknown IDs,
if (!entity->isKnownID()) {
return;
}
// HALTING_* params are determined using expected acceleration of gravity over some timescale.
// This is a HACK for entities that bounce in a 1.0 gravitational field and should eventually be made more universal.
const float HALTING_ENTITY_PERIOD = 0.0167f; // ~1/60th of a second

View file

@ -1616,8 +1616,74 @@ bool VoxelColorAttribute::merge(void*& parent, void* children[], bool postRead)
maxSize = qMax(maxSize, pointer->getSize());
}
}
*(VoxelColorDataPointer*)&parent = VoxelColorDataPointer();
return maxSize == 0;
if (maxSize == 0) {
*(VoxelColorDataPointer*)&parent = VoxelColorDataPointer();
return true;
}
int size = maxSize;
int area = size * size;
QVector<QRgb> contents(area * size);
int halfSize = size / 2;
int halfSizeComplement = size - halfSize;
for (int i = 0; i < MERGE_COUNT; i++) {
VoxelColorDataPointer child = decodeInline<VoxelColorDataPointer>(children[i]);
if (!child) {
continue;
}
const QVector<QRgb>& childContents = child->getContents();
int childSize = child->getSize();
int childArea = childSize * childSize;
const int INDEX_MASK = 1;
int xIndex = i & INDEX_MASK;
const int Y_SHIFT = 1;
int yIndex = (i >> Y_SHIFT) & INDEX_MASK;
int Z_SHIFT = 2;
int zIndex = (i >> Z_SHIFT) & INDEX_MASK;
QRgb* dest = contents.data() + (zIndex * halfSize * area) + (yIndex * halfSize * size) + (xIndex * halfSize);
const QRgb* src = childContents.data();
const int MAX_ALPHA = 255;
if (childSize == size) {
// simple case: one destination value for four child values
for (int z = 0; z < halfSizeComplement; z++) {
int offset4 = (z == halfSize) ? 0 : childArea;
for (int y = 0; y < halfSizeComplement; y++) {
int offset2 = (y == halfSize) ? 0 : childSize;
int offset6 = offset4 + offset2;
for (QRgb* end = dest + halfSizeComplement; dest != end; ) {
int offset1 = (dest == end - 1) ? 0 : 1;
QRgb v0 = src[0], v1 = src[offset1], v2 = src[offset2], v3 = src[offset2 + offset1], v4 = src[offset4],
v5 = src[offset4 + offset1], v6 = src[offset6], v7 = src[offset6 + offset1];
src += (1 + offset1);
int a0 = qAlpha(v0), a1 = qAlpha(v1), a2 = qAlpha(v2), a3 = qAlpha(v3),
a4 = qAlpha(v4), a5 = qAlpha(v5), a6 = qAlpha(v6), a7 = qAlpha(v7);
if (a0 == 0) {
*dest++ = qRgba(0, 0, 0, 0);
continue;
}
int alphaTotal = a0 + a1 + a2 + a3 + a4 + a5 + a6 + a7;
*dest++ = qRgba(
(qRed(v0) * a0 + qRed(v1) * a1 + qRed(v2) * a2 + qRed(v3) * a3 +
qRed(v4) * a4 + qRed(v5) * a5 + qRed(v6) * a6 + qRed(v7) * a7) / alphaTotal,
(qGreen(v0) * a0 + qGreen(v1) * a1 + qGreen(v2) * a2 + qGreen(v3) * a3 +
qGreen(v4) * a4 + qGreen(v5) * a5 + qGreen(v6) * a6 + qGreen(v7) * a7) / alphaTotal,
(qBlue(v0) * a0 + qBlue(v1) * a1 + qBlue(v2) * a2 + qBlue(v3) * a3 +
qBlue(v4) * a4 + qBlue(v5) * a5 + qBlue(v6) * a6 + qBlue(v7) * a7) / alphaTotal,
MAX_ALPHA);
}
dest += halfSize;
src += offset2;
}
dest += halfSize * size;
src += offset4;
}
} else {
// more complex: N destination values for four child values
// ...
}
}
*(VoxelColorDataPointer*)&parent = VoxelColorDataPointer(new VoxelColorData(contents, size));
return false;
}
const int VOXEL_MATERIAL_HEADER_SIZE = sizeof(qint32) * 6;
@ -2020,8 +2086,87 @@ bool VoxelHermiteAttribute::merge(void*& parent, void* children[], bool postRead
maxSize = qMax(maxSize, pointer->getSize());
}
}
*(VoxelHermiteDataPointer*)&parent = VoxelHermiteDataPointer();
return maxSize == 0;
if (maxSize == 0) {
*(VoxelHermiteDataPointer*)&parent = VoxelHermiteDataPointer();
return true;
}
int size = maxSize;
int area = size * size;
QVector<QRgb> contents(area * size * VoxelHermiteData::EDGE_COUNT);
int halfSize = size / 2;
int halfSizeComplement = size - halfSize;
for (int i = 0; i < MERGE_COUNT; i++) {
VoxelHermiteDataPointer child = decodeInline<VoxelHermiteDataPointer>(children[i]);
if (!child) {
continue;
}
const QVector<QRgb>& childContents = child->getContents();
int childSize = child->getSize();
int childArea = childSize * childSize;
const int INDEX_MASK = 1;
int xIndex = i & INDEX_MASK;
const int Y_SHIFT = 1;
int yIndex = (i >> Y_SHIFT) & INDEX_MASK;
int Z_SHIFT = 2;
int zIndex = (i >> Z_SHIFT) & INDEX_MASK;
QRgb* dest = contents.data() + ((zIndex * halfSize * area) + (yIndex * halfSize * size) + (xIndex * halfSize)) *
VoxelHermiteData::EDGE_COUNT;
const QRgb* src = childContents.data();
if (childSize == size) {
// simple case: one destination value for four child values
for (int z = 0; z < halfSizeComplement; z++) {
int offset4 = (z == halfSize) ? 0 : (childArea * VoxelHermiteData::EDGE_COUNT);
for (int y = 0; y < halfSizeComplement; y++) {
int offset2 = (y == halfSize) ? 0 : (childSize * VoxelHermiteData::EDGE_COUNT);
int offset6 = offset4 + offset2;
for (QRgb* end = dest + halfSizeComplement * VoxelHermiteData::EDGE_COUNT; dest != end;
dest += VoxelHermiteData::EDGE_COUNT) {
int offset1 = (dest == end - VoxelHermiteData::EDGE_COUNT) ? 0 : VoxelHermiteData::EDGE_COUNT;
for (int i = 0; i < VoxelHermiteData::EDGE_COUNT; i++) {
QRgb v[] = { src[i], src[offset1 + i], src[offset2 + i], src[offset2 + offset1 + i],
src[offset4 + i], src[offset4 + offset1 + i], src[offset6 + i], src[offset6 + offset1 + i] };
glm::vec3 n[] = { unpackNormal(v[0]), unpackNormal(v[1]), unpackNormal(v[2]), unpackNormal(v[3]),
unpackNormal(v[4]), unpackNormal(v[5]), unpackNormal(v[6]), unpackNormal(v[7]) };
float l[] = { glm::length(n[0]), glm::length(n[1]), glm::length(n[2]), glm::length(n[3]),
glm::length(n[4]), glm::length(n[5]), glm::length(n[6]), glm::length(n[7]) };
float lengthTotal = l[0] + l[1] + l[2] + l[3] + l[4] + l[5] + l[6] + l[7];
if (lengthTotal == 0.0f) {
dest[i] = qRgba(0, 0, 0, 0);
continue;
}
glm::vec3 combinedNormal = n[0] * l[0] + n[1] * l[1] + n[2] * l[2] + n[3] * l[3] + n[4] * l[4] +
n[5] * l[5] + n[6] * l[6] + n[7] * l[7];
float combinedLength = glm::length(combinedNormal);
if (combinedLength > 0.0f) {
combinedNormal /= combinedLength;
}
float combinedOffset = 0.0f;
int mask = 1 << i;
for (int j = 0; j < MERGE_COUNT; j++) {
float offset = qAlpha(v[j]) * (0.5f / EIGHT_BIT_MAXIMUM);
if (j & mask) {
offset += 0.5f;
}
combinedOffset += offset * l[j];
}
dest[i] = packNormal(combinedNormal, EIGHT_BIT_MAXIMUM * combinedOffset / lengthTotal);
}
src += (VoxelHermiteData::EDGE_COUNT + offset1);
}
dest += (halfSize * VoxelHermiteData::EDGE_COUNT);
src += offset2;
}
dest += (halfSize * size * VoxelHermiteData::EDGE_COUNT);
src += offset4;
}
} else {
// more complex: N destination values for four child values
// ...
}
}
*(VoxelHermiteDataPointer*)&parent = VoxelHermiteDataPointer(new VoxelHermiteData(contents, size));
return false;
}
SharedObjectAttribute::SharedObjectAttribute(const QString& name, const QMetaObject* metaObject,

View file

@ -374,6 +374,13 @@ void MetavoxelData::clear(const AttributePointer& attribute) {
}
}
void MetavoxelData::touch(const AttributePointer& attribute) {
MetavoxelNode* root = _roots.value(attribute);
if (root) {
setRoot(attribute, root->touch(attribute));
}
}
class FirstRaySpannerIntersectionVisitor : public RaySpannerIntersectionVisitor {
public:
@ -1249,6 +1256,16 @@ void MetavoxelNode::countNodes(const AttributePointer& attribute, const glm::vec
}
}
MetavoxelNode* MetavoxelNode::touch(const AttributePointer& attribute) const {
MetavoxelNode* node = new MetavoxelNode(getAttributeValue(attribute));
for (int i = 0; i < CHILD_COUNT; i++) {
if (_children[i]) {
node->setChild(i, _children[i]->touch(attribute));
}
}
return node;
}
MetavoxelInfo::MetavoxelInfo(MetavoxelInfo* parentInfo, int inputValuesSize, int outputValuesSize) :
parentInfo(parentInfo),
inputValues(inputValuesSize),

View file

@ -112,6 +112,9 @@ public:
/// Clears all data in the specified attribute layer.
void clear(const AttributePointer& attribute);
/// "Touches" all data in the specified attribute layer, making it look as if it has changed.
void touch(const AttributePointer& attribute);
/// Convenience function that finds the first spanner intersecting the provided ray.
SharedObjectPointer findFirstRaySpannerIntersection(const glm::vec3& origin, const glm::vec3& direction,
const AttributePointer& attribute, float& distance, const MetavoxelLOD& lod = MetavoxelLOD());
@ -254,6 +257,8 @@ public:
void countNodes(const AttributePointer& attribute, const glm::vec3& minimum,
float size, const MetavoxelLOD& lod, int& internalNodes, int& leaves) const;
MetavoxelNode* touch(const AttributePointer& attribute) const;
private:
Q_DISABLE_COPY(MetavoxelNode)