Merge branch 'master' into light_mod

This commit is contained in:
James B. Pollack 2015-12-16 09:56:17 -08:00
commit b09575dc40
28 changed files with 380 additions and 481 deletions

View file

@ -10,24 +10,30 @@ macro(SETUP_HIFI_PLUGIN)
setup_hifi_library(${ARGV})
add_dependencies(interface ${TARGET_NAME})
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Plugins")
if (APPLE)
if (APPLE)
set(PLUGIN_PATH "interface.app/Contents/MacOS/plugins")
else()
set(PLUGIN_PATH "plugins")
endif()
IF(${CMAKE_SYSTEM_NAME} MATCHES "Linux")
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${PLUGIN_PATH}/")
else()
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/$<CONFIGURATION>/${PLUGIN_PATH}/")
endif()
# create the destination for the plugin binaries
add_custom_command(
TARGET ${TARGET_NAME} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E make_directory
"${CMAKE_BINARY_DIR}/interface/$<CONFIGURATION>/${PLUGIN_PATH}/"
${PLUGIN_FULL_PATH}
)
add_custom_command(TARGET ${DIR} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy
"$<TARGET_FILE:${TARGET_NAME}>"
"${CMAKE_BINARY_DIR}/interface/$<CONFIGURATION>/${PLUGIN_PATH}/"
${PLUGIN_FULL_PATH}
)
endmacro()
endmacro()

View file

@ -46,7 +46,7 @@
var BAT_MODEL = "atp:c47deaae09cca927f6bc9cca0e8bbe77fc618f8c3f2b49899406a63a59f885cb.fbx";
var BAT_COLLISION_HULL = "atp:9eafceb7510c41d50661130090de7e0632aa4da236ebda84a0059a4be2130e0c.obj";
var SCRIPT_URL = "http://rawgit.com/birarda/hifi/baseball/examples/baseball/bat.js"
var SCRIPT_RELATIVE_PATH = "bat.js"
var batUserData = {
grabbableKey: {
@ -69,7 +69,7 @@
velocity: { x: 0, y: 0.05, z: 0}, // workaround for gravity not taking effect on add
gravity: { x: 0, y: -9.81, z: 0},
rotation: Quat.fromPitchYawRollDegrees(0.0, 0.0, -90.0),
script: SCRIPT_URL,
script: Script.resolvePath(SCRIPT_RELATIVE_PATH),
userData: JSON.stringify(batUserData)
});
};

View file

@ -0,0 +1,39 @@
//
// reticleTest.js
// examples/controllers
//
// Created by Brad Hefta-Gaub on 2015/12/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var mappingJSON = {
"name": "com.highfidelity.testing.reticleWithJoystick",
"channels": [
{ "from": "Standard.RT", "to": "Actions.ReticleClick", "filters": "constrainToInteger" },
{ "from": "Standard.RX", "to": "Actions.ReticleX",
"filters":
[
{ "type": "pulse", "interval": 0.05 },
{ "type": "scale", "scale": 20 }
]
},
{ "from": "Standard.RY", "to": "Actions.ReticleY",
"filters":
[
{ "type": "pulse", "interval": 0.05 },
{ "type": "scale", "scale": 20 }
]
},
]
};
mapping = Controller.parseMapping(JSON.stringify(mappingJSON));
mapping.enable();
Script.scriptEnding.connect(function(){
mapping.disable();
});

84
examples/tPose.js Normal file
View file

@ -0,0 +1,84 @@
//
// tPose.js
// examples
//
// Created by Anthony Thibault on 12/10/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Example of how to put the avatar into it's default tpose.
//
// TODO: CHANGE
var buttonImageUrl = "https://s3.amazonaws.com/hifi-public/images/tools/tpose.svg";
var windowDimensions = Controller.getViewportDimensions();
var buttonWidth = 37;
var buttonHeight = 46;
var buttonPadding = 10;
var buttonPositionX = windowDimensions.x - buttonPadding - buttonWidth;
var buttonPositionY = (windowDimensions.y - buttonHeight) / 2 - (buttonHeight + buttonPadding);
var tPoseEnterImageOverlay = {
x: buttonPositionX,
y: buttonPositionY,
width: buttonWidth,
height: buttonHeight,
subImage: { x: 0, y: buttonHeight, width: buttonWidth, height: buttonHeight },
imageURL: buttonImageUrl,
visible: true,
alpha: 1.0
};
var tPoseExitImageOverlay = {
x: buttonPositionX,
y: buttonPositionY,
width: buttonWidth,
height: buttonHeight,
subImage: { x: buttonWidth, y: buttonHeight, width: buttonWidth, height: buttonHeight },
imageURL: buttonImageUrl,
visible: false,
alpha: 1.0
};
var tPoseEnterButton = Overlays.addOverlay("image", tPoseEnterImageOverlay);
var tPoseExitButton = Overlays.addOverlay("image", tPoseExitImageOverlay);
var tPose = false;
function enterDefaultPose() {
tPose = true;
var i, l = MyAvatar.getJointNames().length;
var rot, trans;
for (i = 0; i < l; i++) {
rot = MyAvatar.getDefaultJointRotation(i);
trans = MyAvatar.getDefaultJointTranslation(i);
MyAvatar.setJointData(i, rot, trans);
}
Overlays.editOverlay(tPoseEnterButton, { visible: false });
Overlays.editOverlay(tPoseExitButton, { visible: true });
}
function exitDefaultPose() {
tPose = false;
MyAvatar.clearJointsData();
Overlays.editOverlay(tPoseEnterButton, { visible: true });
Overlays.editOverlay(tPoseExitButton, { visible: false });
}
Controller.mousePressEvent.connect(function (event) {
var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
if (clickedOverlay == tPoseEnterButton) {
enterDefaultPose();
} else if (clickedOverlay == tPoseExitButton) {
exitDefaultPose();
}
});
Script.scriptEnding.connect(function() {
Overlays.deleteOverlay(tPoseEnterButton);
Overlays.deleteOverlay(tPoseExitButton);
});

View file

@ -11,7 +11,7 @@
{ "from": "Keyboard.S", "when": "Keyboard.Shift", "to": "Actions.PITCH_DOWN" },
{ "from": "Keyboard.W", "when": "Keyboard.Shift", "to": "Actions.PITCH_UP" },
{ "comment" : "Mouse turn need to be small continuous increments",
"from": { "makeAxis" : [
[ "Keyboard.MouseMoveLeft" ],
@ -75,7 +75,6 @@
{ "from": "Keyboard.S", "to": "Actions.LONGITUDINAL_BACKWARD" },
{ "from": "Keyboard.C", "to": "Actions.VERTICAL_DOWN" },
{ "from": "Keyboard.E", "to": "Actions.VERTICAL_UP" },
{ "from": "Keyboard.Left", "when": "Keyboard.RightMouseButton", "to": "Actions.LATERAL_LEFT" },
{ "from": "Keyboard.Right", "when": "Keyboard.RightMouseButton", "to": "Actions.LATERAL_RIGHT" },
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },

View file

@ -13,8 +13,8 @@
{ "type": "scale", "scale": 22.5 }
]
},
{ "from": "Standard.RX", "to": "Actions.Yaw" },
{ "from": "Standard.RX", "to": "Actions.Yaw" },
{ "from": "Standard.RY",
"when": "Application.Grounded",
"to": "Actions.Up",
@ -24,6 +24,7 @@
"invert"
]
},
{ "from": "Standard.RY", "to": "Actions.Up", "filters": "invert"},
{ "from": [ "Standard.DU", "Standard.DL", "Standard.DR", "Standard.DD" ], "to": "Standard.LeftPrimaryThumb" },

View file

@ -382,7 +382,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_scaleMirror(1.0f),
_rotateMirror(0.0f),
_raiseMirror(0.0f),
_lastMouseMoveWasSimulated(false),
_enableProcessOctreeThread(true),
_runningScriptsWidget(NULL),
_runningScriptsWidgetWasVisible(false),
@ -664,6 +663,21 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// Setup the userInputMapper with the actions
auto userInputMapper = DependencyManager::get<UserInputMapper>();
connect(userInputMapper.data(), &UserInputMapper::actionEvent, [this](int action, float state) {
if (action == controller::toInt(controller::Action::RETICLE_CLICK)) {
auto globalPos = QCursor::pos();
auto localPos = _glWidget->mapFromGlobal(globalPos);
if (state) {
QMouseEvent mousePress(QEvent::MouseButtonPress, localPos, Qt::LeftButton, Qt::LeftButton, Qt::NoModifier);
sendEvent(_glWidget, &mousePress);
_reticleClickPressed = true;
} else {
QMouseEvent mouseRelease(QEvent::MouseButtonRelease, localPos, Qt::LeftButton, Qt::NoButton, Qt::NoModifier);
sendEvent(_glWidget, &mouseRelease);
_reticleClickPressed = false;
}
return; // nothing else to do
}
if (state) {
if (action == controller::toInt(controller::Action::TOGGLE_MUTE)) {
DependencyManager::get<AudioClient>()->toggleMute();
@ -671,6 +685,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
cycleCamera();
} else if (action == controller::toInt(controller::Action::CONTEXT_MENU)) {
VrMenu::toggle(); // show context menu even on non-stereo displays
} else if (action == controller::toInt(controller::Action::RETICLE_X)) {
auto globalPos = QCursor::pos();
globalPos.setX(globalPos.x() + state);
QCursor::setPos(globalPos);
} else if (action == controller::toInt(controller::Action::RETICLE_Y)) {
auto globalPos = QCursor::pos();
globalPos.setY(globalPos.y() + state);
QCursor::setPos(globalPos);
}
}
});
@ -692,8 +714,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// Setup the keyboardMouseDevice and the user input mapper with the default bindings
userInputMapper->registerDevice(_keyboardMouseDevice->getInputDevice());
userInputMapper->loadDefaultMapping(userInputMapper->getStandardDeviceID());
// check first run...
@ -745,15 +765,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
setActiveEyeTracker();
#endif
_oldHandMouseX[0] = -1;
_oldHandMouseY[0] = -1;
_oldHandMouseX[1] = -1;
_oldHandMouseY[1] = -1;
_oldHandLeftClick[0] = false;
_oldHandRightClick[0] = false;
_oldHandLeftClick[1] = false;
_oldHandRightClick[1] = false;
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
applicationUpdater->checkForUpdate();
@ -1231,15 +1242,28 @@ void Application::paintGL() {
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (isHMDMode()) {
auto mirrorBodyOrientation = myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)) * hmdRotation);
// Mirror HMD yaw and roll
glm::vec3 mirrorHmdEulers = glm::eulerAngles(hmdRotation);
mirrorHmdEulers.y = -mirrorHmdEulers.y;
mirrorHmdEulers.z = -mirrorHmdEulers.z;
glm::quat mirrorHmdRotation = glm::quat(mirrorHmdEulers);
glm::quat worldMirrorRotation = mirrorBodyOrientation * mirrorHmdRotation;
_myCamera.setRotation(worldMirrorRotation);
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
// Mirror HMD lateral offsets
hmdOffset.x = -hmdOffset.x;
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getAvatarScale(), 0)
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))) * hmdOffset);
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ mirrorBodyOrientation * hmdOffset);
} else {
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
@ -1273,6 +1297,10 @@ void Application::paintGL() {
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
const QSize size = framebufferCache->getFrameBufferSize();
// Final framebuffer that will be handled to the display-plugin
auto finalFramebuffer = framebufferCache->getFramebuffer();
{
PROFILE_RANGE(__FUNCTION__ "/mainRender");
PerformanceTimer perfTimer("mainRender");
@ -1326,9 +1354,63 @@ void Application::paintGL() {
}
displaySide(&renderArgs, _myCamera);
renderArgs._context->enableStereo(false);
gpu::doInBatch(renderArgs._context, [](gpu::Batch& batch) {
batch.setFramebuffer(nullptr);
});
// Blit primary to final FBO
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
if (renderArgs._renderMode == RenderArgs::MIRROR_RENDER_MODE) {
if (displayPlugin->isStereo()) {
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
gpu::Vec4i srcRectLeft;
srcRectLeft.z = size.width() / 2;
srcRectLeft.w = size.height();
gpu::Vec4i srcRectRight;
srcRectRight.x = size.width() / 2;
srcRectRight.z = size.width();
srcRectRight.w = size.height();
gpu::Vec4i destRectLeft;
destRectLeft.x = srcRectLeft.z;
destRectLeft.z = srcRectLeft.x;
destRectLeft.y = srcRectLeft.y;
destRectLeft.w = srcRectLeft.w;
gpu::Vec4i destRectRight;
destRectRight.x = srcRectRight.z;
destRectRight.z = srcRectRight.x;
destRectRight.y = srcRectRight.y;
destRectRight.w = srcRectRight.w;
batch.setFramebuffer(finalFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0f, 0.0f, 1.0f, 0.0f));
// BLit left to right and right to left in stereo
batch.blit(primaryFbo, srcRectRight, finalFramebuffer, destRectLeft);
batch.blit(primaryFbo, srcRectLeft, finalFramebuffer, destRectRight);
});
} else {
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
gpu::Vec4i srcRect;
srcRect.z = size.width();
srcRect.w = size.height();
gpu::Vec4i destRect;
destRect.x = size.width();
destRect.y = 0;
destRect.z = 0;
destRect.w = size.height();
batch.setFramebuffer(finalFramebuffer);
batch.blit(primaryFbo, srcRect, finalFramebuffer, destRect);
});
}
} else {
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
gpu::Vec4i rect;
rect.z = size.width();
rect.w = size.height();
batch.setFramebuffer(finalFramebuffer);
batch.blit(primaryFbo, rect, finalFramebuffer, rect);
});
}
}
// Overlay Composition, needs to occur after screen space effects have completed
@ -1336,7 +1418,7 @@ void Application::paintGL() {
{
PROFILE_RANGE(__FUNCTION__ "/compositor");
PerformanceTimer perfTimer("compositor");
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
auto primaryFbo = finalFramebuffer;
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFbo));
if (displayPlugin->isStereo()) {
QRect currentViewport(QPoint(0, 0), QSize(size.width() / 2, size.height()));
@ -1361,23 +1443,12 @@ void Application::paintGL() {
{
PROFILE_RANGE(__FUNCTION__ "/pluginOutput");
PerformanceTimer perfTimer("pluginOutput");
auto primaryFramebuffer = framebufferCache->getPrimaryFramebuffer();
auto scratchFramebuffer = framebufferCache->getFramebuffer();
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
gpu::Vec4i rect;
rect.z = size.width();
rect.w = size.height();
batch.setFramebuffer(scratchFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
batch.blit(primaryFramebuffer, rect, scratchFramebuffer, rect);
batch.setFramebuffer(nullptr);
});
auto finalTexturePointer = scratchFramebuffer->getRenderBuffer(0);
auto finalTexturePointer = finalFramebuffer->getRenderBuffer(0);
GLuint finalTexture = gpu::GLBackend::getTextureID(finalTexturePointer);
Q_ASSERT(0 != finalTexture);
Q_ASSERT(!_lockedFramebufferMap.contains(finalTexture));
_lockedFramebufferMap[finalTexture] = scratchFramebuffer;
_lockedFramebufferMap[finalTexture] = finalFramebuffer;
Q_ASSERT(isCurrentContext(_offscreenContext->getContext()));
{
@ -1940,8 +2011,6 @@ void Application::focusOutEvent(QFocusEvent* event) {
void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
PROFILE_RANGE(__FUNCTION__);
// Used by application overlay to determine how to draw cursor(s)
_lastMouseMoveWasSimulated = deviceID > 0;
if (_aboutToQuit) {
return;
@ -1969,11 +2038,20 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
QPointF transformedPos = offscreenUi->mapToVirtualScreen(event->localPos(), _glWidget);
auto button = event->button();
auto buttons = event->buttons();
// Determine if the ReticleClick Action is 1 and if so, fake include the LeftMouseButton
if (_reticleClickPressed) {
if (button == Qt::NoButton) {
button = Qt::LeftButton;
}
buttons |= Qt::LeftButton;
}
QMouseEvent mappedEvent(event->type(),
transformedPos,
event->screenPos(), event->button(),
event->buttons(), event->modifiers());
event->screenPos(), button,
buttons, event->modifiers());
getEntities()->mouseMoveEvent(&mappedEvent, deviceID);
_controllerScriptingInterface->emitMouseMoveEvent(&mappedEvent, deviceID); // send events to any registered scripts
@ -2877,13 +2955,6 @@ void Application::update(float deltaTime) {
Hand* hand = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHand();
setPalmData(hand, leftHand, deltaTime, HandData::LeftHand, userInputMapper->getActionState(controller::Action::LEFT_HAND_CLICK));
setPalmData(hand, rightHand, deltaTime, HandData::RightHand, userInputMapper->getActionState(controller::Action::RIGHT_HAND_CLICK));
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableHandMouseInput)) {
emulateMouse(hand, userInputMapper->getActionState(controller::Action::LEFT_HAND_CLICK),
userInputMapper->getActionState(controller::Action::SHIFT), HandData::LeftHand);
emulateMouse(hand, userInputMapper->getActionState(controller::Action::RIGHT_HAND_CLICK),
userInputMapper->getActionState(controller::Action::SHIFT), HandData::RightHand);
}
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateDialogs(deltaTime); // update various stats dialogs if present
@ -4047,7 +4118,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
ClipboardScriptingInterface* clipboardScriptable = new ClipboardScriptingInterface();
scriptEngine->registerGlobalObject("Clipboard", clipboardScriptable);
connect(scriptEngine, SIGNAL(finished(const QString&)), clipboardScriptable, SLOT(deleteLater()));
connect(scriptEngine, &ScriptEngine::finished, clipboardScriptable, &ClipboardScriptingInterface::deleteLater);
connect(scriptEngine, &ScriptEngine::finished, this, &Application::scriptFinished, Qt::DirectConnection);
@ -5062,125 +5133,6 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
});
}
void Application::emulateMouse(Hand* hand, float click, float shift, HandData::Hand whichHand) {
auto palms = hand->getCopyOfPalms();
// Locate the palm, if it exists and is active
PalmData* palm;
bool foundHand = false;
for (size_t j = 0; j < palms.size(); j++) {
if (palms[j].whichHand() == whichHand) {
palm = &(palms[j]);
foundHand = true;
break;
}
}
if (!foundHand || !palm->isActive()) {
return;
}
// Process the mouse events
QPoint pos;
// FIXME - this mouse emulation stuff needs to be reworked for new controller input plugins
unsigned int deviceID = whichHand == HandData::LeftHand ? CONTROLLER_0_EVENT : CONTROLLER_1_EVENT;
int index = (int)whichHand; // FIXME - hack attack
if (isHMDMode()) {
pos = getApplicationCompositor().getPalmClickLocation(palm);
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(getMyAvatar()->getOrientation()) * palm->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2f(direction.z, direction.x) + (float)M_PI_2);
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)M_PI_2));
auto canvasSize = getCanvasSize();
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * controller::InputDevice::getCursorPixelRangeMult();
pos.setX(canvasSize.x / 2.0f + cursorRange * xAngle);
pos.setY(canvasSize.y / 2.0f + cursorRange * yAngle);
}
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
//we should unpress them.
if (pos.x() == INT_MAX) {
if (_oldHandLeftClick[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, Qt::LeftButton, Qt::LeftButton, 0);
mouseReleaseEvent(&mouseEvent, deviceID);
_oldHandLeftClick[index] = false;
}
if (_oldHandRightClick[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, Qt::RightButton, Qt::RightButton, 0);
mouseReleaseEvent(&mouseEvent, deviceID);
_oldHandRightClick[index] = false;
}
return;
}
//If position has changed, emit a mouse move to the application
if (pos.x() != _oldHandMouseX[index] || pos.y() != _oldHandMouseY[index]) {
QMouseEvent mouseEvent(QEvent::MouseMove, pos, Qt::NoButton, Qt::NoButton, 0);
// Only send the mouse event if the opposite left button isnt held down.
// Is this check necessary?
if (!_oldHandLeftClick[(int)(!index)]) {
mouseMoveEvent(&mouseEvent, deviceID);
}
}
_oldHandMouseX[index] = pos.x();
_oldHandMouseY[index] = pos.y();
//We need separate coordinates for clicks, since we need to check if
//a magnification window was clicked on
int clickX = pos.x();
int clickY = pos.y();
//Set pos to the new click location, which may be the same if no magnification window is open
pos.setX(clickX);
pos.setY(clickY);
// Right click
if (shift == 1.0f && click == 1.0f) {
if (!_oldHandRightClick[index]) {
_oldHandRightClick[index] = true;
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, Qt::RightButton, Qt::RightButton, 0);
mousePressEvent(&mouseEvent, deviceID);
}
} else if (_oldHandRightClick[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, Qt::RightButton, Qt::RightButton, 0);
mouseReleaseEvent(&mouseEvent, deviceID);
_oldHandRightClick[index] = false;
}
// Left click
if (shift != 1.0f && click == 1.0f) {
if (!_oldHandLeftClick[index]) {
_oldHandLeftClick[index] = true;
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, Qt::LeftButton, Qt::LeftButton, 0);
mousePressEvent(&mouseEvent, deviceID);
}
} else if (_oldHandLeftClick[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, Qt::LeftButton, Qt::LeftButton, 0);
mouseReleaseEvent(&mouseEvent, deviceID);
_oldHandLeftClick[index] = false;
}
}
void Application::crashApplication() {
qCDebug(interfaceapp) << "Intentionally crashed Interface";
QObject* object = nullptr;

View file

@ -145,7 +145,6 @@ public:
ivec2 getMouse() const;
ivec2 getTrueMouse() const;
bool getLastMouseMoveWasSimulated() const { return _lastMouseMoveWasSimulated; }
FaceTracker* getActiveFaceTracker();
FaceTracker* getSelectedFaceTracker();
@ -361,7 +360,6 @@ private:
void update(float deltaTime);
void setPalmData(Hand* hand, const controller::Pose& pose, float deltaTime, HandData::Hand whichHand, float triggerValue);
void emulateMouse(Hand* hand, float click, float shift, HandData::Hand whichHand);
// Various helper functions called during update()
void updateLOD();
@ -476,8 +474,6 @@ private:
Environment _environment;
bool _lastMouseMoveWasSimulated;
QSet<int> _keysPressed;
bool _enableProcessOctreeThread;
@ -537,14 +533,6 @@ private:
ApplicationCompositor _compositor;
OverlayConductor _overlayConductor;
// FIXME - Hand Controller to mouse emulation helpers. This is crufty and should be moved
// into the input plugins or something.
int _oldHandMouseX[(int)HandData::NUMBER_OF_HANDS];
int _oldHandMouseY[(int)HandData::NUMBER_OF_HANDS];
bool _oldHandLeftClick[(int)HandData::NUMBER_OF_HANDS];
bool _oldHandRightClick[(int)HandData::NUMBER_OF_HANDS];
DialogsManagerScriptingInterface* _dialogsManagerScriptingInterface = new DialogsManagerScriptingInterface();
EntityItemID _keyboardFocusedItem;
@ -559,6 +547,8 @@ private:
bool _inPaint = false;
bool _isGLInitialized { false };
bool _physicsEnabled { false };
bool _reticleClickPressed { false };
};
#endif // hifi_Application_h

View file

@ -261,9 +261,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true,
NULL, NULL, UNSPECIFIED_POSITION, "Advanced");
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true,
NULL, NULL, UNSPECIFIED_POSITION, "Advanced");
MenuWrapper* viewMenu = addMenu("View");
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()),
QAction::NoRole, UNSPECIFIED_POSITION, "Advanced");
@ -480,7 +477,6 @@ Menu::Menu() {
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::EnableHandMouseInput, 0, false);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
qApp, SLOT(setLowVelocityFilter(bool)));

View file

@ -168,7 +168,6 @@ namespace MenuOption {
const QString Back = "Back";
const QString BandwidthDetails = "Bandwidth Details";
const QString BinaryEyelidControl = "Binary Eyelid Control";
const QString BlueSpeechSphere = "Blue Sphere While Speaking";
const QString BookmarkLocation = "Bookmark Location";
const QString Bookmarks = "Bookmarks";
const QString CachesSize = "RAM Caches Size";
@ -220,7 +219,6 @@ namespace MenuOption {
const QString FrameTimer = "Show Timer";
const QString FullscreenMirror = "Fullscreen Mirror";
const QString GlowWhenSpeaking = "Glow When Speaking";
const QString EnableHandMouseInput = "Enable Hand Controller Mouse Input";
const QString IncreaseAvatarSize = "Increase Avatar Size";
const QString IndependentMode = "Independent Mode";
const QString InputMenu = "Avatar>Input Devices";

View file

@ -495,39 +495,6 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
}
}
}
// quick check before falling into the code below:
// (a 10 degree breadth of an almost 2 meter avatar kicks in at about 12m)
const float MIN_VOICE_SPHERE_DISTANCE = 12.0f;
if (Menu::getInstance()->isOptionChecked(MenuOption::BlueSpeechSphere)
&& distanceToTarget > MIN_VOICE_SPHERE_DISTANCE) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":renderVoiceSphere");
// render voice intensity sphere for avatars that are farther away
const float MAX_SPHERE_ANGLE = 10.0f * RADIANS_PER_DEGREE;
const float MIN_SPHERE_ANGLE = 0.5f * RADIANS_PER_DEGREE;
const float MIN_SPHERE_SIZE = 0.01f;
const float SPHERE_LOUDNESS_SCALING = 0.0005f;
const float SPHERE_COLOR[] = { 0.5f, 0.8f, 0.8f };
float height = getSkeletonHeight();
glm::vec3 delta = height * (getHead()->getCameraOrientation() * IDENTITY_UP) / 2.0f;
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
if (renderArgs->_renderMode == RenderArgs::DEFAULT_RENDER_MODE && (sphereRadius > MIN_SPHERE_SIZE) &&
(angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
batch.setModelTransform(Transform());
Transform transform;
transform.setTranslation(getPosition());
transform.setScale(height);
transform.postScale(sphereRadius);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphereInstance(batch,
transform,
glm::vec4(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.0f - angle / MAX_SPHERE_ANGLE));
}
}
}
const float DISPLAYNAME_DISTANCE = 20.0f;
@ -868,6 +835,18 @@ glm::vec3 Avatar::getJointTranslation(int index) const {
return translation;
}
glm::quat Avatar::getDefaultJointRotation(int index) const {
glm::quat rotation;
_skeletonModel.getRelativeDefaultJointRotation(index, rotation);
return rotation;
}
glm::vec3 Avatar::getDefaultJointTranslation(int index) const {
glm::vec3 translation;
_skeletonModel.getRelativeDefaultJointTranslation(index, translation);
return translation;
}
glm::quat Avatar::getAbsoluteJointRotationInObjectFrame(int index) const {
glm::quat rotation;
_skeletonModel.getAbsoluteJointRotationInRigFrame(index, rotation);

View file

@ -108,6 +108,9 @@ public:
virtual int getJointIndex(const QString& name) const override;
virtual QStringList getJointNames() const override;
Q_INVOKABLE virtual glm::quat getDefaultJointRotation(int index) const;
Q_INVOKABLE virtual glm::vec3 getDefaultJointTranslation(int index) const;
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;

View file

@ -441,14 +441,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
estimatedPosition = tracker->getHeadTranslation();
_trackedHeadPosition = estimatedPosition;
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
if (qApp->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
// Invert yaw and roll when in mirror mode
// NOTE: this is kinda a hack, it's the same hack we use to make the head tilt. But it's not really a mirror
// it just makes you feel like you're looking in a mirror because the body movements of the avatar appear to
// match your body movements.
YAW(estimatedRotation) *= -1.0f;
ROLL(estimatedRotation) *= -1.0f;
}
}
// Rotate the body if the head is turned beyond the screen
@ -489,14 +481,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
const float TORSO_LENGTH = 0.5f;
glm::vec3 relativePosition = estimatedPosition - glm::vec3(0.0f, -TORSO_LENGTH, 0.0f);
// Invert left/right lean when in mirror mode
// NOTE: this is kinda a hack, it's the same hack we use to make the head tilt. But it's not really a mirror
// it just makes you feel like you're looking in a mirror because the body movements of the avatar appear to
// match your body movements.
if ((inHmd || inFacetracker) && qApp->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
relativePosition.x = -relativePosition.x;
}
const float MAX_LEAN = 45.0f;
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
@ -966,7 +950,11 @@ void MyAvatar::clearJointData(int index) {
}
void MyAvatar::clearJointsData() {
//clearJointAnimationPriorities();
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "clearJointsData");
return;
}
_rig->clearJointStates();
}
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
@ -1401,12 +1389,6 @@ void MyAvatar::updateOrientation(float deltaTime) {
// ... so they need to be converted to degrees before we do math...
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
//Invert yaw and roll when in mirror mode
if (qApp->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
YAW(euler) *= -1.0f;
ROLL(euler) *= -1.0f;
}
Head* head = getHead();
head->setBaseYaw(YAW(euler));
head->setBasePitch(PITCH(euler));

View file

@ -115,10 +115,6 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
ApplicationCompositor::ApplicationCompositor() :
_alphaPropertyAnimation(new QPropertyAnimation(this, "alpha"))
{
memset(_reticleActive, 0, sizeof(_reticleActive));
memset(_magActive, 0, sizeof(_reticleActive));
memset(_magSizeMult, 0, sizeof(_magSizeMult));
auto geometryCache = DependencyManager::get<GeometryCache>();
_reticleQuad = geometryCache->allocateID();
@ -219,9 +215,6 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
geometryCache->renderUnitQuad(batch, vec4(vec3(1), _alpha));
// Doesn't actually render
renderPointers(batch);
//draw the mouse pointer
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 canvasSize = qApp->getCanvasSize();
@ -306,8 +299,7 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
}
#endif
// Doesn't actually render
renderPointers(batch);
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
bindCursorTexture(batch);
@ -316,34 +308,13 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
glm::mat4 overlayXfm;
_modelTransform.getMatrix(overlayXfm);
// Only render the hand pointers if the EnableHandMouseInput is enabled
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableHandMouseInput)) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto palms = myAvatar->getHand()->getCopyOfPalms();
for (const auto& palm : palms) {
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
mat4 pointerXfm = glm::mat4_cast(quat(vec3(polar.y, -polar.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
// Render reticle at location
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
glm::vec2 projection = screenToSpherical(qApp->getTrueMouse());
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
});
}
@ -423,124 +394,6 @@ bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& positi
return false;
}
//Renders optional pointers
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated()) {
//If we are in oculus, render reticle later
auto trueMouse = qApp->getTrueMouse();
trueMouse /= qApp->getCanvasSize();
QPoint position = QPoint(qApp->getTrueMouse().x, qApp->getTrueMouse().y);
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
_reticleActive[LEFT_CONTROLLER] = false;
_reticleActive[RIGHT_CONTROLLER] = false;
} else if (qApp->getLastMouseMoveWasSimulated()
&& Menu::getInstance()->isOptionChecked(MenuOption::EnableHandMouseInput)) {
//only render controller pointer if we aren't already rendering a mouse pointer
_reticleActive[MOUSE] = false;
_magActive[MOUSE] = false;
renderControllerPointers(batch);
}
}
// FIXME - this is old code that likely needs to be removed and/or reworked to support the new input control model
void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Static variables used for storing controller state
static quint64 pressedTime[NUMBER_OF_RETICLES] = { 0ULL, 0ULL, 0ULL };
static bool isPressed[NUMBER_OF_RETICLES] = { false, false, false };
static bool stateWhenPressed[NUMBER_OF_RETICLES] = { false, false, false };
const HandData* handData = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHandData();
auto palms = handData->getCopyOfPalms();
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
const int index = palmIndex - 1;
const PalmData* palmData = NULL;
if (palmIndex >= palms.size()) {
return;
}
if (palms[palmIndex].isActive()) {
palmData = &palms[palmIndex];
} else {
continue;
}
if (isPressed[index]) {
isPressed[index] = false;
//If the button was only pressed for < 250 ms
//then disable it.
const int MAX_BUTTON_PRESS_TIME = 250 * MSECS_TO_USECS;
if (usecTimestampNow() < pressedTime[index] + MAX_BUTTON_PRESS_TIME) {
_magActive[index] = !stateWhenPressed[index];
}
}
//if we have the oculus, we should make the cursor smaller since it will be
//magnified
if (qApp->isHMDMode()) {
QPoint point = getPalmClickLocation(palmData);
_reticlePosition[index] = point;
//When button 2 is pressed we drag the mag window
if (isPressed[index]) {
_magActive[index] = true;
}
// If oculus is enabled, we draw the crosshairs later
continue;
}
auto canvasSize = qApp->getCanvasSize();
int mouseX, mouseY;
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2f(direction.z, direction.x) + PI_OVER_TWO);
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * controller::InputDevice::getCursorPixelRangeMult();
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
//If the cursor is out of the screen then don't render it
if (mouseX < 0 || mouseX >= (int)canvasSize.x || mouseY < 0 || mouseY >= (int)canvasSize.y) {
_reticleActive[index] = false;
continue;
}
_reticleActive[index] = true;
const float reticleSize = 40.0f;
mouseX -= reticleSize / 2.0f;
mouseY += reticleSize / 2.0f;
glm::vec2 topLeft(mouseX, mouseY);
glm::vec2 bottomRight(mouseX + reticleSize, mouseY - reticleSize);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f));
}
}
void ApplicationCompositor::buildHemiVertices(
const float fov, const float aspectRatio, const int slices, const int stacks) {
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;

View file

@ -92,9 +92,6 @@ private:
void drawSphereSection(gpu::Batch& batch);
void updateTooltips();
void renderPointers(gpu::Batch& batch);
void renderControllerPointers(gpu::Batch& batch);
vec2 getPolarCoordinates(const PalmData& palm) const;
// Support for hovering and tooltips
@ -109,11 +106,6 @@ private:
float _textureAspectRatio{ 1.0f };
int _hemiVerticesID{ GeometryCache::UNKNOWN_ID };
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
QPoint _reticlePosition[NUMBER_OF_RETICLES];
bool _magActive[NUMBER_OF_RETICLES];
float _magSizeMult[NUMBER_OF_RETICLES];
bool _magnifier{ true };
float _alpha{ 1.0f };

View file

@ -283,12 +283,14 @@ bool Rig::getJointStateTranslation(int index, glm::vec3& translation) const {
void Rig::clearJointState(int index) {
if (isIndexValid(index)) {
_internalPoseSet._overrideFlags[index] = false;
_internalPoseSet._overridePoses[index] = _animSkeleton->getRelativeDefaultPose(index);
}
}
void Rig::clearJointStates() {
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints());
_internalPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
}
void Rig::clearJointAnimationPriority(int index) {
@ -464,6 +466,25 @@ const AnimPoseVec& Rig::getAbsoluteDefaultPoses() const {
return _absoluteDefaultPoses;
}
bool Rig::getRelativeDefaultJointRotation(int index, glm::quat& rotationOut) const {
if (_animSkeleton && index >= 0 && index < _animSkeleton->getNumJoints()) {
rotationOut = _animSkeleton->getRelativeDefaultPose(index).rot;
return true;
} else {
return false;
}
}
bool Rig::getRelativeDefaultJointTranslation(int index, glm::vec3& translationOut) const {
if (_animSkeleton && index >= 0 && index < _animSkeleton->getNumJoints()) {
translationOut = _animSkeleton->getRelativeDefaultPose(index).trans;
return true;
} else {
return false;
}
}
// animation reference speeds.
static const std::vector<float> FORWARD_SPEEDS = { 0.4f, 1.4f, 4.5f }; // m/s
static const std::vector<float> BACKWARD_SPEEDS = { 0.6f, 1.45f }; // m/s

View file

@ -200,6 +200,10 @@ public:
// rig space
const AnimPoseVec& getAbsoluteDefaultPoses() const;
// geometry space
bool getRelativeDefaultJointRotation(int index, glm::quat& rotationOut) const;
bool getRelativeDefaultJointTranslation(int index, glm::vec3& translationOut) const;
void copyJointsIntoJointData(QVector<JointData>& jointDataVec) const;
void copyJointsFromJointData(const QVector<JointData>& jointDataVec);

View file

@ -62,6 +62,14 @@ namespace controller {
makeButtonPair(Action::TOGGLE_MUTE, "ToggleMute"),
makeButtonPair(Action::CYCLE_CAMERA, "CycleCamera"),
makeAxisPair(Action::RETICLE_CLICK, "ReticleClick"),
makeAxisPair(Action::RETICLE_X, "ReticleX"),
makeAxisPair(Action::RETICLE_Y, "ReticleY"),
makeAxisPair(Action::RETICLE_LEFT, "ReticleLeft"),
makeAxisPair(Action::RETICLE_RIGHT, "ReticleRight"),
makeAxisPair(Action::RETICLE_UP, "ReticleUp"),
makeAxisPair(Action::RETICLE_DOWN, "ReticleDown"),
// Aliases and bisected versions
makeAxisPair(Action::LONGITUDINAL_BACKWARD, "Backward"),
makeAxisPair(Action::LONGITUDINAL_FORWARD, "Forward"),

View file

@ -55,30 +55,41 @@ enum class Action {
SHIFT,
// Biseced aliases for TRANSLATE_Z
// Pointer/Reticle control
RETICLE_CLICK,
RETICLE_X,
RETICLE_Y,
// Bisected aliases for RETICLE_X/RETICLE_Y
RETICLE_LEFT,
RETICLE_RIGHT,
RETICLE_UP,
RETICLE_DOWN,
// Bisected aliases for TRANSLATE_Z
LONGITUDINAL_BACKWARD,
LONGITUDINAL_FORWARD,
// Biseced aliases for TRANSLATE_X
// Bisected aliases for TRANSLATE_X
LATERAL_LEFT,
LATERAL_RIGHT,
// Biseced aliases for TRANSLATE_Y
// Bisected aliases for TRANSLATE_Y
VERTICAL_DOWN,
VERTICAL_UP,
// Biseced aliases for ROTATE_Y
// Bisected aliases for ROTATE_Y
YAW_LEFT,
YAW_RIGHT,
// Biseced aliases for ROTATE_X
// Bisected aliases for ROTATE_X
PITCH_DOWN,
PITCH_UP,
// Biseced aliases for TRANSLATE_CAMERA_Z
// Bisected aliases for TRANSLATE_CAMERA_Z
BOOM_IN,
BOOM_OUT,
NUM_ACTIONS,
};

View file

@ -255,6 +255,9 @@ void UserInputMapper::update(float deltaTime) {
fixBisectedAxis(_actionStates[toInt(Action::ROTATE_Y)], _actionStates[toInt(Action::YAW_LEFT)], _actionStates[toInt(Action::YAW_RIGHT)]);
fixBisectedAxis(_actionStates[toInt(Action::ROTATE_X)], _actionStates[toInt(Action::PITCH_UP)], _actionStates[toInt(Action::PITCH_DOWN)]);
fixBisectedAxis(_actionStates[toInt(Action::RETICLE_X)], _actionStates[toInt(Action::RETICLE_LEFT)], _actionStates[toInt(Action::RETICLE_RIGHT)]);
fixBisectedAxis(_actionStates[toInt(Action::RETICLE_Y)], _actionStates[toInt(Action::RETICLE_UP)], _actionStates[toInt(Action::RETICLE_DOWN)]);
static const float EPSILON = 0.01f;
for (auto i = 0; i < toInt(Action::NUM_ACTIONS); i++) {
_actionStates[i] *= _actionScales[i];
@ -319,42 +322,12 @@ QVector<QString> UserInputMapper::getActionNames() const {
}
return result;
}
/*
void UserInputMapper::assignDefaulActionScales() {
_actionScales[toInt(Action::LONGITUDINAL_BACKWARD)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::LONGITUDINAL_FORWARD)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::LATERAL_LEFT)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::LATERAL_RIGHT)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::VERTICAL_DOWN)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::VERTICAL_UP)] = 1.0f; // 1m per unit
_actionScales[toInt(Action::YAW_LEFT)] = 1.0f; // 1 degree per unit
_actionScales[toInt(Action::YAW_RIGHT)] = 1.0f; // 1 degree per unit
_actionScales[toInt(Action::PITCH_DOWN)] = 1.0f; // 1 degree per unit
_actionScales[toInt(Action::PITCH_UP)] = 1.0f; // 1 degree per unit
_actionScales[toInt(Action::BOOM_IN)] = 0.5f; // .5m per unit
_actionScales[toInt(Action::BOOM_OUT)] = 0.5f; // .5m per unit
_actionScales[toInt(Action::LEFT_HAND)] = 1.0f; // default
_actionScales[toInt(Action::RIGHT_HAND)] = 1.0f; // default
_actionScales[toInt(Action::LEFT_HAND_CLICK)] = 1.0f; // on
_actionScales[toInt(Action::RIGHT_HAND_CLICK)] = 1.0f; // on
_actionScales[toInt(Action::SHIFT)] = 1.0f; // on
_actionScales[toInt(Action::ACTION1)] = 1.0f; // default
_actionScales[toInt(Action::ACTION2)] = 1.0f; // default
_actionScales[toInt(Action::TRANSLATE_X)] = 1.0f; // default
_actionScales[toInt(Action::TRANSLATE_Y)] = 1.0f; // default
_actionScales[toInt(Action::TRANSLATE_Z)] = 1.0f; // default
_actionScales[toInt(Action::ROLL)] = 1.0f; // default
_actionScales[toInt(Action::PITCH)] = 1.0f; // default
_actionScales[toInt(Action::YAW)] = 1.0f; // default
}
*/
static int actionMetaTypeId = qRegisterMetaType<Action>();
static int inputMetaTypeId = qRegisterMetaType<Input>();
static int inputPairMetaTypeId = qRegisterMetaType<Input::NamedPair>();
static int poseMetaTypeId = qRegisterMetaType<controller::Pose>("Pose");
QScriptValue inputToScriptValue(QScriptEngine* engine, const Input& input);
void inputFromScriptValue(const QScriptValue& object, Input& input);
QScriptValue actionToScriptValue(QScriptEngine* engine, const Action& action);

View file

@ -194,7 +194,7 @@ AnimationPointer ModelEntityItem::getAnimation(const QString& url) {
void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
// if we don't have animation, or we're already joint mapped then bail early
if (!hasAnimation() || _jointMappingCompleted) {
if (!hasAnimation() || jointsMapped()) {
return;
}
@ -208,6 +208,7 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
_jointMapping[i] = animationJointNames.indexOf(modelJointNames[i]);
}
_jointMappingCompleted = true;
_jointMappingURL = _animationProperties.getURL();
}
}
}

View file

@ -104,7 +104,7 @@ public:
void mapJoints(const QStringList& modelJointNames);
void getAnimationFrame(bool& newFrame, QVector<glm::quat>& rotationsResult, QVector<glm::vec3>& translationsResult);
bool jointsMapped() const { return _jointMappingCompleted; }
bool jointsMapped() const { return _jointMappingURL == getAnimationURL() && _jointMappingCompleted; }
bool getAnimationIsPlaying() const { return _animationLoop.getRunning(); }
float getAnimationCurrentFrame() const { return _animationLoop.getCurrentFrame(); }
@ -146,6 +146,7 @@ protected:
// used on client side
bool _jointMappingCompleted;
QVector<int> _jointMapping;
QString _jointMappingURL;
static AnimationPointer getAnimation(const QString& url);
static QMap<QString, AnimationPointer> _loadedAnimations;

View file

@ -217,7 +217,6 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
materials[matName] = currentMaterial;
matName = tokenizer.getDatum();
currentMaterial = materials[matName];
currentMaterial.diffuseTextureFilename = "test";
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader Starting new material definition " << matName;
#endif
@ -461,6 +460,9 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
}
if (!textName.isEmpty()) {
#ifdef WANT_DEBUG
qCDebug(modelformat) << "OBJ Reader found a default texture: " << textName;
#endif
preDefinedMaterial.diffuseTextureFilename = textName;
}
materials[SMART_DEFAULT_MATERIAL_NAME] = preDefinedMaterial;
@ -553,9 +555,7 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
model::MaterialPointer modelMaterial = fbxMaterial._material;
if (!objMaterial.diffuseTextureFilename.isEmpty()) {
FBXTexture texture;
QUrl url = _url.resolved(QUrl(objMaterial.diffuseTextureFilename));
// TODO -- something to get textures working again
fbxMaterial.diffuseTexture.filename = objMaterial.diffuseTextureFilename;
}
modelMaterial->setEmissive(fbxMaterial.emissiveColor);

View file

@ -99,6 +99,9 @@ void KeyboardMouseDevice::mouseMoveEvent(QMouseEvent* event, unsigned int device
_inputDevice->_axisStateMap[MOUSE_AXIS_Y_POS] = (currentMove.y() < 0 ? -currentMove.y() : 0.0f);
_inputDevice->_axisStateMap[MOUSE_AXIS_Y_NEG] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
// FIXME - this has the characteristic that it will show large jumps when you move the cursor
// outside of the application window, because we don't get MouseEvents when the cursor is outside
// of the application window.
_lastCursor = currentPos;
_mouseMoved = true;

View file

@ -47,18 +47,7 @@ void ProceduralSkybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum,
Skybox::render(batch, viewFrustum, skybox);
}
static gpu::BufferPointer theBuffer;
static gpu::Stream::FormatPointer theFormat;
if (skybox._procedural && skybox._procedural->_enabled && skybox._procedural->ready()) {
if (!theBuffer) {
const float CLIP = 1.0f;
const glm::vec2 vertices[4] = { { -CLIP, -CLIP }, { CLIP, -CLIP }, { -CLIP, CLIP }, { CLIP, CLIP } };
theBuffer = std::make_shared<gpu::Buffer>(sizeof(vertices), (const gpu::Byte*) vertices);
theFormat = std::make_shared<gpu::Stream::Format>();
theFormat->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::XYZ));
}
glm::mat4 projMat;
viewFrustum.evalProjectionMatrix(projMat);
@ -67,8 +56,6 @@ void ProceduralSkybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum,
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewTransform);
batch.setModelTransform(Transform()); // only for Mac
batch.setInputBuffer(gpu::Stream::POSITION, theBuffer, 0, 8);
batch.setInputFormat(theFormat);
if (skybox.getCubemap() && skybox.getCubemap()->isDefined()) {
batch.setResourceTexture(0, skybox.getCubemap());

View file

@ -11,21 +11,26 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec3 _normal;
void main(void) {
void main(void) {
const float depth = 0.0;
const vec4 UNIT_QUAD[4] = vec4[4](
vec4(-1.0, -1.0, depth, 1.0),
vec4(1.0, -1.0, depth, 1.0),
vec4(-1.0, 1.0, depth, 1.0),
vec4(1.0, 1.0, depth, 1.0)
);
vec4 inPosition = UNIT_QUAD[gl_VertexID];
// standard transform
TransformCamera cam = getTransformCamera();
vec3 clipDir = vec3(inPosition.xy, 0.0);
vec3 eyeDir;
<$transformClipToEyeDir(cam, clipDir, eyeDir)$>
<$transformEyeToWorldDir(cam, eyeDir, _normal)$>

View file

@ -772,12 +772,20 @@ bool Model::getJointTranslation(int jointIndex, glm::vec3& translation) const {
return _rig->getJointTranslation(jointIndex, translation);
}
bool Model::getAbsoluteJointRotationInRigFrame(int jointIndex, glm::quat& rotation) const {
return _rig->getAbsoluteJointRotationInRigFrame(jointIndex, rotation);
bool Model::getAbsoluteJointRotationInRigFrame(int jointIndex, glm::quat& rotationOut) const {
return _rig->getAbsoluteJointRotationInRigFrame(jointIndex, rotationOut);
}
bool Model::getAbsoluteJointTranslationInRigFrame(int jointIndex, glm::vec3& translation) const {
return _rig->getAbsoluteJointTranslationInRigFrame(jointIndex, translation);
bool Model::getAbsoluteJointTranslationInRigFrame(int jointIndex, glm::vec3& translationOut) const {
return _rig->getAbsoluteJointTranslationInRigFrame(jointIndex, translationOut);
}
bool Model::getRelativeDefaultJointRotation(int jointIndex, glm::quat& rotationOut) const {
return _rig->getRelativeDefaultJointRotation(jointIndex, rotationOut);
}
bool Model::getRelativeDefaultJointTranslation(int jointIndex, glm::vec3& translationOut) const {
return _rig->getRelativeDefaultJointTranslation(jointIndex, translationOut);
}
bool Model::getJointCombinedRotation(int jointIndex, glm::quat& rotation) const {

View file

@ -167,8 +167,11 @@ public:
bool getJointTranslation(int jointIndex, glm::vec3& translation) const;
// model frame
bool getAbsoluteJointRotationInRigFrame(int jointIndex, glm::quat& rotation) const;
bool getAbsoluteJointTranslationInRigFrame(int jointIndex, glm::vec3& translation) const;
bool getAbsoluteJointRotationInRigFrame(int jointIndex, glm::quat& rotationOut) const;
bool getAbsoluteJointTranslationInRigFrame(int jointIndex, glm::vec3& translationOut) const;
bool getRelativeDefaultJointRotation(int jointIndex, glm::quat& rotationOut) const;
bool getRelativeDefaultJointTranslation(int jointIndex, glm::vec3& translationOut) const;
/// Returns the index of the parent of the indexed joint, or -1 if not found.
int getParentJointIndex(int jointIndex) const;