Merge branch 'master' of https://github.com/highfidelity/hifi into daft

This commit is contained in:
Sam Gateau 2015-06-12 17:17:32 +02:00
commit 09e12f93b3
62 changed files with 1885 additions and 657 deletions

View file

@ -53,7 +53,7 @@ else ()
endif ()
endif(WIN32)
if (NOT MSVC12)
if ((NOT MSVC12) AND (NOT MSVC14))
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)

View file

@ -66,6 +66,9 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
// set the logging target to the the CHILD_TARGET_NAME
LogHandler::getInstance().setTargetName(ASSIGNMENT_CLIENT_TARGET_NAME);
// make sure we output process IDs for a child AC otherwise it's insane to parse
LogHandler::getInstance().setShouldOutputPID(true);
// setup our _requestAssignment member variable from the passed arguments
_requestAssignment = Assignment(Assignment::RequestCommand, requestAssignmentType, assignmentPool);

View file

@ -39,9 +39,9 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
_walletUUID(walletUUID),
_assignmentServerHostname(assignmentServerHostname),
_assignmentServerPort(assignmentServerPort)
{
{
qDebug() << "_requestAssignmentType =" << _requestAssignmentType;
// start the Logging class with the parent's target name
LogHandler::getInstance().setTargetName(ASSIGNMENT_CLIENT_MONITOR_TARGET_NAME);
@ -77,13 +77,13 @@ void AssignmentClientMonitor::simultaneousWaitOnChildren(int waitMsecs) {
while(_childProcesses.size() > 0 && !waitTimer.hasExpired(waitMsecs)) {
// continue processing events so we can handle a process finishing up
QCoreApplication::processEvents();
}
}
}
void AssignmentClientMonitor::childProcessFinished() {
QProcess* childProcess = qobject_cast<QProcess*>(sender());
qint64 processID = _childProcesses.key(childProcess);
if (processID > 0) {
qDebug() << "Child process" << processID << "has finished. Removing from internal map.";
_childProcesses.remove(processID);
@ -98,17 +98,17 @@ void AssignmentClientMonitor::stopChildProcesses() {
qDebug() << "Attempting to terminate child process" << childProcess->processId();
childProcess->terminate();
}
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
if (_childProcesses.size() > 0) {
// ask even more firmly
foreach(QProcess* childProcess, _childProcesses) {
qDebug() << "Attempting to kill child process" << childProcess->processId();
childProcess->kill();
}
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
}
}
@ -122,7 +122,7 @@ void AssignmentClientMonitor::aboutToQuit() {
void AssignmentClientMonitor::spawnChildClient() {
QProcess* assignmentClient = new QProcess(this);
// unparse the parts of the command-line that the child cares about
QStringList _childArguments;
if (_assignmentPool != "") {
@ -153,7 +153,7 @@ void AssignmentClientMonitor::spawnChildClient() {
// make sure that the output from the child process appears in our output
assignmentClient->setProcessChannelMode(QProcess::ForwardedChannels);
assignmentClient->start(QCoreApplication::applicationFilePath(), _childArguments);
// make sure we hear that this process has finished when it does
@ -194,7 +194,7 @@ void AssignmentClientMonitor::checkSpares() {
qDebug() << "asking child" << aSpareId << "to exit.";
SharedNodePointer childNode = nodeList->nodeWithUUID(aSpareId);
childNode->activateLocalSocket();
QByteArray diePacket = nodeList->byteArrayWithPopulatedHeader(PacketTypeStopNode);
nodeList->writeUnverifiedDatagram(diePacket, childNode);
}
@ -239,7 +239,7 @@ void AssignmentClientMonitor::readPendingDatagrams() {
// update our records about how to reach this child
matchingNode->setLocalSocket(senderSockAddr);
QVariantMap packetVariantMap =
QVariantMap packetVariantMap =
JSONBreakableMarshal::fromStringBuffer(receivedPacket.mid(numBytesForPacketHeader(receivedPacket)));
QJsonObject unpackedStatsJSON = QJsonObject::fromVariantMap(packetVariantMap);

View file

@ -850,7 +850,9 @@ void AudioMixer::run() {
++_numStatFrames;
// since we're a while loop we need to help Qt's event processing
QCoreApplication::processEvents();
QCoreApplication::sendPostedEvents(this, 0);
if (_isFinished) {
break;

View file

@ -14,12 +14,12 @@ macro(SETUP_EXTERNALS_BINARY_DIR)
# get a short name for the generator to use in the path
STRING(REGEX REPLACE " " "-" CMAKE_GENERATOR_FOLDER_NAME ${CMAKE_GENERATOR})
if (MSVC12)
if (MSVC12)
set(CMAKE_GENERATOR_FOLDER_NAME "vc12")
else ()
if (CMAKE_GENERATOR_FOLDER_NAME STREQUAL "Unix-Makefiles")
set(CMAKE_GENERATOR_FOLDER_NAME "makefiles")
endif ()
elseif (MSVC14)
set(CMAKE_GENERATOR_FOLDER_NAME "vc14")
elseif(CMAKE_GENERATOR_FOLDER_NAME STREQUAL "Unix-Makefiles")
set(CMAKE_GENERATOR_FOLDER_NAME "makefiles")
endif ()
set(EXTERNALS_BINARY_ROOT_DIR "${CMAKE_CURRENT_BINARY_DIR}/ext")

View file

@ -3,6 +3,7 @@
// examples
//
// Created by Stephen Birarda on 06/08/15.
// Added disconnect HRS 6/11/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -10,14 +11,19 @@
//
// setup the local sound we're going to use
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/short1.wav");
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/hello.wav");
var disconnectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/goodbye.wav");
// setup the options needed for that sound
var connectSoundOptions = {
localOnly: true
}
};
// play the sound locally once we get the first audio packet from a mixer
Audio.receivedFirstPacket.connect(function(){
Audio.playSound(connectSound, connectSoundOptions);
});
Audio.disconnected.connect(function(){
Audio.playSound(disconnectSound, connectSoundOptions);
});

View file

@ -0,0 +1,96 @@
//
// controllerScriptingExamples.js
// examples
//
// Created by Sam Gondelman on 6/2/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Assumes you only have the default keyboard connected
// Resets every device to its default key bindings:
Controller.resetAllDeviceBindings();
// Query all actions
print("All Actions: \n" + Controller.getAllActions());
// Each action stores:
// action: int representation of enum
print("Action 5 int: \n" + Controller.getAllActions()[5].action);
// actionName: string representation of enum
print("Action 5 name: \n" + Controller.getAllActions()[5].actionName);
// inputChannels: list of all inputchannels that control that action
print("Action 5 input channels: \n" + Controller.getAllActions()[5].inputChannels + "\n");
// Each input channel stores:
// action: Action that this InputChannel maps to
print("Input channel action: \n" + Controller.getAllActions()[5].inputChannels[0].action);
// scale: sensitivity of input
print("Input channel scale: \n" + Controller.getAllActions()[5].inputChannels[0].scale);
// input and modifier: Inputs
print("Input channel input and modifier: \n" + Controller.getAllActions()[5].inputChannels[0].input + "\n" + Controller.getAllActions()[5].inputChannels[0].modifier + "\n");
// Each Input stores:
// device: device of input
print("Input device: \n" + Controller.getAllActions()[5].inputChannels[0].input.device);
// channel: channel of input
print("Input channel: \n" + Controller.getAllActions()[5].inputChannels[0].input.channel);
// type: type of input (Unknown, Button, Axis, Joint)
print("Input type: \n" + Controller.getAllActions()[5].inputChannels[0].input.type);
// id: id of input
print("Input id: \n" + Controller.getAllActions()[5].inputChannels[0].input.id + "\n");
// You can get the name of a device from its id
print("Device 1 name: \n" + Controller.getDeviceName(Controller.getAllActions()[5].inputChannels[0].input.id));
// You can also get all of a devices input channels
print("Device 1's input channels: \n" + Controller.getAllInputsForDevice(1) + "\n");
// Modifying properties:
// The following code will switch the "w" and "s" key functionality and adjust their scales
var s = Controller.getAllActions()[0].inputChannels[0];
var w = Controller.getAllActions()[1].inputChannels[0];
// You must remove an input controller before modifying it so the old input controller isn't registered anymore
// removeInputChannel and addInputChannel return true if successful, false otherwise
Controller.removeInputChannel(s);
Controller.removeInputChannel(w);
print(s.scale);
s.action = 1;
s.scale = .01;
w.action = 0;
w.scale = 10000;
Controller.addInputChannel(s);
Controller.addInputChannel(w);
print(s.scale);
// You can get all the available inputs for any device
// Each AvailableInput has:
// input: the Input itself
// inputName: string representing the input
var availableInputs = Controller.getAvailableInputs(1);
for (i = 0; i < availableInputs.length; i++) {
print(availableInputs[i].inputName);
}
// You can modify key bindings by using these avaiable inputs
// This will replace e (up) with 6
var e = Controller.getAllActions()[5].inputChannels[0];
Controller.removeInputChannel(e);
e.input = availableInputs[6].input;
Controller.addInputChannel(e);

View file

@ -4,7 +4,7 @@
// Created by Eric Levin on May 1, 2015
// Copyright 2015 High Fidelity, Inc.
//
// Grab's physically moveable entities with the mouse, by applying a spring force.
// Grab's physically moveable entities with the mouse, by applying a spring force.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -20,7 +20,7 @@ var ANGULAR_DAMPING_RATE = 0.40;
// NOTE: to improve readability global variable names start with 'g'
var gIsGrabbing = false;
var gGrabbedEntity = null;
var gPrevMouse = {x: 0, y: 0};
var gActionID = null;
var gEntityProperties;
var gStartPosition;
var gStartRotation;
@ -31,20 +31,20 @@ var gPlaneNormal = ZERO_VEC3;
// gMaxGrabDistance is a function of the size of the object.
var gMaxGrabDistance;
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
// Modes to eventually support?:
// xyPlane
// yzPlane
// xyPlane
// yzPlane
// polar
// elevationAzimuth
var gGrabMode = "xzplane";
var gGrabMode = "xzplane";
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// are relative to the ray's intersection by the same offset.
var gGrabOffset = { x: 0, y: 0, z: 0 };
@ -53,13 +53,14 @@ var gTargetRotation;
var gLiftKey = false; // SHIFT
var gRotateKey = false; // CONTROL
var gInitialMouse = { x: 0, y: 0 };
var gPreviousMouse = { x: 0, y: 0 };
var gMouseCursorLocation = { x: 0, y: 0 };
var gMouseAtRotateStart = { x: 0, y: 0 };
var gBeaconHeight = 0.10;
var gAngularVelocity = ZERO_VEC3;
// var gAngularVelocity = ZERO_VEC3;
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
@ -140,6 +141,10 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
}
function computeNewGrabPlane() {
if (!gIsGrabbing) {
return;
}
var maybeResetMousePosition = false;
if (gGrabMode !== "rotate") {
gMouseAtRotateStart = gMouseCursorLocation;
@ -162,7 +167,7 @@ function computeNewGrabPlane() {
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
xzOffset.y = 0;
gXzDistanceToGrab = Vec3.length(xzOffset);
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
// we reset the mouse position whenever we stop rotating
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
@ -173,6 +178,7 @@ function mousePressEvent(event) {
if (!event.isLeftButton) {
return;
}
gInitialMouse = {x: event.x, y: event.y };
gPreviousMouse = {x: event.x, y: event.y };
var pickRay = Camera.computePickRay(event.x, event.y);
@ -189,12 +195,13 @@ function mousePressEvent(event) {
var clickedEntity = pickResults.entityID;
var entityProperties = Entities.getEntityProperties(clickedEntity)
var objectPosition = entityProperties.position;
gStartPosition = entityProperties.position;
gStartRotation = entityProperties.rotation;
var cameraPosition = Camera.getPosition();
gBeaconHeight = Vec3.length(entityProperties.dimensions);
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
if (Vec3.distance(objectPosition, cameraPosition) > gMaxGrabDistance) {
if (Vec3.distance(gStartPosition, cameraPosition) > gMaxGrabDistance) {
// don't allow grabs of things far away
return;
}
@ -205,20 +212,20 @@ function mousePressEvent(event) {
gGrabbedEntity = clickedEntity;
gCurrentPosition = entityProperties.position;
gOriginalGravity = entityProperties.gravity;
gTargetPosition = objectPosition;
gTargetPosition = gStartPosition;
// compute the grab point
var nearestPoint = Vec3.subtract(objectPosition, cameraPosition);
var nearestPoint = Vec3.subtract(gStartPosition, cameraPosition);
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
// compute the grab offset
gGrabOffset = Vec3.subtract(objectPosition, gPointOnPlane);
gGrabOffset = Vec3.subtract(gStartPosition, gPointOnPlane);
computeNewGrabPlane();
updateDropLine(objectPosition);
updateDropLine(gStartPosition);
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
@ -231,6 +238,8 @@ function mouseReleaseEvent() {
}
gIsGrabbing = false
Entities.deleteAction(gGrabbedEntity, gActionID);
gActionID = null;
Overlays.editOverlay(gBeacon, { visible: false });
@ -250,18 +259,24 @@ function mouseMoveEvent(event) {
gOriginalGravity = entityProperties.gravity;
}
var actionArgs = {};
if (gGrabMode === "rotate") {
var deltaMouse = { x: 0, y: 0 };
var dx = event.x - gPreviousMouse.x;
var dy = event.y - gPreviousMouse.y;
var dx = event.x - gInitialMouse.x;
var dy = event.y - gInitialMouse.y;
var orientation = Camera.getOrientation();
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
var axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 8.0; // magic number tuned by hand
gAngularVelocity = Vec3.multiply(ROTATE_STRENGTH, axis);
axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
var angle = ROTATE_STRENGTH * Math.sqrt((dx * dx) + (dy * dy));
var deltaQ = Quat.angleAxis(angle, axis);
// var qZero = entityProperties.rotation;
var qZero = gStartRotation;
var qOne = Quat.multiply(deltaQ, qZero);
actionArgs = {targetRotation: qOne, angularTimeScale: 0.1};
} else {
var newTargetPosition;
if (gGrabMode === "verticalCylinder") {
@ -284,9 +299,18 @@ function mouseMoveEvent(event) {
}
}
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
}
gPreviousMouse = { x: event.x, y: event.y };
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
if (!gActionID) {
gActionID = Entities.addAction("spring", gGrabbedEntity, actionArgs);
} else {
Entities.updateAction(gGrabbedEntity, gActionID, actionArgs);
}
updateDropLine(gTargetPosition);
}
function keyReleaseEvent(event) {
@ -309,38 +333,8 @@ function keyPressEvent(event) {
computeNewGrabPlane();
}
function update(deltaTime) {
if (!gIsGrabbing) {
return;
}
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
gCurrentPosition = entityProperties.position;
if (gGrabMode === "rotate") {
gAngularVelocity = Vec3.subtract(gAngularVelocity, Vec3.multiply(gAngularVelocity, ANGULAR_DAMPING_RATE));
Entities.editEntity(gGrabbedEntity, { angularVelocity: gAngularVelocity, });
}
// always push toward linear grab position, even when rotating
var newVelocity = ZERO_VEC3;
var dPosition = Vec3.subtract(gTargetPosition, gCurrentPosition);
var delta = Vec3.length(dPosition);
if (delta > CLOSE_ENOUGH) {
var MAX_POSITION_DELTA = 4.0;
if (delta > MAX_POSITION_DELTA) {
dPosition = Vec3.multiply(dPosition, MAX_POSITION_DELTA / delta);
}
// desired speed is proportional to displacement by the inverse of timescale
// (for critically damped motion)
newVelocity = Vec3.multiply(dPosition, INV_MOVE_TIMESCALE);
}
Entities.editEntity(gGrabbedEntity, { velocity: newVelocity, });
updateDropLine(gTargetPosition);
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Controller.keyReleaseEvent.connect(keyReleaseEvent);
Script.update.connect(update);

92
examples/stick.js Normal file
View file

@ -0,0 +1,92 @@
// stick.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Allow avatar to hold a stick
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var hand = "left";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
var stickID = null;
var actionID = nullActionID;
// sometimes if this is run immediately the stick doesn't get created? use a timer.
Script.setTimeout(function() {
stickID = Entities.addEntity({
type: "Model",
modelURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.fbx",
compoundShapeURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.obj",
dimensions: {x: .11, y: .11, z: .59},
position: MyAvatar.getRightPalmPosition(), // initial position doesn't matter, as long as it's close
rotation: MyAvatar.orientation,
damping: .1,
collisionsWillMove: true
});
actionID = Entities.addAction("hold", stickID, {relativePosition: {x: 0.0, y: 0.0, z: -0.9},
hand: hand,
timeScale: 0.15});
}, 3000);
function cleanUp() {
Entities.deleteEntity(stickID);
}
function positionStick(stickOrientation) {
var baseOffset = {x: 0.0, y: 0.0, z: -0.9};
var offset = Vec3.multiplyQbyV(stickOrientation, baseOffset);
Entities.updateAction(stickID, actionID, {relativePosition: offset,
relativeRotation: stickOrientation});
}
function mouseMoveEvent(event) {
if (!stickID || actionID == nullActionID) {
return;
}
var windowCenterX = Window.innerWidth / 2;
var windowCenterY = Window.innerHeight / 2;
var mouseXCenterOffset = event.x - windowCenterX;
var mouseYCenterOffset = event.y - windowCenterY;
var mouseXRatio = mouseXCenterOffset / windowCenterX;
var mouseYRatio = mouseYCenterOffset / windowCenterY;
var stickOrientation = Quat.fromPitchYawRollDegrees(mouseYRatio * -90, mouseXRatio * -90, 0);
positionStick(stickOrientation);
}
function initControls(){
if (hand == "right") {
controllerID = 3; // right handed
} else {
controllerID = 4; // left handed
}
}
function update(deltaTime){
var palmPosition = Controller.getSpatialControlPosition(controllerID);
controllerActive = (Vec3.length(palmPosition) > 0);
if(!controllerActive){
return;
}
stickOrientation = Controller.getSpatialControlRawRotation(controllerID);
var adjustment = Quat.fromPitchYawRollDegrees(180, 0, 0);
stickOrientation = Quat.multiply(stickOrientation, adjustment);
positionStick(stickOrientation);
}
Script.scriptEnding.connect(cleanUp);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Script.update.connect(update);

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

Binary file not shown.

Binary file not shown.

View file

@ -57,6 +57,7 @@
#include <AccountManager.h>
#include <AddressManager.h>
#include <CursorManager.h>
#include <AmbientOcclusionEffect.h>
#include <AudioInjector.h>
#include <DeferredLightingEffect.h>
@ -100,6 +101,7 @@
#include "ModelPackager.h"
#include "Util.h"
#include "InterfaceLogging.h"
#include "InterfaceActionFactory.h"
#include "avatar/AvatarManager.h"
@ -257,6 +259,7 @@ bool setupEssentials(int& argc, char** argv) {
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
DependencyManager::registerInheritance<AvatarHashMap, AvatarManager>();
DependencyManager::registerInheritance<EntityActionFactoryInterface, InterfaceActionFactory>();
Setting::init();
@ -293,7 +296,8 @@ bool setupEssentials(int& argc, char** argv) {
auto discoverabilityManager = DependencyManager::set<DiscoverabilityManager>();
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
auto offscreenUi = DependencyManager::set<OffscreenUi>();
auto pathUtils = DependencyManager::set<PathUtils>();
auto pathUtils = DependencyManager::set<PathUtils>();
auto actionFactory = DependencyManager::set<InterfaceActionFactory>();
return true;
}
@ -421,6 +425,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(audioIO.data(), &AudioClient::muteToggled, this, &Application::audioMuteToggled);
connect(audioIO.data(), &AudioClient::receivedFirstPacket,
&AudioScriptingInterface::getInstance(), &AudioScriptingInterface::receivedFirstPacket);
connect(audioIO.data(), &AudioClient::disconnected,
&AudioScriptingInterface::getInstance(), &AudioScriptingInterface::disconnected);
audioThread->start();
@ -949,6 +955,7 @@ void Application::paintGL() {
glPushMatrix();
glLoadIdentity();
displaySide(&renderArgs, _myCamera);
_applicationOverlay.displayOverlayTexture(&renderArgs);
glPopMatrix();
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
@ -965,8 +972,6 @@ void Application::paintGL() {
0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
_applicationOverlay.displayOverlayTexture();
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
@ -1237,9 +1242,20 @@ void Application::keyPressEvent(QKeyEvent* event) {
}
break;
case Qt::Key_Apostrophe:
resetSensors();
case Qt::Key_Apostrophe: {
if (isMeta) {
auto cursor = Cursor::Manager::instance().getCursor();
auto curIcon = cursor->getIcon();
if (curIcon == Cursor::Icon::DEFAULT) {
cursor->setIcon(Cursor::Icon::LINK);
} else {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
} else {
resetSensors();
}
break;
}
case Qt::Key_A:
if (isShifted) {
@ -1363,12 +1379,27 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_Slash:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;
case Qt::Key_Plus:
_myAvatar->increaseSize();
case Qt::Key_Plus: {
if (isMeta && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() * 1.1f);
} else {
_myAvatar->increaseSize();
}
break;
case Qt::Key_Minus:
_myAvatar->decreaseSize();
}
case Qt::Key_Minus: {
if (isMeta && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() / 1.1f);
} else {
_myAvatar->decreaseSize();
}
break;
}
case Qt::Key_Equal:
_myAvatar->resetSize();
break;

View file

@ -149,6 +149,7 @@ public:
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
static UserInputMapper* getUserInputMapper() { return &getInstance()->_userInputMapper; }
static void initPlugins();
static void shutdownPlugins();

View file

@ -0,0 +1,49 @@
//
// InterfaceActionFactory.cpp
// libraries/entities/src
//
// Created by Seth Alves on 2015-6-2
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <avatar/AvatarActionHold.h>
#include <ObjectActionPullToPoint.h>
#include <ObjectActionSpring.h>
#include "InterfaceActionFactory.h"
EntityActionPointer InterfaceActionFactory::factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) {
EntityActionPointer action = nullptr;
switch (type) {
case ACTION_TYPE_NONE:
return nullptr;
case ACTION_TYPE_PULL_TO_POINT:
action = (EntityActionPointer) new ObjectActionPullToPoint(id, ownerEntity);
break;
case ACTION_TYPE_SPRING:
action = (EntityActionPointer) new ObjectActionSpring(id, ownerEntity);
break;
case ACTION_TYPE_HOLD:
action = (EntityActionPointer) new AvatarActionHold(id, ownerEntity);
break;
}
bool ok = action->updateArguments(arguments);
if (ok) {
ownerEntity->addAction(simulation, action);
return action;
}
action = nullptr;
return action;
}

View file

@ -0,0 +1,28 @@
//
// InterfaceActionFactory.cpp
// interface/src/
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_InterfaceActionFactory_h
#define hifi_InterfaceActionFactory_h
#include "EntityActionFactoryInterface.h"
class InterfaceActionFactory : public EntityActionFactoryInterface {
public:
InterfaceActionFactory() : EntityActionFactoryInterface() { }
virtual ~InterfaceActionFactory() { }
virtual EntityActionPointer factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments);
};
#endif // hifi_InterfaceActionFactory_h

View file

@ -0,0 +1,108 @@
//
// AvatarActionHold.cpp
// interface/src/avatar/
//
// Created by Seth Alves 2015-6-9
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "avatar/MyAvatar.h"
#include "avatar/AvatarManager.h"
#include "AvatarActionHold.h"
AvatarActionHold::AvatarActionHold(QUuid id, EntityItemPointer ownerEntity) :
ObjectActionSpring(id, ownerEntity) {
#if WANT_DEBUG
qDebug() << "AvatarActionHold::AvatarActionHold";
#endif
}
AvatarActionHold::~AvatarActionHold() {
#if WANT_DEBUG
qDebug() << "AvatarActionHold::~AvatarActionHold";
#endif
}
void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 palmPosition;
if (_hand == "right") {
palmPosition = myAvatar->getRightPalmPosition();
} else {
palmPosition = myAvatar->getLeftPalmPosition();
}
auto rotation = myAvatar->getWorldAlignedOrientation();
auto offset = rotation * _relativePosition;
auto position = palmPosition + offset;
rotation *= _relativeRotation;
lockForWrite();
_positionalTarget = position;
_rotationalTarget = rotation;
unlock();
ObjectActionSpring::updateActionWorker(deltaTimeStep);
}
bool AvatarActionHold::updateArguments(QVariantMap arguments) {
bool rPOk = true;
glm::vec3 relativePosition =
EntityActionInterface::extractVec3Argument("hold", arguments, "relativePosition", rPOk, false);
bool rROk = true;
glm::quat relativeRotation =
EntityActionInterface::extractQuatArgument("hold", arguments, "relativeRotation", rROk, false);
bool tSOk = true;
float timeScale =
EntityActionInterface::extractFloatArgument("hold", arguments, "timeScale", tSOk, false);
bool hOk = true;
QString hand =
EntityActionInterface::extractStringArgument("hold", arguments, "hand", hOk, false);
lockForWrite();
if (rPOk) {
_relativePosition = relativePosition;
} else if (!_parametersSet) {
_relativePosition = glm::vec3(0.0f, 0.0f, 1.0f);
}
if (rROk) {
_relativeRotation = relativeRotation;
} else if (!_parametersSet) {
_relativeRotation = glm::quat(0.0f, 0.0f, 0.0f, 1.0f);
}
if (tSOk) {
_linearTimeScale = timeScale;
_angularTimeScale = timeScale;
} else if (!_parametersSet) {
_linearTimeScale = 0.2;
_angularTimeScale = 0.2;
}
if (hOk) {
hand = hand.toLower();
if (hand == "left") {
_hand = "left";
} else if (hand == "right") {
_hand = "right";
} else {
qDebug() << "hold action -- invalid hand argument:" << hand;
_hand = "right";
}
} else if (!_parametersSet) {
_hand = "right";
}
_parametersSet = true;
_positionalTargetSet = true;
_rotationalTargetSet = true;
_active = true;
unlock();
return true;
}

View file

@ -0,0 +1,35 @@
//
// AvatarActionHold.h
// interface/src/avatar/
//
// Created by Seth Alves 2015-6-9
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarActionHold_h
#define hifi_AvatarActionHold_h
#include <QUuid>
#include <EntityItem.h>
#include <ObjectActionSpring.h>
class AvatarActionHold : public ObjectActionSpring {
public:
AvatarActionHold(QUuid id, EntityItemPointer ownerEntity);
virtual ~AvatarActionHold();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateActionWorker(float deltaTimeStep);
private:
glm::vec3 _relativePosition;
glm::quat _relativeRotation;
QString _hand;
bool _parametersSet = false;
};
#endif // hifi_AvatarActionHold_h

View file

@ -159,9 +159,25 @@ void KeyboardMouseDevice::registerToUserInputMapper(UserInputMapper& mapper) {
// Grab the current free device ID
_deviceID = mapper.getFreeDeviceID();
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy());
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy("Keyboard"));
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input._channel); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input._channel); };
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
QVector<UserInputMapper::InputPair> availableInputs;
for (int i = (int) Qt::Key_0; i <= (int) Qt::Key_9; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
for (int i = (int) Qt::Key_A; i <= (int) Qt::Key_Z; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_Space), QKeySequence(Qt::Key_Space).toString()));
return availableInputs;
};
proxy->resetDeviceBindings = [this, &mapper] () -> bool {
mapper.removeAllInputChannelsForDevice(_deviceID);
this->assignDefaultInputMapping(mapper);
return true;
};
mapper.registerDevice(_deviceID, proxy);
}

View file

@ -615,12 +615,10 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, *_camera, false);
qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
qApp->getApplicationOverlay().displayOverlayTextureHmd(renderArgs, *_camera);
});
_activeEye = ovrEye_Count;
glPopMatrix();
gpu::FramebufferPointer finalFbo;
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
@ -631,6 +629,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
glPopMatrix();
glMatrixMode(GL_PROJECTION);
glPopMatrix();

View file

@ -12,6 +12,7 @@
#include "InterfaceConfig.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GlowEffect.h>
#include "gpu/GLBackend.h"
@ -106,21 +107,20 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
_activeEye = &eye;
glViewport(portalX, portalY, portalW, portalH);
glScissor(portalX, portalY, portalW, portalH);
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
float fov = atan(1.0f / projection[1][1]);
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
eyeCamera.setProjection(projection);
glMatrixMode(GL_PROJECTION);
glLoadIdentity(); // reset projection matrix
glFrustum(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ); // set left view frustum
GLfloat p[4][4];
// Really?
glGetFloatv(GL_PROJECTION_MATRIX, &(p[0][0]));
float cotangent = p[1][1];
GLfloat fov = atan(1.0f / cotangent);
glTranslatef(eye.modelTranslation, 0.0, 0.0); // translate to cancel parallax
glLoadMatrixf(glm::value_ptr(projection));
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
renderArgs->_renderSide = RenderArgs::MONO;
qApp->displaySide(renderArgs, eyeCamera, false);
qApp->getApplicationOverlay().displayOverlayTextureStereo(whichCamera, _aspect, fov);
qApp->getApplicationOverlay().displayOverlayTexture(renderArgs);
_activeEye = NULL;
}, [&]{
// render right side view

View file

@ -28,6 +28,94 @@ ControllerScriptingInterface::ControllerScriptingInterface() :
{
}
static int actionMetaTypeId = qRegisterMetaType<UserInputMapper::Action>();
static int inputChannelMetaTypeId = qRegisterMetaType<UserInputMapper::InputChannel>();
static int inputMetaTypeId = qRegisterMetaType<UserInputMapper::Input>();
static int inputPairMetaTypeId = qRegisterMetaType<UserInputMapper::InputPair>();
QScriptValue inputToScriptValue(QScriptEngine* engine, const UserInputMapper::Input& input);
void inputFromScriptValue(const QScriptValue& object, UserInputMapper::Input& input);
QScriptValue inputChannelToScriptValue(QScriptEngine* engine, const UserInputMapper::InputChannel& inputChannel);
void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::InputChannel& inputChannel);
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action);
void actionFromScriptValue(const QScriptValue& object, UserInputMapper::Action& action);
QScriptValue inputPairToScriptValue(QScriptEngine* engine, const UserInputMapper::InputPair& inputPair);
void inputPairFromScriptValue(const QScriptValue& object, UserInputMapper::InputPair& inputPair);
QScriptValue inputToScriptValue(QScriptEngine* engine, const UserInputMapper::Input& input) {
QScriptValue obj = engine->newObject();
obj.setProperty("device", input.getDevice());
obj.setProperty("channel", input.getChannel());
obj.setProperty("type", (unsigned short) input.getType());
obj.setProperty("id", input.getID());
return obj;
}
void inputFromScriptValue(const QScriptValue& object, UserInputMapper::Input& input) {
input.setDevice(object.property("device").toUInt16());
input.setChannel(object.property("channel").toUInt16());
input.setType(object.property("type").toUInt16());
input.setID(object.property("id").toInt32());
}
QScriptValue inputChannelToScriptValue(QScriptEngine* engine, const UserInputMapper::InputChannel& inputChannel) {
QScriptValue obj = engine->newObject();
obj.setProperty("input", inputToScriptValue(engine, inputChannel.getInput()));
obj.setProperty("modifier", inputToScriptValue(engine, inputChannel.getModifier()));
obj.setProperty("action", inputChannel.getAction());
obj.setProperty("scale", inputChannel.getScale());
return obj;
}
void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::InputChannel& inputChannel) {
UserInputMapper::Input input;
UserInputMapper::Input modifier;
inputFromScriptValue(object.property("input"), input);
inputChannel.setInput(input);
inputFromScriptValue(object.property("modifier"), modifier);
inputChannel.setModifier(modifier);
inputChannel.setAction(UserInputMapper::Action(object.property("action").toVariant().toInt()));
inputChannel.setScale(object.property("scale").toVariant().toFloat());
}
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action) {
QScriptValue obj = engine->newObject();
QVector<UserInputMapper::InputChannel> inputChannels = Application::getUserInputMapper()->getInputChannelsForAction(action);
QScriptValue _inputChannels = engine->newArray(inputChannels.size());
for (int i = 0; i < inputChannels.size(); i++) {
_inputChannels.setProperty(i, inputChannelToScriptValue(engine, inputChannels[i]));
}
obj.setProperty("action", (int) action);
obj.setProperty("actionName", Application::getUserInputMapper()->getActionName(action));
obj.setProperty("inputChannels", _inputChannels);
return obj;
}
void actionFromScriptValue(const QScriptValue& object, UserInputMapper::Action& action) {
action = UserInputMapper::Action(object.property("action").toVariant().toInt());
}
QScriptValue inputPairToScriptValue(QScriptEngine* engine, const UserInputMapper::InputPair& inputPair) {
QScriptValue obj = engine->newObject();
obj.setProperty("input", inputToScriptValue(engine, inputPair.first));
obj.setProperty("inputName", inputPair.second);
return obj;
}
void inputPairFromScriptValue(const QScriptValue& object, UserInputMapper::InputPair& inputPair) {
inputFromScriptValue(object.property("input"), inputPair.first);
inputPair.second = QString(object.property("inputName").toVariant().toString());
}
void ControllerScriptingInterface::registerControllerTypes(QScriptEngine* engine) {
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::Action> >(engine);
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::InputChannel> >(engine);
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::InputPair> >(engine);
qScriptRegisterMetaType(engine, actionToScriptValue, actionFromScriptValue);
qScriptRegisterMetaType(engine, inputChannelToScriptValue, inputChannelFromScriptValue);
qScriptRegisterMetaType(engine, inputToScriptValue, inputFromScriptValue);
qScriptRegisterMetaType(engine, inputPairToScriptValue, inputPairFromScriptValue);
}
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
if (event->type() == HFActionEvent::startType()) {
@ -337,6 +425,37 @@ void ControllerScriptingInterface::updateInputControllers() {
}
}
QVector<UserInputMapper::Action> ControllerScriptingInterface::getAllActions() {
return Application::getUserInputMapper()->getAllActions();
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getInputChannelsForAction(UserInputMapper::Action action) {
return Application::getUserInputMapper()->getInputChannelsForAction(action);
}
QString ControllerScriptingInterface::getDeviceName(unsigned int device) {
return Application::getUserInputMapper()->getDeviceName((unsigned short) device);
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getAllInputsForDevice(unsigned int device) {
return Application::getUserInputMapper()->getAllInputsForDevice(device);
}
bool ControllerScriptingInterface::addInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
}
bool ControllerScriptingInterface::removeInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->removeInputChannel(inputChannel);
}
QVector<UserInputMapper::InputPair> ControllerScriptingInterface::getAvailableInputs(unsigned int device) {
return Application::getUserInputMapper()->getAvailableInputs((unsigned short) device);
}
void ControllerScriptingInterface::resetAllDeviceBindings() {
Application::getUserInputMapper()->resetAllDeviceBindings();
}
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
AbstractInputController(),
@ -373,4 +492,4 @@ const unsigned int INPUTCONTROLLER_KEY_DEVICE_MASK = 16;
InputController::Key InputController::getKey() const {
return (((_deviceTrackerId & INPUTCONTROLLER_KEY_DEVICE_MASK) << INPUTCONTROLLER_KEY_DEVICE_OFFSET) | _subTrackerId);
}
}

View file

@ -14,10 +14,11 @@
#include <QtCore/QObject>
#include "ui/UserInputMapper.h"
#include <AbstractControllerScriptingInterface.h>
class PalmData;
class InputController : public AbstractInputController {
Q_OBJECT
@ -54,6 +55,9 @@ class ControllerScriptingInterface : public AbstractControllerScriptingInterface
public:
ControllerScriptingInterface();
virtual void registerControllerTypes(QScriptEngine* engine);
void emitKeyPressEvent(QKeyEvent* event) { emit keyPressEvent(KeyEvent(*event)); }
void emitKeyReleaseEvent(QKeyEvent* event) { emit keyReleaseEvent(KeyEvent(*event)); }
@ -79,6 +83,14 @@ public:
void updateInputControllers();
public slots:
Q_INVOKABLE virtual QVector<UserInputMapper::Action> getAllActions();
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
Q_INVOKABLE virtual bool addInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual bool removeInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual QVector<UserInputMapper::InputPair> getAvailableInputs(unsigned int device);
Q_INVOKABLE virtual void resetAllDeviceBindings();
virtual bool isPrimaryButtonPressed() const;
virtual glm::vec2 getPrimaryJoystickPosition() const;

View file

@ -14,13 +14,16 @@
#include <QOpenGLFramebufferObject>
#include <QOpenGLTexture>
#include <glm/gtc/type_ptr.hpp>
#include <avatar/AvatarManager.h>
#include <DeferredLightingEffect.h>
#include <GLMHelpers.h>
#include <PathUtils.h>
#include <gpu/GLBackend.h>
#include <GLMHelpers.h>
#include <PerfStat.h>
#include <OffscreenUi.h>
#include <CursorManager.h>
#include <PerfStat.h>
#include "AudioClient.h"
#include "audio/AudioIOStatsRenderer.h"
@ -33,6 +36,9 @@
#include "Util.h"
#include "ui/Stats.h"
#include "../../libraries/render-utils/standardTransformPNTC_vert.h"
#include "../../libraries/render-utils/standardDrawTexture_frag.h"
// Used to animate the magnification windows
const float MAG_SPEED = 0.08f;
@ -114,27 +120,6 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
}
}
void ApplicationOverlay::renderReticle(glm::quat orientation, float alpha) {
glPushMatrix(); {
glm::vec3 axis = glm::axis(orientation);
glRotatef(glm::degrees(glm::angle(orientation)), axis.x, axis.y, axis.z);
glm::vec3 topLeft = getPoint(reticleSize / 2.0f, -reticleSize / 2.0f);
glm::vec3 topRight = getPoint(-reticleSize / 2.0f, -reticleSize / 2.0f);
glm::vec3 bottomLeft = getPoint(reticleSize / 2.0f, reticleSize / 2.0f);
glm::vec3 bottomRight = getPoint(-reticleSize / 2.0f, reticleSize / 2.0f);
// TODO: this version of renderQuad() needs to take a color
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], alpha };
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomLeft, bottomRight, topRight,
glm::vec2(0.0f, 0.0f), glm::vec2(1.0f, 0.0f),
glm::vec2(1.0f, 1.0f), glm::vec2(0.0f, 1.0f),
reticleColor, _reticleQuad);
} glPopMatrix();
}
ApplicationOverlay::ApplicationOverlay() :
_textureFov(glm::radians(DEFAULT_HMD_UI_ANGULAR_SIZE)),
_textureAspectRatio(1.0f),
@ -143,13 +128,13 @@ ApplicationOverlay::ApplicationOverlay() :
_alpha(1.0f),
_oculusUIRadius(1.0f),
_trailingAudioLoudness(0.0f),
_crosshairTexture(0),
_previousBorderWidth(-1),
_previousBorderHeight(-1),
_previousMagnifierBottomLeft(),
_previousMagnifierBottomRight(),
_previousMagnifierTopLeft(),
_previousMagnifierTopRight()
_previousMagnifierTopRight(),
_framebufferObject(nullptr)
{
memset(_reticleActive, 0, sizeof(_reticleActive));
memset(_magActive, 0, sizeof(_reticleActive));
@ -196,16 +181,17 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
//Handle fading and deactivation/activation of UI
// Render 2D overlay
glMatrixMode(GL_PROJECTION);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
_overlays.buildFramebufferObject();
_overlays.bind();
buildFramebufferObject();
_framebufferObject->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, size.x, size.y);
glMatrixMode(GL_PROJECTION);
glPushMatrix(); {
const float NEAR_CLIP = -10000;
const float FAR_CLIP = 10000;
@ -227,6 +213,22 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
renderPointers();
renderDomainConnectionStatusBorder();
if (_newUiTexture) {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, _newUiTexture);
DependencyManager::get<GeometryCache>()->renderUnitQuad();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
} glPopMatrix();
@ -236,258 +238,160 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
glEnable(GL_LIGHTING);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
_overlays.release();
_framebufferObject->release();
}
// A quick and dirty solution for compositing the old overlay
// texture with the new one
template <typename F>
void with_each_texture(GLuint firstPassTexture, GLuint secondPassTexture, F f) {
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
if (firstPassTexture) {
glBindTexture(GL_TEXTURE_2D, firstPassTexture);
f();
gpu::PipelinePointer ApplicationOverlay::getDrawPipeline() {
if (!_standardDrawPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(standardTransformPNTC_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(standardDrawTexture_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::makeProgram((*program));
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_standardDrawPipeline.reset(gpu::Pipeline::create(program, state));
}
if (secondPassTexture) {
glBindTexture(GL_TEXTURE_2D, secondPassTexture);
f();
}
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
return _standardDrawPipeline;
}
void ApplicationOverlay::bindCursorTexture(gpu::Batch& batch, uint8_t cursorIndex) {
auto& cursorManager = Cursor::Manager::instance();
auto cursor = cursorManager.getCursor(cursorIndex);
auto iconId = cursor->getIcon();
if (!_cursors.count(iconId)) {
auto iconPath = cursorManager.getIconImage(cursor->getIcon());
_cursors[iconId] = DependencyManager::get<TextureCache>()->
getImageTexture(iconPath);
}
batch.setUniformTexture(0, _cursors[iconId]);
}
#define CURSOR_PIXEL_SIZE 32.0f
// Draws the FBO texture for the screen
void ApplicationOverlay::displayOverlayTexture() {
void ApplicationOverlay::displayOverlayTexture(RenderArgs* renderArgs) {
if (_alpha == 0.0f) {
return;
}
glMatrixMode(GL_PROJECTION);
glPushMatrix(); {
glLoadIdentity();
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glViewport(0, 0, qApp->getDeviceSize().width(), qApp->getDeviceSize().height());
renderArgs->_context->syncCache();
static const glm::vec2 topLeft(-1, 1);
static const glm::vec2 bottomRight(1, -1);
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
DependencyManager::get<GeometryCache>()->renderQuad(
topLeft, bottomRight,
texCoordTopLeft, texCoordBottomRight,
glm::vec4(1.0f, 1.0f, 1.0f, _alpha));
});
gpu::Batch batch;
Transform model;
//DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, true);
batch.setPipeline(getDrawPipeline());
batch.setModelTransform(Transform());
batch.setProjectionTransform(mat4());
batch.setViewTransform(model);
batch._glBindTexture(GL_TEXTURE_2D, _framebufferObject->texture());
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
DependencyManager::get<GeometryCache>()->renderUnitQuad(batch, vec4(vec3(1), _alpha));
if (!_crosshairTexture) {
_crosshairTexture = DependencyManager::get<TextureCache>()->
getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
//draw the mouse pointer
glm::vec2 canvasSize = qApp->getCanvasSize();
//draw the mouse pointer
glm::vec2 canvasSize = qApp->getCanvasSize();
glm::vec2 mouseSize = 32.0f / canvasSize;
auto mouseTopLeft = topLeft * mouseSize;
auto mouseBottomRight = bottomRight * mouseSize;
vec2 mousePosition = vec2(qApp->getMouseX(), qApp->getMouseY());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
mousePosition.y *= -1.0f;
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 mousePosition = vec2(qApp->getMouseX(), qApp->getMouseY());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
mousePosition.y *= -1.0f;
model.setTranslation(vec3(mousePosition, 0));
glm::vec2 mouseSize = CURSOR_PIXEL_SIZE / canvasSize;
model.setScale(vec3(mouseSize, 1.0f));
batch.setModelTransform(model);
bindCursorTexture(batch);
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderUnitQuad(batch, vec4(1));
renderArgs->_context->render(batch);
}
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(
mouseTopLeft + mousePosition, mouseBottomRight + mousePosition,
texCoordTopLeft, texCoordBottomRight,
reticleColor);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glDisable(GL_TEXTURE_2D);
} glPopMatrix();
static gpu::BufferPointer _hemiVertices;
static gpu::BufferPointer _hemiIndices;
static int _hemiIndexCount{ 0 };
glm::vec2 getPolarCoordinates(const PalmData& palm) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
return polar;
}
// Draws the FBO texture for Oculus rift.
void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
void ApplicationOverlay::displayOverlayTextureHmd(RenderArgs* renderArgs, Camera& whichCamera) {
if (_alpha == 0.0f) {
return;
}
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDisable(GL_LIGHTING);
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.01f);
renderArgs->_context->syncCache();
gpu::Batch batch;
batch.setPipeline(getDrawPipeline());
batch._glDisable(GL_DEPTH_TEST);
batch._glDisable(GL_CULL_FACE);
batch._glBindTexture(GL_TEXTURE_2D, _framebufferObject->texture());
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
batch.setProjectionTransform(whichCamera.getProjection());
batch.setViewTransform(Transform());
//Update and draw the magnifiers
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
const glm::quat& orientation = myAvatar->getOrientation();
// Always display the HMD overlay relative to the camera position but
// remove the HMD pose offset. This results in an overlay that sticks with you
// even in third person mode, but isn't drawn at a fixed distance.
glm::vec3 position = whichCamera.getPosition();
position -= qApp->getCamera()->getHmdPosition();
const float scale = myAvatar->getScale() * _oculusUIRadius;
// glm::vec3 eyeOffset = setEyeOffsetPosition;
glMatrixMode(GL_MODELVIEW);
glPushMatrix(); {
glTranslatef(position.x, position.y, position.z);
glm::mat4 rotation = glm::toMat4(orientation);
glMultMatrixf(&rotation[0][0]);
glScalef(scale, scale, scale);
for (int i = 0; i < NUMBER_OF_RETICLES; i++) {
if (_magActive[i]) {
_magSizeMult[i] += MAG_SPEED;
if (_magSizeMult[i] > 1.0f) {
_magSizeMult[i] = 1.0f;
}
} else {
_magSizeMult[i] -= MAG_SPEED;
if (_magSizeMult[i] < 0.0f) {
_magSizeMult[i] = 0.0f;
}
}
if (_magSizeMult[i] > 0.0f) {
//Render magnifier, but dont show border for mouse magnifier
glm::vec2 projection = screenToOverlay(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
with_each_texture(_overlays.getTexture(), 0, [&] {
renderMagnifier(projection, _magSizeMult[i], i != MOUSE);
});
}
}
glDepthMask(GL_FALSE);
glDisable(GL_ALPHA_TEST);
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
if (textureFOV != _textureFov ||
textureAspectRatio != _textureAspectRatio) {
textureFOV = _textureFov;
textureAspectRatio = _textureAspectRatio;
_overlays.buildVBO(_textureFov, _textureAspectRatio, 80, 80);
}
const quat& avatarOrientation = myAvatar->getOrientation();
quat hmdOrientation = qApp->getCamera()->getHmdRotation();
vec3 hmdPosition = glm::inverse(avatarOrientation) * qApp->getCamera()->getHmdPosition();
mat4 overlayXfm = glm::mat4_cast(glm::inverse(hmdOrientation)) * glm::translate(mat4(), -hmdPosition);
batch.setModelTransform(Transform(overlayXfm));
drawSphereSection(batch);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
_overlays.render();
});
if (!Application::getInstance()->isMouseHidden()) {
renderPointersOculus();
bindCursorTexture(batch);
auto geometryCache = DependencyManager::get<GeometryCache>();
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
//Controller Pointers
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
mat4 pointerXfm = glm::mat4_cast(quat(vec3(polar.y, -polar.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
// Render reticle at location
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
glDepthMask(GL_TRUE);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
} glPopMatrix();
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
renderArgs->_context->render(batch);
}
// Draws the FBO texture for 3DTV.
void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float aspectRatio, float fov) {
if (_alpha == 0.0f) {
return;
}
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
const glm::vec3& viewMatrixTranslation = qApp->getViewMatrixTranslation();
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
// Transform to world space
glm::quat rotation = whichCamera.getRotation();
glm::vec3 axis2 = glm::axis(rotation);
glRotatef(-glm::degrees(glm::angle(rotation)), axis2.x, axis2.y, axis2.z);
glTranslatef(viewMatrixTranslation.x, viewMatrixTranslation.y, viewMatrixTranslation.z);
// Translate to the front of the camera
glm::vec3 pos = whichCamera.getPosition();
glm::quat rot = myAvatar->getOrientation();
glm::vec3 axis = glm::axis(rot);
glTranslatef(pos.x, pos.y, pos.z);
glRotatef(glm::degrees(glm::angle(rot)), axis.x, axis.y, axis.z);
glm::vec4 overlayColor = {1.0f, 1.0f, 1.0f, _alpha};
//Render
const GLfloat distance = 1.0f;
const GLfloat halfQuadHeight = distance * tan(fov);
const GLfloat halfQuadWidth = halfQuadHeight * aspectRatio;
const GLfloat quadWidth = halfQuadWidth * 2.0f;
const GLfloat quadHeight = halfQuadHeight * 2.0f;
GLfloat x = -halfQuadWidth;
GLfloat y = -halfQuadHeight;
glDisable(GL_DEPTH_TEST);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
DependencyManager::get<GeometryCache>()->renderQuad(glm::vec3(x, y + quadHeight, -distance),
glm::vec3(x + quadWidth, y + quadHeight, -distance),
glm::vec3(x + quadWidth, y, -distance),
glm::vec3(x, y, -distance),
glm::vec2(0.0f, 1.0f), glm::vec2(1.0f, 1.0f),
glm::vec2(1.0f, 0.0f), glm::vec2(0.0f, 0.0f),
overlayColor);
});
if (!_crosshairTexture) {
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() +
"images/sixense-reticle.png");
}
//draw the mouse pointer
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec2 canvasSize = qApp->getCanvasSize();
const float reticleSize = 40.0f / canvasSize.x * quadWidth;
x -= reticleSize / 2.0f;
y += reticleSize / 2.0f;
const float mouseX = (qApp->getMouseX() / (float)canvasSize.x) * quadWidth;
const float mouseY = (1.0 - (qApp->getMouseY() / (float)canvasSize.y)) * quadHeight;
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(glm::vec3(x + mouseX, y + mouseY, -distance),
glm::vec3(x + mouseX + reticleSize, y + mouseY, -distance),
glm::vec3(x + mouseX + reticleSize, y + mouseY - reticleSize, -distance),
glm::vec3(x + mouseX, y + mouseY - reticleSize, -distance),
glm::vec2(0.0f, 0.0f), glm::vec2(1.0f, 0.0f),
glm::vec2(1.0f, 1.0f), glm::vec2(0.0f, 1.0f),
reticleColor, _reticleQuad);
glEnable(GL_DEPTH_TEST);
glPopMatrix();
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
}
void ApplicationOverlay::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const {
cursorPos *= qApp->getCanvasSize();
@ -517,22 +421,6 @@ void ApplicationOverlay::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origi
direction = glm::normalize(intersectionWithUi - origin);
}
glm::vec2 getPolarCoordinates(const PalmData& palm) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
return polar;
}
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
QPoint rv;
@ -583,16 +471,12 @@ bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position,
//Renders optional pointers
void ApplicationOverlay::renderPointers() {
//lazily load crosshair texture
if (_crosshairTexture == 0) {
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//glEnable(GL_TEXTURE_2D);
//glEnable(GL_BLEND);
//glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
//glActiveTexture(GL_TEXTURE0);
//bindCursorTexture();
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
//If we are in oculus, render reticle later
@ -637,8 +521,8 @@ void ApplicationOverlay::renderPointers() {
_magActive[MOUSE] = false;
renderControllerPointers();
}
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
//glBindTexture(GL_TEXTURE_2D, 0);
//glDisable(GL_TEXTURE_2D);
}
void ApplicationOverlay::renderControllerPointers() {
@ -751,43 +635,6 @@ void ApplicationOverlay::renderControllerPointers() {
}
}
void ApplicationOverlay::renderPointersOculus() {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_MODELVIEW);
//Controller Pointers
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
glm::quat orientation = glm::quat(glm::vec3(polar.y, -polar.x, 0.0f));
// Render reticle at location
renderReticle(orientation, _alpha);
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
renderReticle(orientation, _alpha);
}
glEnable(GL_DEPTH_TEST);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool showBorder) {
if (!_magnifier) {
@ -1068,119 +915,109 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder() {
}
}
ApplicationOverlay::TexturedHemisphere::TexturedHemisphere() :
_vertices(0),
_indices(0),
_framebufferObject(NULL),
_vbo(0, 0) {
}
ApplicationOverlay::TexturedHemisphere::~TexturedHemisphere() {
cleanupVBO();
if (_framebufferObject != NULL) {
delete _framebufferObject;
void ApplicationOverlay::buildHemiVertices(
const float fov, const float aspectRatio, const int slices, const int stacks) {
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
if (textureFOV == fov && textureAspectRatio == aspectRatio) {
return;
}
}
void ApplicationOverlay::TexturedHemisphere::bind() {
_framebufferObject->bind();
}
textureFOV = fov;
textureAspectRatio = aspectRatio;
auto geometryCache = DependencyManager::get<GeometryCache>();
_hemiVertices = gpu::BufferPointer(new gpu::Buffer());
_hemiIndices = gpu::BufferPointer(new gpu::Buffer());
void ApplicationOverlay::TexturedHemisphere::release() {
_framebufferObject->release();
}
void ApplicationOverlay::TexturedHemisphere::buildVBO(const float fov,
const float aspectRatio,
const int slices,
const int stacks) {
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
// Cleanup old VBO if necessary
cleanupVBO();
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
// Compute number of vertices needed
_vertices = slices * stacks;
vec3 pos;
vec2 uv;
// Compute vertices positions and texture UV coordinate
TextureVertex* vertexData = new TextureVertex[_vertices];
TextureVertex* vertexPtr = &vertexData[0];
// Create and write to buffer
for (int i = 0; i < stacks; i++) {
float stacksRatio = (float)i / (float)(stacks - 1); // First stack is 0.0f, last stack is 1.0f
uv.y = (float)i / (float)(stacks - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (stacksRatio - 0.5f);
float pitch = -fov * (uv.y - 0.5f);
for (int j = 0; j < slices; j++) {
float slicesRatio = (float)j / (float)(slices - 1); // First slice is 0.0f, last slice is 1.0f
uv.x = (float)j / (float)(slices - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (slicesRatio - 0.5f);
vertexPtr->position = getPoint(yaw, pitch);
vertexPtr->uv.x = slicesRatio;
vertexPtr->uv.y = stacksRatio;
vertexPtr++;
float yaw = -fov * aspectRatio * (uv.x - 0.5f);
pos = getPoint(yaw, pitch);
static const vec4 color(1);
_hemiVertices->append(sizeof(pos), (gpu::Byte*)&pos);
_hemiVertices->append(sizeof(vec2), (gpu::Byte*)&uv);
_hemiVertices->append(sizeof(vec4), (gpu::Byte*)&color);
}
}
// Create and write to buffer
glGenBuffers(1, &_vbo.first);
glBindBuffer(GL_ARRAY_BUFFER, _vbo.first);
static const int BYTES_PER_VERTEX = sizeof(TextureVertex);
glBufferData(GL_ARRAY_BUFFER, _vertices * BYTES_PER_VERTEX, vertexData, GL_STATIC_DRAW);
delete[] vertexData;
// Compute number of indices needed
static const int VERTEX_PER_TRANGLE = 3;
static const int TRIANGLE_PER_RECTANGLE = 2;
int numberOfRectangles = (slices - 1) * (stacks - 1);
_indices = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
_hemiIndexCount = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
GLushort* indexData = new GLushort[_indices];
GLushort* indexPtr = indexData;
std::vector<GLushort> indices;
for (int i = 0; i < stacks - 1; i++) {
for (int j = 0; j < slices - 1; j++) {
GLushort bottomLeftIndex = i * slices + j;
GLushort bottomRightIndex = bottomLeftIndex + 1;
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
*(indexPtr++) = topLeftIndex;
*(indexPtr++) = bottomLeftIndex;
*(indexPtr++) = topRightIndex;
*(indexPtr++) = topRightIndex;
*(indexPtr++) = bottomLeftIndex;
*(indexPtr++) = bottomRightIndex;
// FIXME make a z-order curve for better vertex cache locality
indices.push_back(topLeftIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(topRightIndex);
indices.push_back(topRightIndex);
indices.push_back(bottomLeftIndex);
indices.push_back(bottomRightIndex);
}
}
// Create and write to buffer
glGenBuffers(1, &_vbo.second);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _vbo.second);
static const int BYTES_PER_INDEX = sizeof(GLushort);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, _indices * BYTES_PER_INDEX, indexData, GL_STATIC_DRAW);
delete[] indexData;
_hemiIndices->append(sizeof(GLushort) * indices.size(), (gpu::Byte*)&indices[0]);
}
void ApplicationOverlay::TexturedHemisphere::cleanupVBO() {
if (_vbo.first != 0) {
glDeleteBuffers(1, &_vbo.first);
_vbo.first = 0;
}
if (_vbo.second != 0) {
glDeleteBuffers(1, &_vbo.second);
_vbo.second = 0;
}
void ApplicationOverlay::drawSphereSection(gpu::Batch& batch) {
buildHemiVertices(_textureFov, _textureAspectRatio, 80, 80);
static const int VERTEX_DATA_SLOT = 0;
static const int TEXTURE_DATA_SLOT = 1;
static const int COLOR_DATA_SLOT = 2;
gpu::Stream::FormatPointer streamFormat(new gpu::Stream::Format()); // 1 for everyone
streamFormat->setAttribute(gpu::Stream::POSITION, VERTEX_DATA_SLOT, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
streamFormat->setAttribute(gpu::Stream::TEXCOORD, TEXTURE_DATA_SLOT, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
streamFormat->setAttribute(gpu::Stream::COLOR, COLOR_DATA_SLOT, gpu::Element(gpu::VEC4, gpu::FLOAT, gpu::RGBA));
batch.setInputFormat(streamFormat);
static const int VERTEX_STRIDE = sizeof(vec3) + sizeof(vec2) + sizeof(vec4);
gpu::BufferView posView(_hemiVertices, 0, _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::POSITION)._element);
gpu::BufferView uvView(_hemiVertices, sizeof(vec3), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::TEXCOORD)._element);
gpu::BufferView colView(_hemiVertices, sizeof(vec3) + sizeof(vec2), _hemiVertices->getSize(), VERTEX_STRIDE, streamFormat->getAttributes().at(gpu::Stream::COLOR)._element);
batch.setInputBuffer(VERTEX_DATA_SLOT, posView);
batch.setInputBuffer(TEXTURE_DATA_SLOT, uvView);
batch.setInputBuffer(COLOR_DATA_SLOT, colView);
batch.setIndexBuffer(gpu::UINT16, _hemiIndices, 0);
batch.drawIndexed(gpu::TRIANGLES, _hemiIndexCount);
}
void ApplicationOverlay::TexturedHemisphere::buildFramebufferObject() {
GLuint ApplicationOverlay::getOverlayTexture() {
return _framebufferObject->texture();
}
void ApplicationOverlay::buildFramebufferObject() {
auto canvasSize = qApp->getCanvasSize();
QSize fboSize = QSize(canvasSize.x, canvasSize.y);
if (_framebufferObject != NULL && fboSize == _framebufferObject->size()) {
// Already build
// Already built
return;
}
@ -1189,7 +1026,7 @@ void ApplicationOverlay::TexturedHemisphere::buildFramebufferObject() {
}
_framebufferObject = new QOpenGLFramebufferObject(fboSize, QOpenGLFramebufferObject::Depth);
glBindTexture(GL_TEXTURE_2D, getTexture());
glBindTexture(GL_TEXTURE_2D, getOverlayTexture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
@ -1199,38 +1036,6 @@ void ApplicationOverlay::TexturedHemisphere::buildFramebufferObject() {
glBindTexture(GL_TEXTURE_2D, 0);
}
//Renders a hemisphere with texture coordinates.
void ApplicationOverlay::TexturedHemisphere::render() {
if (_framebufferObject == NULL || _vbo.first == 0 || _vbo.second == 0) {
qDebug() << "TexturedHemisphere::render(): Incorrect initialisation";
return;
}
glBindBuffer(GL_ARRAY_BUFFER, _vbo.first);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _vbo.second);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static const int STRIDE = sizeof(TextureVertex);
static const void* VERTEX_POINTER = 0;
static const void* TEX_COORD_POINTER = (void*)sizeof(glm::vec3);
glVertexPointer(3, GL_FLOAT, STRIDE, VERTEX_POINTER);
glTexCoordPointer(2, GL_FLOAT, STRIDE, TEX_COORD_POINTER);
glDrawRangeElements(GL_TRIANGLES, 0, _vertices - 1, _indices, GL_UNSIGNED_SHORT, 0);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
GLuint ApplicationOverlay::TexturedHemisphere::getTexture() {
return _framebufferObject->texture();
}
glm::vec2 ApplicationOverlay::directionToSpherical(const glm::vec3& direction) {
glm::vec2 result;
// Compute yaw

View file

@ -33,9 +33,9 @@ public:
~ApplicationOverlay();
void renderOverlay(RenderArgs* renderArgs);
void displayOverlayTexture();
void displayOverlayTextureStereo(Camera& whichCamera, float aspectRatio, float fov);
void displayOverlayTextureHmd(Camera& whichCamera);
void displayOverlayTexture(RenderArgs* renderArgs);
void displayOverlayTextureStereo(RenderArgs* renderArgs, Camera& whichCamera, float aspectRatio, float fov);
void displayOverlayTextureHmd(RenderArgs* renderArgs, Camera& whichCamera);
QPoint getPalmClickLocation(const PalmData *palm) const;
bool calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const;
@ -59,6 +59,7 @@ public:
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
GLuint getOverlayTexture();
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
@ -66,39 +67,12 @@ public:
static glm::vec2 sphericalToScreen(const glm::vec2 & sphericalPos);
private:
// Interleaved vertex data
struct TextureVertex {
glm::vec3 position;
glm::vec2 uv;
};
typedef QPair<GLuint, GLuint> VerticesIndices;
class TexturedHemisphere {
public:
TexturedHemisphere();
~TexturedHemisphere();
void bind();
void release();
GLuint getTexture();
void buildFramebufferObject();
void buildVBO(const float fov, const float aspectRatio, const int slices, const int stacks);
void render();
private:
void cleanupVBO();
GLuint _vertices;
GLuint _indices;
QOpenGLFramebufferObject* _framebufferObject;
VerticesIndices _vbo;
};
void buildHemiVertices(const float fov, const float aspectRatio, const int slices, const int stacks);
void drawSphereSection(gpu::Batch& batch);
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
void renderReticle(glm::quat orientation, float alpha);
void renderPointers();;
QOpenGLFramebufferObject* _framebufferObject;
void renderPointers();
void renderMagnifier(glm::vec2 magPos, float sizeMult, bool showBorder);
void renderControllerPointers();
@ -108,11 +82,14 @@ private:
void renderCameraToggle();
void renderStatsAndLogs();
void renderDomainConnectionStatusBorder();
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
TexturedHemisphere _overlays;
void buildFramebufferObject();
float _textureFov;
float _textureAspectRatio;
int _hemiVerticesID{ GeometryCache::UNKNOWN_ID };
enum Reticles { MOUSE, LEFT_CONTROLLER, RIGHT_CONTROLLER, NUMBER_OF_RETICLES };
bool _reticleActive[NUMBER_OF_RETICLES];
@ -127,7 +104,8 @@ private:
float _trailingAudioLoudness;
gpu::TexturePointer _crosshairTexture;
QMap<uint16_t, gpu::TexturePointer> _cursors;
GLuint _newUiTexture{ 0 };
int _reticleQuad;
@ -146,6 +124,10 @@ private:
glm::vec3 _previousMagnifierTopLeft;
glm::vec3 _previousMagnifierTopRight;
gpu::PipelinePointer _standardDrawPipeline;
gpu::PipelinePointer getDrawPipeline();
};
#endif // hifi_ApplicationOverlay_h

View file

@ -13,7 +13,15 @@
// UserInputMapper Class
// Default contruct allocate the poutput size with the current hardcoded action channels
UserInputMapper::UserInputMapper() {
assignDefaulActionScales();
createActionNames();
}
bool UserInputMapper::registerDevice(uint16 deviceID, const DeviceProxy::Pointer& proxy){
proxy->_name += " (" + QString::number(deviceID) + ")";
_registeredDevices[deviceID] = proxy;
return true;
}
@ -27,6 +35,12 @@ UserInputMapper::DeviceProxy::Pointer UserInputMapper::getDeviceProxy(const Inpu
}
}
void UserInputMapper::resetAllDeviceBindings() {
for (auto device : _registeredDevices) {
device.second->resetDeviceBindings();
}
}
bool UserInputMapper::addInputChannel(Action action, const Input& input, float scale) {
return addInputChannel(action, input, Input(), scale);
}
@ -37,7 +51,7 @@ bool UserInputMapper::addInputChannel(Action action, const Input& input, const I
qDebug() << "UserInputMapper::addInputChannel: The input comes from a device #" << input.getDevice() << "is unknown. no inputChannel mapped.";
return false;
}
auto inputChannel = InputChannel(input, modifier, action, scale);
// Insert or replace the input to modifiers
@ -61,6 +75,37 @@ int UserInputMapper::addInputChannels(const InputChannels& channels) {
return nbAdded;
}
bool UserInputMapper::removeInputChannel(InputChannel inputChannel) {
// Remove from Input to Modifiers map
if (inputChannel.hasModifier()) {
_inputToModifiersMap.erase(inputChannel._input.getID());
}
// Remove from Action to Inputs map
std::pair<ActionToInputsMap::iterator, ActionToInputsMap::iterator> ret;
ret = _actionToInputsMap.equal_range(inputChannel._action);
for (ActionToInputsMap::iterator it=ret.first; it!=ret.second; ++it) {
if (it->second == inputChannel) {
_actionToInputsMap.erase(it);
return true;
}
}
return false;
}
void UserInputMapper::removeAllInputChannels() {
_inputToModifiersMap.clear();
_actionToInputsMap.clear();
}
void UserInputMapper::removeAllInputChannelsForDevice(uint16 device) {
QVector<InputChannel> channels = getAllInputsForDevice(device);
for (auto& channel : channels) {
removeInputChannel(channel);
}
}
int UserInputMapper::getInputChannels(InputChannels& channels) const {
for (auto& channel : _actionToInputsMap) {
channels.push_back(channel.second);
@ -69,6 +114,20 @@ int UserInputMapper::getInputChannels(InputChannels& channels) const {
return _actionToInputsMap.size();
}
QVector<UserInputMapper::InputChannel> UserInputMapper::getAllInputsForDevice(uint16 device) {
InputChannels allChannels;
getInputChannels(allChannels);
QVector<InputChannel> channels;
for (InputChannel inputChannel : allChannels) {
if (inputChannel._input._device == device) {
channels.push_back(inputChannel);
}
}
return channels;
}
void UserInputMapper::update(float deltaTime) {
// Reset the axis state for next loop
@ -130,6 +189,24 @@ void UserInputMapper::update(float deltaTime) {
}
}
QVector<UserInputMapper::Action> UserInputMapper::getAllActions() {
QVector<Action> actions;
for (auto i = 0; i < NUM_ACTIONS; i++) {
actions.append(Action(i));
}
return actions;
}
QVector<UserInputMapper::InputChannel> UserInputMapper::getInputChannelsForAction(UserInputMapper::Action action) {
QVector<InputChannel> inputChannels;
std::pair <ActionToInputsMap::iterator, ActionToInputsMap::iterator> ret;
ret = _actionToInputsMap.equal_range(action);
for (ActionToInputsMap::iterator it=ret.first; it!=ret.second; ++it) {
inputChannels.append(it->second);
}
return inputChannels;
}
void UserInputMapper::assignDefaulActionScales() {
_actionScales[LONGITUDINAL_BACKWARD] = 1.0f; // 1m per unit
_actionScales[LONGITUDINAL_FORWARD] = 1.0f; // 1m per unit
@ -144,3 +221,20 @@ void UserInputMapper::assignDefaulActionScales() {
_actionScales[BOOM_IN] = 1.0f; // 1m per unit
_actionScales[BOOM_OUT] = 1.0f; // 1m per unit
}
// This is only necessary as long as the actions are hardcoded
// Eventually you can just add the string when you add the action
void UserInputMapper::createActionNames() {
_actionNames[LONGITUDINAL_BACKWARD] = "LONGITUDINAL_BACKWARD";
_actionNames[LONGITUDINAL_FORWARD] = "LONGITUDINAL_FORWARD";
_actionNames[LATERAL_LEFT] = "LATERAL_LEFT";
_actionNames[LATERAL_RIGHT] = "LATERAL_RIGHT";
_actionNames[VERTICAL_DOWN] = "VERTICAL_DOWN";
_actionNames[VERTICAL_UP] = "VERTICAL_UP";
_actionNames[YAW_LEFT] = "YAW_LEFT";
_actionNames[YAW_RIGHT] = "YAW_RIGHT";
_actionNames[PITCH_DOWN] = "PITCH_DOWN";
_actionNames[PITCH_UP] = "PITCH_UP";
_actionNames[BOOM_IN] = "BOOM_IN";
_actionNames[BOOM_OUT] = "BOOM_OUT";
}

View file

@ -21,6 +21,7 @@
class UserInputMapper : public QObject {
Q_OBJECT
Q_ENUMS(Action)
public:
typedef unsigned short uint16;
typedef unsigned int uint32;
@ -51,8 +52,13 @@ public:
uint16 getDevice() const { return _device; }
uint16 getChannel() const { return _channel; }
uint32 getID() const { return _id; }
ChannelType getType() const { return (ChannelType) _type; }
void setDevice(uint16 device) { _device = device; }
void setChannel(uint16 channel) { _channel = channel; }
void setType(uint16 type) { _type = type; }
void setID(uint32 ID) { _id = ID; }
bool isButton() const { return getType() == ChannelType::BUTTON; }
bool isAxis() const { return getType() == ChannelType::AXIS; }
bool isJoint() const { return getType() == ChannelType::JOINT; }
@ -64,6 +70,7 @@ public:
explicit Input(uint16 device, uint16 channel, ChannelType type) : _device(device), _channel(channel), _type(uint16(type)) {}
Input(const Input& src) : _id(src._id) {}
Input& operator = (const Input& src) { _id = src._id; return (*this); }
bool operator ==(const Input& right) const { return _id == right._id; }
bool operator < (const Input& src) const { return _id < src._id; }
};
@ -83,22 +90,32 @@ public:
typedef std::function<bool (const Input& input, int timestamp)> ButtonGetter;
typedef std::function<float (const Input& input, int timestamp)> AxisGetter;
typedef std::function<JointValue (const Input& input, int timestamp)> JointGetter;
typedef QPair<Input, QString> InputPair;
typedef std::function<QVector<InputPair> ()> AvailableInputGetter;
typedef std::function<bool ()> ResetBindings;
typedef QVector<InputPair> AvailableInput;
class DeviceProxy {
public:
DeviceProxy() {}
ButtonGetter getButton = [] (const Input& input, int timestamp) -> bool { return false; };
AxisGetter getAxis = [] (const Input& input, int timestamp) -> bool { return 0.0f; };
JointGetter getJoint = [] (const Input& input, int timestamp) -> JointValue { return JointValue(); };
typedef std::shared_ptr<DeviceProxy> Pointer;
DeviceProxy(QString name) { _name = name; }
QString _name;
ButtonGetter getButton = [] (const Input& input, int timestamp) -> bool { return false; };
AxisGetter getAxis = [] (const Input& input, int timestamp) -> bool { return 0.0f; };
JointGetter getJoint = [] (const Input& input, int timestamp) -> JointValue { return JointValue(); };
AvailableInputGetter getAvailabeInputs = [] () -> AvailableInput { return QVector<InputPair>(); };
ResetBindings resetDeviceBindings = [] () -> bool { return true; };
typedef std::shared_ptr<DeviceProxy> Pointer;
};
// GetFreeDeviceID should be called before registering a device to use an ID not used by a different device.
uint16 getFreeDeviceID() { return _nextFreeDeviceID++; }
bool registerDevice(uint16 deviceID, const DeviceProxy::Pointer& device);
DeviceProxy::Pointer getDeviceProxy(const Input& input);
QString getDeviceName(uint16 deviceID) { return _registeredDevices[deviceID]->_name; }
QVector<InputPair> getAvailableInputs(uint16 deviceID) { return _registeredDevices[deviceID]->getAvailabeInputs(); }
void resetAllDeviceBindings();
// Actions are the output channels of the Mapper, that's what the InputChannel map to
// For now the Actions are hardcoded, this is bad, but we will fix that in the near future
@ -123,7 +140,12 @@ public:
NUM_ACTIONS,
};
std::vector<QString> _actionNames = std::vector<QString>(NUM_ACTIONS);
void createActionNames();
QVector<Action> getAllActions();
QString getActionName(Action action) { return UserInputMapper::_actionNames[(int) action]; }
float getActionState(Action action) const { return _actionStates[action]; }
void assignDefaulActionScales();
@ -140,27 +162,43 @@ public:
Input _modifier = Input(); // make it invalid by default, meaning no modifier
Action _action = LONGITUDINAL_BACKWARD;
float _scale = 0.0f;
Input getInput() const { return _input; }
Input getModifier() const { return _modifier; }
Action getAction() const { return _action; }
float getScale() const { return _scale; }
void setInput(Input input) { _input = input; }
void setModifier(Input modifier) { _modifier = modifier; }
void setAction(Action action) { _action = action; }
void setScale(float scale) { _scale = scale; }
InputChannel() {}
InputChannel(const Input& input, const Input& modifier, Action action, float scale = 1.0f) :
_input(input), _modifier(modifier), _action(action), _scale(scale) {}
InputChannel(const InputChannel& src) : InputChannel(src._input, src._modifier, src._action, src._scale) {}
InputChannel& operator = (const InputChannel& src) { _input = src._input; _modifier = src._modifier; _action = src._action; _scale = src._scale; return (*this); }
bool operator ==(const InputChannel& right) const { return _input == right._input && _modifier == right._modifier && _action == right._action && _scale == right._scale; }
bool hasModifier() { return _modifier.isValid(); }
};
typedef std::vector< InputChannel > InputChannels;
// Add a bunch of input channels, return the true number of channels that successfully were added
int addInputChannels(const InputChannels& channels);
// Remove the first found instance of the input channel from the input mapper, true if found
bool removeInputChannel(InputChannel channel);
void removeAllInputChannels();
void removeAllInputChannelsForDevice(uint16 device);
//Grab all the input channels currently in use, return the number
int getInputChannels(InputChannels& channels) const;
QVector<InputChannel> getAllInputsForDevice(uint16 device);
QVector<InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
std::multimap<Action, InputChannel> getActionToInputsMap() { return _actionToInputsMap; }
// Update means go grab all the device input channels and update the output channel values
void update(float deltaTime);
// Default contruct allocate the poutput size with the current hardcoded action channels
UserInputMapper() { assignDefaulActionScales(); }
UserInputMapper();
protected:
typedef std::map<int, DeviceProxy::Pointer> DevicesMap;
@ -177,4 +215,12 @@ protected:
std::vector<float> _actionScales = std::vector<float>(NUM_ACTIONS, 1.0f);
};
Q_DECLARE_METATYPE(UserInputMapper::InputPair)
Q_DECLARE_METATYPE(QVector<UserInputMapper::InputPair>)
Q_DECLARE_METATYPE(UserInputMapper::Input)
Q_DECLARE_METATYPE(UserInputMapper::InputChannel)
Q_DECLARE_METATYPE(QVector<UserInputMapper::InputChannel>)
Q_DECLARE_METATYPE(UserInputMapper::Action)
Q_DECLARE_METATYPE(QVector<UserInputMapper::Action>)
#endif // hifi_UserInputMapper_h

View file

@ -156,6 +156,7 @@ void AudioClient::audioMixerKilled() {
_hasReceivedFirstPacket = false;
_outgoingAvatarAudioSequenceNumber = 0;
_stats.reset();
emit disconnected();
}

View file

@ -186,6 +186,7 @@ signals:
void deviceChanged();
void receivedFirstPacket();
void disconnected();
protected:
AudioClient();

View file

@ -1012,7 +1012,9 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
void EntityTreeRenderer::addingEntity(const EntityItemID& entityID) {
checkAndCallPreload(entityID);
auto entity = static_cast<EntityTree*>(_tree)->findEntityByID(entityID);
addEntityToScene(entity);
if (entity) {
addEntityToScene(entity);
}
}
void EntityTreeRenderer::addEntityToScene(EntityItemPointer entity) {

View file

@ -33,7 +33,7 @@ void RenderableBoxEntityItem::render(RenderArgs* args) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well
DependencyManager::get<DeferredLightingEffect>()->renderSolidCube(batch, 1.0f, cubeColor);
RenderableDebugableEntityItem::render(this, args);

View file

@ -24,9 +24,7 @@ void RenderableDebugableEntityItem::renderBoundingBox(EntityItem* entity, Render
float puffedOut, glm::vec4& color) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform = entity->getTransformToCenter();
//transform.postScale(entity->getDimensions());
batch.setModelTransform(transform);
batch.setModelTransform(entity->getTransformToCenter()); // we want to include the scale as well
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f + puffedOut, color);
}
@ -34,10 +32,9 @@ void RenderableDebugableEntityItem::render(EntityItem* entity, RenderArgs* args)
if (args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform = entity->getTransformToCenter();
transform.postScale(entity->getDimensions());
batch.setModelTransform(transform);
batch.setModelTransform(entity->getTransformToCenter()); // we want to include the scale as well
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
bool highlightSimulationOwnership = (entity->getSimulatorID() == myNodeID);

View file

@ -39,7 +39,7 @@ void RenderableSphereEntityItem::render(RenderArgs* args) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
batch.setModelTransform(getTransformToCenter()); // use a transform with scale, rotation, registration point and translation
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, 0.5f, SLICES, STACKS, sphereColor);
RenderableDebugableEntityItem::render(this, args);

View file

@ -14,6 +14,7 @@
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <AbstractViewStateInterface.h>
#include <DeferredLightingEffect.h>
#include <DependencyManager.h>
#include <GeometryCache.h>
@ -100,10 +101,17 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
case SHAPE_TYPE_COMPOUND: {
PerformanceTimer perfTimer("zone->renderCompound");
updateGeometry();
if (_model && _model->isActive()) {
// FIX ME: this is no longer available... we need to switch to payloads
//_model->renderInScene(getLocalRenderAlpha(), args);
if (_model && _model->needsFixupInScene()) {
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
render::PendingChanges pendingChanges;
_model->removeFromScene(scene, pendingChanges);
_model->addToScene(scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
_model->setVisibleInScene(getVisible(), scene);
}
break;
}
@ -131,6 +139,15 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
break;
}
}
if ((!_drawZoneBoundaries || getShapeType() != SHAPE_TYPE_COMPOUND) &&
_model && !_model->needsFixupInScene()) {
// If the model is in the scene but doesn't need to be, remove it.
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
render::PendingChanges pendingChanges;
_model->removeFromScene(scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
}
bool RenderableZoneEntityItem::contains(const glm::vec3& point) const {
@ -145,3 +162,51 @@ bool RenderableZoneEntityItem::contains(const glm::vec3& point) const {
return false;
}
class RenderableZoneEntityItemMeta {
public:
RenderableZoneEntityItemMeta(EntityItemPointer entity) : entity(entity){ }
typedef render::Payload<RenderableZoneEntityItemMeta> Payload;
typedef Payload::DataPointer Pointer;
EntityItemPointer entity;
};
namespace render {
template <> const ItemKey payloadGetKey(const RenderableZoneEntityItemMeta::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const RenderableZoneEntityItemMeta::Pointer& payload) {
if (payload && payload->entity) {
return payload->entity->getAABox();
}
return render::Item::Bound();
}
template <> void payloadRender(const RenderableZoneEntityItemMeta::Pointer& payload, RenderArgs* args) {
if (args) {
if (payload && payload->entity) {
payload->entity->render(args);
}
}
}
}
bool RenderableZoneEntityItem::addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
_myMetaItem = scene->allocateID();
auto renderData = RenderableZoneEntityItemMeta::Pointer(new RenderableZoneEntityItemMeta(self));
auto renderPayload = render::PayloadPointer(new RenderableZoneEntityItemMeta::Payload(renderData));
pendingChanges.resetItem(_myMetaItem, renderPayload);
return true;
}
void RenderableZoneEntityItem::removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_myMetaItem);
if (_model) {
_model->removeFromScene(scene, pendingChanges);
}
}

View file

@ -35,6 +35,9 @@ public:
virtual void render(RenderArgs* args);
virtual bool contains(const glm::vec3& point) const;
virtual bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
private:
Model* getModel();
void initialSimulation();
@ -45,6 +48,8 @@ private:
Model* _model;
bool _needsInitialSimulation;
render::ItemID _myMetaItem;
};
#endif // hifi_RenderableZoneEntityItem_h

View file

@ -0,0 +1,33 @@
//
// EntityActionFactoryInterface.cpp
// libraries/entities/src
//
// Created by Seth Alves on 2015-6-2
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EntityActionFactoryInterface_h
#define hifi_EntityActionFactoryInterface_h
#include <DependencyManager.h>
#include "EntityActionInterface.h"
class EntityActionFactoryInterface : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
EntityActionFactoryInterface() { }
virtual ~EntityActionFactoryInterface() { }
virtual EntityActionPointer factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) { assert(false); return nullptr; }
};
#endif // hifi_EntityActionFactoryInterface_h

View file

@ -22,6 +22,12 @@ EntityActionType EntityActionInterface::actionTypeFromString(QString actionTypeS
if (normalizedActionTypeString == "pulltopoint") {
return ACTION_TYPE_PULL_TO_POINT;
}
if (normalizedActionTypeString == "spring") {
return ACTION_TYPE_SPRING;
}
if (normalizedActionTypeString == "hold") {
return ACTION_TYPE_HOLD;
}
qDebug() << "Warning -- EntityActionInterface::actionTypeFromString got unknown action-type name" << actionTypeString;
return ACTION_TYPE_NONE;
@ -33,31 +39,37 @@ QString EntityActionInterface::actionTypeToString(EntityActionType actionType) {
return "none";
case ACTION_TYPE_PULL_TO_POINT:
return "pullToPoint";
case ACTION_TYPE_SPRING:
return "spring";
case ACTION_TYPE_HOLD:
return "hold";
}
assert(false);
return "none";
}
glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok) {
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
qDebug() << objectName << "requires argument:" << argumentName;
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return vec3();
return glm::vec3();
}
QVariant resultV = arguments[argumentName];
if (resultV.type() != (QVariant::Type) QMetaType::QVariantMap) {
qDebug() << objectName << "argument" << argumentName << "must be a map";
ok = false;
return vec3();
return glm::vec3();
}
QVariantMap resultVM = resultV.toMap();
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
ok = false;
return vec3();
return glm::vec3();
}
QVariant xV = resultVM["x"];
@ -73,17 +85,65 @@ glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVarian
if (!xOk || !yOk || !zOk) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z and values of type float.";
ok = false;
return vec3();
return glm::vec3();
}
return vec3(x, y, z);
return glm::vec3(x, y, z);
}
glm::quat EntityActionInterface::extractQuatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return glm::quat();
}
QVariant resultV = arguments[argumentName];
if (resultV.type() != (QVariant::Type) QMetaType::QVariantMap) {
qDebug() << objectName << "argument" << argumentName << "must be a map, not" << resultV.typeName();
ok = false;
return glm::quat();
}
QVariantMap resultVM = resultV.toMap();
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
ok = false;
return glm::quat();
}
QVariant xV = resultVM["x"];
QVariant yV = resultVM["y"];
QVariant zV = resultVM["z"];
QVariant wV = resultVM["w"];
bool xOk = true;
bool yOk = true;
bool zOk = true;
bool wOk = true;
float x = xV.toFloat(&xOk);
float y = yV.toFloat(&yOk);
float z = zV.toFloat(&zOk);
float w = wV.toFloat(&wOk);
if (!xOk || !yOk || !zOk || !wOk) {
qDebug() << objectName << "argument" << argumentName
<< "must be a map with keys of x, y, z, w and values of type float.";
ok = false;
return glm::quat();
}
return glm::quat(w, x, y, z);
}
float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok) {
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
qDebug() << objectName << "requires argument:" << argumentName;
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return 0.0f;
}
@ -99,3 +159,18 @@ float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMa
return v;
}
QString EntityActionInterface::extractStringArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return "";
}
QVariant vV = arguments[argumentName];
QString v = vV.toString();
return v;
}

View file

@ -14,12 +14,16 @@
#include <QUuid>
#include "EntityItem.h"
class EntitySimulation;
enum EntityActionType {
// keep these synchronized with actionTypeFromString and actionTypeToString
ACTION_TYPE_NONE,
ACTION_TYPE_PULL_TO_POINT
ACTION_TYPE_PULL_TO_POINT,
ACTION_TYPE_SPRING,
ACTION_TYPE_HOLD
};
@ -32,18 +36,35 @@ public:
virtual const EntityItemPointer& getOwnerEntity() const = 0;
virtual void setOwnerEntity(const EntityItemPointer ownerEntity) = 0;
virtual bool updateArguments(QVariantMap arguments) = 0;
// virtual QByteArray serialize() = 0;
// static EntityActionPointer deserialize(EntityItemPointer ownerEntity, QByteArray data);
static EntityActionType actionTypeFromString(QString actionTypeString);
static QString actionTypeToString(EntityActionType actionType);
protected:
virtual glm::vec3 getPosition() = 0;
virtual void setPosition(glm::vec3 position) = 0;
virtual glm::quat getRotation() = 0;
virtual void setRotation(glm::quat rotation) = 0;
virtual glm::vec3 getLinearVelocity() = 0;
virtual void setLinearVelocity(glm::vec3 linearVelocity) = 0;
virtual glm::vec3 getAngularVelocity() = 0;
virtual void setAngularVelocity(glm::vec3 angularVelocity) = 0;
// these look in the arguments map for a named argument. if it's not found or isn't well formed,
// ok will be set to false (note that it's never set to true -- set it to true before calling these).
// if required is true, failure to extract an argument will cause a warning to be printed.
static glm::vec3 extractVec3Argument (QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static glm::quat extractQuatArgument (QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static float extractFloatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static QString extractStringArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static glm::vec3 extractVec3Argument(QString objectName, QVariantMap arguments, QString argumentName, bool& ok);
static float extractFloatArgument(QString objectName, QVariantMap arguments, QString argumentName, bool& ok);
};
typedef std::shared_ptr<EntityActionInterface> EntityActionPointer;
#endif // hifi_EntityActionInterface_h

View file

@ -28,13 +28,16 @@
#include "EntityItemID.h"
#include "EntityItemProperties.h"
#include "EntityItemPropertiesDefaults.h"
#include "EntityActionInterface.h"
#include "EntityTypes.h"
class EntitySimulation;
class EntityTreeElement;
class EntityTreeElementExtraEncodeData;
class EntityActionInterface;
typedef std::shared_ptr<EntityActionInterface> EntityActionPointer;
namespace render {
class Scene;
class PendingChanges;

View file

@ -17,6 +17,8 @@
#include "ZoneEntityItem.h"
#include "EntitiesLogging.h"
#include "EntitySimulation.h"
#include "EntityActionInterface.h"
#include "EntityActionFactoryInterface.h"
#include "EntityScriptingInterface.h"
@ -491,12 +493,19 @@ QUuid EntityScriptingInterface::addAction(const QString& actionTypeString,
const QUuid& entityID,
const QVariantMap& arguments) {
QUuid actionID = QUuid::createUuid();
auto actionFactory = DependencyManager::get<EntityActionFactoryInterface>();
bool success = actionWorker(entityID, [&](EntitySimulation* simulation, EntityItemPointer entity) {
// create this action even if the entity doesn't have physics info. it will often be the
// case that a script adds an action immediately after an object is created, and the physicsInfo
// is computed asynchronously.
// if (!entity->getPhysicsInfo()) {
// return false;
// }
EntityActionType actionType = EntityActionInterface::actionTypeFromString(actionTypeString);
if (actionType == ACTION_TYPE_NONE) {
return false;
}
if (simulation->actionFactory(actionType, actionID, entity, arguments)) {
if (actionFactory->factory(simulation, actionType, actionID, entity, arguments)) {
return true;
}
return false;

View file

@ -18,6 +18,7 @@
#include <PerfStat.h>
#include "EntityActionInterface.h"
#include "EntityItem.h"
#include "EntityTree.h"
@ -56,10 +57,6 @@ public:
friend class EntityTree;
virtual EntityActionPointer actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) { return nullptr; }
virtual void addAction(EntityActionPointer action) { _actionsToAdd += action; }
virtual void removeAction(const QUuid actionID) { _actionsToRemove += actionID; }
virtual void removeActions(QList<QUuid> actionIDsToRemove) { _actionsToRemove += actionIDsToRemove; }

View file

@ -218,7 +218,7 @@ void AddressManager::goToAddressFromObject(const QVariantMap& dataObject, const
const QString DOMAIN_NETWORK_PORT_KEY = "network_port";
const QString DOMAIN_ICE_SERVER_ADDRESS_KEY = "ice_server_address";
DependencyManager::get<NodeList>()->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::HandleAddress);
DependencyManager::get<NodeList>()->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::HandleAddress);
const QString DOMAIN_ID_KEY = "id";
QString domainIDString = domainObject[DOMAIN_ID_KEY].toString();

View file

@ -24,7 +24,14 @@ ObjectAction::~ObjectAction() {
}
void ObjectAction::updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep) {
qDebug() << "ObjectAction::updateAction called";
if (!_active) {
return;
}
if (!_ownerEntity) {
qDebug() << "ObjectActionPullToPoint::updateAction no owner entity";
return;
}
updateActionWorker(deltaTimeStep);
}
void ObjectAction::debugDraw(btIDebugDraw* debugDrawer) {
@ -33,3 +40,87 @@ void ObjectAction::debugDraw(btIDebugDraw* debugDrawer) {
void ObjectAction::removeFromSimulation(EntitySimulation* simulation) const {
simulation->removeAction(_id);
}
btRigidBody* ObjectAction::getRigidBody() {
if (!_ownerEntity) {
return nullptr;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
return nullptr;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
return motionState->getRigidBody();
}
glm::vec3 ObjectAction::getPosition() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getCenterOfMassPosition());
}
void ObjectAction::setPosition(glm::vec3 position) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
// XXX
// void setWorldTransform (const btTransform &worldTrans)
assert(false);
rigidBody->activate();
}
glm::quat ObjectAction::getRotation() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::quat(0.0f, 0.0f, 0.0f, 1.0f);
}
return bulletToGLM(rigidBody->getOrientation());
}
void ObjectAction::setRotation(glm::quat rotation) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
// XXX
// void setWorldTransform (const btTransform &worldTrans)
assert(false);
rigidBody->activate();
}
glm::vec3 ObjectAction::getLinearVelocity() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getLinearVelocity());
}
void ObjectAction::setLinearVelocity(glm::vec3 linearVelocity) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
rigidBody->setLinearVelocity(glmToBullet(glm::vec3(0.0f)));
rigidBody->activate();
}
glm::vec3 ObjectAction::getAngularVelocity() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getAngularVelocity());
}
void ObjectAction::setAngularVelocity(glm::vec3 angularVelocity) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
rigidBody->setAngularVelocity(glmToBullet(angularVelocity));
rigidBody->activate();
}

View file

@ -13,12 +13,17 @@
#ifndef hifi_ObjectAction_h
#define hifi_ObjectAction_h
#include <btBulletDynamicsCommon.h>
#include <QUuid>
#include <btBulletDynamicsCommon.h>
#include <EntityItem.h>
#include "ObjectMotionState.h"
#include "BulletUtil.h"
#include "EntityActionInterface.h"
class ObjectAction : public btActionInterface, public EntityActionInterface {
public:
ObjectAction(QUuid id, EntityItemPointer ownerEntity);
@ -30,6 +35,9 @@ public:
virtual void setOwnerEntity(const EntityItemPointer ownerEntity) { _ownerEntity = ownerEntity; }
virtual bool updateArguments(QVariantMap arguments) { return false; }
// this is called from updateAction and should be overridden by subclasses
virtual void updateActionWorker(float deltaTimeStep) {}
// these are from btActionInterface
virtual void updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep);
virtual void debugDraw(btIDebugDraw* debugDrawer);
@ -39,6 +47,16 @@ private:
QReadWriteLock _lock;
protected:
virtual btRigidBody* getRigidBody();
virtual glm::vec3 getPosition();
virtual void setPosition(glm::vec3 position);
virtual glm::quat getRotation();
virtual void setRotation(glm::quat rotation);
virtual glm::vec3 getLinearVelocity();
virtual void setLinearVelocity(glm::vec3 linearVelocity);
virtual glm::vec3 getAngularVelocity();
virtual void setAngularVelocity(glm::vec3 angularVelocity);
bool tryLockForRead() { return _lock.tryLockForRead(); }
void lockForWrite() { _lock.lockForWrite(); }
void unlock() { _lock.unlock(); }

View file

@ -9,9 +9,6 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ObjectMotionState.h"
#include "BulletUtil.h"
#include "ObjectActionPullToPoint.h"
ObjectActionPullToPoint::ObjectActionPullToPoint(QUuid id, EntityItemPointer ownerEntity) :
@ -27,28 +24,34 @@ ObjectActionPullToPoint::~ObjectActionPullToPoint() {
#endif
}
void ObjectActionPullToPoint::updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep) {
void ObjectActionPullToPoint::updateActionWorker(btScalar deltaTimeStep) {
if (!tryLockForRead()) {
// don't risk hanging the thread running the physics simulation
return;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (_active && physicsInfo) {
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (rigidBody) {
glm::vec3 offset = _target - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * _speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3()));
}
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
unlock();
return;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (!rigidBody) {
unlock();
return;
}
glm::vec3 offset = _target - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * _speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3()));
}
unlock();
}

View file

@ -23,7 +23,7 @@ public:
virtual ~ObjectActionPullToPoint();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep);
virtual void updateActionWorker(float deltaTimeStep);
private:

View file

@ -0,0 +1,144 @@
//
// ObjectActionSpring.cpp
// libraries/physics/src
//
// Created by Seth Alves 2015-6-5
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ObjectActionSpring.h"
ObjectActionSpring::ObjectActionSpring(QUuid id, EntityItemPointer ownerEntity) :
ObjectAction(id, ownerEntity) {
#if WANT_DEBUG
qDebug() << "ObjectActionSpring::ObjectActionSpring";
#endif
}
ObjectActionSpring::~ObjectActionSpring() {
#if WANT_DEBUG
qDebug() << "ObjectActionSpring::~ObjectActionSpring";
#endif
}
void ObjectActionSpring::updateActionWorker(btScalar deltaTimeStep) {
if (!tryLockForRead()) {
// don't risk hanging the thread running the physics simulation
qDebug() << "ObjectActionSpring::updateActionWorker lock failed";
return;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
unlock();
return;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (!rigidBody) {
unlock();
qDebug() << "ObjectActionSpring::updateActionWorker no rigidBody";
return;
}
// handle the linear part
if (_positionalTargetSet) {
glm::vec3 offset = _positionalTarget - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
float speed = offsetLength / _linearTimeScale;
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3(0.0f)));
}
}
// handle rotation
if (_rotationalTargetSet) {
glm::quat bodyRotation = bulletToGLM(rigidBody->getOrientation());
// if qZero and qOne are too close to each other, we can get NaN for angle.
auto alignmentDot = glm::dot(bodyRotation, _rotationalTarget);
const float almostOne = 0.99999f;
if (glm::abs(alignmentDot) < almostOne) {
glm::quat target = _rotationalTarget;
if (alignmentDot < 0) {
target = -target;
}
glm::quat qZeroInverse = glm::inverse(bodyRotation);
glm::quat deltaQ = target * qZeroInverse;
glm::vec3 axis = glm::axis(deltaQ);
float angle = glm::angle(deltaQ);
assert(!isNaN(angle));
glm::vec3 newAngularVelocity = (angle / _angularTimeScale) * glm::normalize(axis);
rigidBody->setAngularVelocity(glmToBullet(newAngularVelocity));
rigidBody->activate();
} else {
rigidBody->setAngularVelocity(glmToBullet(glm::vec3(0.0f)));
}
}
unlock();
}
bool ObjectActionSpring::updateArguments(QVariantMap arguments) {
// targets are required, spring-constants are optional
bool ptOk = true;
glm::vec3 positionalTarget =
EntityActionInterface::extractVec3Argument("spring action", arguments, "targetPosition", ptOk, false);
bool pscOk = true;
float linearTimeScale =
EntityActionInterface::extractFloatArgument("spring action", arguments, "linearTimeScale", pscOk, false);
if (ptOk && pscOk && linearTimeScale <= 0.0f) {
qDebug() << "spring action -- linearTimeScale must be greater than zero.";
return false;
}
bool rtOk = true;
glm::quat rotationalTarget =
EntityActionInterface::extractQuatArgument("spring action", arguments, "targetRotation", rtOk, false);
bool rscOk = true;
float angularTimeScale =
EntityActionInterface::extractFloatArgument("spring action", arguments, "angularTimeScale", rscOk, false);
if (!ptOk && !rtOk) {
qDebug() << "spring action requires either targetPosition or targetRotation argument";
return false;
}
lockForWrite();
_positionalTargetSet = _rotationalTargetSet = false;
if (ptOk) {
_positionalTarget = positionalTarget;
_positionalTargetSet = true;
if (pscOk) {
_linearTimeScale = linearTimeScale;
} else {
_linearTimeScale = 0.1f;
}
}
if (rtOk) {
_rotationalTarget = rotationalTarget;
_rotationalTargetSet = true;
if (rscOk) {
_angularTimeScale = angularTimeScale;
} else {
_angularTimeScale = 0.1f;
}
}
_active = true;
unlock();
return true;
}

View file

@ -0,0 +1,39 @@
//
// ObjectActionSpring.h
// libraries/physics/src
//
// Created by Seth Alves 2015-6-5
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ObjectActionSpring_h
#define hifi_ObjectActionSpring_h
#include <QUuid>
#include <EntityItem.h>
#include "ObjectAction.h"
class ObjectActionSpring : public ObjectAction {
public:
ObjectActionSpring(QUuid id, EntityItemPointer ownerEntity);
virtual ~ObjectActionSpring();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateActionWorker(float deltaTimeStep);
protected:
glm::vec3 _positionalTarget;
float _linearTimeScale;
bool _positionalTargetSet;
glm::quat _rotationalTarget;
float _angularTimeScale;
bool _rotationalTargetSet;
};
#endif // hifi_ObjectActionSpring_h

View file

@ -9,10 +9,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PhysicsHelpers.h"
#include "PhysicsLogging.h"
#include "ShapeManager.h"
#include "ObjectActionPullToPoint.h"
#include "PhysicalEntitySimulation.h"
@ -234,29 +235,6 @@ void PhysicalEntitySimulation::handleCollisionEvents(CollisionEvents& collisionE
}
}
EntityActionPointer PhysicalEntitySimulation::actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) {
EntityActionPointer action = nullptr;
switch (type) {
case ACTION_TYPE_NONE:
return nullptr;
case ACTION_TYPE_PULL_TO_POINT:
action = (EntityActionPointer) new ObjectActionPullToPoint(id, ownerEntity);
break;
}
bool ok = action->updateArguments(arguments);
if (ok) {
ownerEntity->addAction(this, action);
return action;
}
action = nullptr;
return action;
}
void PhysicalEntitySimulation::applyActionChanges() {
if (_physicsEngine) {
foreach (EntityActionPointer actionToAdd, _actionsToAdd) {

View file

@ -32,10 +32,6 @@ public:
void init(EntityTree* tree, PhysicsEngine* engine, EntityEditPacketSender* packetSender);
virtual EntityActionPointer actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments);
virtual void applyActionChanges();
protected: // only called by EntitySimulation

View file

@ -1179,6 +1179,21 @@ void GeometryCache::renderQuad(gpu::Batch& batch, const glm::vec2& minCorner, co
batch.draw(gpu::QUADS, 4, 0);
}
void GeometryCache::renderUnitQuad(const glm::vec4& color, int id) {
gpu::Batch batch;
renderUnitQuad(batch, color, id);
gpu::GLBackend::renderBatch(batch);
}
void GeometryCache::renderUnitQuad(gpu::Batch& batch, const glm::vec4& color, int id) {
static const glm::vec2 topLeft(-1, 1);
static const glm::vec2 bottomRight(1, -1);
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, color, id);
}
void GeometryCache::renderQuad(const glm::vec2& minCorner, const glm::vec2& maxCorner,
const glm::vec2& texCoordMinCorner, const glm::vec2& texCoordMaxCorner,
const glm::vec4& color, int id) {

View file

@ -155,6 +155,9 @@ public:
void renderBevelCornersRect(int x, int y, int width, int height, int bevelDistance, const glm::vec4& color, int id = UNKNOWN_ID);
void renderBevelCornersRect(gpu::Batch& batch, int x, int y, int width, int height, int bevelDistance, const glm::vec4& color, int id = UNKNOWN_ID);
void renderUnitQuad(const glm::vec4& color = glm::vec4(1), int id = UNKNOWN_ID);
void renderUnitQuad(gpu::Batch& batch, const glm::vec4& color = glm::vec4(1), int id = UNKNOWN_ID);
void renderQuad(int x, int y, int width, int height, const glm::vec4& color, int id = UNKNOWN_ID)
{ renderQuad(glm::vec2(x,y), glm::vec2(x + width, y + height), color, id); }
void renderQuad(gpu::Batch& batch, int x, int y, int width, int height, const glm::vec4& color, int id = UNKNOWN_ID)

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// standardDrawTexture.frag
// fragment shader
//
// Created by Sam Gateau on 6/10/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the texture
uniform sampler2D colorMap;
varying vec2 varTexcoord;
varying vec4 varColor;
void main(void) {
vec4 color = texture2D(colorMap, varTexcoord);
gl_FragColor = color * varColor;
}

View file

@ -0,0 +1,33 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// standardTransformPNTC.slv
// vertex shader
//
// Created by Sam Gateau on 6/10/2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec3 varNormal;
varying vec2 varTexcoord;
varying vec4 varColor;
void main(void) {
varTexcoord = gl_MultiTexCoord0.xy;
varColor = gl_Color;
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
<$transformModelToEyeDir(cam, obj, gl_Normal, varNormal)$>
varNormal = normalize(varNormal);
}

View file

@ -251,7 +251,7 @@ public:
void render(RenderArgs* args) { _payload->render(args); }
// Shape Type Interface
const model::MaterialKey& getMaterialKey() const { return _payload->getMaterialKey(); }
const model::MaterialKey getMaterialKey() const { return _payload->getMaterialKey(); }
protected:
PayloadPointer _payload;

View file

@ -52,6 +52,8 @@ class AbstractControllerScriptingInterface : public QObject {
Q_OBJECT
public slots:
virtual void registerControllerTypes(QScriptEngine* engine) = 0;
virtual bool isPrimaryButtonPressed() const = 0;
virtual glm::vec2 getPrimaryJoystickPosition() const = 0;

View file

@ -39,6 +39,7 @@ signals:
void mutedByMixer();
void environmentMuted();
void receivedFirstPacket();
void disconnected();
private:
AudioScriptingInterface();

View file

@ -317,6 +317,7 @@ void ScriptEngine::init() {
registerAnimationTypes(this);
registerAvatarTypes(this);
registerAudioMetaTypes(this);
_controllerScriptingInterface->registerControllerTypes(this);
qScriptRegisterMetaType(this, EntityItemPropertiesToScriptValue, EntityItemPropertiesFromScriptValueHonorReadOnly);
qScriptRegisterMetaType(this, EntityItemIDtoScriptValue, EntityItemIDfromScriptValue);

View file

@ -0,0 +1,92 @@
//
// Created by Bradley Austin Davis on 2015/06/08
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CursorManager.h"
#include <QCursor>
#include <QWidget>
#include <QUrl>
#include <PathUtils.h>
namespace Cursor {
void Instance::setIcon(uint16_t icon) {
_icon = icon;
}
uint16_t Instance::getIcon() const {
return _icon;
}
class MouseInstance : public Instance {
Source getType() const {
return Source::MOUSE;
}
ivec2 getScreenPosition() const {
return toGlm(QCursor::pos());
}
ivec2 getWindowPosition(QWidget* widget) const {
return toGlm(widget->mapFromGlobal(QCursor::pos()));
}
vec2 getRelativePosition(QWidget* widget) const {
vec2 pos = getWindowPosition(widget);
pos /= vec2(toGlm(widget->size()));
return pos;
}
};
static QMap<uint16_t, QString> ICONS;
static uint16_t _customIconId = Icon::USER_BASE;
Manager::Manager() {
ICONS[Icon::DEFAULT] = PathUtils::resourcesPath() + "images/arrow.png";
ICONS[Icon::LINK] = PathUtils::resourcesPath() + "images/reticleLink.png";
}
Manager& Manager::instance() {
static Manager instance;
return instance;
}
uint8_t Manager::getCount() {
return 1;
}
Instance* Manager::getCursor(uint8_t index) {
Q_ASSERT(index < getCount());
static MouseInstance mouseInstance;
if (index == 0) {
return &mouseInstance;
}
return nullptr;
}
uint16_t Manager::registerIcon(const QString& path) {
ICONS[_customIconId] = path;
return _customIconId++;
}
const QString& Manager::getIconImage(uint16_t icon) {
Q_ASSERT(ICONS.count(icon));
return ICONS[icon];
}
float Manager::getScale() {
return _scale;
}
void Manager::setScale(float scale) {
_scale = scale;
}
}

View file

@ -0,0 +1,61 @@
//
// Created by Bradley Austin Davis on 2015/06/08
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <stdint.h>
#include <GLMHelpers.h>
namespace Cursor {
enum class Source {
MOUSE,
LEFT_HAND,
RIGHT_HAND,
UNKNOWN,
};
enum Icon {
DEFAULT,
LINK,
GRAB,
// Add new system cursors here
// User cursors will have ids over this value
USER_BASE = 0xFF,
};
class Instance {
public:
virtual Source getType() const = 0;
virtual ivec2 getWindowPosition(QWidget* widget) const = 0;
virtual vec2 getRelativePosition(QWidget* widget) const = 0;
virtual ivec2 getScreenPosition() const = 0;
virtual void setIcon(uint16_t icon);
virtual uint16_t getIcon() const;
private:
uint16_t _icon;
};
class Manager {
Manager();
Manager(const Manager& other) = delete;
public:
static Manager& instance();
uint8_t getCount();
float getScale();
void setScale(float scale);
Instance* getCursor(uint8_t index = 0);
uint16_t registerIcon(const QString& path);
const QString& getIconImage(uint16_t icon);
private:
float _scale{ 1.0f };
};
}