Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels

This commit is contained in:
Andrzej Kapolka 2014-06-13 12:01:08 -07:00
commit 8480f08816
16 changed files with 956 additions and 844 deletions

View file

@ -15,6 +15,7 @@
#include <AccountManager.h>
#include <Assignment.h>
#include <HifiConfigVariantMap.h>
#include <Logging.h>
#include <NodeList.h>
#include <PacketHeaders.h>
@ -41,73 +42,65 @@ AssignmentClient::AssignmentClient(int &argc, char **argv) :
setOrganizationDomain("highfidelity.io");
setApplicationName("assignment-client");
QSettings::setDefaultFormat(QSettings::IniFormat);
QStringList argumentList = arguments();
// register meta type is required for queued invoke method on Assignment subclasses
// set the logging target to the the CHILD_TARGET_NAME
Logging::setTargetName(ASSIGNMENT_CLIENT_TARGET_NAME);
const QString ASSIGNMENT_TYPE_OVVERIDE_OPTION = "-t";
int argumentIndex = argumentList.indexOf(ASSIGNMENT_TYPE_OVVERIDE_OPTION);
const QVariantMap argumentVariantMap = HifiConfigVariantMap::mergeCLParametersWithJSONConfig(arguments());
const QString ASSIGNMENT_TYPE_OVERRIDE_OPTION = "t";
const QString ASSIGNMENT_POOL_OPTION = "pool";
const QString ASSIGNMENT_WALLET_DESTINATION_ID_OPTION = "wallet";
const QString CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION = "a";
Assignment::Type requestAssignmentType = Assignment::AllTypes;
if (argumentIndex != -1) {
requestAssignmentType = (Assignment::Type) argumentList[argumentIndex + 1].toInt();
// check for an assignment type passed on the command line or in the config
if (argumentVariantMap.contains(ASSIGNMENT_TYPE_OVERRIDE_OPTION)) {
requestAssignmentType = (Assignment::Type) argumentVariantMap.value(ASSIGNMENT_TYPE_OVERRIDE_OPTION).toInt();
}
const QString ASSIGNMENT_POOL_OPTION = "--pool";
argumentIndex = argumentList.indexOf(ASSIGNMENT_POOL_OPTION);
QString assignmentPool;
if (argumentIndex != -1) {
assignmentPool = argumentList[argumentIndex + 1];
// check for an assignment pool passed on the command line or in the config
if (argumentVariantMap.contains(ASSIGNMENT_POOL_OPTION)) {
assignmentPool = argumentVariantMap.value(ASSIGNMENT_POOL_OPTION).toString();
}
// setup our _requestAssignment member variable from the passed arguments
_requestAssignment = Assignment(Assignment::RequestCommand, requestAssignmentType, assignmentPool);
// check if we were passed a wallet UUID on the command line
// check for a wallet UUID on the command line or in the config
// this would represent where the user running AC wants funds sent to
const QString ASSIGNMENT_WALLET_DESTINATION_ID_OPTION = "--wallet";
if ((argumentIndex = argumentList.indexOf(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION)) != -1) {
QUuid walletUUID = QString(argumentList[argumentIndex + 1]);
if (argumentVariantMap.contains(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION)) {
QUuid walletUUID = argumentVariantMap.value(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION).toString();
qDebug() << "The destination wallet UUID for credits is" << uuidStringWithoutCurlyBraces(walletUUID);
_requestAssignment.setWalletUUID(walletUUID);
}
// create a NodeList as an unassigned client
NodeList* nodeList = NodeList::createInstance(NodeType::Unassigned);
// check for an overriden assignment server hostname
const QString CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION = "-a";
argumentIndex = argumentList.indexOf(CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION);
if (argumentIndex != -1) {
_assignmentServerHostname = argumentList[argumentIndex + 1];
if (argumentVariantMap.contains(CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION)) {
_assignmentServerHostname = argumentVariantMap.value(CUSTOM_ASSIGNMENT_SERVER_HOSTNAME_OPTION).toString();
// set the custom assignment socket on our NodeList
HifiSockAddr customAssignmentSocket = HifiSockAddr(_assignmentServerHostname, DEFAULT_DOMAIN_SERVER_PORT);
nodeList->setAssignmentServerSocket(customAssignmentSocket);
}
// call a timer function every ASSIGNMENT_REQUEST_INTERVAL_MSECS to ask for assignment, if required
qDebug() << "Waiting for assignment -" << _requestAssignment;
QTimer* timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), SLOT(sendAssignmentRequest()));
timer->start(ASSIGNMENT_REQUEST_INTERVAL_MSECS);
// connect our readPendingDatagrams method to the readyRead() signal of the socket
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &AssignmentClient::readPendingDatagrams);
// connections to AccountManager for authentication
connect(&AccountManager::getInstance(), &AccountManager::authRequired,
this, &AssignmentClient::handleAuthenticationRequest);
@ -121,49 +114,49 @@ void AssignmentClient::sendAssignmentRequest() {
void AssignmentClient::readPendingDatagrams() {
NodeList* nodeList = NodeList::getInstance();
QByteArray receivedPacket;
HifiSockAddr senderSockAddr;
while (nodeList->getNodeSocket().hasPendingDatagrams()) {
receivedPacket.resize(nodeList->getNodeSocket().pendingDatagramSize());
nodeList->getNodeSocket().readDatagram(receivedPacket.data(), receivedPacket.size(),
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
if (packetTypeForPacket(receivedPacket) == PacketTypeCreateAssignment) {
// construct the deployed assignment from the packet data
_currentAssignment = SharedAssignmentPointer(AssignmentFactory::unpackAssignment(receivedPacket));
if (_currentAssignment) {
qDebug() << "Received an assignment -" << *_currentAssignment;
// switch our DomainHandler hostname and port to whoever sent us the assignment
nodeList->getDomainHandler().setSockAddr(senderSockAddr, _assignmentServerHostname);
nodeList->getDomainHandler().setAssignmentUUID(_currentAssignment->getUUID());
qDebug() << "Destination IP for assignment is" << nodeList->getDomainHandler().getIP().toString();
// start the deployed assignment
AssignmentThread* workerThread = new AssignmentThread(_currentAssignment, this);
connect(workerThread, &QThread::started, _currentAssignment.data(), &ThreadedAssignment::run);
connect(_currentAssignment.data(), &ThreadedAssignment::finished, workerThread, &QThread::quit);
connect(_currentAssignment.data(), &ThreadedAssignment::finished,
this, &AssignmentClient::assignmentCompleted);
connect(workerThread, &QThread::finished, workerThread, &QThread::deleteLater);
_currentAssignment->moveToThread(workerThread);
// move the NodeList to the thread used for the _current assignment
nodeList->moveToThread(workerThread);
// let the assignment handle the incoming datagrams for its duration
disconnect(&nodeList->getNodeSocket(), 0, this, 0);
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, _currentAssignment.data(),
&ThreadedAssignment::readPendingDatagrams);
// Starts an event loop, and emits workerThread->started()
workerThread->start();
} else {
@ -180,15 +173,15 @@ void AssignmentClient::readPendingDatagrams() {
void AssignmentClient::handleAuthenticationRequest() {
const QString DATA_SERVER_USERNAME_ENV = "HIFI_AC_USERNAME";
const QString DATA_SERVER_PASSWORD_ENV = "HIFI_AC_PASSWORD";
// this node will be using an authentication server, let's make sure we have a username/password
QProcessEnvironment sysEnvironment = QProcessEnvironment::systemEnvironment();
QString username = sysEnvironment.value(DATA_SERVER_USERNAME_ENV);
QString password = sysEnvironment.value(DATA_SERVER_PASSWORD_ENV);
AccountManager& accountManager = AccountManager::getInstance();
if (!username.isEmpty() && !password.isEmpty()) {
// ask the account manager to log us in from the env variables
accountManager.requestAccessToken(username, password);
@ -196,7 +189,7 @@ void AssignmentClient::handleAuthenticationRequest() {
qDebug() << "Authentication was requested against" << qPrintable(accountManager.getAuthURL().toString())
<< "but both or one of" << qPrintable(DATA_SERVER_USERNAME_ENV)
<< "/" << qPrintable(DATA_SERVER_PASSWORD_ENV) << "are not set. Unable to authenticate.";
return;
}
}
@ -204,15 +197,15 @@ void AssignmentClient::handleAuthenticationRequest() {
void AssignmentClient::assignmentCompleted() {
// reset the logging target to the the CHILD_TARGET_NAME
Logging::setTargetName(ASSIGNMENT_CLIENT_TARGET_NAME);
qDebug("Assignment finished or never started - waiting for new assignment.");
NodeList* nodeList = NodeList::getInstance();
// have us handle incoming NodeList datagrams again
disconnect(&nodeList->getNodeSocket(), 0, _currentAssignment.data(), 0);
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &AssignmentClient::readPendingDatagrams);
// clear our current assignment shared pointer now that we're done with it
// if the assignment thread is still around it has its own shared pointer to the assignment
_currentAssignment.clear();

File diff suppressed because it is too large Load diff

View file

@ -19,8 +19,8 @@
var damping = 0.9;
var position = { x: MyAvatar.position.x, y: MyAvatar.position.y, z: MyAvatar.position.z };
var joysticksCaptured = false;
var THRUST_CONTROLLER = 1;
var VIEW_CONTROLLER = 0;
var THRUST_CONTROLLER = 0;
var VIEW_CONTROLLER = 1;
var INITIAL_THRUST_MULTPLIER = 1.0;
var THRUST_INCREASE_RATE = 1.05;
var MAX_THRUST_MULTIPLIER = 75.0;

View file

@ -150,6 +150,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_mouseX(0),
_mouseY(0),
_lastMouseMove(usecTimestampNow()),
_lastMouseMoveType(QEvent::MouseMove),
_mouseHidden(false),
_seenMouseMove(false),
_touchAvgX(0.0f),
@ -1096,6 +1097,9 @@ void Application::mouseMoveEvent(QMouseEvent* event) {
showMouse = false;
}
// Used by application overlay to determine how to draw cursor(s)
_lastMouseMoveType = event->type();
_controllerScriptingInterface.emitMouseMoveEvent(event); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
@ -1375,6 +1379,9 @@ void Application::setEnable3DTVMode(bool enable3DTVMode) {
resizeGL(_glWidget->width(),_glWidget->height());
}
void Application::setEnableVRMode(bool enableVRMode) {
resizeGL(_glWidget->width(), _glWidget->height());
}
void Application::setRenderVoxels(bool voxelRender) {
_voxelEditSender.setShouldSend(voxelRender);

View file

@ -206,6 +206,7 @@ public:
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
int getMouseX() const { return _mouseX; }
int getMouseY() const { return _mouseY; }
unsigned int getLastMouseMoveType() const { return _lastMouseMoveType; }
Faceplus* getFaceplus() { return &_faceplus; }
Faceshift* getFaceshift() { return &_faceshift; }
Visage* getVisage() { return &_visage; }
@ -345,6 +346,7 @@ private slots:
void setFullscreen(bool fullscreen);
void setEnable3DTVMode(bool enable3DTVMode);
void setEnableVRMode(bool enableVRMode);
void cameraMenuChanged();
glm::vec2 getScaledScreenPoint(glm::vec2 projectedPoint);
@ -505,6 +507,7 @@ private:
int _mouseDragStartedX;
int _mouseDragStartedY;
quint64 _lastMouseMove;
unsigned int _lastMouseMoveType;
bool _mouseHidden;
bool _seenMouseMove;
@ -521,6 +524,7 @@ private:
QSet<int> _keysPressed;
GeometryCache _geometryCache;
AnimationCache _animationCache;
TextureCache _textureCache;

View file

@ -254,6 +254,11 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FullscreenMirror, Qt::Key_H, false,
appInstance, SLOT(cameraMenuChanged()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::EnableVRMode, 0,
false,
appInstance,
SLOT(setEnableVRMode(bool)));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Enable3DTVMode, 0,
false,
appInstance,
@ -382,7 +387,6 @@ Menu::Menu() :
QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options");
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseLeftHanded, 0, false);
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseInvertInputButtons, 0, false);
QMenu* handOptionsMenu = developerMenu->addMenu("Hand Options");

View file

@ -336,6 +336,7 @@ namespace MenuOption {
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableVRMode = "Enable VR Mode";
const QString ExpandMiscAvatarTiming = "Expand Misc MyAvatar Timing";
const QString ExpandAvatarUpdateTiming = "Expand MyAvatar update Timing";
const QString ExpandAvatarSimulateTiming = "Expand MyAvatar simulate Timing";
@ -400,7 +401,6 @@ namespace MenuOption {
const QString SettingsImport = "Import Settings";
const QString SimpleShadows = "Simple";
const QString SixenseInvertInputButtons = "Invert Sixense Mouse Input Buttons";
const QString SixenseLeftHanded = "Left Handed Sixense Mouse Input";
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
const QString ShowBordersVoxelNodes = "Show Voxel Nodes";
const QString ShowBordersModelNodes = "Show Model Nodes";

View file

@ -72,6 +72,14 @@ void OculusManager::connect() {
#endif
}
bool OculusManager::isConnected() {
#ifdef HAVE_LIBOVR
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
#else
return false;
#endif
}
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
#ifdef HAVE_LIBOVR
_stereoConfig.SetFullViewport(Viewport(0, 0, screenWidth, screenHeight));

View file

@ -27,7 +27,7 @@ class OculusManager {
public:
static void connect();
static bool isConnected() { return _isConnected; }
static bool isConnected();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);

View file

@ -39,10 +39,14 @@ SixenseManager::SixenseManager() {
sixenseInit();
#endif
_triggerPressed = false;
_bumperPressed = false;
_oldX = -1;
_oldY = -1;
_triggerPressed[0] = false;
_bumperPressed[0] = false;
_oldX[0] = -1;
_oldY[0] = -1;
_triggerPressed[1] = false;
_bumperPressed[1] = false;
_oldX[1] = -1;
_oldY[1] = -1;
}
SixenseManager::~SixenseManager() {
@ -114,10 +118,7 @@ void SixenseManager::update(float deltaTime) {
// Emulate the mouse so we can use scripts
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
// Check if we are on the correct palm
if ((Menu::getInstance()->isOptionChecked(MenuOption::SixenseLeftHanded) && numActiveControllers == 1) || numActiveControllers == 2) {
emulateMouse(palm);
}
emulateMouse(palm, numActiveControllers - 1);
}
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
@ -328,30 +329,13 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
}
//Injecting mouse movements and clicks
void SixenseManager::emulateMouse(PalmData *palm) {
void SixenseManager::emulateMouse(PalmData* palm, int index) {
MyAvatar* avatar = Application::getInstance()->getAvatar();
QGLWidget* widget = Application::getInstance()->getGLWidget();
QPoint pos;
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(avatar->getOrientation()) * palm->getFingerDirection();
// Get the angles, scaled between 0-1
float xAngle = (atan2(direction.z, direction.x) + M_PI_2) + 0.5f;
float yAngle = 1.0f - ((atan2(direction.z, direction.y) + M_PI_2) + 0.5f);
float cursorRange = Application::getInstance()->getGLWidget()->width();
pos.setX(cursorRange * xAngle);
pos.setY(cursorRange * yAngle);
//If position has changed, emit a mouse move to the application
if (pos.x() != _oldX || pos.y() != _oldY) {
QMouseEvent mouseEvent(static_cast<QEvent::Type>(CONTROLLER_MOVE_EVENT), pos, Qt::NoButton, Qt::NoButton, 0);
Application::getInstance()->mouseMoveEvent(&mouseEvent);
}
_oldX = pos.x();
_oldY = pos.y();
Qt::MouseButton bumperButton;
Qt::MouseButton triggerButton;
@ -362,42 +346,88 @@ void SixenseManager::emulateMouse(PalmData *palm) {
bumperButton = Qt::RightButton;
triggerButton = Qt::LeftButton;
}
// Get the angles, scaled between 0-1
float xAngle = (atan2(direction.z, direction.x) + M_PI_2) + 0.5f;
float yAngle = 1.0f - ((atan2(direction.z, direction.y) + M_PI_2) + 0.5f);
float cursorRange = widget->width();
pos.setX(cursorRange * xAngle);
pos.setY(cursorRange * yAngle);
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
//we should unpress them.
if (pos.x() < 0 || pos.x() > widget->width() || pos.y() < 0 || pos.y() > widget->height()) {
if (_bumperPressed[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
Application::getInstance()->mouseReleaseEvent(&mouseEvent);
_bumperPressed[index] = false;
}
if (_triggerPressed[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
Application::getInstance()->mouseReleaseEvent(&mouseEvent);
_triggerPressed[index] = false;
}
return;
}
//If position has changed, emit a mouse move to the application
if (pos.x() != _oldX[index] || pos.y() != _oldY[index]) {
QMouseEvent mouseEvent(static_cast<QEvent::Type>(CONTROLLER_MOVE_EVENT), pos, Qt::NoButton, Qt::NoButton, 0);
//Only send the mouse event if the opposite left button isnt held down.
//This is specifically for edit voxels
if (triggerButton == Qt::LeftButton) {
if (!_triggerPressed[(int)(!index)]) {
Application::getInstance()->mouseMoveEvent(&mouseEvent);
}
} else {
if (!_bumperPressed[(int)(!index)]) {
Application::getInstance()->mouseMoveEvent(&mouseEvent);
}
}
}
_oldX[index] = pos.x();
_oldY[index] = pos.y();
//Check for bumper press ( Right Click )
if (palm->getControllerButtons() & BUTTON_FWD) {
if (!_bumperPressed) {
_bumperPressed = true;
if (!_bumperPressed[index]) {
_bumperPressed[index] = true;
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, bumperButton, bumperButton, 0);
Application::getInstance()->mousePressEvent(&mouseEvent);
}
} else if (_bumperPressed) {
} else if (_bumperPressed[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, bumperButton, bumperButton, 0);
Application::getInstance()->mouseReleaseEvent(&mouseEvent);
_bumperPressed = false;
_bumperPressed[index] = false;
}
//Check for trigger press ( Left Click )
if (palm->getTrigger() == 1.0f) {
if (!_triggerPressed) {
_triggerPressed = true;
if (!_triggerPressed[index]) {
_triggerPressed[index] = true;
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, triggerButton, triggerButton, 0);
Application::getInstance()->mousePressEvent(&mouseEvent);
}
} else if (_triggerPressed) {
} else if (_triggerPressed[index]) {
QMouseEvent mouseEvent(QEvent::MouseButtonRelease, pos, triggerButton, triggerButton, 0);
Application::getInstance()->mouseReleaseEvent(&mouseEvent);
_triggerPressed = false;
_triggerPressed[index] = false;
}
}
#endif // HAVE_SIXENSE

View file

@ -47,7 +47,7 @@ public slots:
private:
#ifdef HAVE_SIXENSE
void updateCalibration(const sixenseControllerData* controllers);
void emulateMouse(PalmData *palm);
void emulateMouse(PalmData* palm, int index);
int _calibrationState;
@ -70,11 +70,11 @@ private:
quint64 _lastMovement;
glm::vec3 _amountMoved;
// for mouse emulation
bool _triggerPressed;
bool _bumperPressed;
int _oldX;
int _oldY;
// for mouse emulation with the two controllers
bool _triggerPressed[2];
bool _bumperPressed[2];
int _oldX[2];
int _oldY[2];
};
#endif // hifi_SixenseManager_h

View file

@ -78,7 +78,6 @@ void LocationManager::goTo(QString destination) {
_userData = QJsonObject();
_placeData = QJsonObject();
destination = QString(QUrl::toPercentEncoding(destination));
JSONCallbackParameters callbackParams;
callbackParams.jsonCallbackReceiver = this;
callbackParams.jsonCallbackMethod = "goToUserFromResponse";

View file

@ -16,13 +16,24 @@
#include "Application.h"
#include "ApplicationOverlay.h"
#include "devices/OculusManager.h"
#include "ui/Stats.h"
// Fast helper functions
inline float max(float a, float b) {
return (a > b) ? a : b;
}
inline float min(float a, float b) {
return (a < b) ? a : b;
}
ApplicationOverlay::ApplicationOverlay() :
_framebufferObject(NULL),
_oculusAngle(65.0f * RADIANS_PER_DEGREE),
_distance(0.5f),
_textureFov(PI / 2.5f),
_uiType(HEMISPHERE) {
}
@ -35,82 +46,6 @@ ApplicationOverlay::~ApplicationOverlay() {
const float WHITE_TEXT[] = { 0.93f, 0.93f, 0.93f };
void renderControllerPointer() {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
int numberOfPalms = handData->getNumPalms();
int palmIndex;
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLeftHanded)) {
palmIndex = 2;
} else {
palmIndex = 3;
}
const PalmData* palmData = NULL;
if (palmIndex >= handData->getPalms().size()) {
return;
}
if (handData->getPalms()[palmIndex].isActive()) {
palmData = &handData->getPalms()[palmIndex];
} else {
return;
}
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between 0-1
float xAngle = (atan2(direction.z, direction.x) + M_PI_2) + 0.5f;
float yAngle = 1.0f - ((atan2(direction.z, direction.y) + M_PI_2) + 0.5f);
float cursorRange = glWidget->width();
int mouseX = cursorRange * xAngle;
int mouseY = cursorRange * yAngle;
if (mouseX < 0) {
mouseX = 0;
} else if (mouseX > glWidget->width()) {
mouseX = glWidget->width();
}
if (mouseY < 0) {
mouseY = 0;
} else if (mouseY > glWidget->width()) {
mouseY = glWidget->width();
}
const float pointerWidth = 40;
const float pointerHeight = 40;
const float crossPad = 16;
mouseX -= pointerWidth / 2.0f;
mouseY += pointerHeight / 2.0f;
glBegin(GL_QUADS);
glColor3f(0, 0, 1);
//Horizontal crosshair
glVertex2i(mouseX, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - pointerHeight + crossPad);
glVertex2i(mouseX, mouseY - pointerHeight + crossPad);
//Vertical crosshair
glVertex2i(mouseX + crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY - pointerHeight);
glVertex2i(mouseX + crossPad, mouseY - pointerHeight);
glEnd();
}
// Renders the overlays either to a texture or to the screen
void ApplicationOverlay::renderOverlay(bool renderToTexture) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
@ -120,14 +55,6 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
Overlays& overlays = application->getOverlays();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
Audio* audio = application->getAudio();
const OctreePacketProcessor& octreePacketProcessor = application->getOctreePacketProcessor();
BandwidthMeter* bandwidthMeter = application->getBandwidthMeter();
NodeBounds& nodeBoundsDisplay = application->getNodeBoundsDisplay();
int mouseX = application->getMouseX();
int mouseY = application->getMouseY();
bool renderPointer = renderToTexture;
if (renderToTexture) {
getFramebufferObject()->bind();
@ -136,7 +63,7 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Render 2D overlay: I/O level bar graphs and text
// Render 2D overlay
glMatrixMode(GL_PROJECTION);
glPushMatrix();
@ -145,6 +72,461 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
renderAudioMeter();
if (Menu::getInstance()->isOptionChecked(MenuOption::HeadMouse)) {
myAvatar->renderHeadMouse(glWidget->width(), glWidget->height());
}
renderStatsAndLogs();
// give external parties a change to hook in
emit application->renderingOverlay();
overlays.render2D();
renderPointers();
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
if (renderToTexture) {
getFramebufferObject()->release();
}
}
// Draws the FBO texture for the screen
void ApplicationOverlay::displayOverlayTexture(Camera& whichCamera) {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
gluOrtho2D(0, glWidget->width(), glWidget->height(), 0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2i(0, glWidget->height());
glTexCoord2f(1, 0); glVertex2i(glWidget->width(), glWidget->height());
glTexCoord2f(1, 1); glVertex2i(glWidget->width(), 0);
glTexCoord2f(0, 1); glVertex2i(0, 0);
glEnd();
glPopMatrix();
glDisable(GL_TEXTURE_2D);
}
void ApplicationOverlay::computeOculusPickRay(float x, float y, glm::vec3& direction) const {
glm::quat rot = Application::getInstance()->getAvatar()->getOrientation();
//invert y direction
y = 1.0 - y;
//Get position on hemisphere UI
x = sin((x - 0.5f) * _textureFov);
y = sin((y - 0.5f) * _textureFov);
float dist = sqrt(x * x + y * y);
float z = -sqrt(1.0f - dist * dist);
//Rotate the UI pick ray by the avatar orientation
direction = glm::normalize(rot * glm::vec3(x, y, z));
}
// Draws the FBO texture for Oculus rift. TODO: Draw a curved texture instead of plane.
void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
const glm::vec3& viewMatrixTranslation = application->getViewMatrixTranslation();
// Get vertical FoV of the displayed overlay texture
const float halfVerticalAngle = _oculusAngle / 2.0f;
const float overlayAspectRatio = glWidget->width() / (float)glWidget->height();
const float halfOverlayHeight = _distance * tan(halfVerticalAngle);
const float overlayHeight = halfOverlayHeight * 2.0f;
// The more vertices, the better the curve
const int numHorizontalVertices = 20;
const int numVerticalVertices = 20;
// U texture coordinate width at each quad
const float quadTexWidth = 1.0f / (numHorizontalVertices - 1);
const float quadTexHeight = 1.0f / (numVerticalVertices - 1);
// Get horizontal angle and angle increment from vertical angle and aspect ratio
const float horizontalAngle = halfVerticalAngle * 2.0f * overlayAspectRatio;
const float angleIncrement = horizontalAngle / (numHorizontalVertices - 1);
const float halfHorizontalAngle = horizontalAngle / 2;
const float verticalAngleIncrement = _oculusAngle / (numVerticalVertices - 1);
glActiveTexture(GL_TEXTURE0);
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
glEnable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_TEXTURE_2D);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
// Transform to world space
glm::quat rotation = whichCamera.getRotation();
glm::vec3 axis2 = glm::axis(rotation);
glRotatef(-glm::degrees(glm::angle(rotation)), axis2.x, axis2.y, axis2.z);
glTranslatef(viewMatrixTranslation.x, viewMatrixTranslation.y, viewMatrixTranslation.z);
// Translate to the front of the camera
glm::vec3 pos = whichCamera.getPosition();
glm::quat rot = myAvatar->getOrientation();
glm::vec3 axis = glm::axis(rot);
glTranslatef(pos.x, pos.y, pos.z);
glRotatef(glm::degrees(glm::angle(rot)), axis.x, axis.y, axis.z);
glColor3f(1.0f, 1.0f, 1.0f);
glDepthMask(GL_TRUE);
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.01f);
float leftX, rightX, leftZ, rightZ, topZ, bottomZ;
//Draw the magnifiers
for (int i = 0; i < _numMagnifiers; i++) {
renderMagnifier(_mouseX[i], _mouseY[i]);
}
glDepthMask(GL_FALSE);
glDisable(GL_ALPHA_TEST);
//TODO: Remove immediate mode in favor of VBO
if (_uiType == HEMISPHERE) {
renderTexturedHemisphere();
} else{
glBegin(GL_QUADS);
// Place the vertices in a semicircle curve around the camera
for (int i = 0; i < numHorizontalVertices - 1; i++) {
for (int j = 0; j < numVerticalVertices - 1; j++) {
// Calculate the X and Z coordinates from the angles and radius from camera
leftX = sin(angleIncrement * i - halfHorizontalAngle) * _distance;
rightX = sin(angleIncrement * (i + 1) - halfHorizontalAngle) * _distance;
leftZ = -cos(angleIncrement * i - halfHorizontalAngle) * _distance;
rightZ = -cos(angleIncrement * (i + 1) - halfHorizontalAngle) * _distance;
if (_uiType == 2) {
topZ = -cos((verticalAngleIncrement * (j + 1) - halfVerticalAngle) * overlayAspectRatio) * _distance;
bottomZ = -cos((verticalAngleIncrement * j - halfVerticalAngle) * overlayAspectRatio) * _distance;
} else {
topZ = -99999;
bottomZ = -99999;
}
glTexCoord2f(quadTexWidth * i, (j + 1) * quadTexHeight);
glVertex3f(leftX, (j + 1) * quadTexHeight * overlayHeight - halfOverlayHeight, max(topZ, leftZ));
glTexCoord2f(quadTexWidth * (i + 1), (j + 1) * quadTexHeight);
glVertex3f(rightX, (j + 1) * quadTexHeight * overlayHeight - halfOverlayHeight, max(topZ, rightZ));
glTexCoord2f(quadTexWidth * (i + 1), j * quadTexHeight);
glVertex3f(rightX, j * quadTexHeight * overlayHeight - halfOverlayHeight, max(bottomZ, rightZ));
glTexCoord2f(quadTexWidth * i, j * quadTexHeight);
glVertex3f(leftX, j * quadTexHeight * overlayHeight - halfOverlayHeight, max(bottomZ, leftZ));
}
}
glEnd();
}
glPopMatrix();
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
}
//Renders optional pointers
void ApplicationOverlay::renderPointers() {
Application* application = Application::getInstance();
// Render a crosshair over the mouse when in Oculus
_numMagnifiers = 0;
int mouseX = application->getMouseX();
int mouseY = application->getMouseY();
if (OculusManager::isConnected() && application->getLastMouseMoveType() == QEvent::MouseMove) {
const float pointerWidth = 10;
const float pointerHeight = 10;
const float crossPad = 4;
_numMagnifiers = 1;
_mouseX[0] = application->getMouseX();
_mouseY[0] = application->getMouseY();
mouseX -= pointerWidth / 2.0f;
mouseY += pointerHeight / 2.0f;
glBegin(GL_QUADS);
glColor3f(1, 0, 0);
//Horizontal crosshair
glVertex2i(mouseX, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - pointerHeight + crossPad);
glVertex2i(mouseX, mouseY - pointerHeight + crossPad);
//Vertical crosshair
glVertex2i(mouseX + crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY - pointerHeight);
glVertex2i(mouseX + crossPad, mouseY - pointerHeight);
glEnd();
} else if (application->getLastMouseMoveType() == CONTROLLER_MOVE_EVENT && Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
//only render controller pointer if we aren't already rendering a mouse pointer
renderControllerPointer();
}
}
void ApplicationOverlay::renderControllerPointer() {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
const HandData* handData = Application::getInstance()->getAvatar()->getHandData();
int numberOfPalms = handData->getNumPalms();
for (unsigned int palmIndex = 2; palmIndex < 4; palmIndex++) {
const PalmData* palmData = NULL;
if (palmIndex >= handData->getPalms().size()) {
return;
}
if (handData->getPalms()[palmIndex].isActive()) {
palmData = &handData->getPalms()[palmIndex];
} else {
continue;
}
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between 0-1
float xAngle = (atan2(direction.z, direction.x) + M_PI_2) + 0.5f;
float yAngle = 1.0f - ((atan2(direction.z, direction.y) + M_PI_2) + 0.5f);
float cursorRange = glWidget->width();
int mouseX = cursorRange * xAngle;
int mouseY = cursorRange * yAngle;
//If the cursor is out of the screen then don't render it
if (mouseX < 0 || mouseX >= glWidget->width() || mouseY < 0 || mouseY >= glWidget->height()) {
continue;
}
float pointerWidth = 40;
float pointerHeight = 40;
float crossPad = 16;
//if we have the oculus, we should make the cursor smaller since it will be
//magnified
if (OculusManager::isConnected()) {
pointerWidth /= 4;
pointerHeight /= 4;
crossPad /= 4;
_mouseX[_numMagnifiers] = mouseX;
_mouseY[_numMagnifiers] = mouseY;
_numMagnifiers++;
}
mouseX -= pointerWidth / 2.0f;
mouseY += pointerHeight / 2.0f;
glBegin(GL_QUADS);
glColor3f(0.0f, 0.0f, 1.0f);
//Horizontal crosshair
glVertex2i(mouseX, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - pointerHeight + crossPad);
glVertex2i(mouseX, mouseY - pointerHeight + crossPad);
//Vertical crosshair
glVertex2i(mouseX + crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY - pointerHeight);
glVertex2i(mouseX + crossPad, mouseY - pointerHeight);
glEnd();
}
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY)
{
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
const glm::vec3& viewMatrixTranslation = application->getViewMatrixTranslation();
float leftX, rightX, leftZ, rightZ, topZ, bottomZ;
const int widgetWidth = glWidget->width();
const int widgetHeight = glWidget->height();
const float magnification = 4.0f;
// Get vertical FoV of the displayed overlay texture
const float halfVerticalAngle = _oculusAngle / 2.0f;
const float overlayAspectRatio = glWidget->width() / (float)glWidget->height();
const float halfOverlayHeight = _distance * tan(halfVerticalAngle);
// Get horizontal angle and angle increment from vertical angle and aspect ratio
const float horizontalAngle = halfVerticalAngle * 2.0f * overlayAspectRatio;
const float halfHorizontalAngle = horizontalAngle / 2;
float magnifyWidth = 80.0f;
float magnifyHeight = 60.0f;
mouseX -= magnifyWidth / 2;
mouseY -= magnifyHeight / 2;
//clamp the magnification
if (mouseX < 0) {
magnifyWidth += mouseX;
mouseX = 0;
} else if (mouseX + magnifyWidth > widgetWidth) {
magnifyWidth = widgetWidth - mouseX;
}
if (mouseY < 0) {
magnifyHeight += mouseY;
mouseY = 0;
} else if (mouseY + magnifyHeight > widgetHeight) {
magnifyHeight = widgetHeight - mouseY;
}
const float halfMagnifyHeight = magnifyHeight / 2.0f;
float newWidth = magnifyWidth * magnification;
float newHeight = magnifyHeight * magnification;
// Magnification Texture Coordinates
float magnifyULeft = mouseX / (float)widgetWidth;
float magnifyURight = (mouseX + magnifyWidth) / (float)widgetWidth;
float magnifyVBottom = 1.0f - mouseY / (float)widgetHeight;
float magnifyVTop = 1.0f - (mouseY + magnifyHeight) / (float)widgetHeight;
// Coordinates of magnification overlay
float newMouseX = (mouseX + magnifyWidth / 2) - newWidth / 2.0f;
float newMouseY = (mouseY + magnifyHeight / 2) + newHeight / 2.0f;
// Get angle on the UI
float leftAngle = (newMouseX / (float)widgetWidth) * horizontalAngle - halfHorizontalAngle;
float rightAngle = ((newMouseX + newWidth) / (float)widgetWidth) * horizontalAngle - halfHorizontalAngle;
float bottomAngle = (newMouseY / (float)widgetHeight) * _oculusAngle - halfVerticalAngle;
float topAngle = ((newMouseY - newHeight) / (float)widgetHeight) * _oculusAngle - halfVerticalAngle;
// Get position on hemisphere using angle
if (_uiType == HEMISPHERE) {
//Get new UV coordinates from our magnification window
float newULeft = newMouseX / widgetWidth;
float newURight = (newMouseX + newWidth) / widgetWidth;
float newVBottom = 1.0 - newMouseY / widgetHeight;
float newVTop = 1.0 - (newMouseY - newHeight) / widgetHeight;
// Project our position onto the hemisphere using the UV coordinates
float lX = sin((newULeft - 0.5f) * _textureFov);
float rX = sin((newURight - 0.5f) * _textureFov);
float bY = sin((newVBottom - 0.5f) * _textureFov);
float tY = sin((newVTop - 0.5f) * _textureFov);
float dist;
//Bottom Left
dist = sqrt(lX * lX + bY * bY);
float blZ = sqrt(1.0f - dist * dist);
//Top Left
dist = sqrt(lX * lX + tY * tY);
float tlZ = sqrt(1.0f - dist * dist);
//Bottom Right
dist = sqrt(rX * rX + bY * bY);
float brZ = sqrt(1.0f - dist * dist);
//Top Right
dist = sqrt(rX * rX + tY * tY);
float trZ = sqrt(1.0f - dist * dist);
glBegin(GL_QUADS);
glTexCoord2f(magnifyULeft, magnifyVBottom); glVertex3f(lX, tY, -tlZ);
glTexCoord2f(magnifyURight, magnifyVBottom); glVertex3f(rX, tY, -trZ);
glTexCoord2f(magnifyURight, magnifyVTop); glVertex3f(rX, bY, -brZ);
glTexCoord2f(magnifyULeft, magnifyVTop); glVertex3f(lX, bY, -blZ);
glEnd();
} else {
leftX = sin(leftAngle) * _distance;
rightX = sin(rightAngle) * _distance;
leftZ = -cos(leftAngle) * _distance;
rightZ = -cos(rightAngle) * _distance;
if (_uiType == CURVED_SEMICIRCLE) {
topZ = -cos(topAngle * overlayAspectRatio) * _distance;
bottomZ = -cos(bottomAngle * overlayAspectRatio) * _distance;
} else {
// Dont want to use topZ or bottomZ for SEMICIRCLE
topZ = -99999;
bottomZ = -99999;
}
float bottomY = (1.0 - newMouseY / (float)widgetHeight) * halfOverlayHeight * 2.0f - halfOverlayHeight;
float topY = bottomY + (newHeight / widgetHeight) * halfOverlayHeight * 2;
//TODO: Remove immediate mode in favor of VBO
glBegin(GL_QUADS);
glTexCoord2f(magnifyULeft, magnifyVBottom); glVertex3f(leftX, topY, max(topZ, leftZ));
glTexCoord2f(magnifyURight, magnifyVBottom); glVertex3f(rightX, topY, max(topZ, rightZ));
glTexCoord2f(magnifyURight, magnifyVTop); glVertex3f(rightX, bottomY, max(bottomZ, rightZ));
glTexCoord2f(magnifyULeft, magnifyVTop); glVertex3f(leftX, bottomY, max(bottomZ, leftZ));
glEnd();
}
}
void ApplicationOverlay::renderAudioMeter() {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
Audio* audio = application->getAudio();
// Display a single screen-size quad to create an alpha blended 'collision' flash
if (audio->getCollisionFlashesScreen()) {
float collisionSoundMagnitude = audio->getCollisionSoundMagnitude();
@ -263,11 +645,16 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
glEnd();
}
void ApplicationOverlay::renderStatsAndLogs() {
if (Menu::getInstance()->isOptionChecked(MenuOption::HeadMouse)) {
myAvatar->renderHeadMouse(glWidget->width(), glWidget->height());
}
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
const OctreePacketProcessor& octreePacketProcessor = application->getOctreePacketProcessor();
BandwidthMeter* bandwidthMeter = application->getBandwidthMeter();
NodeBounds& nodeBoundsDisplay = application->getNodeBoundsDisplay();
// Display stats and log text onscreen
glLineWidth(1.0f);
@ -298,347 +685,18 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
drawText(glWidget->width() - 100, glWidget->height() - timerBottom, 0.30f, 0.0f, 0, frameTimer, WHITE_TEXT);
}
nodeBoundsDisplay.drawOverlay();
// give external parties a change to hook in
emit application->renderingOverlay();
overlays.render2D();
// Render a crosshair over the pointer when in Oculus
if (renderPointer) {
const float pointerWidth = 10;
const float pointerHeight = 10;
const float crossPad = 4;
mouseX -= pointerWidth / 2.0f;
mouseY += pointerHeight / 2.0f;
glBegin(GL_QUADS);
glColor3f(1, 0, 0);
//Horizontal crosshair
glVertex2i(mouseX, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - crossPad);
glVertex2i(mouseX + pointerWidth, mouseY - pointerHeight + crossPad);
glVertex2i(mouseX, mouseY - pointerHeight + crossPad);
//Vertical crosshair
glVertex2i(mouseX + crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY);
glVertex2i(mouseX + pointerWidth - crossPad, mouseY - pointerHeight);
glVertex2i(mouseX + crossPad, mouseY - pointerHeight);
glEnd();
} else if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
//only render controller pointer if we aren't already rendering a mouse pointer
renderControllerPointer();
}
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
if (renderToTexture) {
getFramebufferObject()->release();
}
}
// Draws the FBO texture for the screen
void ApplicationOverlay::displayOverlayTexture(Camera& whichCamera) {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
gluOrtho2D(0, glWidget->width(), glWidget->height(), 0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2i(0, glWidget->height());
glTexCoord2f(1, 0); glVertex2i(glWidget->width(), glWidget->height());
glTexCoord2f(1, 1); glVertex2i(glWidget->width(), 0);
glTexCoord2f(0, 1); glVertex2i(0, 0);
glEnd();
glPopMatrix();
glDisable(GL_TEXTURE_2D);
}
const float textureFov = PI / 2.5f;
void ApplicationOverlay::computeOculusPickRay(float x, float y, glm::vec3& direction) const {
glm::quat rot = Application::getInstance()->getAvatar()->getOrientation();
//invert y direction
y = 1.0 - y;
//Get position on hemisphere UI
x = sin((x - 0.5f) * textureFov);
y = sin((y - 0.5f) * textureFov);
float dist = sqrt(x * x + y * y);
float z = -sqrt(1.0f - dist * dist);
//Rotate the UI pick ray by the avatar orientation
direction = glm::normalize(rot * glm::vec3(x, y, z));
}
// Fast helper functions
inline float max(float a, float b) {
return (a > b) ? a : b;
}
inline float min(float a, float b) {
return (a < b) ? a : b;
}
// Draws the FBO texture for Oculus rift. TODO: Draw a curved texture instead of plane.
void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
Application* application = Application::getInstance();
QGLWidget* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
const glm::vec3& viewMatrixTranslation = application->getViewMatrixTranslation();
int mouseX = application->getMouseX();
int mouseY = application->getMouseY();
const int widgetWidth = glWidget->width();
const int widgetHeight = glWidget->height();
float magnifyWidth = 80.0f;
float magnifyHeight = 60.0f;
const float magnification = 4.0f;
// Get vertical FoV of the displayed overlay texture
const float halfVerticalAngle = _oculusAngle / 2.0f;
const float overlayAspectRatio = glWidget->width() / (float)glWidget->height();
const float halfOverlayHeight = _distance * tan(halfVerticalAngle);
const float overlayHeight = halfOverlayHeight * 2.0f;
// The more vertices, the better the curve
const int numHorizontalVertices = 20;
const int numVerticalVertices = 20;
// U texture coordinate width at each quad
const float quadTexWidth = 1.0f / (numHorizontalVertices - 1);
const float quadTexHeight = 1.0f / (numVerticalVertices - 1);
// Get horizontal angle and angle increment from vertical angle and aspect ratio
const float horizontalAngle = halfVerticalAngle * 2.0f * overlayAspectRatio;
const float angleIncrement = horizontalAngle / (numHorizontalVertices - 1);
const float halfHorizontalAngle = horizontalAngle / 2;
const float verticalAngleIncrement = _oculusAngle / (numVerticalVertices - 1);
glActiveTexture(GL_TEXTURE0);
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glBindTexture(GL_TEXTURE_2D, getFramebufferObject()->texture());
glEnable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_TEXTURE_2D);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
// Transform to world space
glm::quat rotation = whichCamera.getRotation();
glm::vec3 axis2 = glm::axis(rotation);
glRotatef(-glm::degrees(glm::angle(rotation)), axis2.x, axis2.y, axis2.z);
glTranslatef(viewMatrixTranslation.x, viewMatrixTranslation.y, viewMatrixTranslation.z);
// Translate to the front of the camera
glm::vec3 pos = whichCamera.getPosition();
glm::quat rot = myAvatar->getOrientation();
glm::vec3 axis = glm::axis(rot);
glTranslatef(pos.x, pos.y, pos.z);
glRotatef(glm::degrees(glm::angle(rot)), axis.x, axis.y, axis.z);
glColor3f(1.0f, 1.0f, 1.0f);
glDepthMask(GL_TRUE);
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.01f);
//Draw the magnifying glass
mouseX -= magnifyWidth / 2;
mouseY -= magnifyHeight / 2;
//clamp the magnification
if (mouseX < 0) {
magnifyWidth += mouseX;
mouseX = 0;
} else if (mouseX + magnifyWidth > widgetWidth) {
magnifyWidth = widgetWidth - mouseX;
}
if (mouseY < 0) {
magnifyHeight += mouseY;
mouseY = 0;
} else if (mouseY + magnifyHeight > widgetHeight) {
magnifyHeight = widgetHeight - mouseY;
}
const float halfMagnifyHeight = magnifyHeight / 2.0f;
float newWidth = magnifyWidth * magnification;
float newHeight = magnifyHeight * magnification;
// Magnification Texture Coordinates
float magnifyULeft = mouseX / (float)widgetWidth;
float magnifyURight = (mouseX + magnifyWidth) / (float)widgetWidth;
float magnifyVBottom = 1.0f - mouseY / (float)widgetHeight;
float magnifyVTop = 1.0f - (mouseY + magnifyHeight) / (float)widgetHeight;
// Coordinates of magnification overlay
float newMouseX = (mouseX + magnifyWidth / 2) - newWidth / 2.0f;
float newMouseY = (mouseY + magnifyHeight / 2) + newHeight / 2.0f;
// Get angle on the UI
float leftAngle = (newMouseX / (float)widgetWidth) * horizontalAngle - halfHorizontalAngle;
float rightAngle = ((newMouseX + newWidth) / (float)widgetWidth) * horizontalAngle - halfHorizontalAngle;
float bottomAngle = (newMouseY / (float)widgetHeight) * _oculusAngle - halfVerticalAngle;
float topAngle = ((newMouseY - newHeight) / (float)widgetHeight) * _oculusAngle - halfVerticalAngle;
float leftX, rightX, leftZ, rightZ, topZ, bottomZ;
// Get position on hemisphere using angle
if (_uiType == HEMISPHERE) {
//Get new UV coordinates from our magnification window
float newULeft = newMouseX / widgetWidth;
float newURight = (newMouseX + newWidth) / widgetWidth;
float newVBottom = 1.0 - newMouseY / widgetHeight;
float newVTop = 1.0 - (newMouseY - newHeight) / widgetHeight;
// Project our position onto the hemisphere using the UV coordinates
float lX = sin((newULeft - 0.5f) * textureFov);
float rX = sin((newURight - 0.5f) * textureFov);
float bY = sin((newVBottom - 0.5f) * textureFov);
float tY = sin((newVTop - 0.5f) * textureFov);
float dist;
//Bottom Left
dist = sqrt(lX * lX + bY * bY);
float blZ = sqrt(1.0f - dist * dist);
//Top Left
dist = sqrt(lX * lX + tY * tY);
float tlZ = sqrt(1.0f - dist * dist);
//Bottom Right
dist = sqrt(rX * rX + bY * bY);
float brZ = sqrt(1.0f - dist * dist);
//Top Right
dist = sqrt(rX * rX + tY * tY);
float trZ = sqrt(1.0f - dist * dist);
glBegin(GL_QUADS);
glTexCoord2f(magnifyULeft, magnifyVBottom); glVertex3f(lX, tY, -tlZ);
glTexCoord2f(magnifyURight, magnifyVBottom); glVertex3f(rX, tY, -trZ);
glTexCoord2f(magnifyURight, magnifyVTop); glVertex3f(rX, bY, -brZ);
glTexCoord2f(magnifyULeft, magnifyVTop); glVertex3f(lX, bY, -blZ);
glEnd();
} else {
leftX = sin(leftAngle) * _distance;
rightX = sin(rightAngle) * _distance;
leftZ = -cos(leftAngle) * _distance;
rightZ = -cos(rightAngle) * _distance;
if (_uiType == CURVED_SEMICIRCLE) {
topZ = -cos(topAngle * overlayAspectRatio) * _distance;
bottomZ = -cos(bottomAngle * overlayAspectRatio) * _distance;
} else {
// Dont want to use topZ or bottomZ for SEMICIRCLE
topZ = -99999;
bottomZ = -99999;
}
float bottomY = (1.0 - newMouseY / (float)widgetHeight) * halfOverlayHeight * 2.0f - halfOverlayHeight;
float topY = bottomY + (newHeight / widgetHeight) * halfOverlayHeight * 2;
//TODO: Remove immediate mode in favor of VBO
glBegin(GL_QUADS);
glTexCoord2f(magnifyULeft, magnifyVBottom); glVertex3f(leftX, topY, max(topZ, leftZ));
glTexCoord2f(magnifyURight, magnifyVBottom); glVertex3f(rightX, topY, max(topZ, rightZ));
glTexCoord2f(magnifyURight, magnifyVTop); glVertex3f(rightX, bottomY, max(bottomZ, rightZ));
glTexCoord2f(magnifyULeft, magnifyVTop); glVertex3f(leftX, bottomY, max(bottomZ, leftZ));
glEnd();
}
glDepthMask(GL_FALSE);
glDisable(GL_ALPHA_TEST);
//TODO: Remove immediate mode in favor of VBO
if (_uiType == HEMISPHERE) {
renderTexturedHemisphere();
} else{
glBegin(GL_QUADS);
// Place the vertices in a semicircle curve around the camera
for (int i = 0; i < numHorizontalVertices - 1; i++) {
for (int j = 0; j < numVerticalVertices - 1; j++) {
// Calculate the X and Z coordinates from the angles and radius from camera
leftX = sin(angleIncrement * i - halfHorizontalAngle) * _distance;
rightX = sin(angleIncrement * (i + 1) - halfHorizontalAngle) * _distance;
leftZ = -cos(angleIncrement * i - halfHorizontalAngle) * _distance;
rightZ = -cos(angleIncrement * (i + 1) - halfHorizontalAngle) * _distance;
if (_uiType == 2) {
topZ = -cos((verticalAngleIncrement * (j + 1) - halfVerticalAngle) * overlayAspectRatio) * _distance;
bottomZ = -cos((verticalAngleIncrement * j - halfVerticalAngle) * overlayAspectRatio) * _distance;
} else {
topZ = -99999;
bottomZ = -99999;
}
glTexCoord2f(quadTexWidth * i, (j + 1) * quadTexHeight);
glVertex3f(leftX, (j + 1) * quadTexHeight * overlayHeight - halfOverlayHeight, max(topZ, leftZ));
glTexCoord2f(quadTexWidth * (i + 1), (j + 1) * quadTexHeight);
glVertex3f(rightX, (j + 1) * quadTexHeight * overlayHeight - halfOverlayHeight, max(topZ, rightZ));
glTexCoord2f(quadTexWidth * (i + 1), j * quadTexHeight);
glVertex3f(rightX, j * quadTexHeight * overlayHeight - halfOverlayHeight, max(bottomZ, rightZ));
glTexCoord2f(quadTexWidth * i, j * quadTexHeight);
glVertex3f(leftX, j * quadTexHeight * overlayHeight - halfOverlayHeight, max(bottomZ, leftZ));
}
}
glEnd();
}
glPopMatrix();
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
}
//Renders a hemisphere with texture coordinates.
void ApplicationOverlay::renderTexturedHemisphere() {
const int slices = 80;
const int stacks = 80;
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
static VerticesIndices vbo(0, 0);
int vertices = slices * (stacks - 1) + 1;
int indices = slices * 2 * 3 * (stacks - 2) + slices * 3;
//We only generate the VBO once
if (vbo.first == 0) {
TextureVertex* vertexData = new TextureVertex[vertices];
TextureVertex* vertex = vertexData;
@ -652,8 +710,8 @@ void ApplicationOverlay::renderTexturedHemisphere() {
vertex->position.x = sinf(theta) * radius;
vertex->position.y = cosf(theta) * radius;
vertex->position.z = z;
vertex->uv.x = asin(vertex->position.x) / (textureFov) + 0.5f;
vertex->uv.y = asin(vertex->position.y) / (textureFov) + 0.5f;
vertex->uv.x = asin(vertex->position.x) / (_textureFov) + 0.5f;
vertex->uv.y = asin(vertex->position.y) / (_textureFov) + 0.5f;
vertex++;
}
}

View file

@ -46,13 +46,22 @@ private:
typedef QPair<GLuint, GLuint> VerticesIndices;
void renderPointers();
void renderControllerPointer();
void renderMagnifier(int mouseX, int mouseY);
void renderAudioMeter();
void renderStatsAndLogs();
void renderTexturedHemisphere();
QOpenGLFramebufferObject* _framebufferObject;
float _trailingAudioLoudness;
float _oculusAngle;
float _distance;
float _textureFov;
UIType _uiType;
int _mouseX[2];
int _mouseY[2];
int _numMagnifiers;
};
#endif // hifi_ApplicationOverlay_h

View file

@ -281,7 +281,7 @@ void NodeList::processSTUNResponse(const QByteArray& packet) {
int byteIndex = attributeStartIndex + NUM_BYTES_STUN_ATTR_TYPE_AND_LENGTH + NUM_BYTES_FAMILY_ALIGN;
uint8_t addressFamily = 0;
memcpy(&addressFamily, packet.data(), sizeof(addressFamily));
memcpy(&addressFamily, packet.data() + byteIndex, sizeof(addressFamily));
byteIndex += sizeof(addressFamily);

View file

@ -19,71 +19,70 @@
#include "HifiConfigVariantMap.h"
QVariantMap HifiConfigVariantMap::mergeCLParametersWithJSONConfig(const QStringList& argumentList) {
QVariantMap mergedMap;
// Add anything in the CL parameter list to the variant map.
// Take anything with a dash in it as a key, and the values after it as the value.
const QString DASHED_KEY_REGEX_STRING = "(^-{1,2})([\\w-]+)";
QRegExp dashedKeyRegex(DASHED_KEY_REGEX_STRING);
int keyIndex = argumentList.indexOf(dashedKeyRegex);
int nextKeyIndex = 0;
// check if there is a config file to read where we can pull config info not passed on command line
const QString CONFIG_FILE_OPTION = "--config";
while (keyIndex != -1) {
if (argumentList[keyIndex] != CONFIG_FILE_OPTION) {
// we have a key - look forward to see how many values associate to it
QString key = dashedKeyRegex.cap(2);
nextKeyIndex = argumentList.indexOf(dashedKeyRegex, keyIndex + 1);
if (nextKeyIndex == keyIndex + 1 || keyIndex == argumentList.size() - 1) {
// there's no value associated with this option, it's a boolean
// so add it to the variant map with NULL as value
mergedMap.insertMulti(key, QVariant());
// this option is simply a switch, so add it to the map with a value of `true`
mergedMap.insertMulti(key, QVariant(true));
} else {
int maxIndex = (nextKeyIndex == -1) ? argumentList.size() - 1: nextKeyIndex;
int maxIndex = (nextKeyIndex == -1) ? argumentList.size() : nextKeyIndex;
// there's at least one value associated with the option
// pull the first value to start
QString value = argumentList[keyIndex + 1];
// for any extra values, append them, with a space, to the value string
for (int i = keyIndex + 2; i <= maxIndex; i++) {
for (int i = keyIndex + 2; i < maxIndex; i++) {
value += " " + argumentList[i];
}
// add the finalized value to the merged map
mergedMap.insert(key, value);
}
keyIndex = nextKeyIndex;
} else {
keyIndex = argumentList.indexOf(dashedKeyRegex, keyIndex + 1);
}
}
int configIndex = argumentList.indexOf(CONFIG_FILE_OPTION);
if (configIndex != -1) {
// we have a config file - try and read it
QString configFilePath = argumentList[configIndex + 1];
QFile configFile(configFilePath);
if (configFile.exists()) {
qDebug() << "Reading JSON config file at" << configFilePath;
configFile.open(QIODevice::ReadOnly);
QJsonDocument configDocument = QJsonDocument::fromJson(configFile.readAll());
QJsonObject rootObject = configDocument.object();
// enumerate the keys of the configDocument object
foreach(const QString& key, rootObject.keys()) {
if (!mergedMap.contains(key)) {
// no match in existing list, add it
mergedMap.insert(key, QVariant(rootObject[key]));
@ -93,6 +92,6 @@ QVariantMap HifiConfigVariantMap::mergeCLParametersWithJSONConfig(const QStringL
qDebug() << "Could not find JSON config file at" << configFilePath;
}
}
return mergedMap;
}