merge from master

This commit is contained in:
Seth Alves 2015-06-18 10:04:14 -07:00
commit b258aeaa3d
87 changed files with 3248 additions and 1697 deletions

View file

@ -9,8 +9,8 @@ if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
URL http://static.oculus.com/sdk-downloads/0.6.0.0/1431634088/ovr_sdk_win_0.6.0.0.zip
URL_MD5 a3dfdab037a854fdcf7e6033fa8d7028
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -3,13 +3,13 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/boostorg/config/archive/boost-1.58.0.zip
URL_MD5 42fa673bae2b7645a22736445e80eb8d
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
${EXTERNAL_NAME}
URL https://github.com/boostorg/config/archive/boost-1.58.0.zip
URL_MD5 42fa673bae2b7645a22736445e80eb8d
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)

View file

@ -3,13 +3,13 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://softlayer-dal.dl.sourceforge.net/project/oglplus/oglplus-0.61.x/oglplus-0.61.0.zip
URL_MD5 bb55038c36c660d2b6c7be380414fa60
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/jherico/oglplus.git
GIT_TAG 470d8e56fd6bf3913ceec03d82f42d3bafab2cbe
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)

View file

@ -0,0 +1,43 @@
macro(SETUP_HIFI_OPENGL)
if (APPLE)
# link in required OS X frameworks and include the right GL headers
find_library(OpenGL OpenGL)
target_link_libraries(${TARGET_NAME} ${OpenGL})
elseif (WIN32)
add_dependency_external_projects(glew)
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLEW_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLEW_LIBRARIES} opengl32.lib)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif()
endif()
elseif(ANDROID)
target_link_libraries(${TARGET_NAME} "-lGLESv3" "-lEGL")
else()
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
target_include_directories(${TARGET_NAME} PUBLIC ${OPENGL_INCLUDE_DIR})
endif()
endmacro()

View file

@ -11,7 +11,6 @@
Script.load("progress.js");
Script.load("edit.js");
Script.load("selectAudioDevice.js");
Script.load("controllers/hydra/hydraMove.js");
Script.load("inspect.js");
Script.load("lobby.js");
Script.load("notifications.js");

View file

@ -354,7 +354,8 @@
var elZoneAtmosphereScatteringWavelengthsZ = document.getElementById("property-zone-atmosphere-scattering-wavelengths-z");
var elZoneAtmosphereHasStars = document.getElementById("property-zone-atmosphere-has-stars");
var elPolyVoxSelections = document.querySelectorAll(".poly-vox-section");
var elPolyVoxSections = document.querySelectorAll(".poly-vox-section");
allSections.push(elPolyVoxSections);
var elVoxelVolumeSizeX = document.getElementById("property-voxel-volume-size-x");
var elVoxelVolumeSizeY = document.getElementById("property-voxel-volume-size-y");
var elVoxelVolumeSizeZ = document.getElementById("property-voxel-volume-size-z");
@ -602,6 +603,10 @@
elParticleLocalGravity.value = properties.localGravity.toFixed(2);
elParticleRadius.value = properties.particleRadius.toFixed(3);
} else if (properties.type == "PolyVox") {
for (var i = 0; i < elPolyVoxSections.length; i++) {
elPolyVoxSections[i].style.display = 'block';
}
elVoxelVolumeSizeX.value = properties.voxelVolumeSize.x.toFixed(2);
elVoxelVolumeSizeY.value = properties.voxelVolumeSize.y.toFixed(2);
elVoxelVolumeSizeZ.value = properties.voxelVolumeSize.z.toFixed(2);
@ -1014,9 +1019,9 @@
<div class="poly-vox-section property">
<div class="label">Voxel Volume Size</div>
<div class="value">
<div class="input-area">X <br><input class="coord" type='number' id="property-voxel-volume-size-x"></input></div>
<div class="input-area">Y <br><input class="coord" type='number' id="property-voxel-volume-size-y"></input></div>
<div class="input-area">Z <br><input class="coord" type='number' id="property-voxel-volume-size-z"></input></div>
<div class="input-area">X <br> <input class="coord" type='number' id="property-voxel-volume-size-x"></input></div>
<div class="input-area">Y <br><input class="coord" type='number' id="property-voxel-volume-size-y"></input></div>
<div class="input-area">Z <br><input class="coord" type='number' id="property-voxel-volume-size-z"></input></div>
</div>
<div class="label">Surface Extractor</div>

View file

@ -15,7 +15,7 @@
viewBox="0 0 1440 200"
id="svg4136"
inkscape:version="0.91 r13725"
sodipodi:docname="address-bar.svg">
sodipodi:docname="address-bar.002.svg">
<metadata
id="metadata4144">
<rdf:RDF>
@ -39,14 +39,14 @@
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1840"
inkscape:window-width="1835"
inkscape:window-height="1057"
id="namedview4140"
showgrid="false"
inkscape:zoom="0.8671875"
inkscape:cx="707.02439"
inkscape:zoom="0.61319416"
inkscape:cx="132.58366"
inkscape:cy="52.468468"
inkscape:window-x="72"
inkscape:window-x="77"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="svg4136" />
@ -59,6 +59,15 @@
y="30"
rx="16.025024"
ry="17.019567" />
<rect
style="fill:#dadada;fill-opacity:1;stroke:#cbcbcb;stroke-width:0.33821851;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:10;stroke-dasharray:none;stroke-opacity:1"
id="rect4135"
width="292.86267"
height="139.66179"
x="150.32542"
y="30.169102"
rx="16.817432"
ry="20.612938" />
<circle
style="fill:#b8b8b8;fill-opacity:1;stroke:none;stroke-opacity:1"
id="path4146"
@ -69,4 +78,11 @@
d="m 100,36.000005 c -22.1,0 -40,17.9 -40,39.999995 0,30 40,88 40,88 0,0 40,-58 40,-88 0,-22.099995 -17.9,-39.999995 -40,-39.999995 z m 0,22 c 9.9,0 18,8.099995 18,17.999995 0,9.9 -8.1,18 -18,18 -9.9,0 -18,-8.1 -18,-18 0,-9.9 8.1,-17.999995 18,-17.999995 z"
id="path4138"
inkscape:connector-curvature="0" />
<rect
style="fill:#bdbdbd;fill-opacity:1;stroke:none;stroke-width:0.30000001;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:10;stroke-dasharray:none;stroke-opacity:1"
id="rect4136"
width="4"
height="100"
x="310.12924"
y="50" />
</svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 369 B

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#535353" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 501 B

View file

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="Layer_1"
x="0px"
y="0px"
viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="left-arrow.svg"><metadata
id="metadata13"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
id="defs11" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1835"
inkscape:window-height="1057"
id="namedview9"
showgrid="false"
inkscape:zoom="7.0658679"
inkscape:cx="10.65"
inkscape:cy="16.700001"
inkscape:window-x="77"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="Layer_1" /><g
id="g3"
transform="matrix(-1,0,0,1,287.5,0)"><g
id="g5"><path
d="m 133.1,714.2 21.3,16.7 -21.3,16.7 0,-33.4 z"
id="path7"
inkscape:connector-curvature="0"
style="fill:#535353" /></g></g></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 369 B

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns:xl="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="133.1 714.2 21.3 33.4"
enable-background="new 133.1 714.2 21.3 33.4" xml:space="preserve">
<g>
<g>
<path fill="#7E7E7E" d="M133.1,714.2l21.3,16.7l-21.3,16.7V714.2z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 501 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 127 B

View file

@ -0,0 +1,3 @@
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xl="http://www.w3.org/1999/xlink" version="1.1" viewBox="344 454 26 74" width="26pt" height="74pt"><metadata xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>2015-06-12 18:23Z</dc:date><!-- Produced by OmniGraffle Professional 5.4.4 --></metadata><defs></defs><g stroke="none" stroke-opacity="1" stroke-dasharray="none" fill="none" fill-opacity="1"><title>Canvas 1</title><rect fill="white" width="1728" height="1466"/><g><title> Navi Bar</title><line x1="356.58927" y1="466.42861" x2="356.58927" y2="515.4286" stroke="#b3b3b3" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="3"/></g></g></svg>

After

Width:  |  Height:  |  Size: 778 B

View file

@ -45,19 +45,64 @@ DialogContainer {
property int inputAreaHeight: 56.0 * root.scale // Height of the background's input area
property int inputAreaStep: (height - inputAreaHeight) / 2
Image {
id: backArrow
source: "../images/left-arrow.svg"
scale: 0.9
anchors {
fill: parent
leftMargin: parent.height + hifi.layout.spacing + 6
rightMargin: parent.height + hifi.layout.spacing * 60
topMargin: parent.inputAreaStep + parent.inputAreaStep + hifi.layout.spacing
bottomMargin: parent.inputAreaStep + parent.inputAreaStep + hifi.layout.spacing
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
onClicked: {
addressBarDialog.loadBack()
}
}
}
Image {
id: forwardArrow
source: "../images/darkgreyarrow.svg"
anchors {
fill: parent
leftMargin: parent.height + hifi.layout.spacing * 9
rightMargin: parent.height + hifi.layout.spacing * 53
topMargin: parent.inputAreaStep + parent.inputAreaStep + hifi.layout.spacing
bottomMargin: parent.inputAreaStep + parent.inputAreaStep + hifi.layout.spacing
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
onClicked: {
addressBarDialog.loadForward()
}
}
}
TextInput {
id: addressLine
anchors {
fill: parent
leftMargin: parent.height + hifi.layout.spacing * 2
leftMargin: parent.height + parent.height + hifi.layout.spacing * 5
rightMargin: hifi.layout.spacing * 2
topMargin: parent.inputAreaStep + hifi.layout.spacing
bottomMargin: parent.inputAreaStep + hifi.layout.spacing
}
font.pixelSize: hifi.fonts.pixelSize * root.scale
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
helperText: "Go to: place, @user, /path, network address"
@ -66,7 +111,7 @@ DialogContainer {
addressBarDialog.loadAddress(addressLine.text)
}
}
MouseArea {
// Drag the icon
width: parent.height
@ -82,6 +127,7 @@ DialogContainer {
}
}
/*
MouseArea {
// Drag the input rectangle
width: parent.width - parent.height
@ -95,7 +141,7 @@ DialogContainer {
maximumX: root.parent ? root.maximumX : 0
maximumY: root.parent ? root.maximumY + parent.inputAreaStep : 0
}
}
}*/
}
}

View file

@ -115,6 +115,7 @@
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
#include "devices/RealSense.h"
#include "devices/SDL2Manager.h"
#include "devices/MIDIManager.h"
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
@ -129,7 +130,6 @@
#include "scripting/AudioDeviceScriptingInterface.h"
#include "scripting/ClipboardScriptingInterface.h"
#include "scripting/HMDScriptingInterface.h"
#include "scripting/JoystickScriptingInterface.h"
#include "scripting/GlobalServicesScriptingInterface.h"
#include "scripting/LocationScriptingInterface.h"
#include "scripting/MenuScriptingInterface.h"
@ -196,7 +196,6 @@ const QString SKIP_FILENAME = QStandardPaths::writableLocation(QStandardPaths::D
const QString DEFAULT_SCRIPTS_JS_URL = "http://s3.amazonaws.com/hifi-public/scripts/defaultScripts.js";
Setting::Handle<int> maxOctreePacketsPerSecond("maxOctreePPS", DEFAULT_MAX_OCTREE_PPS);
#ifdef Q_OS_WIN
class MyNativeEventFilter : public QAbstractNativeEventFilter {
public:
@ -889,6 +888,9 @@ void Application::paintGL() {
}
glEnable(GL_LINE_SMOOTH);
Menu::getInstance()->setIsOptionChecked("First Person", _myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
Application::getInstance()->cameraMenuChanged();
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
// Always use the default eye position, not the actual head eye position.
@ -907,9 +909,8 @@ void Application::paintGL() {
}
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
static const float THIRD_PERSON_CAMERA_DISTANCE = 1.5f;
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, 1.0f) * THIRD_PERSON_CAMERA_DISTANCE * _myAvatar->getScale());
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, 1.0f) * _myAvatar->getBoomLength() * _myAvatar->getScale());
if (OculusManager::isConnected()) {
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
} else {
@ -1492,6 +1493,8 @@ void Application::keyReleaseEvent(QKeyEvent* event) {
void Application::focusOutEvent(QFocusEvent* event) {
_keyboardMouseDevice.focusOutEvent(event);
SixenseManager::getInstance().focusOutEvent();
SDL2Manager::getInstance()->focusOutEvent();
// synthesize events for keys currently pressed, since we may not get their release events
foreach (int key, _keysPressed) {
@ -1927,11 +1930,15 @@ void Application::setEnableVRMode(bool enableVRMode) {
// attempt to reconnect the Oculus manager - it's possible this was a workaround
// for the sixense crash
OculusManager::disconnect();
OculusManager::connect();
OculusManager::connect(_glWidget->context()->contextHandle());
_glWidget->setFocus();
_glWidget->makeCurrent();
glClear(GL_COLOR_BUFFER_BIT);
}
OculusManager::recalibrate();
} else {
OculusManager::abandonCalibration();
OculusManager::disconnect();
_mirrorCamera.setHmdPosition(glm::vec3());
_mirrorCamera.setHmdRotation(glm::quat());
@ -2167,13 +2174,6 @@ void Application::init() {
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
OculusManager::connect();
if (OculusManager::isConnected()) {
QMetaObject::invokeMethod(Menu::getInstance()->getActionForOption(MenuOption::Fullscreen),
"trigger",
Qt::QueuedConnection);
}
TV3DManager::connect();
if (TV3DManager::isConnected()) {
QMetaObject::invokeMethod(Menu::getInstance()->getActionForOption(MenuOption::Fullscreen),
@ -2406,10 +2406,14 @@ void Application::cameraMenuChanged() {
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
_myAvatar->setBoomLength(MyAvatar::ZOOM_MIN);
}
} else {
if (_myCamera.getMode() != CAMERA_MODE_THIRD_PERSON) {
_myCamera.setMode(CAMERA_MODE_THIRD_PERSON);
if (_myAvatar->getBoomLength() == MyAvatar::ZOOM_MIN) {
_myAvatar->setBoomLength(MyAvatar::ZOOM_DEFAULT);
}
}
}
}
@ -2487,7 +2491,7 @@ void Application::update(float deltaTime) {
}
SixenseManager::getInstance().update(deltaTime);
JoystickScriptingInterface::getInstance().update();
SDL2Manager::getInstance()->update();
}
_userInputMapper.update(deltaTime);
@ -2508,7 +2512,8 @@ void Application::update(float deltaTime) {
_myAvatar->setDriveKeys(ROT_DOWN, _userInputMapper.getActionState(UserInputMapper::PITCH_DOWN));
_myAvatar->setDriveKeys(ROT_LEFT, _userInputMapper.getActionState(UserInputMapper::YAW_LEFT));
_myAvatar->setDriveKeys(ROT_RIGHT, _userInputMapper.getActionState(UserInputMapper::YAW_RIGHT));
_myAvatar->setDriveKeys(BOOM_IN, _userInputMapper.getActionState(UserInputMapper::BOOM_IN));
_myAvatar->setDriveKeys(BOOM_OUT, _userInputMapper.getActionState(UserInputMapper::BOOM_OUT));
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
@ -3260,6 +3265,9 @@ namespace render {
template <> const Item::Bound payloadGetBound(const BackgroundRenderData::Pointer& stuff) { return Item::Bound(); }
template <> void payloadRender(const BackgroundRenderData::Pointer& background, RenderArgs* args) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
// Background rendering decision
auto skyStage = DependencyManager::get<SceneScriptingInterface>()->getSkyStage();
auto skybox = model::SkyboxPointer();
@ -3327,7 +3335,8 @@ namespace render {
PerformanceTimer perfTimer("atmosphere");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... atmosphere...");
background->_environment->renderAtmospheres(*(args->_viewFrustum));
background->_environment->renderAtmospheres(batch, *(args->_viewFrustum));
}
}
@ -3336,13 +3345,13 @@ namespace render {
skybox = skyStage->getSkybox();
if (skybox) {
gpu::Batch batch;
model::Skybox::render(batch, *(Application::getInstance()->getDisplayViewFrustum()), *skybox);
gpu::GLBackend::renderBatch(batch, true);
glUseProgram(0);
}
}
// FIX ME - If I don't call this renderBatch() here, then the atmosphere and skybox don't render, but it
// seems like these payloadRender() methods shouldn't be doing this. We need to investigate why the engine
// isn't rendering our batch
gpu::GLBackend::renderBatch(batch, true);
}
}
@ -4078,7 +4087,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("AvatarManager", DependencyManager::get<AvatarManager>().data());
scriptEngine->registerGlobalObject("Joysticks", &JoystickScriptingInterface::getInstance());
qScriptRegisterMetaType(scriptEngine, joystickToScriptValue, joystickFromScriptValue);
scriptEngine->registerGlobalObject("UndoStack", &_undoStackScriptingInterface);

View file

@ -52,6 +52,8 @@ enum DriveKeys {
ROT_RIGHT,
ROT_UP,
ROT_DOWN,
BOOM_IN,
BOOM_OUT,
MAX_DRIVE_KEYS
};

View file

@ -70,6 +70,10 @@ const int SCRIPTED_MOTOR_CAMERA_FRAME = 0;
const int SCRIPTED_MOTOR_AVATAR_FRAME = 1;
const int SCRIPTED_MOTOR_WORLD_FRAME = 2;
const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 10.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
MyAvatar::MyAvatar() :
Avatar(),
_turningKeyPressTime(0.0f),
@ -77,6 +81,7 @@ MyAvatar::MyAvatar() :
_wasPushing(false),
_isPushing(false),
_isBraking(false),
_boomLength(ZOOM_DEFAULT),
_trapDuration(0.0f),
_thrust(0.0f),
_keyboardMotorVelocity(0.0f),
@ -1347,6 +1352,9 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
}
}
}
_boomLength += _driveKeys[BOOM_OUT] - _driveKeys[BOOM_IN];
_boomLength = glm::clamp<float>(_boomLength, ZOOM_MIN, ZOOM_MAX);
return newLocalVelocity;
}

View file

@ -162,6 +162,13 @@ public:
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
float getBoomLength() const { return _boomLength; }
void setBoomLength(float boomLength) { _boomLength = boomLength; }
static const float ZOOM_MIN;
static const float ZOOM_MAX;
static const float ZOOM_DEFAULT;
public slots:
void increaseSize();
void decreaseSize();
@ -210,6 +217,8 @@ private:
bool _wasPushing;
bool _isPushing;
bool _isBraking;
float _boomLength;
float _trapDuration; // seconds that avatar has been trapped by collisions
glm::vec3 _thrust; // impulse accumulator for outside sources

View file

@ -13,8 +13,11 @@
#include <glm/glm.hpp>
#include "Application.h"
#include "Joystick.h"
const float CONTROLLER_THRESHOLD = 0.25f;
#ifdef HAVE_SDL2
const float MAX_AXIS = 32768.0f;
@ -22,10 +25,7 @@ const float MAX_AXIS = 32768.0f;
Joystick::Joystick(SDL_JoystickID instanceId, const QString& name, SDL_GameController* sdlGameController) :
_sdlGameController(sdlGameController),
_sdlJoystick(SDL_GameControllerGetJoystick(_sdlGameController)),
_instanceId(instanceId),
_name(name),
_axes(QVector<float>(SDL_JoystickNumAxes(_sdlJoystick))),
_buttons(QVector<bool>(SDL_JoystickNumButtons(_sdlJoystick)))
_instanceId(instanceId)
{
}
@ -42,24 +42,204 @@ void Joystick::closeJoystick() {
#endif
}
void Joystick::update() {
for (auto axisState : _axisStateMap) {
if (fabsf(axisState.second) < CONTROLLER_THRESHOLD) {
_axisStateMap[axisState.first] = 0.0f;
}
}
}
void Joystick::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
#ifdef HAVE_SDL2
void Joystick::handleAxisEvent(const SDL_ControllerAxisEvent& event) {
if (_axes.size() <= event.axis) {
_axes.resize(event.axis + 1);
SDL_GameControllerAxis axis = (SDL_GameControllerAxis) event.axis;
switch (axis) {
case SDL_CONTROLLER_AXIS_LEFTX:
_axisStateMap[makeInput(LEFT_AXIS_X_POS).getChannel()] = (event.value > 0.0f) ? event.value / MAX_AXIS : 0.0f;
_axisStateMap[makeInput(LEFT_AXIS_X_NEG).getChannel()] = (event.value < 0.0f) ? -event.value / MAX_AXIS : 0.0f;
break;
case SDL_CONTROLLER_AXIS_LEFTY:
_axisStateMap[makeInput(LEFT_AXIS_Y_POS).getChannel()] = (event.value > 0.0f) ? event.value / MAX_AXIS : 0.0f;
_axisStateMap[makeInput(LEFT_AXIS_Y_NEG).getChannel()] = (event.value < 0.0f) ? -event.value / MAX_AXIS : 0.0f;
break;
case SDL_CONTROLLER_AXIS_RIGHTX:
_axisStateMap[makeInput(RIGHT_AXIS_X_POS).getChannel()] = (event.value > 0.0f) ? event.value / MAX_AXIS : 0.0f;
_axisStateMap[makeInput(RIGHT_AXIS_X_NEG).getChannel()] = (event.value < 0.0f) ? -event.value / MAX_AXIS : 0.0f;
break;
case SDL_CONTROLLER_AXIS_RIGHTY:
_axisStateMap[makeInput(RIGHT_AXIS_Y_POS).getChannel()] = (event.value > 0.0f) ? event.value / MAX_AXIS : 0.0f;
_axisStateMap[makeInput(RIGHT_AXIS_Y_NEG).getChannel()] = (event.value < 0.0f) ? -event.value / MAX_AXIS : 0.0f;
break;
case SDL_CONTROLLER_AXIS_TRIGGERRIGHT:
_axisStateMap[makeInput(RIGHT_SHOULDER).getChannel()] = event.value / MAX_AXIS;
break;
case SDL_CONTROLLER_AXIS_TRIGGERLEFT:
_axisStateMap[makeInput(LEFT_SHOULDER).getChannel()] = event.value / MAX_AXIS;
break;
default:
break;
}
float oldValue = _axes[event.axis];
float newValue = event.value / MAX_AXIS;
_axes[event.axis] = newValue;
emit axisValueChanged(event.axis, newValue, oldValue);
}
void Joystick::handleButtonEvent(const SDL_ControllerButtonEvent& event) {
bool oldValue = _buttons[event.button];
auto input = makeInput((SDL_GameControllerButton) event.button);
bool newValue = event.state == SDL_PRESSED;
_buttons[event.button] = newValue;
emit buttonStateChanged(event.button, newValue, oldValue);
if (newValue) {
_buttonPressedMap.insert(input.getChannel());
} else {
_buttonPressedMap.erase(input.getChannel());
}
}
#endif
void Joystick::registerToUserInputMapper(UserInputMapper& mapper) {
// Grab the current free device ID
_deviceID = mapper.getFreeDeviceID();
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy(_name));
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
QVector<UserInputMapper::InputPair> availableInputs;
#ifdef HAVE_SDL2
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_A), "Bottom Button"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_B), "Right Button"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_X), "Left Button"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_Y), "Top Button"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_DPAD_UP), "DPad Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_DPAD_DOWN), "DPad Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_DPAD_LEFT), "DPad Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_DPAD_RIGHT), "DPad Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_LEFTSHOULDER), "L1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), "R1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(RIGHT_SHOULDER), "L2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(LEFT_SHOULDER), "R2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(LEFT_AXIS_Y_NEG), "Left Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(LEFT_AXIS_Y_POS), "Left Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(LEFT_AXIS_X_POS), "Left Stick Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(LEFT_AXIS_X_NEG), "Left Stick Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(RIGHT_AXIS_Y_NEG), "Right Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(RIGHT_AXIS_Y_POS), "Right Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(RIGHT_AXIS_X_POS), "Right Stick Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(RIGHT_AXIS_X_NEG), "Right Stick Left"));
#endif
return availableInputs;
};
proxy->resetDeviceBindings = [this, &mapper] () -> bool {
mapper.removeAllInputChannelsForDevice(_deviceID);
this->assignDefaultInputMapping(mapper);
return true;
};
mapper.registerDevice(_deviceID, proxy);
}
void Joystick::assignDefaultInputMapping(UserInputMapper& mapper) {
#ifdef HAVE_SDL2
const float JOYSTICK_MOVE_SPEED = 1.0f;
const float DPAD_MOVE_SPEED = 0.5f;
const float JOYSTICK_YAW_SPEED = 0.5f;
const float JOYSTICK_PITCH_SPEED = 0.25f;
const float BOOM_SPEED = 0.1f;
// Y axes are flipped (up is negative)
// Left Joystick: Movement, strafing
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(LEFT_AXIS_Y_NEG), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(LEFT_AXIS_Y_POS), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(LEFT_AXIS_X_POS), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(LEFT_AXIS_X_NEG), JOYSTICK_MOVE_SPEED);
// Right Joystick: Camera orientation
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(RIGHT_AXIS_X_POS), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(RIGHT_AXIS_X_NEG), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_UP, makeInput(RIGHT_AXIS_Y_NEG), JOYSTICK_PITCH_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_DOWN, makeInput(RIGHT_AXIS_Y_POS), JOYSTICK_PITCH_SPEED);
// Dpad movement
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(SDL_CONTROLLER_BUTTON_DPAD_UP), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(SDL_CONTROLLER_BUTTON_DPAD_DOWN), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_DPAD_RIGHT), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(SDL_CONTROLLER_BUTTON_DPAD_LEFT), DPAD_MOVE_SPEED);
// Button controls
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(SDL_CONTROLLER_BUTTON_Y), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_A), DPAD_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(SDL_CONTROLLER_BUTTON_X), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_B), JOYSTICK_YAW_SPEED);
// Zoom
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(RIGHT_SHOULDER), BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(LEFT_SHOULDER), BOOM_SPEED);
// Hold front right shoulder button for precision controls
// Left Joystick: Movement, strafing
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(LEFT_AXIS_Y_NEG), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(LEFT_AXIS_Y_POS), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(LEFT_AXIS_X_POS), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(LEFT_AXIS_X_NEG), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_MOVE_SPEED/2.0f);
// Right Joystick: Camera orientation
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(RIGHT_AXIS_X_POS), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(RIGHT_AXIS_X_NEG), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::PITCH_UP, makeInput(RIGHT_AXIS_Y_NEG), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_PITCH_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::PITCH_DOWN, makeInput(RIGHT_AXIS_Y_POS), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_PITCH_SPEED/2.0f);
// Dpad movement
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(SDL_CONTROLLER_BUTTON_DPAD_UP), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(SDL_CONTROLLER_BUTTON_DPAD_DOWN), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_DPAD_RIGHT), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(SDL_CONTROLLER_BUTTON_DPAD_LEFT), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
// Button controls
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(SDL_CONTROLLER_BUTTON_Y), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(SDL_CONTROLLER_BUTTON_A), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), DPAD_MOVE_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(SDL_CONTROLLER_BUTTON_X), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(SDL_CONTROLLER_BUTTON_B), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), JOYSTICK_YAW_SPEED/2.0f);
// Zoom
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(RIGHT_SHOULDER), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), BOOM_SPEED/2.0f);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(LEFT_SHOULDER), makeInput(SDL_CONTROLLER_BUTTON_RIGHTSHOULDER), BOOM_SPEED/2.0f);
#endif
}
float Joystick::getButton(int channel) const {
if (!_buttonPressedMap.empty()) {
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
return 1.0f;
} else {
return 0.0f;
}
}
return 0.0f;
}
float Joystick::getAxis(int channel) const {
auto axis = _axisStateMap.find(channel);
if (axis != _axisStateMap.end()) {
return (*axis).second;
} else {
return 0.0f;
}
}
#ifdef HAVE_SDL2
UserInputMapper::Input Joystick::makeInput(SDL_GameControllerButton button) {
return UserInputMapper::Input(_deviceID, button, UserInputMapper::ChannelType::BUTTON);
}
#endif
UserInputMapper::Input Joystick::makeInput(Joystick::JoystickAxisChannel axis) {
return UserInputMapper::Input(_deviceID, axis, UserInputMapper::ChannelType::AXIS);
}

View file

@ -20,6 +20,8 @@
#undef main
#endif
#include "ui/UserInputMapper.h"
class Joystick : public QObject {
Q_OBJECT
@ -29,12 +31,40 @@ class Joystick : public QObject {
Q_PROPERTY(int instanceId READ getInstanceId)
#endif
Q_PROPERTY(int numAxes READ getNumAxes)
Q_PROPERTY(int numButtons READ getNumButtons)
public:
enum JoystickAxisChannel {
LEFT_AXIS_X_POS = 0,
LEFT_AXIS_X_NEG,
LEFT_AXIS_Y_POS,
LEFT_AXIS_Y_NEG,
RIGHT_AXIS_X_POS,
RIGHT_AXIS_X_NEG,
RIGHT_AXIS_Y_POS,
RIGHT_AXIS_Y_NEG,
RIGHT_SHOULDER,
LEFT_SHOULDER,
};
Joystick();
~Joystick();
typedef std::unordered_set<int> ButtonPressedMap;
typedef std::map<int, float> AxisStateMap;
float getButton(int channel) const;
float getAxis(int channel) const;
#ifdef HAVE_SDL2
UserInputMapper::Input makeInput(SDL_GameControllerButton button);
#endif
UserInputMapper::Input makeInput(Joystick::JoystickAxisChannel axis);
void registerToUserInputMapper(UserInputMapper& mapper);
void assignDefaultInputMapping(UserInputMapper& mapper);
void update();
void focusOutEvent();
#ifdef HAVE_SDL2
Joystick(SDL_JoystickID instanceId, const QString& name, SDL_GameController* sdlGameController);
#endif
@ -51,15 +81,8 @@ public:
int getInstanceId() const { return _instanceId; }
#endif
const QVector<float>& getAxes() const { return _axes; }
const QVector<bool>& getButtons() const { return _buttons; }
int getDeviceID() { return _deviceID; }
int getNumAxes() const { return _axes.size(); }
int getNumButtons() const { return _buttons.size(); }
signals:
void axisValueChanged(int axis, float newValue, float oldValue);
void buttonStateChanged(int button, float newValue, float oldValue);
private:
#ifdef HAVE_SDL2
SDL_GameController* _sdlGameController;
@ -68,8 +91,12 @@ private:
#endif
QString _name;
QVector<float> _axes;
QVector<bool> _buttons;
protected:
int _deviceID = 0;
ButtonPressedMap _buttonPressedMap;
AxisStateMap _axisStateMap;
};
#endif // hifi_JoystickTracker_h
#endif // hifi_Joystick_h

View file

@ -160,8 +160,8 @@ void KeyboardMouseDevice::registerToUserInputMapper(UserInputMapper& mapper) {
_deviceID = mapper.getFreeDeviceID();
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy("Keyboard"));
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input._channel); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input._channel); };
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
QVector<UserInputMapper::InputPair> availableInputs;
for (int i = (int) Qt::Key_0; i <= (int) Qt::Key_9; i++) {
@ -170,7 +170,33 @@ void KeyboardMouseDevice::registerToUserInputMapper(UserInputMapper& mapper) {
for (int i = (int) Qt::Key_A; i <= (int) Qt::Key_Z; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
for (int i = (int) Qt::Key_Left; i <= (int) Qt::Key_Down; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_Space), QKeySequence(Qt::Key_Space).toString()));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_Shift), "Shift"));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_PageUp), QKeySequence(Qt::Key_PageUp).toString()));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_PageDown), QKeySequence(Qt::Key_PageDown).toString()));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::LeftButton), "Left Mouse Click"));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::MiddleButton), "Middle Mouse Click"));
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::RightButton), "Right Mouse Click"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_X_POS), "Mouse Move Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_X_NEG), "Mouse Move Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_Y_POS), "Mouse Move Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_Y_NEG), "Mouse Move Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_WHEEL_Y_POS), "Mouse Wheel Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_WHEEL_Y_NEG), "Mouse Wheel Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_WHEEL_X_POS), "Mouse Wheel Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(MOUSE_AXIS_WHEEL_X_NEG), "Mouse Wheel Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(TOUCH_AXIS_X_POS), "Touchpad Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(TOUCH_AXIS_X_NEG), "Touchpad Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(TOUCH_AXIS_Y_POS), "Touchpad Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(TOUCH_AXIS_Y_NEG), "Touchpad Down"));
return availableInputs;
};
proxy->resetDeviceBindings = [this, &mapper] () -> bool {
@ -189,7 +215,7 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
const float MOUSE_PITCH_SPEED = 0.25f;
const float TOUCH_YAW_SPEED = 0.5f;
const float TOUCH_PITCH_SPEED = 0.25f;
//const float BUTTON_BOOM_SPEED = 0.1f;
const float BUTTON_BOOM_SPEED = 0.1f;
// AWSD keys mapping
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(Qt::Key_S), BUTTON_MOVE_SPEED);
@ -199,8 +225,8 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(Qt::Key_C), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(Qt::Key_E), BUTTON_MOVE_SPEED);
// mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(Qt::Key_W), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
// mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(Qt::Key_S), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(Qt::Key_W), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(Qt::Key_S), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(Qt::Key_A), makeInput(Qt::RightButton), BUTTON_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(Qt::Key_D), makeInput(Qt::RightButton), BUTTON_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(Qt::Key_A), makeInput(Qt::Key_Shift), BUTTON_YAW_SPEED);
@ -216,8 +242,8 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(Qt::Key_PageDown), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(Qt::Key_PageUp), BUTTON_MOVE_SPEED);
// mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(Qt::Key_Up), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
// mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(Qt::Key_Down), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(Qt::Key_Up), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(Qt::Key_Down), makeInput(Qt::Key_Shift), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(Qt::Key_Left), makeInput(Qt::RightButton), BUTTON_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(Qt::Key_Right), makeInput(Qt::RightButton), BUTTON_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(Qt::Key_Left), makeInput(Qt::Key_Shift), BUTTON_YAW_SPEED);
@ -246,8 +272,8 @@ void KeyboardMouseDevice::assignDefaultInputMapping(UserInputMapper& mapper) {
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(TOUCH_AXIS_X_POS), TOUCH_YAW_SPEED);
// Wheel move
//mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(MOUSE_AXIS_WHEEL_Y_NEG), BUTTON_BOOM_SPEED);
//mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(MOUSE_AXIS_WHEEL_Y_POS), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(MOUSE_AXIS_WHEEL_Y_NEG), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(MOUSE_AXIS_WHEEL_Y_POS), BUTTON_BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(MOUSE_AXIS_WHEEL_X_NEG), BUTTON_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(MOUSE_AXIS_WHEEL_X_POS), BUTTON_YAW_SPEED);

View file

@ -25,6 +25,9 @@
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>
#include <GlowEffect.h>
#include <GlWindow.h>
#include <gpu/GLBackend.h>
#include <OglplusHelpers.h>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <UserActivityLogger.h>
@ -34,7 +37,6 @@
#include "InterfaceLogging.h"
#include "Application.h"
#include <gpu/GLBackend.h>
template <typename Function>
void for_each_eye(Function function) {
@ -53,27 +55,133 @@ void for_each_eye(const ovrHmd & hmd, Function function) {
}
}
#ifdef OVR_CLIENT_DISTORTION
ProgramObject OculusManager::_program;
int OculusManager::_textureLocation;
int OculusManager::_eyeToSourceUVScaleLocation;
int OculusManager::_eyeToSourceUVOffsetLocation;
int OculusManager::_eyeRotationStartLocation;
int OculusManager::_eyeRotationEndLocation;
int OculusManager::_positionAttributeLocation;
int OculusManager::_colorAttributeLocation;
int OculusManager::_texCoord0AttributeLocation;
int OculusManager::_texCoord1AttributeLocation;
int OculusManager::_texCoord2AttributeLocation;
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
ovrFrameTiming OculusManager::_hmdFrameTiming;
bool OculusManager::_programInitialized = false;
#endif
#ifdef Q_OS_WIN
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
// ovrHmd_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrHmd hmd;
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
color = 0;
depth = 0;
};
void Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
protected:
virtual void initDepth() override final {
}
};
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
SwapFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
~SwapFramebufferWrapper() {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
}
void Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
protected:
virtual void initColor() override {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color);
Q_ASSERT(OVR_SUCCESS(result));
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
virtual void initDone() override {
}
virtual void onBind(oglplus::Framebuffer::Target target) override {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
};
// We use a FBO to wrap the mirror texture because it makes it easier to
// render to the screen via glBlitFramebuffer
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
MirrorFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
virtual ~MirrorFramebufferWrapper() {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
}
private:
void initColor() override {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
void initDone() override {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
};
SwapFramebufferWrapper* OculusManager::_swapFbo{ nullptr };
MirrorFramebufferWrapper* OculusManager::_mirrorFbo{ nullptr };
ovrLayerEyeFov OculusManager::_sceneLayer;
#else
ovrTexture OculusManager::_eyeTextures[ovrEye_Count];
GlWindow* OculusManager::_outputWindow{ nullptr };
#endif
bool OculusManager::_isConnected = false;
ovrHmd OculusManager::_ovrHmd;
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
@ -104,146 +212,178 @@ bool OculusManager::_eyePerFrameMode = false;
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
float OculusManager::_offscreenRenderScale = 1.0;
void OculusManager::initSdk() {
ovr_Initialize();
_ovrHmd = ovrHmd_Create(0);
if (!_ovrHmd) {
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
}
}
void OculusManager::shutdownSdk() {
if (_ovrHmd) {
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
ovr_Shutdown();
}
}
ovrRecti OculusManager::_eyeViewports[ovrEye_Count];
void OculusManager::init() {
#ifdef OVR_DIRECT_MODE
initSdk();
#endif
}
void OculusManager::deinit() {
#ifdef OVR_DIRECT_MODE
shutdownSdk();
#endif
}
void OculusManager::connect() {
#ifndef OVR_DIRECT_MODE
initSdk();
#endif
_calibrationState = UNCALIBRATED;
void OculusManager::connect(QOpenGLContext* shareContext) {
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
if (_ovrHmd) {
if (!_isConnected) {
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
}
_isConnected = true;
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
});
ovrInitParams initParams; memset(&initParams, 0, sizeof(initParams));
ovrGLConfig cfg;
memset(&cfg, 0, sizeof(cfg));
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
cfg.OGL.Header.Multisample = 1;
int distortionCaps = 0
| ovrDistortionCap_Vignette
| ovrDistortionCap_Overdrive
| ovrDistortionCap_TimeWarp;
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
distortionCaps, _eyeFov, _eyeRenderDesc);
assert(configResult);
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
for_each_eye([&](ovrEyeType eye) {
//Get texture size
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
});
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position |
ovrTrackingCap_MagYawCorrection,
ovrTrackingCap_Orientation);
if (!_camera) {
_camera = new Camera;
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
}
#ifdef OVR_CLIENT_DISTORTION
if (!_programInitialized) {
// Shader program
_programInitialized = true;
_program.addShaderFromSourceFile(QGLShader::Vertex, PathUtils::resourcesPath() + "shaders/oculus.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/oculus.frag");
_program.link();
// Uniforms
_textureLocation = _program.uniformLocation("texture");
_eyeToSourceUVScaleLocation = _program.uniformLocation("EyeToSourceUVScale");
_eyeToSourceUVOffsetLocation = _program.uniformLocation("EyeToSourceUVOffset");
_eyeRotationStartLocation = _program.uniformLocation("EyeRotationStart");
_eyeRotationEndLocation = _program.uniformLocation("EyeRotationEnd");
// Attributes
_positionAttributeLocation = _program.attributeLocation("position");
_colorAttributeLocation = _program.attributeLocation("color");
_texCoord0AttributeLocation = _program.attributeLocation("texCoord0");
_texCoord1AttributeLocation = _program.attributeLocation("texCoord1");
_texCoord2AttributeLocation = _program.attributeLocation("texCoord2");
}
//Generate the distortion VBOs
generateDistortionMesh();
#ifdef DEBUG
initParams.Flags |= ovrInit_Debug;
#endif
} else {
ovr_Initialize(&initParams);
#ifdef Q_OS_WIN
ovrResult res = ovrHmd_Create(0, &_ovrHmd);
#ifdef DEBUG
if (!OVR_SUCCESS(res)) {
res = ovrHmd_CreateDebug(ovrHmd_DK2, &_ovrHmd);
Q_ASSERT(OVR_SUCCESS(res));
}
#endif
#else
_ovrHmd = ovrHmd_Create(0);
#ifdef DEBUG
if (!_ovrHmd) {
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
}
#endif
#endif
if (!_ovrHmd) {
_isConnected = false;
// we're definitely not in "VR mode" so tell the menu that
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
ovr_Shutdown();
return;
}
_calibrationState = UNCALIBRATED;
if (!_isConnected) {
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
}
_isConnected = true;
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
});
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
#ifdef Q_OS_WIN
_mirrorFbo = new MirrorFramebufferWrapper(_ovrHmd);
_swapFbo = new SwapFramebufferWrapper(_ovrHmd);
_swapFbo->Init(toGlm(_renderTargetSize));
_sceneLayer.ColorTexture[0] = _swapFbo->color;
_sceneLayer.ColorTexture[1] = nullptr;
_sceneLayer.Viewport[0].Pos = { 0, 0 };
_sceneLayer.Viewport[0].Size = _recommendedTexSize;
_sceneLayer.Viewport[1].Pos = { _recommendedTexSize.w, 0 };
_sceneLayer.Viewport[1].Size = _recommendedTexSize;
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _sceneLayer.Viewport[eye];
_sceneLayer.Fov[eye] = _eyeFov[eye];
});
#else
_outputWindow = new GlWindow(shareContext);
_outputWindow->show();
// _outputWindow->setFlags(Qt::FramelessWindowHint );
// _outputWindow->resize(_ovrHmd->Resolution.w, _ovrHmd->Resolution.h);
// _outputWindow->setPosition(_ovrHmd->WindowsPos.x, _ovrHmd->WindowsPos.y);
ivec2 desiredPosition = toGlm(_ovrHmd->WindowsPos);
foreach(QScreen* screen, qGuiApp->screens()) {
ivec2 screenPosition = toGlm(screen->geometry().topLeft());
if (screenPosition == desiredPosition) {
_outputWindow->setScreen(screen);
break;
}
}
_outputWindow->showFullScreen();
_outputWindow->makeCurrent();
ovrGLConfig cfg;
memset(&cfg, 0, sizeof(cfg));
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
cfg.OGL.Header.Multisample = 0;
int distortionCaps = 0
| ovrDistortionCap_Vignette
| ovrDistortionCap_Overdrive
| ovrDistortionCap_TimeWarp;
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
distortionCaps, _eyeFov, _eyeRenderDesc);
assert(configResult);
_outputWindow->doneCurrent();
for_each_eye([&](ovrEyeType eye) {
//Get texture size
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
_eyeTextures[eye].Header.RenderViewport.Size = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Size.w /= 2;
});
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _eyeTextures[eye].Header.RenderViewport;
});
#endif
ovrHmd_SetEnabledCaps(_ovrHmd,
ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
ovrHmd_ConfigureTracking(_ovrHmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection,
ovrTrackingCap_Orientation);
if (!_camera) {
_camera = new Camera;
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
}
}
//Disconnects and deallocates the OR
void OculusManager::disconnect() {
if (_isConnected) {
#ifdef Q_OS_WIN
if (_swapFbo) {
delete _swapFbo;
_swapFbo = nullptr;
}
if (_mirrorFbo) {
delete _mirrorFbo;
_mirrorFbo = nullptr;
}
#else
_outputWindow->showNormal();
_outputWindow->deleteLater();
_outputWindow = nullptr;
#endif
if (_ovrHmd) {
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
}
ovr_Shutdown();
_isConnected = false;
// Prepare to potentially have to dismiss the HSW again
// if the user re-enables VR
_hswDismissed = false;
#ifndef OVR_DIRECT_MODE
shutdownSdk();
#endif
#ifdef OVR_CLIENT_DISTORTION
//Free the distortion mesh data
for (int i = 0; i < ovrEye_Count; i++) {
if (_vertices[i] != 0) {
glDeleteBuffers(1, &(_vertices[i]));
_vertices[i] = 0;
}
if (_indices[i] != 0) {
glDeleteBuffers(1, &(_indices[i]));
_indices[i] = 0;
}
}
#endif
}
}
@ -346,7 +486,6 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
break;
default:
break;
}
}
@ -363,64 +502,8 @@ void OculusManager::abandonCalibration() {
}
}
#ifdef OVR_CLIENT_DISTORTION
void OculusManager::generateDistortionMesh() {
//Check if we already have the distortion mesh
if (_vertices[0] != 0) {
printf("WARNING: Tried to generate Oculus distortion mesh twice without freeing the VBOs.");
return;
}
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
// Allocate and generate distortion mesh vertices
ovrDistortionMesh meshData;
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmd->DistortionCaps, &meshData);
// Parse the vertex data and create a render ready vertex buffer
DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount];
_meshSize[eyeNum] = meshData.IndexCount;
// Convert the oculus vertex data to the DistortionVertex format.
DistortionVertex* v = pVBVerts;
ovrDistortionVertex* ov = meshData.pVertexData;
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
v->pos.x = ov->ScreenPosNDC.x;
v->pos.y = ov->ScreenPosNDC.y;
v->texR.x = ov->TanEyeAnglesR.x;
v->texR.y = ov->TanEyeAnglesR.y;
v->texG.x = ov->TanEyeAnglesG.x;
v->texG.y = ov->TanEyeAnglesG.y;
v->texB.x = ov->TanEyeAnglesB.x;
v->texB.y = ov->TanEyeAnglesB.y;
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
v++;
ov++;
}
//vertices
glGenBuffers(1, &(_vertices[eyeNum]));
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
glBufferData(GL_ARRAY_BUFFER, sizeof(DistortionVertex) * meshData.VertexCount, pVBVerts, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//indices
glGenBuffers(1, &(_indices[eyeNum]));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
delete [] pVBVerts;
ovrHmd_DestroyDistortionMesh(&meshData);
}
}
#endif
bool OculusManager::isConnected() {
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
return _isConnected;
}
//Begins the frame timing for oculus prediction purposes
@ -428,10 +511,6 @@ void OculusManager::beginFrameTiming() {
if (_frameTimingActive) {
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
}
#ifdef OVR_CLIENT_DISTORTION
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
#endif
_frameTimingActive = true;
}
@ -441,9 +520,6 @@ bool OculusManager::allowSwap() {
//Ends frame timing
void OculusManager::endFrameTiming() {
#ifdef OVR_CLIENT_DISTORTION
ovrHmd_EndFrameTiming(_ovrHmd);
#endif
_frameIndex++;
_frameTimingActive = false;
}
@ -474,39 +550,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
oldFrameIndex = _frameIndex;
#endif
// Every so often do some additional timing calculations and debug output
bool debugFrame = 0 == _frameIndex % 400;
#ifndef Q_OS_WIN
#if 0
// Try to measure the amount of time taken to do the distortion
// (does not seem to work on OSX with SDK based distortion)
// FIXME can't use a static object here, because it will cause a crash when the
// query attempts deconstruct after the GL context is gone.
static bool timerActive = false;
static QOpenGLTimerQuery timerQuery;
if (!timerQuery.isCreated()) {
timerQuery.create();
}
if (timerActive && timerQuery.isResultAvailable()) {
auto result = timerQuery.waitForResult();
if (result) { qCDebug(interfaceapp) << "Distortion took " << result << "ns"; };
timerActive = false;
}
#endif
#ifdef OVR_DIRECT_MODE
static bool attached = false;
if (!attached) {
attached = true;
void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId();
if (nullptr != nativeWindowHandle) {
ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr);
}
}
#endif
#ifndef OVR_CLIENT_DISTORTION
// FIXME: we need a better way of responding to the HSW. In particular
// we need to ensure that it's only displayed once per session, rather than
// every time the user toggles VR mode, and we need to hook it up to actual
@ -523,7 +568,6 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
}
#endif
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
@ -565,7 +609,9 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
ovrPosef eyePoses[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
#ifndef Q_OS_WIN
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
#endif
static ovrPosef eyeRenderPose[ovrEye_Count];
//Render each eye into an fbo
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
@ -607,10 +653,9 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
ovrRecti & vp = _eyeTextures[eye].Header.RenderViewport;
ovrRecti & vp = _eyeViewports[eye];
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
renderArgs->_renderSide = RenderArgs::MONO;
@ -638,52 +683,53 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// restore our normal viewport
glViewport(0, 0, deviceSize.width(), deviceSize.height());
#if 0
if (debugFrame && !timerActive) {
timerQuery.begin();
}
#endif
#ifdef Q_OS_WIN
auto srcFboSize = finalFbo->getSize();
#ifdef OVR_CLIENT_DISTORTION
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
#ifdef DEBUG_RENDER_WITHOUT_DISTORTION
auto fboSize = finalFbo->getSize();
// Blit to the oculus provided texture
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
_swapFbo->Bound(oglplus::Framebuffer::Target::Draw, [&] {
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, _swapFbo->size.x, _swapFbo->size.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
});
// Blit to the onscreen window
auto destWindowSize = qApp->getDeviceSize();
glBlitFramebuffer(
0, 0, fboSize.x, fboSize.y,
0, 0, deviceSize.width(), deviceSize.height(),
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, destWindowSize.width(), destWindowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
#else
//Clear the color buffer to ensure that there isnt any residual color
//Left over from when OR was not connected.
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0)));
//Renders the distorted mesh onto the screen
renderDistortionMesh(eyeRenderPose);
glBindTexture(GL_TEXTURE_2D, 0);
#endif
glCanvas->swapBuffers();
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
#else
// Submit the frame to the Oculus SDK for timewarp and distortion
for_each_eye([&](ovrEyeType eye) {
_sceneLayer.RenderPose[eye] = eyeRenderPose[eye];
});
auto header = &_sceneLayer.Header;
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
Q_ASSERT(OVR_SUCCESS(res));
_swapFbo->Increment();
#else
GLsync syncObject = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
glFlush();
_outputWindow->makeCurrent();
// force the compositing context to wait for the texture
// rendering to complete before it starts the distortion rendering,
// but without triggering a CPU/GPU synchronization
glWaitSync(syncObject, 0, GL_TIMEOUT_IGNORED);
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
glEyeTexture.OGL.TexId = finalFbo->texture();
glEyeTexture.OGL.TexId = textureId;
});
// restore our normal viewport
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
#endif
#if 0
if (debugFrame && !timerActive) {
timerQuery.end();
timerActive = true;
}
glCanvas->makeCurrent();
#endif
// No DK2, no message.
@ -773,8 +819,11 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
glEnable(GL_DEPTH_TEST);
_program.release();
glBindBuffer(GL_ARRAY_BUFFER, 0);
=======
>>>>>>> db56e15410520adab59d0e54348d9b32d17d808a
}
#endif
//Tries to reconnect to the sensors
void OculusManager::reset() {
@ -783,21 +832,6 @@ void OculusManager::reset() {
}
}
//Gets the current predicted angles from the oculus sensors
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
glm::vec3 euler = glm::eulerAngles(toGlm(ts.HeadPose.ThePose.Orientation));
yaw = euler.y;
pitch = euler.x;
roll = euler.z;
} else {
yaw = 0.0f;
pitch = 0.0f;
roll = 0.0f;
}
}
glm::vec3 OculusManager::getRelativePosition() {
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
return toGlm(trackingState.HeadPose.ThePose.Position);
@ -828,6 +862,9 @@ void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bot
}
int OculusManager::getHMDScreen() {
#ifdef Q_OS_WIN
return -1;
#else
int hmdScreenIndex = -1; // unknown
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
@ -878,5 +915,6 @@ int OculusManager::getHMDScreen() {
}
}
return hmdScreenIndex;
#endif
}

View file

@ -22,30 +22,16 @@
#include "RenderArgs.h"
class QOpenGLContext;
class Camera;
class GlWindow;
class PalmData;
class Text3DOverlay;
// Uncomment this to enable client side distortion. NOT recommended since
// the Oculus SDK will ideally provide the best practices for distortion in
// in terms of performance and quality, and by using it we will get updated
// best practices for free with new runtime releases.
#define OVR_CLIENT_DISTORTION 1
// Direct HMD mode is currently only supported on windows and some linux systems will
// misbehave if we try to enable the Oculus SDK at all, so isolate support for Direct
// mode only to windows for now
#ifdef Q_OS_WIN
// On Win32 platforms, enabling Direct HMD requires that the SDK be
// initialized before the GL context is set up, but this breaks v-sync
// for any application that has a Direct mode enable Rift connected
// but is not rendering to it. For the time being I'm setting this as
// a macro enabled mechanism which changes where the SDK is initialized.
// To enable Direct HMD mode, you can un-comment this, but with the
// caveat that it will break v-sync in NON-VR mode if you have an Oculus
// Rift connect and in Direct mode
#define OVR_DIRECT_MODE 1
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
#endif
@ -54,7 +40,7 @@ class OculusManager {
public:
static void init();
static void deinit();
static void connect();
static void connect(QOpenGLContext* shareContext);
static void disconnect();
static bool isConnected();
static void recalibrate();
@ -66,10 +52,6 @@ public:
static void display(QGLWidget * glCanvas, RenderArgs* renderArgs, const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void reset();
/// param \yaw[out] yaw in radians
/// param \pitch[out] pitch in radians
/// param \roll[out] roll in radians
static void getEulerAngles(float& yaw, float& pitch, float& roll);
static glm::vec3 getRelativePosition();
static glm::quat getOrientation();
static QSize getRenderTargetSize();
@ -85,44 +67,7 @@ public:
private:
static void initSdk();
static void shutdownSdk();
#ifdef OVR_CLIENT_DISTORTION
static void generateDistortionMesh();
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
struct DistortionVertex {
glm::vec2 pos;
glm::vec2 texR;
glm::vec2 texG;
glm::vec2 texB;
struct {
GLubyte r;
GLubyte g;
GLubyte b;
GLubyte a;
} color;
};
static ProgramObject _program;
//Uniforms
static int _textureLocation;
static int _eyeToSourceUVScaleLocation;
static int _eyeToSourceUVOffsetLocation;
static int _eyeRotationStartLocation;
static int _eyeRotationEndLocation;
//Attributes
static int _positionAttributeLocation;
static int _colorAttributeLocation;
static int _texCoord0AttributeLocation;
static int _texCoord1AttributeLocation;
static int _texCoord2AttributeLocation;
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
static GLuint _vertices[ovrEye_Count];
static GLuint _indices[ovrEye_Count];
static GLsizei _meshSize[ovrEye_Count];
static ovrFrameTiming _hmdFrameTiming;
static bool _programInitialized;
#endif
static ovrTexture _eyeTextures[ovrEye_Count];
static bool _isConnected;
static glm::vec3 _eyePositions[ovrEye_Count];
static ovrHmd _ovrHmd;
@ -130,6 +75,7 @@ private:
static ovrVector3f _eyeOffset[ovrEye_Count];
static glm::mat4 _eyeProjection[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrRecti _eyeViewports[ovrEye_Count];
static ovrSizei _renderTargetSize;
static unsigned int _frameIndex;
static bool _frameTimingActive;
@ -162,6 +108,14 @@ private:
static float _offscreenRenderScale;
static bool _eyePerFrameMode;
static ovrEyeType _lastEyeRendered;
#ifdef Q_OS_WIN
static SwapFramebufferWrapper* _swapFbo;
static MirrorFramebufferWrapper* _mirrorFbo;
static ovrLayerEyeFov _sceneLayer;
#else
static ovrTexture _eyeTextures[ovrEye_Count];
static GlWindow* _outputWindow;
#endif
};
@ -181,6 +135,10 @@ inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::ivec2 toGlm(const ovrVector2i & ov) {
return glm::ivec2(ov.x, ov.y);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}

View file

@ -1,73 +1,66 @@
//
// JoystickScriptingInterface.cpp
// SDL2Manager.cpp
// interface/src/devices
//
// Created by Andrzej Kapolka on 5/15/14.
// Copyright 2014 High Fidelity, Inc.
// Created by Sam Gondelman on 6/5/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qapplication.h>
#include <QtDebug>
#include <QScriptValue>
#ifdef HAVE_SDL2
#include <SDL.h>
#undef main
#endif
#include <HFActionEvent.h>
#include <HFBackEvent.h>
#include <PerfStat.h>
#include "Application.h"
#include "Application.h"
#include "JoystickScriptingInterface.h"
#include "SDL2Manager.h"
#ifdef HAVE_SDL2
SDL_JoystickID getInstanceId(SDL_GameController* controller) {
SDL_JoystickID SDL2Manager::getInstanceId(SDL_GameController* controller) {
SDL_Joystick* joystick = SDL_GameControllerGetJoystick(controller);
return SDL_JoystickInstanceID(joystick);
}
#endif
JoystickScriptingInterface& JoystickScriptingInterface::getInstance() {
static JoystickScriptingInterface sharedInstance;
return sharedInstance;
}
JoystickScriptingInterface::JoystickScriptingInterface() :
SDL2Manager::SDL2Manager() :
#ifdef HAVE_SDL2
_openJoysticks(),
_openJoysticks(),
#endif
_isInitialized(false)
_isInitialized(false)
{
#ifdef HAVE_SDL2
bool initSuccess = (SDL_Init(SDL_INIT_GAMECONTROLLER) == 0);
if (initSuccess) {
int joystickCount = SDL_NumJoysticks();
for (int i = 0; i < joystickCount; i++) {
SDL_GameController* controller = SDL_GameControllerOpen(i);
if (controller) {
SDL_JoystickID id = getInstanceId(controller);
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
if (!_openJoysticks.contains(id)) {
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
joystick->registerToUserInputMapper(*Application::getUserInputMapper());
joystick->assignDefaultInputMapping(*Application::getUserInputMapper());
emit joystickAdded(joystick);
}
}
}
_isInitialized = true;
} else {
qDebug() << "Error initializing SDL";
qDebug() << "Error initializing SDL2 Manager";
}
#endif
}
JoystickScriptingInterface::~JoystickScriptingInterface() {
SDL2Manager::~SDL2Manager() {
#ifdef HAVE_SDL2
qDeleteAll(_openJoysticks);
@ -75,34 +68,27 @@ JoystickScriptingInterface::~JoystickScriptingInterface() {
#endif
}
const QObjectList JoystickScriptingInterface::getAllJoysticks() const {
QObjectList objectList;
#ifdef HAVE_SDL2
const QList<Joystick*> joystickList = _openJoysticks.values();
for (int i = 0; i < joystickList.length(); i++) {
objectList << joystickList[i];
}
#endif
return objectList;
SDL2Manager* SDL2Manager::getInstance() {
static SDL2Manager sharedInstance;
return &sharedInstance;
}
Joystick* JoystickScriptingInterface::joystickWithName(const QString& name) {
void SDL2Manager::focusOutEvent() {
#ifdef HAVE_SDL2
QMap<SDL_JoystickID, Joystick*>::iterator iter = _openJoysticks.begin();
while (iter != _openJoysticks.end()) {
if (iter.value()->getName() == name) {
return iter.value();
}
iter++;
for (auto joystick : _openJoysticks) {
joystick->focusOutEvent();
}
#endif
return NULL;
}
void JoystickScriptingInterface::update() {
void SDL2Manager::update() {
#ifdef HAVE_SDL2
if (_isInitialized) {
PerformanceTimer perfTimer("JoystickScriptingInterface::update");
for (auto joystick : _openJoysticks) {
joystick->update();
}
PerformanceTimer perfTimer("SDL2Manager::update");
SDL_GameControllerUpdate();
SDL_Event event;
while (SDL_PollEvent(&event)) {
@ -120,16 +106,16 @@ void JoystickScriptingInterface::update() {
if (event.cbutton.button == SDL_CONTROLLER_BUTTON_BACK) {
// this will either start or stop a global back event
QEvent::Type backType = (event.type == SDL_CONTROLLERBUTTONDOWN)
? HFBackEvent::startType()
: HFBackEvent::endType();
? HFBackEvent::startType()
: HFBackEvent::endType();
HFBackEvent backEvent(backType);
qApp->sendEvent(qApp, &backEvent);
} else if (event.cbutton.button == SDL_CONTROLLER_BUTTON_A) {
// this will either start or stop a global action event
QEvent::Type actionType = (event.type == SDL_CONTROLLERBUTTONDOWN)
? HFActionEvent::startType()
: HFActionEvent::endType();
? HFActionEvent::startType()
: HFActionEvent::endType();
// global action events fire in the center of the screen
Application* app = Application::getInstance();
@ -141,14 +127,19 @@ void JoystickScriptingInterface::update() {
} else if (event.type == SDL_CONTROLLERDEVICEADDED) {
SDL_GameController* controller = SDL_GameControllerOpen(event.cdevice.which);
SDL_JoystickID id = getInstanceId(controller);
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
emit joystickAdded(joystick);
if (!_openJoysticks.contains(id)) {
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
joystick->registerToUserInputMapper(*Application::getUserInputMapper());
joystick->assignDefaultInputMapping(*Application::getUserInputMapper());
emit joystickAdded(joystick);
}
} else if (event.type == SDL_CONTROLLERDEVICEREMOVED) {
Joystick* joystick = _openJoysticks[event.cdevice.which];
_openJoysticks.remove(event.cdevice.which);
Application::getUserInputMapper()->removeDevice(joystick->getDeviceID());
emit joystickRemoved(joystick);
}
}

View file

@ -1,77 +1,46 @@
//
// JoystickScriptingInterface.h
// SDL2Manager.h
// interface/src/devices
//
// Created by Andrzej Kapolka on 5/15/14.
// Copyright 2014 High Fidelity, Inc.
// Created by Sam Gondelman on 6/5/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_JoystickScriptingInterface_h
#define hifi_JoystickScriptingInterface_h
#include <QObject>
#include <QVector>
#ifndef hifi__SDL2Manager_h
#define hifi__SDL2Manager_h
#ifdef HAVE_SDL2
#include <SDL.h>
#endif
#include "ui/UserInputMapper.h"
#include "devices/Joystick.h"
/// Handles joystick input through SDL.
class JoystickScriptingInterface : public QObject {
class SDL2Manager : public QObject {
Q_OBJECT
#ifdef HAVE_SDL2
Q_PROPERTY(int AXIS_INVALID READ axisInvalid)
Q_PROPERTY(int AXIS_LEFT_X READ axisLeftX)
Q_PROPERTY(int AXIS_LEFT_Y READ axisLeftY)
Q_PROPERTY(int AXIS_RIGHT_X READ axisRightX)
Q_PROPERTY(int AXIS_RIGHT_Y READ axisRightY)
Q_PROPERTY(int AXIS_TRIGGER_LEFT READ axisTriggerLeft)
Q_PROPERTY(int AXIS_TRIGGER_RIGHT READ axisTriggerRight)
Q_PROPERTY(int AXIS_MAX READ axisMax)
Q_PROPERTY(int BUTTON_INVALID READ buttonInvalid)
Q_PROPERTY(int BUTTON_FACE_BOTTOM READ buttonFaceBottom)
Q_PROPERTY(int BUTTON_FACE_RIGHT READ buttonFaceRight)
Q_PROPERTY(int BUTTON_FACE_LEFT READ buttonFaceLeft)
Q_PROPERTY(int BUTTON_FACE_TOP READ buttonFaceTop)
Q_PROPERTY(int BUTTON_BACK READ buttonBack)
Q_PROPERTY(int BUTTON_GUIDE READ buttonGuide)
Q_PROPERTY(int BUTTON_START READ buttonStart)
Q_PROPERTY(int BUTTON_LEFT_STICK READ buttonLeftStick)
Q_PROPERTY(int BUTTON_RIGHT_STICK READ buttonRightStick)
Q_PROPERTY(int BUTTON_LEFT_SHOULDER READ buttonLeftShoulder)
Q_PROPERTY(int BUTTON_RIGHT_SHOULDER READ buttonRightShoulder)
Q_PROPERTY(int BUTTON_DPAD_UP READ buttonDpadUp)
Q_PROPERTY(int BUTTON_DPAD_DOWN READ buttonDpadDown)
Q_PROPERTY(int BUTTON_DPAD_LEFT READ buttonDpadLeft)
Q_PROPERTY(int BUTTON_DPAD_RIGHT READ buttonDpadRight)
Q_PROPERTY(int BUTTON_MAX READ buttonMax)
Q_PROPERTY(int BUTTON_PRESSED READ buttonPressed)
Q_PROPERTY(int BUTTON_RELEASED READ buttonRelease)
#endif
public:
static JoystickScriptingInterface& getInstance();
SDL2Manager();
~SDL2Manager();
void focusOutEvent();
void update();
public slots:
Joystick* joystickWithName(const QString& name);
const QObjectList getAllJoysticks() const;
static SDL2Manager* getInstance();
signals:
void joystickAdded(Joystick* joystick);
void joystickRemoved(Joystick* joystick);
private:
#ifdef HAVE_SDL2
SDL_JoystickID getInstanceId(SDL_GameController* controller);
int axisInvalid() const { return SDL_CONTROLLER_AXIS_INVALID; }
int axisLeftX() const { return SDL_CONTROLLER_AXIS_LEFTX; }
int axisLeftY() const { return SDL_CONTROLLER_AXIS_LEFTY; }
@ -80,7 +49,7 @@ private:
int axisTriggerLeft() const { return SDL_CONTROLLER_AXIS_TRIGGERLEFT; }
int axisTriggerRight() const { return SDL_CONTROLLER_AXIS_TRIGGERRIGHT; }
int axisMax() const { return SDL_CONTROLLER_AXIS_MAX; }
int buttonInvalid() const { return SDL_CONTROLLER_BUTTON_INVALID; }
int buttonFaceBottom() const { return SDL_CONTROLLER_BUTTON_A; }
int buttonFaceRight() const { return SDL_CONTROLLER_BUTTON_B; }
@ -98,18 +67,15 @@ private:
int buttonDpadLeft() const { return SDL_CONTROLLER_BUTTON_DPAD_LEFT; }
int buttonDpadRight() const { return SDL_CONTROLLER_BUTTON_DPAD_RIGHT; }
int buttonMax() const { return SDL_CONTROLLER_BUTTON_MAX; }
int buttonPressed() const { return SDL_PRESSED; }
int buttonRelease() const { return SDL_RELEASED; }
#endif
JoystickScriptingInterface();
~JoystickScriptingInterface();
#ifdef HAVE_SDL2
QMap<SDL_JoystickID, Joystick*> _openJoysticks;
#endif
bool _isInitialized;
};
#endif // hifi_JoystickScriptingInterface_h
#endif // hifi__SDL2Manager_h

View file

@ -19,6 +19,10 @@
#include "UserActivityLogger.h"
#include "InterfaceLogging.h"
// These bits aren't used for buttons, so they can be used as masks:
const unsigned int LEFT_MASK = 0;
const unsigned int RIGHT_MASK = 1U << 1;
#ifdef HAVE_SIXENSE
const int CALIBRATION_STATE_IDLE = 0;
@ -61,6 +65,8 @@ SixenseManager::SixenseManager() :
_bumperPressed[1] = false;
_oldX[1] = -1;
_oldY[1] = -1;
_prevPalms[0] = nullptr;
_prevPalms[1] = nullptr;
}
SixenseManager::~SixenseManager() {
@ -147,6 +153,7 @@ void SixenseManager::update(float deltaTime) {
#ifdef HAVE_SIXENSE
Hand* hand = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHand();
if (_isInitialized && _isEnabled) {
_buttonPressedMap.clear();
#ifdef __APPLE__
SixenseBaseFunction sixenseGetNumActiveControllers =
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
@ -154,12 +161,24 @@ void SixenseManager::update(float deltaTime) {
if (sixenseGetNumActiveControllers() == 0) {
_hydrasConnected = false;
if (_deviceID != 0) {
Application::getUserInputMapper()->removeDevice(_deviceID);
_deviceID = 0;
if (_prevPalms[0]) {
_prevPalms[0]->setActive(false);
}
if (_prevPalms[1]) {
_prevPalms[1]->setActive(false);
}
}
return;
}
PerformanceTimer perfTimer("sixense");
if (!_hydrasConnected) {
_hydrasConnected = true;
registerToUserInputMapper(*Application::getUserInputMapper());
getInstance().assignDefaultInputMapping(*Application::getUserInputMapper());
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
}
@ -198,6 +217,7 @@ void SixenseManager::update(float deltaTime) {
for (size_t j = 0; j < hand->getNumPalms(); j++) {
if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
palm = &(hand->getPalms()[j]);
_prevPalms[numActiveControllers - 1] = palm;
foundHand = true;
}
}
@ -206,6 +226,7 @@ void SixenseManager::update(float deltaTime) {
hand->getPalms().push_back(newPalm);
palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
palm->setSixenseID(data->controller_index);
_prevPalms[numActiveControllers - 1] = palm;
qCDebug(interfaceapp, "Found new Sixense controller, ID %i", data->controller_index);
}
@ -216,12 +237,14 @@ void SixenseManager::update(float deltaTime) {
palm->setActive(false); // if this isn't a Sixsense ID palm, always make it inactive
}
// Read controller buttons and joystick into the hand
palm->setControllerButtons(data->buttons);
palm->setTrigger(data->trigger);
palm->setJoystick(data->joystick_x, data->joystick_y);
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// Emulate the mouse so we can use scripts
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
emulateMouse(palm, numActiveControllers - 1);
@ -590,3 +613,143 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
#endif // HAVE_SIXENSE
void SixenseManager::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
void SixenseManager::handleAxisEvent(float stickX, float stickY, float trigger, int index) {
_axisStateMap[makeInput(AXIS_Y_POS, index).getChannel()] = (stickY > 0.0f) ? stickY : 0.0f;
_axisStateMap[makeInput(AXIS_Y_NEG, index).getChannel()] = (stickY < 0.0f) ? -stickY : 0.0f;
_axisStateMap[makeInput(AXIS_X_POS, index).getChannel()] = (stickX > 0.0f) ? stickX : 0.0f;
_axisStateMap[makeInput(AXIS_X_NEG, index).getChannel()] = (stickX < 0.0f) ? -stickX : 0.0f;
_axisStateMap[makeInput(BACK_TRIGGER, index).getChannel()] = trigger;
}
void SixenseManager::handleButtonEvent(unsigned int buttons, int index) {
if (buttons & BUTTON_0) {
_buttonPressedMap.insert(makeInput(BUTTON_0, index).getChannel());
}
if (buttons & BUTTON_1) {
_buttonPressedMap.insert(makeInput(BUTTON_1, index).getChannel());
}
if (buttons & BUTTON_2) {
_buttonPressedMap.insert(makeInput(BUTTON_2, index).getChannel());
}
if (buttons & BUTTON_3) {
_buttonPressedMap.insert(makeInput(BUTTON_3, index).getChannel());
}
if (buttons & BUTTON_4) {
_buttonPressedMap.insert(makeInput(BUTTON_4, index).getChannel());
}
if (buttons & BUTTON_FWD) {
_buttonPressedMap.insert(makeInput(BUTTON_FWD, index).getChannel());
}
if (buttons & BUTTON_TRIGGER) {
_buttonPressedMap.insert(makeInput(BUTTON_TRIGGER, index).getChannel());
}
}
void SixenseManager::registerToUserInputMapper(UserInputMapper& mapper) {
// Grab the current free device ID
_deviceID = mapper.getFreeDeviceID();
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy("Hydra"));
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
QVector<UserInputMapper::InputPair> availableInputs;
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_0, 0), "Left Start"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_1, 0), "Left Button 1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_2, 0), "Left Button 2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_3, 0), "Left Button 3"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_4, 0), "Left Button 4"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_FWD, 0), "L1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BACK_TRIGGER, 0), "L2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_POS, 0), "Left Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_NEG, 0), "Left Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_POS, 0), "Left Stick Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_NEG, 0), "Left Stick Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_TRIGGER, 0), "Left Trigger Press"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_0, 1), "Right Start"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_1, 1), "Right Button 1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_2, 1), "Right Button 2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_3, 1), "Right Button 3"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_4, 1), "Right Button 4"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_FWD, 1), "R1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BACK_TRIGGER, 1), "R2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_POS, 1), "Right Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_NEG, 1), "Right Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_POS, 1), "Right Stick Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_NEG, 1), "Right Stick Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_TRIGGER, 1), "Right Trigger Press"));
return availableInputs;
};
proxy->resetDeviceBindings = [this, &mapper] () -> bool {
mapper.removeAllInputChannelsForDevice(_deviceID);
this->assignDefaultInputMapping(mapper);
return true;
};
mapper.registerDevice(_deviceID, proxy);
}
void SixenseManager::assignDefaultInputMapping(UserInputMapper& mapper) {
const float JOYSTICK_MOVE_SPEED = 1.0f;
const float JOYSTICK_YAW_SPEED = 0.5f;
const float JOYSTICK_PITCH_SPEED = 0.25f;
const float BUTTON_MOVE_SPEED = 1.0f;
const float BOOM_SPEED = 0.1f;
// Left Joystick: Movement, strafing
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(AXIS_Y_POS, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(AXIS_Y_NEG, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(AXIS_X_POS, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(AXIS_X_NEG, 0), JOYSTICK_MOVE_SPEED);
// Right Joystick: Camera orientation
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(AXIS_X_POS, 1), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(AXIS_X_NEG, 1), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_UP, makeInput(AXIS_Y_POS, 1), JOYSTICK_PITCH_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_DOWN, makeInput(AXIS_Y_NEG, 1), JOYSTICK_PITCH_SPEED);
// Buttons
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(BUTTON_3, 0), BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(BUTTON_1, 0), BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(BUTTON_3, 1), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(BUTTON_1, 1), BUTTON_MOVE_SPEED);
}
float SixenseManager::getButton(int channel) const {
if (!_buttonPressedMap.empty()) {
if (_buttonPressedMap.find(channel) != _buttonPressedMap.end()) {
return 1.0f;
} else {
return 0.0f;
}
}
return 0.0f;
}
float SixenseManager::getAxis(int channel) const {
auto axis = _axisStateMap.find(channel);
if (axis != _axisStateMap.end()) {
return (*axis).second;
} else {
return 0.0f;
}
}
UserInputMapper::Input SixenseManager::makeInput(unsigned int button, int index) {
return UserInputMapper::Input(_deviceID, button | (index == 0 ? LEFT_MASK : RIGHT_MASK), UserInputMapper::ChannelType::BUTTON);
}
UserInputMapper::Input SixenseManager::makeInput(SixenseManager::JoystickAxisChannel axis, int index) {
return UserInputMapper::Input(_deviceID, axis | (index == 0 ? LEFT_MASK : RIGHT_MASK), UserInputMapper::ChannelType::AXIS);
}

View file

@ -13,6 +13,7 @@
#define hifi_SixenseManager_h
#include <QObject>
#include <unordered_set>
#ifdef HAVE_SIXENSE
#include <glm/glm.hpp>
@ -25,6 +26,8 @@
#endif
#include "ui/UserInputMapper.h"
class PalmData;
const unsigned int BUTTON_0 = 1U << 0; // the skinny button between 1 and 2
@ -33,6 +36,7 @@ const unsigned int BUTTON_2 = 1U << 6;
const unsigned int BUTTON_3 = 1U << 3;
const unsigned int BUTTON_4 = 1U << 4;
const unsigned int BUTTON_FWD = 1U << 7;
const unsigned int BUTTON_TRIGGER = 1U << 8;
// Event type that represents using the controller
const unsigned int CONTROLLER_0_EVENT = 1500U;
@ -45,6 +49,14 @@ const bool DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS = false;
class SixenseManager : public QObject {
Q_OBJECT
public:
enum JoystickAxisChannel {
AXIS_Y_POS = 1U << 0,
AXIS_Y_NEG = 1U << 3,
AXIS_X_POS = 1U << 4,
AXIS_X_NEG = 1U << 5,
BACK_TRIGGER = 1U << 6,
};
static SixenseManager& getInstance();
void initialize();
@ -60,6 +72,21 @@ public:
bool getInvertButtons() const { return _invertButtons; }
void setInvertButtons(bool invertSixenseButtons) { _invertButtons = invertSixenseButtons; }
typedef std::unordered_set<int> ButtonPressedMap;
typedef std::map<int, float> AxisStateMap;
float getButton(int channel) const;
float getAxis(int channel) const;
UserInputMapper::Input makeInput(unsigned int button, int index);
UserInputMapper::Input makeInput(JoystickAxisChannel axis, int index);
void registerToUserInputMapper(UserInputMapper& mapper);
void assignDefaultInputMapping(UserInputMapper& mapper);
void update();
void focusOutEvent();
public slots:
void toggleSixense(bool shouldEnable);
void setFilter(bool filter);
@ -69,6 +96,8 @@ private:
SixenseManager();
~SixenseManager();
void handleButtonEvent(unsigned int buttons, int index);
void handleAxisEvent(float x, float y, float trigger, int index);
#ifdef HAVE_SIXENSE
void updateCalibration(const sixenseControllerData* controllers);
void emulateMouse(PalmData* palm, int index);
@ -104,12 +133,19 @@ private:
bool _bumperPressed[2];
int _oldX[2];
int _oldY[2];
PalmData* _prevPalms[2];
bool _lowVelocityFilter;
bool _controllersAtBase;
float _reticleMoveSpeed = DEFAULT_SIXENSE_RETICLE_MOVE_SPEED;
bool _invertButtons = DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS;
protected:
int _deviceID = 0;
ButtonPressedMap _buttonPressedMap;
AxisStateMap _axisStateMap;
};
#endif // hifi_SixenseManager_h

View file

@ -35,6 +35,16 @@ void AddressBarDialog::loadAddress(const QString& address) {
}
}
void AddressBarDialog::loadBack() {
qDebug() << "Called LoadBack";
DependencyManager::get<AddressManager>()->goBack();
}
void AddressBarDialog::loadForward() {
qDebug() << "Called LoadForward";
DependencyManager::get<AddressManager>()->goForward();
}
void AddressBarDialog::displayAddressOfflineMessage() {
OffscreenUi::error("That user or place is currently offline");
}

View file

@ -28,6 +28,8 @@ protected:
void hide();
Q_INVOKABLE void loadAddress(const QString& address);
Q_INVOKABLE void loadBack();
Q_INVOKABLE void loadForward();
};
#endif

View file

@ -376,7 +376,7 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
textureAspectRatio = _textureAspectRatio;
_overlays.buildVBO(_textureFov, _textureAspectRatio, 80, 80);
}
}
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
_overlays.render();
@ -605,7 +605,6 @@ void ApplicationOverlay::renderPointers() {
_lastMouseMove = usecTimestampNow();
} else if (usecTimestampNow() - _lastMouseMove > MAX_IDLE_TIME * USECS_PER_SECOND) {
//float pitch = 0.0f, yaw = 0.0f, roll = 0.0f; // radians
//OculusManager::getEulerAngles(yaw, pitch, roll);
glm::quat orientation = qApp->getHeadOrientation(); // (glm::vec3(pitch, yaw, roll));
glm::vec3 result;

View file

@ -56,8 +56,6 @@ HMDToolsDialog::HMDToolsDialog(QWidget* parent) :
this->QDialog::setLayout(form);
_wasMoved = false;
_previousRect = Application::getInstance()->getWindow()->rect();
Application::getInstance()->getWindow()->activateWindow();
// watch for our application window moving screens. If it does we want to update our screen details
@ -136,24 +134,6 @@ void HMDToolsDialog::enterHDMMode() {
if (!_inHDMMode) {
_switchModeButton->setText("Leave HMD Mode");
_debugDetails->setText(getDebugDetails());
_hmdScreenNumber = OculusManager::getHMDScreen();
if (_hmdScreenNumber >= 0) {
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
_hmdScreen = QGuiApplication::screens()[_hmdScreenNumber];
_previousRect = Application::getInstance()->getWindow()->rect();
_previousRect = QRect(mainWindow->mapToGlobal(_previousRect.topLeft()),
mainWindow->mapToGlobal(_previousRect.bottomRight()));
_previousScreen = mainWindow->screen();
QRect rect = QApplication::desktop()->screenGeometry(_hmdScreenNumber);
mainWindow->setScreen(_hmdScreen);
mainWindow->setGeometry(rect);
_wasMoved = true;
}
// if we're on a single screen setup, then hide our tools window when entering HMD mode
if (QApplication::desktop()->screenCount() == 1) {
@ -161,58 +141,21 @@ void HMDToolsDialog::enterHDMMode() {
}
Application::getInstance()->setEnableVRMode(true);
const int SLIGHT_DELAY = 500;
// If we go to fullscreen immediately, it ends up on the primary monitor,
// even though we've already moved the window. By adding this delay, the
// fullscreen target screen ends up correct.
QTimer::singleShot(SLIGHT_DELAY, this, [&]{
Application::getInstance()->setFullscreen(true);
activateWindowAfterEnterMode();
});
_inHDMMode = true;
}
}
void HMDToolsDialog::activateWindowAfterEnterMode() {
Application::getInstance()->getWindow()->activateWindow();
// center the cursor on the main application window
centerCursorOnWidget(Application::getInstance()->getWindow());
}
void HMDToolsDialog::leaveHDMMode() {
if (_inHDMMode) {
_switchModeButton->setText("Enter HMD Mode");
_debugDetails->setText(getDebugDetails());
Application::getInstance()->setEnableVRMode(false);
Application::getInstance()->setFullscreen(false);
Application::getInstance()->getWindow()->activateWindow();
if (_wasMoved) {
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
mainWindow->setScreen(_previousScreen);
mainWindow->setGeometry(_previousRect);
const int SLIGHT_DELAY = 1500;
QTimer::singleShot(SLIGHT_DELAY, this, SLOT(moveWindowAfterLeaveMode()));
}
_wasMoved = false;
_inHDMMode = false;
}
}
void HMDToolsDialog::moveWindowAfterLeaveMode() {
QWindow* mainWindow = Application::getInstance()->getWindow()->windowHandle();
mainWindow->setScreen(_previousScreen);
mainWindow->setGeometry(_previousRect);
Application::getInstance()->getWindow()->activateWindow();
Application::getInstance()->resetSensors();
}
void HMDToolsDialog::reject() {
// Just regularly close upon ESC
close();
@ -244,13 +187,15 @@ void HMDToolsDialog::hideEvent(QHideEvent* event) {
void HMDToolsDialog::aboutToQuit() {
if (_inHDMMode) {
// FIXME this is ineffective because it doesn't trigger the menu to
// save the fact that VR Mode is not checked.
leaveHDMMode();
}
}
void HMDToolsDialog::screenCountChanged(int newCount) {
if (!OculusManager::isConnected()) {
OculusManager::connect();
//OculusManager::connect();
}
int hmdScreenNumber = OculusManager::getHMDScreen();

View file

@ -35,8 +35,6 @@ signals:
public slots:
void reject();
void switchModeClicked(bool checked);
void activateWindowAfterEnterMode();
void moveWindowAfterLeaveMode();
void applicationWindowScreenChanged(QScreen* screen);
void aboutToQuit();
void screenCountChanged(int newCount);
@ -51,8 +49,6 @@ private:
void enterHDMMode();
void leaveHDMMode();
bool _wasMoved;
QRect _previousRect;
QScreen* _previousScreen;
QScreen* _hmdScreen;
int _hmdScreenNumber;

View file

@ -106,6 +106,11 @@ void UserInputMapper::removeAllInputChannelsForDevice(uint16 device) {
}
}
void UserInputMapper::removeDevice(int device) {
removeAllInputChannelsForDevice((uint16) device);
_registeredDevices.erase(device);
}
int UserInputMapper::getInputChannels(InputChannels& channels) const {
for (auto& channel : _actionToInputsMap) {
channels.push_back(channel.second);

View file

@ -189,6 +189,7 @@ public:
bool removeInputChannel(InputChannel channel);
void removeAllInputChannels();
void removeAllInputChannelsForDevice(uint16 device);
void removeDevice(int device);
//Grab all the input channels currently in use, return the number
int getInputChannels(InputChannels& channels) const;
QVector<InputChannel> getAllInputsForDevice(uint16 device);

View file

@ -43,72 +43,90 @@ void BillboardOverlay::render(RenderArgs* args) {
return;
}
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f);
glm::quat rotation;
if (_isFacingAvatar) {
// rotate about vertical to face the camera
rotation = Application::getInstance()->getCamera()->getRotation();
rotation *= glm::angleAxis(glm::pi<float>(), glm::vec3(0.0f, 1.0f, 0.0f));
rotation *= getRotation();
} else {
rotation = getRotation();
}
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
float imageWidth = _texture->getWidth();
float imageHeight = _texture->getHeight();
glBindTexture(GL_TEXTURE_2D, _texture->getID());
QRect fromImage;
if (_fromImage.isNull()) {
fromImage.setX(0);
fromImage.setY(0);
fromImage.setWidth(imageWidth);
fromImage.setHeight(imageHeight);
} else {
float scaleX = imageWidth / _texture->getOriginalWidth();
float scaleY = imageHeight / _texture->getOriginalHeight();
glPushMatrix(); {
glTranslatef(_position.x, _position.y, _position.z);
glm::quat rotation;
if (_isFacingAvatar) {
// rotate about vertical to face the camera
rotation = Application::getInstance()->getCamera()->getRotation();
rotation *= glm::angleAxis(glm::pi<float>(), glm::vec3(0.0f, 1.0f, 0.0f));
rotation *= getRotation();
} else {
rotation = getRotation();
}
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glScalef(_scale, _scale, _scale);
fromImage.setX(scaleX * _fromImage.x());
fromImage.setY(scaleY * _fromImage.y());
fromImage.setWidth(scaleX * _fromImage.width());
fromImage.setHeight(scaleY * _fromImage.height());
}
const float MAX_COLOR = 255.0f;
xColor color = getColor();
float alpha = getAlpha();
float maxSize = glm::max(fromImage.width(), fromImage.height());
float x = fromImage.width() / (2.0f * maxSize);
float y = -fromImage.height() / (2.0f * maxSize);
float imageWidth = _texture->getWidth();
float imageHeight = _texture->getHeight();
glm::vec2 topLeft(-x, -y);
glm::vec2 bottomRight(x, y);
glm::vec2 texCoordTopLeft(fromImage.x() / imageWidth, fromImage.y() / imageHeight);
glm::vec2 texCoordBottomRight((fromImage.x() + fromImage.width()) / imageWidth,
(fromImage.y() + fromImage.height()) / imageHeight);
QRect fromImage;
if (_fromImage.isNull()) {
fromImage.setX(0);
fromImage.setY(0);
fromImage.setWidth(imageWidth);
fromImage.setHeight(imageHeight);
} else {
float scaleX = imageWidth / _texture->getOriginalWidth();
float scaleY = imageHeight / _texture->getOriginalHeight();
const float MAX_COLOR = 255.0f;
xColor color = getColor();
float alpha = getAlpha();
fromImage.setX(scaleX * _fromImage.x());
fromImage.setY(scaleY * _fromImage.y());
fromImage.setWidth(scaleX * _fromImage.width());
fromImage.setHeight(scaleY * _fromImage.height());
}
auto batch = args->_batch;
float maxSize = glm::max(fromImage.width(), fromImage.height());
float x = fromImage.width() / (2.0f * maxSize);
float y = -fromImage.height() / (2.0f * maxSize);
if (batch) {
Transform transform;
transform.setTranslation(_position);
transform.setRotation(rotation);
transform.setScale(_scale);
glm::vec2 topLeft(-x, -y);
glm::vec2 bottomRight(x, y);
glm::vec2 texCoordTopLeft(fromImage.x() / imageWidth, fromImage.y() / imageHeight);
glm::vec2 texCoordBottomRight((fromImage.x() + fromImage.width()) / imageWidth,
(fromImage.y() + fromImage.height()) / imageHeight);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
batch->setModelTransform(transform);
batch->setUniformTexture(0, _texture->getGPUTexture());
DependencyManager::get<GeometryCache>()->renderQuad(*batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha));
batch->setUniformTexture(0, args->_whiteTexture); // restore default white color after me
} else {
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f);
} glPopMatrix();
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glDisable(GL_ALPHA_TEST);
glBindTexture(GL_TEXTURE_2D, _texture->getID());
glBindTexture(GL_TEXTURE_2D, 0);
glPushMatrix(); {
glTranslatef(_position.x, _position.y, _position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glScalef(_scale, _scale, _scale);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha));
} glPopMatrix();
glDisable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glDisable(GL_ALPHA_TEST);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
void BillboardOverlay::setProperties(const QScriptValue &properties) {

View file

@ -35,13 +35,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
return; // do nothing if we're not visible
}
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
float alpha = getAlpha();
xColor color = getColor();
const float MAX_COLOR = 255.0f;
@ -49,91 +42,161 @@ void Cube3DOverlay::render(RenderArgs* args) {
//glDisable(GL_LIGHTING);
// TODO: handle registration point??
// TODO: handle registration point??
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
if (_isSolid) {
if (_borderSize > 0) {
// Draw a cube at a larger size behind the main cube, creating
// a border effect.
// Disable writing to the depth mask so that the "border" cube will not
// occlude the main cube. This means the border could be covered by
// overlays that are further back and drawn later, but this is good
// enough for the use-case.
glDepthMask(GL_FALSE);
glPushMatrix();
glScalef(dimensions.x * _borderSize, dimensions.y * _borderSize, dimensions.z * _borderSize);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
}
auto batch = args->_batch;
glPopMatrix();
glDepthMask(GL_TRUE);
}
if (batch) {
Transform transform;
transform.setTranslation(position);
transform.setRotation(rotation);
if (_isSolid) {
// if (_borderSize > 0) {
// // Draw a cube at a larger size behind the main cube, creating
// // a border effect.
// // Disable writing to the depth mask so that the "border" cube will not
// // occlude the main cube. This means the border could be covered by
// // overlays that are further back and drawn later, but this is good
// // enough for the use-case.
// transform.setScale(dimensions * _borderSize);
// batch->setModelTransform(transform);
// DependencyManager::get<GeometryCache>()->renderSolidCube(*batch, 1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
// }
transform.setScale(dimensions);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderSolidCube(*batch, 1.0f, cubeColor);
} else {
if (getIsDashedLine()) {
transform.setScale(1.0f);
batch->setModelTransform(transform);
glm::vec3 halfDimensions = dimensions / 2.0f;
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderDashedLine(*batch, bottomLeftNear, bottomRightNear, cubeColor);
geometryCache->renderDashedLine(*batch, bottomRightNear, bottomRightFar, cubeColor);
geometryCache->renderDashedLine(*batch, bottomRightFar, bottomLeftFar, cubeColor);
geometryCache->renderDashedLine(*batch, bottomLeftFar, bottomLeftNear, cubeColor);
geometryCache->renderDashedLine(*batch, topLeftNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(*batch, topRightNear, topRightFar, cubeColor);
geometryCache->renderDashedLine(*batch, topRightFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(*batch, topLeftFar, topLeftNear, cubeColor);
geometryCache->renderDashedLine(*batch, bottomLeftNear, topLeftNear, cubeColor);
geometryCache->renderDashedLine(*batch, bottomRightNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(*batch, bottomLeftFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(*batch, bottomRightFar, topRightFar, cubeColor);
glPushMatrix();
glScalef(dimensions.x, dimensions.y, dimensions.z);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
}
glPopMatrix();
} else {
glLineWidth(_lineWidth);
if (getIsDashedLine()) {
glm::vec3 halfDimensions = dimensions / 2.0f;
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderDashedLine(bottomLeftNear, bottomRightNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, bottomRightFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, bottomLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, bottomLeftNear, cubeColor);
geometryCache->renderDashedLine(topLeftNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(topRightNear, topRightFar, cubeColor);
geometryCache->renderDashedLine(topRightFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(topLeftFar, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftNear, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, topRightFar, cubeColor);
} else {
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
}
transform.setScale(dimensions);
batch->setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(*batch, 1.0f, cubeColor);
}
glPopMatrix();
glPopMatrix();
}
} else {
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
if (glower) {
delete glower;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
if (_isSolid) {
if (_borderSize > 0) {
// Draw a cube at a larger size behind the main cube, creating
// a border effect.
// Disable writing to the depth mask so that the "border" cube will not
// occlude the main cube. This means the border could be covered by
// overlays that are further back and drawn later, but this is good
// enough for the use-case.
glDepthMask(GL_FALSE);
glPushMatrix();
glScalef(dimensions.x * _borderSize, dimensions.y * _borderSize, dimensions.z * _borderSize);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
}
glPopMatrix();
glDepthMask(GL_TRUE);
}
glPushMatrix();
glScalef(dimensions.x, dimensions.y, dimensions.z);
if (_drawOnHUD) {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
} else {
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
}
glPopMatrix();
} else {
glLineWidth(_lineWidth);
if (getIsDashedLine()) {
glm::vec3 halfDimensions = dimensions / 2.0f;
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderDashedLine(bottomLeftNear, bottomRightNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, bottomRightFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, bottomLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, bottomLeftNear, cubeColor);
geometryCache->renderDashedLine(topLeftNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(topRightNear, topRightFar, cubeColor);
geometryCache->renderDashedLine(topRightFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(topLeftFar, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftNear, topLeftNear, cubeColor);
geometryCache->renderDashedLine(bottomRightNear, topRightNear, cubeColor);
geometryCache->renderDashedLine(bottomLeftFar, topLeftFar, cubeColor);
geometryCache->renderDashedLine(bottomRightFar, topRightFar, cubeColor);
} else {
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
}
}
glPopMatrix();
glPopMatrix();
if (glower) {
delete glower;
}
}
}

View file

@ -36,87 +36,128 @@ void Grid3DOverlay::render(RenderArgs* args) {
return; // do nothing if we're not visible
}
if (!_gridProgram.isLinked()) {
if (!_gridProgram.addShaderFromSourceFile(QGLShader::Vertex, PathUtils::resourcesPath() + "shaders/grid.vert")) {
qDebug() << "Failed to compile: " + _gridProgram.log();
return;
}
if (!_gridProgram.addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/grid.frag")) {
qDebug() << "Failed to compile: " + _gridProgram.log();
return;
}
if (!_gridProgram.link()) {
qDebug() << "Failed to link: " + _gridProgram.log();
return;
}
}
// Render code largely taken from MetavoxelEditor::render()
glDisable(GL_LIGHTING);
glDepthMask(GL_FALSE);
glPushMatrix();
glm::quat rotation = getRotation();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glLineWidth(1.5f);
// center the grid around the camera position on the plane
glm::vec3 rotated = glm::inverse(rotation) * Application::getInstance()->getCamera()->getPosition();
float spacing = _minorGridWidth;
float alpha = getAlpha();
xColor color = getColor();
glm::vec3 position = getPosition();
const int MINOR_GRID_DIVISIONS = 200;
const int MAJOR_GRID_DIVISIONS = 100;
const float MAX_COLOR = 255.0f;
// center the grid around the camera position on the plane
glm::vec3 rotated = glm::inverse(_rotation) * Application::getInstance()->getCamera()->getPosition();
float spacing = _minorGridWidth;
float alpha = getAlpha();
xColor color = getColor();
glm::vec4 gridColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
_gridProgram.bind();
auto batch = args->_batch;
// Minor grid
glPushMatrix();
{
glTranslatef(_minorGridWidth * (floorf(rotated.x / spacing) - MINOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MINOR_GRID_DIVISIONS / 2), position.z);
if (batch) {
Transform transform;
transform.setRotation(_rotation);
float scale = MINOR_GRID_DIVISIONS * spacing;
glScalef(scale, scale, scale);
DependencyManager::get<GeometryCache>()->renderGrid(MINOR_GRID_DIVISIONS, MINOR_GRID_DIVISIONS, gridColor);
// Minor grid
{
batch->_glLineWidth(1.0f);
auto position = glm::vec3(_minorGridWidth * (floorf(rotated.x / spacing) - MINOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MINOR_GRID_DIVISIONS / 2),
_position.z);
float scale = MINOR_GRID_DIVISIONS * spacing;
transform.setTranslation(position);
transform.setScale(scale);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderGrid(*batch, MINOR_GRID_DIVISIONS, MINOR_GRID_DIVISIONS, gridColor);
}
// Major grid
{
batch->_glLineWidth(4.0f);
spacing *= _majorGridEvery;
auto position = glm::vec3(spacing * (floorf(rotated.x / spacing) - MAJOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MAJOR_GRID_DIVISIONS / 2),
_position.z);
float scale = MAJOR_GRID_DIVISIONS * spacing;
transform.setTranslation(position);
transform.setScale(scale);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderGrid(*batch, MAJOR_GRID_DIVISIONS, MAJOR_GRID_DIVISIONS, gridColor);
}
} else {
if (!_gridProgram.isLinked()) {
if (!_gridProgram.addShaderFromSourceFile(QGLShader::Vertex, PathUtils::resourcesPath() + "shaders/grid.vert")) {
qDebug() << "Failed to compile: " + _gridProgram.log();
return;
}
if (!_gridProgram.addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/grid.frag")) {
qDebug() << "Failed to compile: " + _gridProgram.log();
return;
}
if (!_gridProgram.link()) {
qDebug() << "Failed to link: " + _gridProgram.log();
return;
}
}
// Render code largely taken from MetavoxelEditor::render()
glDisable(GL_LIGHTING);
glDepthMask(GL_FALSE);
glPushMatrix();
glm::quat rotation = getRotation();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glLineWidth(1.5f);
glm::vec3 position = getPosition();
_gridProgram.bind();
// Minor grid
glPushMatrix();
{
glTranslatef(_minorGridWidth * (floorf(rotated.x / spacing) - MINOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MINOR_GRID_DIVISIONS / 2), position.z);
float scale = MINOR_GRID_DIVISIONS * spacing;
glScalef(scale, scale, scale);
DependencyManager::get<GeometryCache>()->renderGrid(MINOR_GRID_DIVISIONS, MINOR_GRID_DIVISIONS, gridColor);
}
glPopMatrix();
// Major grid
glPushMatrix();
{
glLineWidth(4.0f);
spacing *= _majorGridEvery;
glTranslatef(spacing * (floorf(rotated.x / spacing) - MAJOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MAJOR_GRID_DIVISIONS / 2), position.z);
float scale = MAJOR_GRID_DIVISIONS * spacing;
glScalef(scale, scale, scale);
DependencyManager::get<GeometryCache>()->renderGrid(MAJOR_GRID_DIVISIONS, MAJOR_GRID_DIVISIONS, gridColor);
}
glPopMatrix();
_gridProgram.release();
glPopMatrix();
glEnable(GL_LIGHTING);
glDepthMask(GL_TRUE);
}
glPopMatrix();
// Major grid
glPushMatrix();
{
glLineWidth(4.0f);
spacing *= _majorGridEvery;
glTranslatef(spacing * (floorf(rotated.x / spacing) - MAJOR_GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - MAJOR_GRID_DIVISIONS / 2), position.z);
float scale = MAJOR_GRID_DIVISIONS * spacing;
glScalef(scale, scale, scale);
DependencyManager::get<GeometryCache>()->renderGrid(MAJOR_GRID_DIVISIONS, MAJOR_GRID_DIVISIONS, gridColor);
}
glPopMatrix();
_gridProgram.release();
glPopMatrix();
glEnable(GL_LIGHTING);
glDepthMask(GL_TRUE);
}
void Grid3DOverlay::setProperties(const QScriptValue& properties) {

View file

@ -37,41 +37,57 @@ void Line3DOverlay::render(RenderArgs* args) {
return; // do nothing if we're not visible
}
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPushMatrix();
glDisable(GL_LIGHTING);
glLineWidth(_lineWidth);
float alpha = getAlpha();
xColor color = getColor();
const float MAX_COLOR = 255.0f;
glm::vec4 colorv4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
glm::vec3 position = getPosition();
glm::quat rotation = getRotation();
auto batch = args->_batch;
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
if (batch) {
Transform transform;
transform.setTranslation(_position);
transform.setRotation(_rotation);
batch->setModelTransform(transform);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(_position, _end, colorv4, _geometryCacheID);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(*batch, _position, _end, colorv4, _geometryCacheID);
} else {
DependencyManager::get<GeometryCache>()->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
}
} else {
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
}
glEnable(GL_LIGHTING);
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPopMatrix();
glPushMatrix();
if (glower) {
delete glower;
glDisable(GL_LIGHTING);
glLineWidth(_lineWidth);
glm::vec3 position = getPosition();
glm::quat rotation = getRotation();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()
DependencyManager::get<GeometryCache>()->renderDashedLine(_position, _end, colorv4, _geometryCacheID);
} else {
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
}
glEnable(GL_LIGHTING);
glPopMatrix();
if (glower) {
delete glower;
}
}
}

View file

@ -66,8 +66,8 @@ namespace render {
}
template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args) {
if (args) {
glPushMatrix();
if (overlay->getAnchor() == Overlay::MY_AVATAR) {
glPushMatrix();
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat myAvatarRotation = avatar->getOrientation();
glm::vec3 myAvatarPosition = avatar->getPosition();
@ -78,9 +78,11 @@ namespace render {
glTranslatef(myAvatarPosition.x, myAvatarPosition.y, myAvatarPosition.z);
glRotatef(angle, axis.x, axis.y, axis.z);
glScalef(myAvatarScale, myAvatarScale, myAvatarScale);
overlay->render(args);
glPopMatrix();
} else {
overlay->render(args);
}
overlay->render(args);
glPopMatrix();
}
}
}

View file

@ -41,74 +41,116 @@ void Rectangle3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
glm::vec4 rectangleColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
glDisable(GL_LIGHTING);
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec2 dimensions = getDimensions();
glm::vec2 halfDimensions = dimensions * 0.5f;
glm::quat rotation = getRotation();
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
auto batch = args->_batch;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
//glScalef(dimensions.x, dimensions.y, 1.0f);
if (batch) {
Transform transform;
transform.setTranslation(position);
transform.setRotation(rotation);
glLineWidth(_lineWidth);
batch->setModelTransform(transform);
if (getIsSolid()) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, 0.0f);
DependencyManager::get<GeometryCache>()->renderQuad(*batch, topLeft, bottomRight, rectangleColor);
} else {
auto geometryCache = DependencyManager::get<GeometryCache>();
if (getIsDashedLine()) {
glm::vec3 point1(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point2(halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point3(halfDimensions.x, halfDimensions.y, 0.0f);
glm::vec3 point4(-halfDimensions.x, halfDimensions.y, 0.0f);
// for our overlay, is solid means we draw a solid "filled" rectangle otherwise we just draw a border line...
if (getIsSolid()) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, 0.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, rectangleColor);
geometryCache->renderDashedLine(*batch, point1, point2, rectangleColor);
geometryCache->renderDashedLine(*batch, point2, point3, rectangleColor);
geometryCache->renderDashedLine(*batch, point3, point4, rectangleColor);
geometryCache->renderDashedLine(*batch, point4, point1, rectangleColor);
} else {
if (getIsDashedLine()) {
if (halfDimensions != _previousHalfDimensions) {
QVector<glm::vec3> border;
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
geometryCache->updateVertices(_geometryCacheID, border, rectangleColor);
glm::vec3 point1(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point2(halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point3(halfDimensions.x, halfDimensions.y, 0.0f);
glm::vec3 point4(-halfDimensions.x, halfDimensions.y, 0.0f);
geometryCache->renderDashedLine(point1, point2, rectangleColor);
geometryCache->renderDashedLine(point2, point3, rectangleColor);
geometryCache->renderDashedLine(point3, point4, rectangleColor);
geometryCache->renderDashedLine(point4, point1, rectangleColor);
} else {
if (halfDimensions != _previousHalfDimensions) {
QVector<glm::vec3> border;
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
geometryCache->updateVertices(_geometryCacheID, border, rectangleColor);
_previousHalfDimensions = halfDimensions;
}
geometryCache->renderVertices(gpu::LINE_STRIP, _geometryCacheID);
_previousHalfDimensions = halfDimensions;
}
geometryCache->renderVertices(*batch, gpu::LINE_STRIP, _geometryCacheID);
}
}
} else {
glDisable(GL_LIGHTING);
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
//glScalef(dimensions.x, dimensions.y, 1.0f);
glLineWidth(_lineWidth);
auto geometryCache = DependencyManager::get<GeometryCache>();
// for our overlay, is solid means we draw a solid "filled" rectangle otherwise we just draw a border line...
if (getIsSolid()) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, 0.0f);
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, rectangleColor);
} else {
if (getIsDashedLine()) {
glm::vec3 point1(-halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point2(halfDimensions.x, -halfDimensions.y, 0.0f);
glm::vec3 point3(halfDimensions.x, halfDimensions.y, 0.0f);
glm::vec3 point4(-halfDimensions.x, halfDimensions.y, 0.0f);
geometryCache->renderDashedLine(point1, point2, rectangleColor);
geometryCache->renderDashedLine(point2, point3, rectangleColor);
geometryCache->renderDashedLine(point3, point4, rectangleColor);
geometryCache->renderDashedLine(point4, point1, rectangleColor);
} else {
if (halfDimensions != _previousHalfDimensions) {
QVector<glm::vec3> border;
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, -halfDimensions.y, 0.0f);
border << glm::vec3(halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, halfDimensions.y, 0.0f);
border << glm::vec3(-halfDimensions.x, -halfDimensions.y, 0.0f);
geometryCache->updateVertices(_geometryCacheID, border, rectangleColor);
_previousHalfDimensions = halfDimensions;
}
geometryCache->renderVertices(gpu::LINE_STRIP, _geometryCacheID);
}
}
glPopMatrix();
glPopMatrix();
glPopMatrix();
if (glower) {
delete glower;
if (glower) {
delete glower;
}
}
}

View file

@ -39,33 +39,45 @@ void Sphere3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
glm::vec4 sphereColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
glDisable(GL_LIGHTING);
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();
auto batch = args->_batch;
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
if (batch) {
Transform transform;
transform.setTranslation(_position);
transform.setRotation(_rotation);
transform.setScale(_dimensions);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderSphere(*batch, 1.0f, SLICES, SLICES, sphereColor, _isSolid);
} else {
glDisable(GL_LIGHTING);
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();
float glowLevel = getGlowLevel();
Glower* glower = NULL;
if (glowLevel > 0.0f) {
glower = new Glower(glowLevel);
}
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<GeometryCache>()->renderSphere(1.0f, SLICES, SLICES, sphereColor, _isSolid);
glTranslatef(position.x, position.y, position.z);
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glPushMatrix();
glm::vec3 positionToCenter = center - position;
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
glScalef(dimensions.x, dimensions.y, dimensions.z);
DependencyManager::get<GeometryCache>()->renderSphere(1.0f, SLICES, SLICES, sphereColor, _isSolid);
glPopMatrix();
glPopMatrix();
glPopMatrix();
if (glower) {
delete glower;
if (glower) {
delete glower;
}
}
}

View file

@ -1020,7 +1020,10 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
localOutput->moveToThread(injector->getLocalBuffer()->thread());
// have it be stopped when that local buffer is about to close
connect(injector->getLocalBuffer(), &AudioInjectorLocalBuffer::bufferEmpty, localOutput, &QAudioOutput::stop);
connect(localOutput, &QAudioOutput::stateChanged, this, &AudioClient::audioStateChanged);
connect(this, &AudioClient::audioFinished, localOutput, &QAudioOutput::stop);
connect(this, &AudioClient::audioFinished, injector, &AudioInjector::stop);
connect(injector->getLocalBuffer(), &QIODevice::aboutToClose, localOutput, &QAudioOutput::stop);
qCDebug(audioclient) << "Starting QAudioOutput for local injector" << localOutput;
@ -1338,3 +1341,9 @@ void AudioClient::saveSettings() {
windowSecondsForDesiredReduction.set(_receivedAudioStream.getWindowSecondsForDesiredReduction());
repetitionWithFade.set(_receivedAudioStream.getRepetitionWithFade());
}
void AudioClient::audioStateChanged(QAudio::State state) {
if (state == QAudio::IdleState) {
emit audioFinished();
}
}

View file

@ -188,6 +188,8 @@ signals:
void receivedFirstPacket();
void disconnected();
void audioFinished();
protected:
AudioClient();
~AudioClient();
@ -196,6 +198,9 @@ protected:
deleteLater();
}
private slots:
void audioStateChanged(QAudio::State state);
private:
void outputFormatChanged();

View file

@ -60,7 +60,6 @@ void AudioInjector::setIsFinished(bool isFinished) {
if (_shouldDeleteAfterFinish) {
// we've been asked to delete after finishing, trigger a queued deleteLater here
qCDebug(audio) << "AudioInjector triggering delete from setIsFinished";
QMetaObject::invokeMethod(this, "deleteLater", Qt::QueuedConnection);
}
}
@ -122,9 +121,6 @@ void AudioInjector::injectLocally() {
success = _localAudioInterface->outputLocalInjector(_options.stereo, this);
// if we're not looping and the buffer tells us it is empty then emit finished
connect(_localBuffer, &AudioInjectorLocalBuffer::bufferEmpty, this, &AudioInjector::stop);
if (!success) {
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
}

View file

@ -69,10 +69,7 @@ qint64 AudioInjectorLocalBuffer::readData(char* data, qint64 maxSize) {
_currentOffset += bytesRead;
}
if (!_shouldLoop && bytesRead == bytesToEnd) {
// we hit the end of the buffer, emit a signal
emit bufferEmpty();
} else if (_shouldLoop && _currentOffset == _rawAudioArray.size()) {
if (_shouldLoop && _currentOffset == _rawAudioArray.size()) {
_currentOffset = 0;
}

View file

@ -32,9 +32,6 @@ public:
void setCurrentOffset(int currentOffset) { _currentOffset = currentOffset; }
void setVolume(float volume) { _volume = glm::clamp(volume, 0.0f, 1.0f); }
signals:
void bufferEmpty();
private:
qint64 recursiveReadFromFront(char* data, qint64 maxSize);

View file

@ -48,6 +48,9 @@
#include "RenderablePolyVoxEntityItem.h"
#include "EntitiesRendererLogging.h"
#include "DependencyManager.h"
#include "AddressManager.h"
EntityTreeRenderer::EntityTreeRenderer(bool wantScripts, AbstractViewStateInterface* viewState,
AbstractScriptingServicesInterface* scriptingServices) :
OctreeRenderer(),
@ -911,15 +914,13 @@ void EntityTreeRenderer::mouseMoveEvent(QMouseEvent* event, unsigned int deviceI
bool precisionPicking = false; // for mouse moves we do not do precision picking
RayToEntityIntersectionResult rayPickResult = findRayIntersectionWorker(ray, Octree::TryLock, precisionPicking);
if (rayPickResult.intersects) {
//qCDebug(entitiesrenderer) << "mouseReleaseEvent over entity:" << rayPickResult.entityID;
QScriptValueList entityScriptArgs = createMouseEventArgs(rayPickResult.entityID, event, deviceID);
// load the entity script if needed...
QScriptValue entityScript = loadEntityScript(rayPickResult.entity);
if (entityScript.property("mouseMoveEvent").isValid()) {
entityScript.property("mouseMoveEvent").call(entityScript, entityScriptArgs);
}
//qCDebug(entitiesrenderer) << "mouseMoveEvent over entity:" << rayPickResult.entityID;
emit mouseMoveOnEntity(rayPickResult, event, deviceID);
if (entityScript.property("mouseMoveOnEntity").isValid()) {
entityScript.property("mouseMoveOnEntity").call(entityScript, entityScriptArgs);

View file

@ -129,7 +129,7 @@ void RenderablePolyVoxEntityItem::setVoxelVolumeSize(glm::vec3 voxelVolumeSize)
_volData->setBorderValue(255);
#ifdef WANT_DEBUG
qDebug() << " new size is" << _volData->getWidth() << _volData->getHeight() << _volData->getDepth();
qDebug() << " new voxel-space size is" << _volData->getWidth() << _volData->getHeight() << _volData->getDepth();
#endif
// I'm not sure this is needed... the docs say that each element is initialized with its default
@ -228,12 +228,15 @@ uint8_t RenderablePolyVoxEntityItem::getVoxel(int x, int y, int z) {
return _volData->getVoxelAt(x, y, z);
}
void RenderablePolyVoxEntityItem::setVoxel(int x, int y, int z, uint8_t toValue) {
void RenderablePolyVoxEntityItem::setVoxelInternal(int x, int y, int z, uint8_t toValue) {
// set a voxel without recompressing the voxel data
assert(_volData);
if (!inUserBounds(_volData, _voxelSurfaceStyle, x, y, z)) {
return;
}
updateOnCount(x, y, z, toValue);
if (_voxelSurfaceStyle == SURFACE_EDGED_CUBIC) {
_volData->setVoxelAt(x + 1, y + 1, z + 1, toValue);
} else {
@ -242,6 +245,14 @@ void RenderablePolyVoxEntityItem::setVoxel(int x, int y, int z, uint8_t toValue)
}
void RenderablePolyVoxEntityItem::setVoxel(int x, int y, int z, uint8_t toValue) {
if (_locked) {
return;
}
setVoxelInternal(x, y, z, toValue);
compressVolumeData();
}
void RenderablePolyVoxEntityItem::updateOnCount(int x, int y, int z, uint8_t toValue) {
// keep _onCount up to date
if (!inUserBounds(_volData, _voxelSurfaceStyle, x, y, z)) {
@ -263,11 +274,15 @@ void RenderablePolyVoxEntityItem::updateOnCount(int x, int y, int z, uint8_t toV
}
void RenderablePolyVoxEntityItem::setAll(uint8_t toValue) {
if (_locked) {
return;
}
for (int z = 0; z < _voxelVolumeSize.z; z++) {
for (int y = 0; y < _voxelVolumeSize.y; y++) {
for (int x = 0; x < _voxelVolumeSize.x; x++) {
updateOnCount(x, y, z, toValue);
setVoxel(x, y, z, toValue);
setVoxelInternal(x, y, z, toValue);
}
}
}
@ -275,11 +290,19 @@ void RenderablePolyVoxEntityItem::setAll(uint8_t toValue) {
}
void RenderablePolyVoxEntityItem::setVoxelInVolume(glm::vec3 position, uint8_t toValue) {
updateOnCount(position.x, position.y, position.z, toValue);
setVoxel(position.x, position.y, position.z, toValue);
if (_locked) {
return;
}
// same as setVoxel but takes a vector rather than 3 floats.
setVoxel((int)position.x, (int)position.y, (int)position.z, toValue);
}
void RenderablePolyVoxEntityItem::setSphereInVolume(glm::vec3 center, float radius, uint8_t toValue) {
if (_locked) {
return;
}
// This three-level for loop iterates over every voxel in the volume
for (int z = 0; z < _voxelVolumeSize.z; z++) {
for (int y = 0; y < _voxelVolumeSize.y; y++) {
@ -291,7 +314,7 @@ void RenderablePolyVoxEntityItem::setSphereInVolume(glm::vec3 center, float radi
// If the current voxel is less than 'radius' units from the center then we make it solid.
if (fDistToCenter <= radius) {
updateOnCount(x, y, z, toValue);
setVoxel(x, y, z, toValue);
setVoxelInternal(x, y, z, toValue);
}
}
}
@ -388,10 +411,7 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
getModel();
}
Transform transform;
transform.setTranslation(getPosition() - getRegistrationPoint() * getDimensions());
transform.setRotation(getRotation());
transform.setScale(getDimensions() / _voxelVolumeSize);
Transform transform(voxelToWorldMatrix());
auto mesh = _modelGeometry.getMesh();
Q_ASSERT(args->_batch);
@ -522,6 +542,11 @@ void RenderablePolyVoxEntityItem::compressVolumeData() {
QByteArray newVoxelData;
QDataStream writer(&newVoxelData, QIODevice::WriteOnly | QIODevice::Truncate);
#ifdef WANT_DEBUG
qDebug() << "compressing voxel data of size:" << voxelXSize << voxelYSize << voxelZSize;
#endif
writer << voxelXSize << voxelYSize << voxelZSize;
QByteArray compressedData = qCompress(uncompressedData, 9);
@ -581,7 +606,7 @@ void RenderablePolyVoxEntityItem::decompressVolumeData() {
for (int x = 0; x < voxelXSize; x++) {
int uncompressedIndex = (z * voxelYSize * voxelXSize) + (y * voxelZSize) + x;
updateOnCount(x, y, z, uncompressedData[uncompressedIndex]);
setVoxel(x, y, z, uncompressedData[uncompressedIndex]);
setVoxelInternal(x, y, z, uncompressedData[uncompressedIndex]);
}
}
}

View file

@ -73,11 +73,12 @@ public:
virtual void setVoxelInVolume(glm::vec3 position, uint8_t toValue);
SIMPLE_RENDERABLE();
private:
// The PolyVoxEntityItem class has _voxelData which contains dimensions and compressed voxel data. The dimensions
// may not match _voxelVolumeSize.
void setVoxelInternal(int x, int y, int z, uint8_t toValue);
void compressVolumeData();
void decompressVolumeData();

View file

@ -95,6 +95,7 @@ bool DeleteEntityOperator::preRecursion(OctreeElement* element) {
EntityItemPointer theEntity = details.entity;
bool entityDeleted = entityTreeElement->removeEntityItem(theEntity); // remove it from the element
assert(entityDeleted);
(void)entityDeleted; // quite warning
_tree->setContainingElement(details.entity->getEntityItemID(), NULL); // update or id to element lookup
_foundCount++;
}

View file

@ -67,12 +67,12 @@ EntityItem::EntityItem(const EntityItemID& entityItemID) :
_simulatorIDChangedTime(0),
_marketplaceID(ENTITY_ITEM_DEFAULT_MARKETPLACE_ID),
_name(ENTITY_ITEM_DEFAULT_NAME),
_href(""),
_description(""),
_dirtyFlags(0),
_element(nullptr),
_physicsInfo(nullptr),
_simulated(false),
_href(""),
_description("")
_simulated(false)
{
quint64 now = usecTimestampNow();
_lastSimulated = now;

View file

@ -23,7 +23,7 @@
const glm::vec3 PolyVoxEntityItem::DEFAULT_VOXEL_VOLUME_SIZE = glm::vec3(32, 32, 32);
const float PolyVoxEntityItem::MAX_VOXEL_DIMENSION = 32.0f;
const QByteArray PolyVoxEntityItem::DEFAULT_VOXEL_DATA(qCompress(QByteArray(0), 9)); // XXX
const QByteArray PolyVoxEntityItem::DEFAULT_VOXEL_DATA(PolyVoxEntityItem::makeEmptyVoxelData());
const PolyVoxEntityItem::PolyVoxSurfaceStyle PolyVoxEntityItem::DEFAULT_VOXEL_SURFACE_STYLE =
PolyVoxEntityItem::SURFACE_MARCHING_CUBES;
@ -31,6 +31,25 @@ EntityItemPointer PolyVoxEntityItem::factory(const EntityItemID& entityID, const
return EntityItemPointer(new PolyVoxEntityItem(entityID, properties));
}
QByteArray PolyVoxEntityItem::makeEmptyVoxelData(quint16 voxelXSize, quint16 voxelYSize, quint16 voxelZSize) {
int rawSize = voxelXSize * voxelYSize * voxelZSize;
QByteArray uncompressedData = QByteArray(rawSize, '\0');
QByteArray newVoxelData;
QDataStream writer(&newVoxelData, QIODevice::WriteOnly | QIODevice::Truncate);
writer << voxelXSize << voxelYSize << voxelZSize;
QByteArray compressedData = qCompress(uncompressedData, 9);
writer << compressedData;
return newVoxelData;
}
PolyVoxEntityItem::PolyVoxEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
EntityItem(entityItemID),
_voxelVolumeSize(PolyVoxEntityItem::DEFAULT_VOXEL_VOLUME_SIZE),

View file

@ -12,14 +12,14 @@
#ifndef hifi_PolyVoxEntityItem_h
#define hifi_PolyVoxEntityItem_h
#include "EntityItem.h"
#include "EntityItem.h"
class PolyVoxEntityItem : public EntityItem {
public:
static EntityItemPointer factory(const EntityItemID& entityID, const EntityItemProperties& properties);
PolyVoxEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
ALLOW_INSTANTIATION // This class can be instantiated
// methods for getting/setting all properties of an entity
@ -29,22 +29,22 @@ class PolyVoxEntityItem : public EntityItem {
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const;
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
EntityTreeElementExtraEncodeData* modelTreeElementExtraEncodeData,
EntityPropertyFlags& requestedProperties,
EntityPropertyFlags& propertyFlags,
EntityPropertyFlags& propertiesDidntFit,
int& propertyCount,
int& propertyCount,
OctreeElement::AppendState& appendState) const;
virtual int readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
virtual int readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
ReadBitstreamToTreeParams& args,
EntityPropertyFlags& propertyFlags, bool overwriteLocalData);
// never have a ray intersection pick a PolyVoxEntityItem.
virtual bool supportsDetailedRayIntersection() const { return true; }
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
void** intersectedObject, bool precisionPicking) const { return false; }
virtual void debugDump() const;
@ -58,12 +58,12 @@ class PolyVoxEntityItem : public EntityItem {
enum PolyVoxSurfaceStyle {
SURFACE_MARCHING_CUBES,
SURFACE_CUBIC,
SURFACE_EDGED_CUBIC
SURFACE_EDGED_CUBIC
};
virtual void setVoxelSurfaceStyle(PolyVoxSurfaceStyle voxelSurfaceStyle) { _voxelSurfaceStyle = voxelSurfaceStyle; }
virtual void setVoxelSurfaceStyle(uint16_t voxelSurfaceStyle) {
setVoxelSurfaceStyle((PolyVoxSurfaceStyle) voxelSurfaceStyle);
setVoxelSurfaceStyle((PolyVoxSurfaceStyle) voxelSurfaceStyle);
}
virtual PolyVoxSurfaceStyle getVoxelSurfaceStyle() const { return _voxelSurfaceStyle; }
@ -82,10 +82,11 @@ class PolyVoxEntityItem : public EntityItem {
virtual void setAll(uint8_t toValue) {}
virtual void setVoxelInVolume(glm::vec3 position, uint8_t toValue) {}
virtual uint8_t getVoxel(int x, int y, int z) { return 0; }
virtual void setVoxel(int x, int y, int z, uint8_t toValue) {}
static QByteArray makeEmptyVoxelData(quint16 voxelXSize = 16, quint16 voxelYSize = 16, quint16 voxelZSize = 16);
protected:
glm::vec3 _voxelVolumeSize; // this is always 3 bytes

View file

@ -150,6 +150,8 @@ public:
void _glUseProgram(GLuint program);
void _glUniform1f(GLint location, GLfloat v0);
void _glUniform2f(GLint location, GLfloat v0, GLfloat v1);
void _glUniform3f(GLint location, GLfloat v0, GLfloat v1, GLfloat v2);
void _glUniform3fv(GLint location, GLsizei count, const GLfloat* value);
void _glUniform4fv(GLint location, GLsizei count, const GLfloat* value);
void _glUniformMatrix4fv(GLint location, GLsizei count, GLboolean transpose, const GLfloat* value);
@ -210,6 +212,8 @@ public:
COMMAND_glUseProgram,
COMMAND_glUniform1f,
COMMAND_glUniform2f,
COMMAND_glUniform3f,
COMMAND_glUniform3fv,
COMMAND_glUniform4fv,
COMMAND_glUniformMatrix4fv,

View file

@ -61,6 +61,8 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glUseProgram),
(&::gpu::GLBackend::do_glUniform1f),
(&::gpu::GLBackend::do_glUniform2f),
(&::gpu::GLBackend::do_glUniform3f),
(&::gpu::GLBackend::do_glUniform3fv),
(&::gpu::GLBackend::do_glUniform4fv),
(&::gpu::GLBackend::do_glUniformMatrix4fv),
@ -462,6 +464,7 @@ void Batch::_glUniform2f(GLint location, GLfloat v0, GLfloat v1) {
DO_IT_NOW(_glUniform2f, 1);
}
void GLBackend::do_glUniform2f(Batch& batch, uint32 paramOffset) {
if (_pipeline._program == 0) {
// We should call updatePipeline() to bind the program but we are not doing that
@ -475,6 +478,56 @@ void GLBackend::do_glUniform2f(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform3f(GLint location, GLfloat v0, GLfloat v1, GLfloat v2) {
ADD_COMMAND_GL(glUniform3f);
_params.push_back(v2);
_params.push_back(v1);
_params.push_back(v0);
_params.push_back(location);
DO_IT_NOW(_glUniform3f, 1);
}
void GLBackend::do_glUniform3f(Batch& batch, uint32 paramOffset) {
if (_pipeline._program == 0) {
// We should call updatePipeline() to bind the program but we are not doing that
// because these uniform setters are deprecated and we don;t want to create side effect
return;
}
glUniform3f(
batch._params[paramOffset + 3]._int,
batch._params[paramOffset + 2]._float,
batch._params[paramOffset + 1]._float,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform3fv(GLint location, GLsizei count, const GLfloat* value) {
ADD_COMMAND_GL(glUniform3fv);
const int VEC3_SIZE = 3 * sizeof(float);
_params.push_back(cacheData(count * VEC3_SIZE, value));
_params.push_back(count);
_params.push_back(location);
DO_IT_NOW(_glUniform3fv, 3);
}
void GLBackend::do_glUniform3fv(Batch& batch, uint32 paramOffset) {
if (_pipeline._program == 0) {
// We should call updatePipeline() to bind the program but we are not doing that
// because these uniform setters are deprecated and we don;t want to create side effect
return;
}
glUniform3fv(
batch._params[paramOffset + 2]._int,
batch._params[paramOffset + 1]._uint,
(const GLfloat*)batch.editData(batch._params[paramOffset + 0]._uint));
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform4fv(GLint location, GLsizei count, const GLfloat* value) {
ADD_COMMAND_GL(glUniform4fv);

View file

@ -379,6 +379,8 @@ protected:
void do_glUseProgram(Batch& batch, uint32 paramOffset);
void do_glUniform1f(Batch& batch, uint32 paramOffset);
void do_glUniform2f(Batch& batch, uint32 paramOffset);
void do_glUniform3f(Batch& batch, uint32 paramOffset);
void do_glUniform3fv(Batch& batch, uint32 paramOffset);
void do_glUniform4fv(Batch& batch, uint32 paramOffset);
void do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset);

View file

@ -61,7 +61,11 @@ void makeBindings(GLBackend::GLShader* shader) {
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD, "texcoord");
}
loc = glGetAttribLocation(glprogram, "attribTexcoord");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD, "attribTexcoord");
}
loc = glGetAttribLocation(glprogram, "tangent");
if (loc >= 0) {
glBindAttribLocation(glprogram, gpu::Stream::TANGENT, "tangent");

View file

@ -75,6 +75,8 @@ void GLBackend::syncTransformStateCache() {
}
void GLBackend::updateTransform() {
GLint originalMatrixMode;
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
// Check all the dirty flags and update the state accordingly
if (_transform._invalidProj) {
_transform._transformCamera._projection = _transform._projection;
@ -132,12 +134,13 @@ void GLBackend::updateTransform() {
(void) CHECK_GL_ERROR();
}
if (_transform._invalidModel || _transform._invalidView) {
if (_transform._lastMode != GL_MODELVIEW) {
glMatrixMode(GL_MODELVIEW);
_transform._lastMode = GL_MODELVIEW;
}
if (!_transform._model.isIdentity()) {
if (_transform._lastMode != GL_MODELVIEW) {
glMatrixMode(GL_MODELVIEW);
_transform._lastMode = GL_MODELVIEW;
}
Transform::Mat4 modelView;
if (!_transform._view.isIdentity()) {
Transform mvx;
@ -147,19 +150,12 @@ void GLBackend::updateTransform() {
_transform._model.getMatrix(modelView);
}
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
} else if (!_transform._view.isIdentity()) {
Transform::Mat4 modelView;
_transform._view.getInverseMatrix(modelView);
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
} else {
if (!_transform._view.isIdentity()) {
if (_transform._lastMode != GL_MODELVIEW) {
glMatrixMode(GL_MODELVIEW);
_transform._lastMode = GL_MODELVIEW;
}
Transform::Mat4 modelView;
_transform._view.getInverseMatrix(modelView);
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
} else {
// TODO: eventually do something about the matrix when neither view nor model is specified?
// glLoadIdentity();
}
glLoadIdentity();
}
(void) CHECK_GL_ERROR();
}
@ -167,6 +163,7 @@ void GLBackend::updateTransform() {
// Flags are clean
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = false;
glMatrixMode(originalMatrixMode);
}

View file

@ -1,325 +1,305 @@
//
// Stage.cpp
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Stage.h"
#include <glm/gtx/transform.hpp>
#include <math.h>
#include <qcompilerdetection.h>
#include "SkyFromAtmosphere_vert.h"
#include "SkyFromAtmosphere_frag.h"
using namespace model;
void EarthSunModel::updateAll() const {
updateWorldToSurface();
updateSurfaceToEye();
updateSun();
}
Mat4d EarthSunModel::evalWorldToGeoLocationMat(double longitude, double latitude, double absAltitude, double scale) {
// Longitude is along Z axis but - from east to west
Mat4d rotLon = glm::rotate(glm::radians(longitude), Vec3d(0.0, 0.0, 1.0));
// latitude is along X axis + from south to north
Mat4d rotLat = glm::rotate(-glm::radians(latitude), Vec3d(1.0, 0.0, 0.0));
// translation is movin to the earth surface + altiture at the radius along Y axis
Mat4d surfaceT = glm::translate(Vec3d(0.0, -absAltitude, 0.0));
// Mat4d worldScale = glm::scale(Vec3d(scale));
Mat4d worldToGeoLocMat = surfaceT * rotLat * rotLon;
return worldToGeoLocMat;
}
void EarthSunModel::updateWorldToSurface() const {
// Check if the final position is too close to the earth center ?
float absAltitude = _earthRadius + _altitude;
if (absAltitude < 0.01f) {
absAltitude = 0.01f;
}
// Final world to local Frame
_worldToSurfaceMat = evalWorldToGeoLocationMat(_longitude, _latitude, absAltitude, _scale);
// and the inverse
_surfaceToWorldMat = glm::inverse(_worldToSurfaceMat);
_surfacePos = Vec3d(_surfaceToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0));
}
void EarthSunModel::updateSurfaceToEye() const {
_surfaceToEyeMat = glm::inverse(_eyeToSurfaceMat);
_worldToEyeMat = _surfaceToEyeMat * _worldToSurfaceMat;
_eyeToWorldMat = _surfaceToWorldMat * _eyeToSurfaceMat;
_eyePos = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0) );
_eyeDir = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, -1.0, 0.0) );
}
void EarthSunModel::updateSun() const {
// Longitude is along Y axis but - from east to west
Mat4d rotSunLon;
Mat4d rotSun = evalWorldToGeoLocationMat(_sunLongitude, _sunLatitude, _earthRadius, _scale);
rotSun = glm::inverse(rotSun);
_sunDir = Vec3d(rotSun * Vec4d(0.0, 1.0, 0.0, 0.0));
// sun direction is looking up toward Y axis at the specified sun lat, long
Vec3d lssd = Vec3d(_worldToSurfaceMat * Vec4d(_sunDir.x, _sunDir.y, _sunDir.z, 0.0));
// apply surface rotation offset
glm::dquat dSurfOrient(_surfaceOrientation);
lssd = glm::rotate(dSurfOrient, lssd);
_surfaceSunDir = glm::normalize(Vec3(lssd.x, lssd.y, lssd.z));
}
void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
_surfaceOrientation = orientation;
invalidate();
}
double moduloRange(double val, double minVal, double maxVal) {
double range = maxVal - minVal;
double rval = (val - minVal) / range;
rval = rval - floor(rval);
return rval * range + minVal;
}
const float MAX_LONGITUDE = 180.0f;
const float MAX_LATITUDE = 90.0f;
float validateLongitude(float lon) {
return moduloRange(lon, -MAX_LONGITUDE, MAX_LONGITUDE);
}
float validateLatitude(float lat) {
return moduloRange(lat, -MAX_LATITUDE, MAX_LATITUDE);
}
float validateAltitude(float altitude) {
const float MIN_ALTITUDE = -1000.0f;
const float MAX_ALTITUDE = 100000.0f;
return std::min(std::max(altitude, MIN_ALTITUDE), MAX_ALTITUDE);
}
void EarthSunModel::setLatitude(float lat) {
_latitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setLongitude(float lon) {
_longitude = validateLongitude(lon);
invalidate();
}
void EarthSunModel::setAltitude(float altitude) {
_altitude = validateAltitude(altitude);
invalidate();
}
void EarthSunModel::setSunLatitude(float lat) {
_sunLatitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setSunLongitude(float lon) {
_sunLongitude = validateLongitude(lon);
invalidate();
}
Atmosphere::Atmosphere() {
// only if created from nothing shall we create the Buffer to store the properties
Data data;
_dataBuffer = gpu::BufferView(new gpu::Buffer(sizeof(Data), (const gpu::Byte*) &data));
setScatteringWavelength(_scatteringWavelength);
setRayleighScattering(_rayleighScattering);
setInnerOuterRadiuses(getInnerRadius(), getOuterRadius());
}
void Atmosphere::setScatteringWavelength(Vec3 wavelength) {
_scatteringWavelength = wavelength;
Data& data = editData();
data._invWaveLength = Vec4(1.0f / powf(wavelength.x, 4.0f), 1.0f / powf(wavelength.y, 4.0f), 1.0f / powf(wavelength.z, 4.0f), 0.0f);
}
void Atmosphere::setRayleighScattering(float scattering) {
_rayleighScattering = scattering;
updateScattering();
}
void Atmosphere::setMieScattering(float scattering) {
_mieScattering = scattering;
updateScattering();
}
void Atmosphere::setSunBrightness(float brightness) {
_sunBrightness = brightness;
updateScattering();
}
void Atmosphere::updateScattering() {
Data& data = editData();
data._scatterings.x = getRayleighScattering() * getSunBrightness();
data._scatterings.y = getMieScattering() * getSunBrightness();
data._scatterings.z = getRayleighScattering() * 4.0f * glm::pi<float>();
data._scatterings.w = getMieScattering() * 4.0f * glm::pi<float>();
}
void Atmosphere::setInnerOuterRadiuses(float inner, float outer) {
Data& data = editData();
data._radiuses.x = inner;
data._radiuses.y = outer;
data._scales.x = 1.0f / (outer - inner);
data._scales.z = data._scales.x / data._scales.y;
}
const int NUM_DAYS_PER_YEAR = 365;
const float NUM_HOURS_PER_DAY = 24.0f;
const float NUM_HOURS_PER_HALF_DAY = NUM_HOURS_PER_DAY * 0.5f;
SunSkyStage::SunSkyStage() :
_sunLight(new Light()),
_skybox(new Skybox())
{
_sunLight->setType(Light::SUN);
setSunIntensity(1.0f);
setSunAmbientIntensity(0.5f);
setSunColor(Vec3(1.0f, 1.0f, 1.0f));
// Default origin location is a special place in the world...
setOriginLocation(122.407f, 37.777f, 0.03f);
// Noun
setDayTime(12.0f);
// Begining of march
setYearTime(60.0f);
auto skyFromAtmosphereVertex = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(SkyFromAtmosphere_vert)));
auto skyFromAtmosphereFragment = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(SkyFromAtmosphere_frag)));
auto skyShader = gpu::ShaderPointer(gpu::Shader::createProgram(skyFromAtmosphereVertex, skyFromAtmosphereFragment));
auto skyState = gpu::StatePointer(new gpu::State());
// skyState->setStencilEnable(false);
// skyState->setBlendEnable(false);
_skyPipeline = gpu::PipelinePointer(gpu::Pipeline::create(skyShader, skyState));
_skybox.reset(new Skybox());
_skybox->setColor(Color(1.0f, 0.0f, 0.0f));
}
SunSkyStage::~SunSkyStage() {
}
void SunSkyStage::setDayTime(float hour) {
_dayTime = moduloRange(hour, 0.f, NUM_HOURS_PER_DAY);
invalidate();
}
void SunSkyStage::setYearTime(unsigned int day) {
_yearTime = day % NUM_DAYS_PER_YEAR;
invalidate();
}
void SunSkyStage::setOriginOrientation(const Quat& orientation) {
_earthSunModel.setSurfaceOrientation(orientation);
invalidate();
}
void SunSkyStage::setOriginLocation(float longitude, float latitude, float altitude) {
_earthSunModel.setLongitude(longitude);
_earthSunModel.setLatitude(latitude);
_earthSunModel.setAltitude(altitude);
invalidate();
}
void SunSkyStage::setSunModelEnable(bool isEnabled) {
_sunModelEnable = isEnabled;
invalidate();
}
void SunSkyStage::setSunColor(const Vec3& color) {
_sunLight->setColor(color);
}
void SunSkyStage::setSunIntensity(float intensity) {
_sunLight->setIntensity(intensity);
}
void SunSkyStage::setSunAmbientIntensity(float intensity) {
_sunLight->setAmbientIntensity(intensity);
}
void SunSkyStage::setSunDirection(const Vec3& direction) {
if (!isSunModelEnabled()) {
_sunLight->setDirection(direction);
}
}
// THe sun declinaison calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclinaison(double dayNumber) {
return -(23.0 + 44.0/60.0)*cos(glm::radians((360.0/365.0)*(dayNumber + 10.0)));
}
void SunSkyStage::updateGraphicsObject() const {
// Always update the sunLongitude based on the current dayTime and the current origin
// The day time is supposed to be local at the origin
float signedNormalizedDayTime = (_dayTime - NUM_HOURS_PER_HALF_DAY) / NUM_HOURS_PER_HALF_DAY;
float sunLongitude = _earthSunModel.getLongitude() + (MAX_LONGITUDE * signedNormalizedDayTime);
_earthSunModel.setSunLongitude(sunLongitude);
// And update the sunLAtitude as the declinaison depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclinaison(_yearTime));
if (isSunModelEnabled()) {
Vec3d sunLightDir = -_earthSunModel.getSurfaceSunDir();
_sunLight->setDirection(Vec3(sunLightDir.x, sunLightDir.y, sunLightDir.z));
double originAlt = _earthSunModel.getAltitude();
_sunLight->setPosition(Vec3(0.0f, originAlt, 0.0f));
}
// Background
switch (getBackgroundMode()) {
case NO_BACKGROUND: {
break;
}
case SKY_DOME: {
break;
}
case SKY_BOX: {
break;
}
case NUM_BACKGROUND_MODES:
Q_UNREACHABLE();
};
static int firstTime = 0;
if (firstTime == 0) {
firstTime++;
gpu::Shader::makeProgram(*(_skyPipeline->getProgram()));
}
}
void SunSkyStage::setBackgroundMode(BackgroundMode mode) {
_backgroundMode = mode;
invalidate();
}
void SunSkyStage::setSkybox(const SkyboxPointer& skybox) {
_skybox = skybox;
invalidate();
//
// Stage.cpp
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Stage.h"
#include <glm/gtx/transform.hpp>
#include <math.h>
#include <qcompilerdetection.h>
using namespace model;
void EarthSunModel::updateAll() const {
updateWorldToSurface();
updateSurfaceToEye();
updateSun();
}
Mat4d EarthSunModel::evalWorldToGeoLocationMat(double longitude, double latitude, double absAltitude, double scale) {
// Longitude is along Z axis but - from east to west
Mat4d rotLon = glm::rotate(glm::radians(longitude), Vec3d(0.0, 0.0, 1.0));
// latitude is along X axis + from south to north
Mat4d rotLat = glm::rotate(-glm::radians(latitude), Vec3d(1.0, 0.0, 0.0));
// translation is movin to the earth surface + altiture at the radius along Y axis
Mat4d surfaceT = glm::translate(Vec3d(0.0, -absAltitude, 0.0));
// Mat4d worldScale = glm::scale(Vec3d(scale));
Mat4d worldToGeoLocMat = surfaceT * rotLat * rotLon;
return worldToGeoLocMat;
}
void EarthSunModel::updateWorldToSurface() const {
// Check if the final position is too close to the earth center ?
double absAltitude = _earthRadius + _altitude;
if ( absAltitude < 0.01) {
absAltitude = 0.01;
}
// Final world to local Frame
_worldToSurfaceMat = evalWorldToGeoLocationMat(_longitude, _latitude, absAltitude, _scale);
// and the inverse
_surfaceToWorldMat = glm::inverse(_worldToSurfaceMat);
_surfacePos = Vec3d(_surfaceToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0));
}
void EarthSunModel::updateSurfaceToEye() const {
_surfaceToEyeMat = glm::inverse(_eyeToSurfaceMat);
_worldToEyeMat = _surfaceToEyeMat * _worldToSurfaceMat;
_eyeToWorldMat = _surfaceToWorldMat * _eyeToSurfaceMat;
_eyePos = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0) );
_eyeDir = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, -1.0, 0.0) );
}
void EarthSunModel::updateSun() const {
// Longitude is along Y axis but - from east to west
Mat4d rotSunLon;
Mat4d rotSun = evalWorldToGeoLocationMat(_sunLongitude, _sunLatitude, _earthRadius, _scale);
rotSun = glm::inverse(rotSun);
_sunDir = Vec3d(rotSun * Vec4d(0.0, 1.0, 0.0, 0.0));
// sun direction is looking up toward Y axis at the specified sun lat, long
Vec3d lssd = Vec3d(_worldToSurfaceMat * Vec4d(_sunDir.x, _sunDir.y, _sunDir.z, 0.0));
// apply surface rotation offset
glm::dquat dSurfOrient(_surfaceOrientation);
lssd = glm::rotate(dSurfOrient, lssd);
_surfaceSunDir = glm::normalize(Vec3(lssd.x, lssd.y, lssd.z));
}
void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
_surfaceOrientation = orientation;
invalidate();
}
double moduloRange(double val, double minVal, double maxVal) {
double range = maxVal - minVal;
double rval = (val - minVal) / range;
rval = rval - floor(rval);
return rval * range + minVal;
}
const float MAX_LONGITUDE = 180.0f;
const float MAX_LATITUDE = 90.0f;
float validateLongitude(float lon) {
return moduloRange(lon, -MAX_LONGITUDE, MAX_LONGITUDE);
}
float validateLatitude(float lat) {
return moduloRange(lat, -MAX_LATITUDE, MAX_LATITUDE);
}
float validateAltitude(float altitude) {
const float MIN_ALTITUDE = -1000.0f;
const float MAX_ALTITUDE = 100000.0f;
return std::min(std::max(altitude, MIN_ALTITUDE), MAX_ALTITUDE);
}
void EarthSunModel::setLatitude(float lat) {
_latitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setLongitude(float lon) {
_longitude = validateLongitude(lon);
invalidate();
}
void EarthSunModel::setAltitude(float altitude) {
_altitude = validateAltitude(altitude);
invalidate();
}
void EarthSunModel::setSunLatitude(float lat) {
_sunLatitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setSunLongitude(float lon) {
_sunLongitude = validateLongitude(lon);
invalidate();
}
Atmosphere::Atmosphere() {
// only if created from nothing shall we create the Buffer to store the properties
Data data;
_dataBuffer = gpu::BufferView(new gpu::Buffer(sizeof(Data), (const gpu::Byte*) &data));
setScatteringWavelength(_scatteringWavelength);
setRayleighScattering(_rayleighScattering);
setInnerOuterRadiuses(getInnerRadius(), getOuterRadius());
}
void Atmosphere::setScatteringWavelength(Vec3 wavelength) {
_scatteringWavelength = wavelength;
Data& data = editData();
data._invWaveLength = Vec4(1.0f / powf(wavelength.x, 4.0f), 1.0f / powf(wavelength.y, 4.0f), 1.0f / powf(wavelength.z, 4.0f), 0.0f);
}
void Atmosphere::setRayleighScattering(float scattering) {
_rayleighScattering = scattering;
updateScattering();
}
void Atmosphere::setMieScattering(float scattering) {
_mieScattering = scattering;
updateScattering();
}
void Atmosphere::setSunBrightness(float brightness) {
_sunBrightness = brightness;
updateScattering();
}
void Atmosphere::updateScattering() {
Data& data = editData();
data._scatterings.x = getRayleighScattering() * getSunBrightness();
data._scatterings.y = getMieScattering() * getSunBrightness();
data._scatterings.z = getRayleighScattering() * 4.0f * glm::pi<float>();
data._scatterings.w = getMieScattering() * 4.0f * glm::pi<float>();
}
void Atmosphere::setInnerOuterRadiuses(float inner, float outer) {
Data& data = editData();
data._radiuses.x = inner;
data._radiuses.y = outer;
data._scales.x = 1.0f / (outer - inner);
data._scales.z = data._scales.x / data._scales.y;
}
const int NUM_DAYS_PER_YEAR = 365;
const float NUM_HOURS_PER_DAY = 24.0f;
const float NUM_HOURS_PER_HALF_DAY = NUM_HOURS_PER_DAY * 0.5f;
SunSkyStage::SunSkyStage() :
_sunLight(new Light()),
_skybox(new Skybox())
{
_sunLight->setType(Light::SUN);
setSunIntensity(1.0f);
setSunAmbientIntensity(0.5f);
setSunColor(Vec3(1.0f, 1.0f, 1.0f));
// Default origin location is a special place in the world...
setOriginLocation(122.407f, 37.777f, 0.03f);
// Noun
setDayTime(12.0f);
// Begining of march
setYearTime(60.0f);
_skybox.reset(new Skybox());
_skybox->setColor(Color(1.0f, 0.0f, 0.0f));
}
SunSkyStage::~SunSkyStage() {
}
void SunSkyStage::setDayTime(float hour) {
_dayTime = moduloRange(hour, 0.f, NUM_HOURS_PER_DAY);
invalidate();
}
void SunSkyStage::setYearTime(unsigned int day) {
_yearTime = day % NUM_DAYS_PER_YEAR;
invalidate();
}
void SunSkyStage::setOriginOrientation(const Quat& orientation) {
_earthSunModel.setSurfaceOrientation(orientation);
invalidate();
}
void SunSkyStage::setOriginLocation(float longitude, float latitude, float altitude) {
_earthSunModel.setLongitude(longitude);
_earthSunModel.setLatitude(latitude);
_earthSunModel.setAltitude(altitude);
invalidate();
}
void SunSkyStage::setSunModelEnable(bool isEnabled) {
_sunModelEnable = isEnabled;
invalidate();
}
void SunSkyStage::setSunColor(const Vec3& color) {
_sunLight->setColor(color);
}
void SunSkyStage::setSunIntensity(float intensity) {
_sunLight->setIntensity(intensity);
}
void SunSkyStage::setSunAmbientIntensity(float intensity) {
_sunLight->setAmbientIntensity(intensity);
}
void SunSkyStage::setSunDirection(const Vec3& direction) {
if (!isSunModelEnabled()) {
_sunLight->setDirection(direction);
}
}
// THe sun declinaison calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclinaison(double dayNumber) {
return -(23.0 + 44.0/60.0)*cos(glm::radians((360.0/365.0)*(dayNumber + 10.0)));
}
void SunSkyStage::updateGraphicsObject() const {
// Always update the sunLongitude based on the current dayTime and the current origin
// The day time is supposed to be local at the origin
double signedNormalizedDayTime = (_dayTime - NUM_HOURS_PER_HALF_DAY) / NUM_HOURS_PER_HALF_DAY;
double sunLongitude = _earthSunModel.getLongitude() + (MAX_LONGITUDE * signedNormalizedDayTime);
_earthSunModel.setSunLongitude(sunLongitude);
// And update the sunLAtitude as the declinaison depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclinaison(_yearTime));
if (isSunModelEnabled()) {
Vec3d sunLightDir = -_earthSunModel.getSurfaceSunDir();
_sunLight->setDirection(Vec3(sunLightDir.x, sunLightDir.y, sunLightDir.z));
double originAlt = _earthSunModel.getAltitude();
_sunLight->setPosition(Vec3(0.0f, originAlt, 0.0f));
}
// Background
switch (getBackgroundMode()) {
case NO_BACKGROUND: {
break;
}
case SKY_DOME: {
break;
}
case SKY_BOX: {
break;
}
case NUM_BACKGROUND_MODES:
Q_UNREACHABLE();
};
}
void SunSkyStage::setBackgroundMode(BackgroundMode mode) {
_backgroundMode = mode;
invalidate();
}
void SunSkyStage::setSkybox(const SkyboxPointer& skybox) {
_skybox = skybox;
invalidate();
}

View file

@ -1,249 +1,247 @@
//
// Stage.h
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_model_Stage_h
#define hifi_model_Stage_h
#include "gpu/Pipeline.h"
#include "Light.h"
#include "Skybox.h"
namespace model {
typedef glm::dvec3 Vec3d;
typedef glm::dvec4 Vec4d;
typedef glm::dmat4 Mat4d;
typedef glm::mat4 Mat4;
class EarthSunModel {
public:
void setScale(float scale);
float getScale() const { return _scale; }
void setLatitude(float lat);
float getLatitude() const { return _latitude; }
void setLongitude(float lon);
float getLongitude() const { return _longitude; }
void setAltitude(float altitude);
float getAltitude() const { return _altitude; }
void setSurfaceOrientation(const Quat& orientation);
const Quat& getSurfaceOrientation() const { valid(); return _surfaceOrientation; }
const Vec3d& getSurfacePos() const { valid(); return _surfacePos; }
const Mat4d& getSurfaceToWorldMat() const { valid(); return _surfaceToWorldMat; }
const Mat4d& getWoldToSurfaceMat() const { valid(); return _worldToSurfaceMat; }
const Mat4d& getEyeToSurfaceMat() const { valid(); return _eyeToSurfaceMat; }
const Mat4d& getSurfaceToEyeMat() const { valid(); return _surfaceToEyeMat; }
const Mat4d& getEyeToWorldMat() const { valid(); return _eyeToWorldMat; }
const Mat4d& getWorldToEyeMat() const { valid(); return _worldToEyeMat; }
//or set the surfaceToEye mat directly
void setEyeToSurfaceMat( const Mat4d& e2s);
const Vec3d& getEyePos() const { valid(); return _eyePos; }
const Vec3d& getEyeDir() const { valid(); return _eyeDir; }
void setSunLongitude(float lon);
float getSunLongitude() const { return _sunLongitude; }
void setSunLatitude(float lat);
float getSunLatitude() const { return _sunLatitude; }
const Vec3d& getWorldSunDir() const { valid(); return _sunDir; }
const Vec3d& getSurfaceSunDir() const { valid(); return _surfaceSunDir; }
EarthSunModel() { valid(); }
protected:
float _scale = 1000.0f; //Km
float _earthRadius = 6360.0;
Quat _surfaceOrientation;
float _longitude = 0.0f;
float _latitude = 0.0f;
float _altitude = 0.01f;
mutable Vec3d _surfacePos;
mutable Mat4d _worldToSurfaceMat;
mutable Mat4d _surfaceToWorldMat;
void updateWorldToSurface() const;
mutable Mat4d _surfaceToEyeMat;
mutable Mat4d _eyeToSurfaceMat;
mutable Vec3d _eyeDir;
mutable Vec3d _eyePos;
void updateSurfaceToEye() const;
mutable Mat4d _worldToEyeMat;
mutable Mat4d _eyeToWorldMat;
float _sunLongitude = 0.0f;
float _sunLatitude = 0.0f;
mutable Vec3d _sunDir;
mutable Vec3d _surfaceSunDir;
void updateSun() const;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateAll(); _invalid = false; } }
void updateAll() const;
static Mat4d evalWorldToGeoLocationMat(double longitude, double latitude, double altitude, double scale);
};
class Atmosphere {
public:
Atmosphere();
Atmosphere(const Atmosphere& atmosphere);
Atmosphere& operator= (const Atmosphere& atmosphere);
virtual ~Atmosphere() {};
void setScatteringWavelength(Vec3 wavelength);
const Vec3& getScatteringWavelength() const { return _scatteringWavelength; }
void setRayleighScattering(float scattering);
float getRayleighScattering() const { return _rayleighScattering; }
void setMieScattering(float scattering);
float getMieScattering() const { return _mieScattering; }
void setSunBrightness(float brightness);
float getSunBrightness() const { return _sunBrightness; }
void setInnerOuterRadiuses(float inner, float outer);
float getInnerRadius() const { return getData()._radiuses.x; }
float getOuterRadius() const { return getData()._radiuses.y; }
// Data to access the attribute values of the atmosphere
class Data {
public:
Vec4 _invWaveLength = Vec4(0.0f);
Vec4 _radiuses = Vec4(6000.0f, 6025.0f, 0.0f, 0.0f);
Vec4 _scales = Vec4(0.0f, 0.25f, 0.0f, 0.0f);
Vec4 _scatterings = Vec4(0.0f);
Vec4 _control = Vec4(2.0f, -0.990f, -0.990f*-0.990f, 0.f);
Data() {}
};
const UniformBufferView& getDataBuffer() const { return _dataBuffer; }
protected:
UniformBufferView _dataBuffer;
Vec3 _scatteringWavelength = Vec3(0.650f, 0.570f, 0.475f);
float _rayleighScattering = 0.0025f;
float _mieScattering = 0.0010f;
float _sunBrightness = 20.0f;
const Data& getData() const { return _dataBuffer.get<Data>(); }
Data& editData() { return _dataBuffer.edit<Data>(); }
void updateScattering();
};
typedef std::shared_ptr< Atmosphere > AtmospherePointer;
// Sun sky stage generates the rendering primitives to display a scene realistically
// at the specified location and time around earth
class SunSkyStage {
public:
SunSkyStage();
~SunSkyStage();
// time of the day (local to the position) expressed in decimal hour in the range [0.0, 24.0]
void setDayTime(float hour);
float getDayTime() const { return _dayTime; }
// time of the year expressed in day in the range [0, 365]
void setYearTime(unsigned int day);
unsigned int getYearTime() const { return _yearTime; }
// Origin orientation used to modify the cardinal axis alignement used.
// THe default is north along +Z axis and west along +X axis. this orientation gets added
// to the transform stack producing the sun light direction.
void setOriginOrientation(const Quat& orientation);
const Quat& getOriginOrientation() const { return _earthSunModel.getSurfaceOrientation(); }
// Location used to define the sun & sky is a longitude and latitude [rad] and a earth surface altitude [km]
void setOriginLocation(float longitude, float latitude, float surfaceAltitude);
float getOriginLatitude() const { return _earthSunModel.getLatitude(); }
float getOriginLongitude() const { return _earthSunModel.getLongitude(); }
float getOriginSurfaceAltitude() const { return _earthSunModel.getAltitude(); }
// Enable / disable the effect of the time and location on the sun direction and color
void setSunModelEnable(bool isEnabled);
bool isSunModelEnabled() const { return _sunModelEnable; }
// Sun properties
void setSunColor(const Vec3& color);
const Vec3& getSunColor() const { return getSunLight()->getColor(); }
void setSunIntensity(float intensity);
float getSunIntensity() const { return getSunLight()->getIntensity(); }
void setSunAmbientIntensity(float intensity);
float getSunAmbientIntensity() const { return getSunLight()->getAmbientIntensity(); }
// The sun direction is expressed in the world space
void setSunDirection(const Vec3& direction);
const Vec3& getSunDirection() const { return getSunLight()->getDirection(); }
LightPointer getSunLight() const { valid(); return _sunLight; }
AtmospherePointer getAtmosphere() const { valid(); return _atmosphere; }
enum BackgroundMode {
NO_BACKGROUND = 0,
SKY_DOME,
SKY_BOX,
NUM_BACKGROUND_MODES,
};
void setBackgroundMode(BackgroundMode mode);
BackgroundMode getBackgroundMode() const { return _backgroundMode; }
// Skybox
void setSkybox(const SkyboxPointer& skybox);
const SkyboxPointer& getSkybox() const { valid(); return _skybox; }
protected:
BackgroundMode _backgroundMode = SKY_BOX;
LightPointer _sunLight;
AtmospherePointer _atmosphere;
mutable SkyboxPointer _skybox;
gpu::PipelinePointer _skyPipeline;
float _dayTime = 12.0f;
int _yearTime = 0;
mutable EarthSunModel _earthSunModel;
bool _sunModelEnable = true;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateGraphicsObject(); _invalid = false; } }
void updateGraphicsObject() const;
};
typedef std::shared_ptr< SunSkyStage > SunSkyStagePointer;
};
#endif
//
// Stage.h
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_model_Stage_h
#define hifi_model_Stage_h
#include "gpu/Pipeline.h"
#include "Light.h"
#include "Skybox.h"
namespace model {
typedef glm::dvec3 Vec3d;
typedef glm::dvec4 Vec4d;
typedef glm::dmat4 Mat4d;
typedef glm::mat4 Mat4;
class EarthSunModel {
public:
void setScale(float scale);
float getScale() const { return _scale; }
void setLatitude(float lat);
float getLatitude() const { return _latitude; }
void setLongitude(float lon);
float getLongitude() const { return _longitude; }
void setAltitude(float altitude);
float getAltitude() const { return _altitude; }
void setSurfaceOrientation(const Quat& orientation);
const Quat& getSurfaceOrientation() const { valid(); return _surfaceOrientation; }
const Vec3d& getSurfacePos() const { valid(); return _surfacePos; }
const Mat4d& getSurfaceToWorldMat() const { valid(); return _surfaceToWorldMat; }
const Mat4d& getWoldToSurfaceMat() const { valid(); return _worldToSurfaceMat; }
const Mat4d& getEyeToSurfaceMat() const { valid(); return _eyeToSurfaceMat; }
const Mat4d& getSurfaceToEyeMat() const { valid(); return _surfaceToEyeMat; }
const Mat4d& getEyeToWorldMat() const { valid(); return _eyeToWorldMat; }
const Mat4d& getWorldToEyeMat() const { valid(); return _worldToEyeMat; }
//or set the surfaceToEye mat directly
void setEyeToSurfaceMat( const Mat4d& e2s);
const Vec3d& getEyePos() const { valid(); return _eyePos; }
const Vec3d& getEyeDir() const { valid(); return _eyeDir; }
void setSunLongitude(float lon);
float getSunLongitude() const { return _sunLongitude; }
void setSunLatitude(float lat);
float getSunLatitude() const { return _sunLatitude; }
const Vec3d& getWorldSunDir() const { valid(); return _sunDir; }
const Vec3d& getSurfaceSunDir() const { valid(); return _surfaceSunDir; }
EarthSunModel() { valid(); }
protected:
float _scale = 1000.0f; //Km
double _earthRadius = 6360.0;
Quat _surfaceOrientation;
float _longitude = 0.0f;
float _latitude = 0.0f;
float _altitude = 0.01f;
mutable Vec3d _surfacePos;
mutable Mat4d _worldToSurfaceMat;
mutable Mat4d _surfaceToWorldMat;
void updateWorldToSurface() const;
mutable Mat4d _surfaceToEyeMat;
mutable Mat4d _eyeToSurfaceMat;
mutable Vec3d _eyeDir;
mutable Vec3d _eyePos;
void updateSurfaceToEye() const;
mutable Mat4d _worldToEyeMat;
mutable Mat4d _eyeToWorldMat;
float _sunLongitude = 0.0f;
float _sunLatitude = 0.0f;
mutable Vec3d _sunDir;
mutable Vec3d _surfaceSunDir;
void updateSun() const;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateAll(); _invalid = false; } }
void updateAll() const;
static Mat4d evalWorldToGeoLocationMat(double longitude, double latitude, double altitude, double scale);
};
class Atmosphere {
public:
Atmosphere();
Atmosphere(const Atmosphere& atmosphere);
Atmosphere& operator= (const Atmosphere& atmosphere);
virtual ~Atmosphere() {};
void setScatteringWavelength(Vec3 wavelength);
const Vec3& getScatteringWavelength() const { return _scatteringWavelength; }
void setRayleighScattering(float scattering);
float getRayleighScattering() const { return _rayleighScattering; }
void setMieScattering(float scattering);
float getMieScattering() const { return _mieScattering; }
void setSunBrightness(float brightness);
float getSunBrightness() const { return _sunBrightness; }
void setInnerOuterRadiuses(float inner, float outer);
float getInnerRadius() const { return getData()._radiuses.x; }
float getOuterRadius() const { return getData()._radiuses.y; }
// Data to access the attribute values of the atmosphere
class Data {
public:
Vec4 _invWaveLength = Vec4(0.0f);
Vec4 _radiuses = Vec4(6000.0f, 6025.0f, 0.0f, 0.0f);
Vec4 _scales = Vec4(0.0f, 0.25f, 0.0f, 0.0f);
Vec4 _scatterings = Vec4(0.0f);
Vec4 _control = Vec4(2.0f, -0.990f, -0.990f*-0.990f, 0.f);
Data() {}
};
const UniformBufferView& getDataBuffer() const { return _dataBuffer; }
protected:
UniformBufferView _dataBuffer;
Vec3 _scatteringWavelength = Vec3(0.650f, 0.570f, 0.475f);
float _rayleighScattering = 0.0025f;
float _mieScattering = 0.0010f;
float _sunBrightness = 20.0f;
const Data& getData() const { return _dataBuffer.get<Data>(); }
Data& editData() { return _dataBuffer.edit<Data>(); }
void updateScattering();
};
typedef std::shared_ptr< Atmosphere > AtmospherePointer;
// Sun sky stage generates the rendering primitives to display a scene realistically
// at the specified location and time around earth
class SunSkyStage {
public:
SunSkyStage();
~SunSkyStage();
// time of the day (local to the position) expressed in decimal hour in the range [0.0, 24.0]
void setDayTime(float hour);
float getDayTime() const { return _dayTime; }
// time of the year expressed in day in the range [0, 365]
void setYearTime(unsigned int day);
unsigned int getYearTime() const { return _yearTime; }
// Origin orientation used to modify the cardinal axis alignement used.
// THe default is north along +Z axis and west along +X axis. this orientation gets added
// to the transform stack producing the sun light direction.
void setOriginOrientation(const Quat& orientation);
const Quat& getOriginOrientation() const { return _earthSunModel.getSurfaceOrientation(); }
// Location used to define the sun & sky is a longitude and latitude [rad] and a earth surface altitude [km]
void setOriginLocation(float longitude, float latitude, float surfaceAltitude);
float getOriginLatitude() const { return _earthSunModel.getLatitude(); }
float getOriginLongitude() const { return _earthSunModel.getLongitude(); }
float getOriginSurfaceAltitude() const { return _earthSunModel.getAltitude(); }
// Enable / disable the effect of the time and location on the sun direction and color
void setSunModelEnable(bool isEnabled);
bool isSunModelEnabled() const { return _sunModelEnable; }
// Sun properties
void setSunColor(const Vec3& color);
const Vec3& getSunColor() const { return getSunLight()->getColor(); }
void setSunIntensity(float intensity);
float getSunIntensity() const { return getSunLight()->getIntensity(); }
void setSunAmbientIntensity(float intensity);
float getSunAmbientIntensity() const { return getSunLight()->getAmbientIntensity(); }
// The sun direction is expressed in the world space
void setSunDirection(const Vec3& direction);
const Vec3& getSunDirection() const { return getSunLight()->getDirection(); }
LightPointer getSunLight() const { valid(); return _sunLight; }
AtmospherePointer getAtmosphere() const { valid(); return _atmosphere; }
enum BackgroundMode {
NO_BACKGROUND = 0,
SKY_DOME,
SKY_BOX,
NUM_BACKGROUND_MODES,
};
void setBackgroundMode(BackgroundMode mode);
BackgroundMode getBackgroundMode() const { return _backgroundMode; }
// Skybox
void setSkybox(const SkyboxPointer& skybox);
const SkyboxPointer& getSkybox() const { valid(); return _skybox; }
protected:
BackgroundMode _backgroundMode = SKY_BOX;
LightPointer _sunLight;
AtmospherePointer _atmosphere;
mutable SkyboxPointer _skybox;
float _dayTime = 12.0f;
int _yearTime = 0;
mutable EarthSunModel _earthSunModel;
bool _sunModelEnable = true;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateGraphicsObject(); _invalid = false; } }
void updateGraphicsObject() const;
};
typedef std::shared_ptr< SunSkyStage > SunSkyStagePointer;
};
#endif

View file

@ -305,7 +305,7 @@ void AddressManager::goToAddressFromObject(const QVariantMap& dataObject, const
<< returnedPath;
}
} else {
handlePath(overridePath, trigger);
handlePath(returnedPath, trigger);
}
} else {
// we didn't override the path or get one back - ask the DS for the viewpoint of its index path

View file

@ -72,6 +72,8 @@ public slots:
void goBack();
void goForward();
void goToUser(const QString& username);
void storeCurrentAddress();
void copyAddress();
@ -100,7 +102,6 @@ private slots:
void handleAPIResponse(QNetworkReply& requestReply);
void handleAPIError(QNetworkReply& errorReply);
void goToUser(const QString& username);
void goToAddressFromObject(const QVariantMap& addressMap, const QNetworkReply& reply);
private:
void setHost(const QString& host, LookupTrigger trigger);

View file

@ -384,10 +384,12 @@ bool OctreePacketData::appendValue(const glm::vec3& value) {
bool OctreePacketData::appendValue(const QVector<glm::vec3>& value) {
uint16_t qVecSize = value.size();
bool success = appendValue(qVecSize);
success = append((const unsigned char*)value.constData(), qVecSize * sizeof(glm::vec3));
if (success) {
_bytesOfValues += qVecSize * sizeof(glm::vec3);
_totalBytesOfValues += qVecSize * sizeof(glm::vec3);
success = append((const unsigned char*)value.constData(), qVecSize * sizeof(glm::vec3));
if (success) {
_bytesOfValues += qVecSize * sizeof(glm::vec3);
_totalBytesOfValues += qVecSize * sizeof(glm::vec3);
}
}
return success;
}

View file

@ -270,7 +270,9 @@ void MeshMassProperties::computeMassProperties(const VectorOfPoints& points, con
}
// create some variables to hold temporary results
#ifndef NDEBUG
uint32_t numPoints = points.size();
#endif
const btVector3 p0(0.0f, 0.0f, 0.0f);
btMatrix3x3 tetraInertia;
btMatrix3x3 doubleDebugInertia;
@ -281,9 +283,11 @@ void MeshMassProperties::computeMassProperties(const VectorOfPoints& points, con
uint32_t numTriangles = triangleIndices.size() / 3;
for (uint32_t i = 0; i < numTriangles; ++i) {
uint32_t t = 3 * i;
#ifndef NDEBUG
assert(triangleIndices[t] < numPoints);
assert(triangleIndices[t + 1] < numPoints);
assert(triangleIndices[t + 2] < numPoints);
#endif
// extract raw vertices
tetraPoints[0] = p0;

View file

@ -9,25 +9,25 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InterfaceConfig.h"
#include <QByteArray>
#include <QMutexLocker>
#include <QtDebug>
#include "GeometryCache.h"
#include <GeometryUtil.h>
#include <NumericalConstants.h>
#include <OctreePacketData.h>
#include <PacketHeaders.h>
#include <PathUtils.h>
#include <ProgramObject.h>
#include <SharedUtil.h>
#include "Application.h"
#include "Camera.h"
#include "world.h"
#include "InterfaceLogging.h"
#include "Environment.h"
#include "SkyFromSpace_vert.h"
#include "SkyFromSpace_frag.h"
#include "SkyFromAtmosphere_vert.h"
#include "SkyFromAtmosphere_frag.h"
uint qHash(const HifiSockAddr& sockAddr) {
if (sockAddr.getAddress().isNull()) {
return 0; // shouldn't happen, but if it does, zero is a perfectly valid hash
@ -42,20 +42,15 @@ Environment::Environment()
}
Environment::~Environment() {
if (_initialized) {
delete _skyFromAtmosphereProgram;
delete _skyFromSpaceProgram;
}
}
void Environment::init() {
if (_initialized) {
qCDebug(interfaceapp, "[ERROR] Environment is already initialized.");
return;
}
_skyFromAtmosphereProgram = createSkyProgram("Atmosphere", _skyFromAtmosphereUniformLocations);
_skyFromSpaceProgram = createSkyProgram("Space", _skyFromSpaceUniformLocations);
setupAtmosphereProgram(SkyFromSpace_vert, SkyFromSpace_frag, _skyFromSpaceProgram, _skyFromSpaceUniformLocations);
setupAtmosphereProgram(SkyFromAtmosphere_vert, SkyFromAtmosphere_frag, _skyFromAtmosphereProgram, _skyFromAtmosphereUniformLocations);
// start off with a default-constructed environment data
_data[HifiSockAddr()][0];
@ -63,22 +58,60 @@ void Environment::init() {
_initialized = true;
}
void Environment::setupAtmosphereProgram(const char* vertSource, const char* fragSource, gpu::PipelinePointer& pipeline, int* locations) {
auto VS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(vertSource)));
auto PS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(fragSource)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setCullMode(gpu::State::CULL_NONE);
state->setDepthTest(false);
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
pipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
locations[CAMERA_POS_LOCATION] = program->getUniforms().findLocation("v3CameraPos");
locations[LIGHT_POS_LOCATION] = program->getUniforms().findLocation("v3LightPos");
locations[INV_WAVELENGTH_LOCATION] = program->getUniforms().findLocation("v3InvWavelength");
locations[CAMERA_HEIGHT2_LOCATION] = program->getUniforms().findLocation("fCameraHeight2");
locations[OUTER_RADIUS_LOCATION] = program->getUniforms().findLocation("fOuterRadius");
locations[OUTER_RADIUS2_LOCATION] = program->getUniforms().findLocation("fOuterRadius2");
locations[INNER_RADIUS_LOCATION] = program->getUniforms().findLocation("fInnerRadius");
locations[KR_ESUN_LOCATION] = program->getUniforms().findLocation("fKrESun");
locations[KM_ESUN_LOCATION] = program->getUniforms().findLocation("fKmESun");
locations[KR_4PI_LOCATION] = program->getUniforms().findLocation("fKr4PI");
locations[KM_4PI_LOCATION] = program->getUniforms().findLocation("fKm4PI");
locations[SCALE_LOCATION] = program->getUniforms().findLocation("fScale");
locations[SCALE_DEPTH_LOCATION] = program->getUniforms().findLocation("fScaleDepth");
locations[SCALE_OVER_SCALE_DEPTH_LOCATION] = program->getUniforms().findLocation("fScaleOverScaleDepth");
locations[G_LOCATION] = program->getUniforms().findLocation("g");
locations[G2_LOCATION] = program->getUniforms().findLocation("g2");
}
void Environment::resetToDefault() {
_data.clear();
_data[HifiSockAddr()][0];
}
void Environment::renderAtmospheres(ViewFrustum& camera) {
void Environment::renderAtmospheres(gpu::Batch& batch, ViewFrustum& camera) {
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);
if (_environmentIsOverridden) {
renderAtmosphere(camera, _overrideData);
renderAtmosphere(batch, camera, _overrideData);
} else {
foreach (const ServerData& serverData, _data) {
// TODO: do something about EnvironmentData
foreach (const EnvironmentData& environmentData, serverData) {
renderAtmosphere(camera, environmentData);
renderAtmosphere(batch, camera, environmentData);
}
}
}
@ -201,87 +234,51 @@ int Environment::parseData(const HifiSockAddr& senderAddress, const QByteArray&
return bytesRead;
}
ProgramObject* Environment::createSkyProgram(const char* from, int* locations) {
ProgramObject* program = new ProgramObject();
QByteArray prefix = QString(PathUtils::resourcesPath() + "/shaders/SkyFrom" + from).toUtf8();
program->addShaderFromSourceFile(QGLShader::Vertex, prefix + ".vert");
program->addShaderFromSourceFile(QGLShader::Fragment, prefix + ".frag");
program->link();
locations[CAMERA_POS_LOCATION] = program->uniformLocation("v3CameraPos");
locations[LIGHT_POS_LOCATION] = program->uniformLocation("v3LightPos");
locations[INV_WAVELENGTH_LOCATION] = program->uniformLocation("v3InvWavelength");
locations[CAMERA_HEIGHT2_LOCATION] = program->uniformLocation("fCameraHeight2");
locations[OUTER_RADIUS_LOCATION] = program->uniformLocation("fOuterRadius");
locations[OUTER_RADIUS2_LOCATION] = program->uniformLocation("fOuterRadius2");
locations[INNER_RADIUS_LOCATION] = program->uniformLocation("fInnerRadius");
locations[KR_ESUN_LOCATION] = program->uniformLocation("fKrESun");
locations[KM_ESUN_LOCATION] = program->uniformLocation("fKmESun");
locations[KR_4PI_LOCATION] = program->uniformLocation("fKr4PI");
locations[KM_4PI_LOCATION] = program->uniformLocation("fKm4PI");
locations[SCALE_LOCATION] = program->uniformLocation("fScale");
locations[SCALE_DEPTH_LOCATION] = program->uniformLocation("fScaleDepth");
locations[SCALE_OVER_SCALE_DEPTH_LOCATION] = program->uniformLocation("fScaleOverScaleDepth");
locations[G_LOCATION] = program->uniformLocation("g");
locations[G2_LOCATION] = program->uniformLocation("g2");
return program;
}
void Environment::renderAtmosphere(gpu::Batch& batch, ViewFrustum& camera, const EnvironmentData& data) {
void Environment::renderAtmosphere(ViewFrustum& camera, const EnvironmentData& data) {
glm::vec3 center = data.getAtmosphereCenter();
glPushMatrix();
glTranslatef(center.x, center.y, center.z);
Transform transform;
transform.setTranslation(center);
batch.setModelTransform(transform);
glm::vec3 relativeCameraPos = camera.getPosition() - center;
float height = glm::length(relativeCameraPos);
// use the appropriate shader depending on whether we're inside or outside
ProgramObject* program;
int* locations;
if (height < data.getAtmosphereOuterRadius()) {
program = _skyFromAtmosphereProgram;
batch.setPipeline(_skyFromAtmosphereProgram);
locations = _skyFromAtmosphereUniformLocations;
} else {
program = _skyFromSpaceProgram;
batch.setPipeline(_skyFromSpaceProgram);
locations = _skyFromSpaceUniformLocations;
}
// the constants here are from Sean O'Neil's GPU Gems entry
// (http://http.developer.nvidia.com/GPUGems2/gpugems2_chapter16.html), GameEngine.cpp
program->bind();
program->setUniform(locations[CAMERA_POS_LOCATION], relativeCameraPos);
batch._glUniform3f(locations[CAMERA_POS_LOCATION], relativeCameraPos.x, relativeCameraPos.y, relativeCameraPos.z);
glm::vec3 lightDirection = glm::normalize(data.getSunLocation());
program->setUniform(locations[LIGHT_POS_LOCATION], lightDirection);
program->setUniformValue(locations[INV_WAVELENGTH_LOCATION],
1 / powf(data.getScatteringWavelengths().r, 4.0f),
1 / powf(data.getScatteringWavelengths().g, 4.0f),
1 / powf(data.getScatteringWavelengths().b, 4.0f));
program->setUniformValue(locations[CAMERA_HEIGHT2_LOCATION], height * height);
program->setUniformValue(locations[OUTER_RADIUS_LOCATION], data.getAtmosphereOuterRadius());
program->setUniformValue(locations[OUTER_RADIUS2_LOCATION], data.getAtmosphereOuterRadius() * data.getAtmosphereOuterRadius());
program->setUniformValue(locations[INNER_RADIUS_LOCATION], data.getAtmosphereInnerRadius());
program->setUniformValue(locations[KR_ESUN_LOCATION], data.getRayleighScattering() * data.getSunBrightness());
program->setUniformValue(locations[KM_ESUN_LOCATION], data.getMieScattering() * data.getSunBrightness());
program->setUniformValue(locations[KR_4PI_LOCATION], data.getRayleighScattering() * 4.0f * PI);
program->setUniformValue(locations[KM_4PI_LOCATION], data.getMieScattering() * 4.0f * PI);
program->setUniformValue(locations[SCALE_LOCATION], 1.0f / (data.getAtmosphereOuterRadius() - data.getAtmosphereInnerRadius()));
program->setUniformValue(locations[SCALE_DEPTH_LOCATION], 0.25f);
program->setUniformValue(locations[SCALE_OVER_SCALE_DEPTH_LOCATION],
batch._glUniform3f(locations[LIGHT_POS_LOCATION], lightDirection.x, lightDirection.y, lightDirection.z);
batch._glUniform3f(locations[INV_WAVELENGTH_LOCATION],
1 / powf(data.getScatteringWavelengths().r, 4.0f),
1 / powf(data.getScatteringWavelengths().g, 4.0f),
1 / powf(data.getScatteringWavelengths().b, 4.0f));
batch._glUniform1f(locations[CAMERA_HEIGHT2_LOCATION], height * height);
batch._glUniform1f(locations[OUTER_RADIUS_LOCATION], data.getAtmosphereOuterRadius());
batch._glUniform1f(locations[OUTER_RADIUS2_LOCATION], data.getAtmosphereOuterRadius() * data.getAtmosphereOuterRadius());
batch._glUniform1f(locations[INNER_RADIUS_LOCATION], data.getAtmosphereInnerRadius());
batch._glUniform1f(locations[KR_ESUN_LOCATION], data.getRayleighScattering() * data.getSunBrightness());
batch._glUniform1f(locations[KM_ESUN_LOCATION], data.getMieScattering() * data.getSunBrightness());
batch._glUniform1f(locations[KR_4PI_LOCATION], data.getRayleighScattering() * 4.0f * PI);
batch._glUniform1f(locations[KM_4PI_LOCATION], data.getMieScattering() * 4.0f * PI);
batch._glUniform1f(locations[SCALE_LOCATION], 1.0f / (data.getAtmosphereOuterRadius() - data.getAtmosphereInnerRadius()));
batch._glUniform1f(locations[SCALE_DEPTH_LOCATION], 0.25f);
batch._glUniform1f(locations[SCALE_OVER_SCALE_DEPTH_LOCATION],
(1.0f / (data.getAtmosphereOuterRadius() - data.getAtmosphereInnerRadius())) / 0.25f);
program->setUniformValue(locations[G_LOCATION], -0.990f);
program->setUniformValue(locations[G2_LOCATION], -0.990f * -0.990f);
batch._glUniform1f(locations[G_LOCATION], -0.990f);
batch._glUniform1f(locations[G2_LOCATION], -0.990f * -0.990f);
glDepthMask(GL_FALSE);
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glEnable(GL_BLEND);
DependencyManager::get<GeometryCache>()->renderSphere(1.0f, 100, 50, glm::vec4(1.0f, 1.0f, 1.0f, 1.0f)); //Draw a unit sphere
glDepthMask(GL_TRUE);
program->release();
glPopMatrix();
DependencyManager::get<GeometryCache>()->renderSphere(batch,1.0f, 100, 50, glm::vec4(1.0f, 0.0f, 0.0f, 0.5f)); //Draw a unit sphere
}

View file

@ -16,8 +16,9 @@
#include <QMutex>
#include <HifiSockAddr.h>
#include <gpu/Batch.h>
#include "EnvironmentData.h"
#include <EnvironmentData.h>
class ViewFrustum;
class ProgramObject;
@ -29,7 +30,7 @@ public:
void init();
void resetToDefault();
void renderAtmospheres(ViewFrustum& camera);
void renderAtmospheres(gpu::Batch& batch, ViewFrustum& camera);
void override(const EnvironmentData& overrideData) { _overrideData = overrideData; _environmentIsOverridden = true; }
void endOverride() { _environmentIsOverridden = false; }
@ -44,14 +45,10 @@ private:
bool findCapsulePenetration(const glm::vec3& start,
const glm::vec3& end, float radius, glm::vec3& penetration); // NOTE: Deprecated
ProgramObject* createSkyProgram(const char* from, int* locations);
void renderAtmosphere(ViewFrustum& camera, const EnvironmentData& data);
void renderAtmosphere(gpu::Batch& batch, ViewFrustum& camera, const EnvironmentData& data);
bool _initialized;
ProgramObject* _skyFromAtmosphereProgram;
ProgramObject* _skyFromSpaceProgram;
enum {
CAMERA_POS_LOCATION,
LIGHT_POS_LOCATION,
@ -72,6 +69,11 @@ private:
LOCATION_COUNT
};
void setupAtmosphereProgram(const char* vertSource, const char* fragSource, gpu::PipelinePointer& pipelineProgram, int* locations);
gpu::PipelinePointer _skyFromAtmosphereProgram;
gpu::PipelinePointer _skyFromSpaceProgram;
int _skyFromAtmosphereUniformLocations[LOCATION_COUNT];
int _skyFromSpaceUniformLocations[LOCATION_COUNT];

View file

@ -15,9 +15,12 @@
#include "DeferredLightingEffect.h"
#include "ViewFrustum.h"
#include "RenderArgs.h"
#include "TextureCache.h"
#include <PerfStat.h>
#include "overlay3D_vert.h"
#include "overlay3D_frag.h"
using namespace render;
@ -50,7 +53,7 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
_jobs.push_back(Job(RenderDeferred()));
_jobs.push_back(Job(ResolveDeferred()));
_jobs.push_back(Job(DrawTransparentDeferred()));
_jobs.push_back(Job(DrawPostLayered()));
_jobs.push_back(Job(DrawOverlay3D()));
_jobs.push_back(Job(ResetGLState()));
}
@ -225,10 +228,76 @@ template <> void render::jobRun(const DrawTransparentDeferred& job, const SceneC
renderItems(sceneContext, renderContext, renderedItems, renderContext->_maxDrawnTransparentItems);
// Before rendering the batch make sure we re in sync with gl state
args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
// reset blend function to standard...
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
// glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
}
}
const gpu::PipelinePointer& DrawOverlay3D::getOpaquePipeline() const {
if (!_opaquePipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(overlay3D_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(overlay3D_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
auto state = gpu::StatePointer(new gpu::State());
state->setDepthTest(true, true, gpu::LESS_EQUAL);
_opaquePipeline.reset(gpu::Pipeline::create(program, state));
}
return _opaquePipeline;
}
template <> void render::jobRun(const DrawOverlay3D& job, const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
PerformanceTimer perfTimer("DrawOverlay3D");
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);
// render backgrounds
auto& scene = sceneContext->_scene;
auto& items = scene->getMasterBucket().at(ItemFilter::Builder::opaqueShape().withLayered());
ItemIDsBounds inItems;
inItems.reserve(items.size());
for (auto id : items) {
auto& item = scene->getItem(id);
if (item.getKey().isVisible() && (item.getLayer() == 1)) {
inItems.emplace_back(id);
}
}
RenderArgs* args = renderContext->args;
gpu::Batch batch;
args->_batch = &batch;
args->_whiteTexture = DependencyManager::get<TextureCache>()->getWhiteTexture();
glm::mat4 projMat;
Transform viewMat;
args->_viewFrustum->evalProjectionMatrix(projMat);
args->_viewFrustum->evalViewTransform(viewMat);
if (args->_renderMode == RenderArgs::MIRROR_RENDER_MODE) {
viewMat.postScale(glm::vec3(-1.0f, 1.0f, 1.0f));
}
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setPipeline(job.getOpaquePipeline());
batch.setUniformTexture(0, args->_whiteTexture);
if (!inItems.empty()) {
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_DEPTH, glm::vec4(), 1.f, 0);
renderItems(sceneContext, renderContext, inItems);
}
// Before rendering the batch make sure we re in sync with gl state
args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
args->_whiteTexture.reset();
}

View file

@ -14,6 +14,8 @@
#include "render/DrawTask.h"
#include "gpu/Pipeline.h"
class PrepareDeferred {
public:
};
@ -50,6 +52,15 @@ namespace render {
template <> void jobRun(const DrawTransparentDeferred& job, const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
}
class DrawOverlay3D {
mutable gpu::PipelinePointer _opaquePipeline; //lazy evaluation hence mutable
public:
const gpu::PipelinePointer& getOpaquePipeline() const;
};
namespace render {
template <> void jobRun(const DrawOverlay3D& job, const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
}
class RenderDeferredTask : public render::Task {
public:

View file

@ -53,12 +53,14 @@ uniform float g2;
varying vec3 position;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main (void)
{
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
@ -102,7 +104,8 @@ void main (void)
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
gl_FragColor.rgb = frontColor.rgb + fMiePhase * secondaryFrontColor.rgb;
gl_FragColor.a = gl_FragColor.b;
gl_FragColor.rgb = pow(gl_FragColor.rgb, vec3(1.0/2.2));
vec3 finalColor = frontColor.rgb + fMiePhase * secondaryFrontColor.rgb;
gl_FragColor.a = finalColor.b;
gl_FragColor.rgb = pow(finalColor.rgb, vec3(1.0/2.2));
}

View file

@ -33,6 +33,9 @@
// Copyright (c) 2004 Sean O'Neil
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
uniform vec3 v3CameraPos; // The camera's current position
uniform vec3 v3LightPos; // The direction vector to the light source
uniform vec3 v3InvWavelength; // 1 / pow(wavelength, 4) for the red, green, and blue channels
@ -52,17 +55,14 @@ const float fSamples = 2.0;
varying vec3 position;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main(void)
{
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
position = gl_Vertex.xyz * fOuterRadius;
gl_Position = gl_ModelViewProjectionMatrix * vec4(position, 1.0);
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec4 v4pos = vec4(position, 1.0);
<$transformModelToClipPos(cam, obj, v4pos, gl_Position)$>
}

View file

@ -108,7 +108,8 @@ void main (void)
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
vec3 color = v3FrontColor * (v3InvWavelength * fKrESun);
vec3 secondaryColor = v3FrontColor * fKmESun;
gl_FragColor.rgb = color + fMiePhase * secondaryColor;
gl_FragColor.a = gl_FragColor.b;
gl_FragColor.rgb = pow(gl_FragColor.rgb, vec3(1.0/2.2));
vec3 finalColor = color + fMiePhase * secondaryColor;
gl_FragColor.a = finalColor.b;
gl_FragColor.rgb = pow(finalColor.rgb, vec3(1.0/2.2));
}

View file

@ -1,5 +1,6 @@
#version 120
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
@ -32,12 +33,20 @@
// Copyright (c) 2004 Sean O'Neil
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
uniform float fOuterRadius; // The outer (atmosphere) radius
varying vec3 position;
void main(void)
{
void main(void) {
position = gl_Vertex.xyz * fOuterRadius;
gl_Position = gl_ModelViewProjectionMatrix * vec4(position, 1.0);
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec4 v4pos = vec4(position, 1.0);
<$transformModelToClipPos(cam, obj, v4pos, gl_Position)$>
}

View file

@ -451,7 +451,6 @@ TextRenderer3D::TextRenderer3D(const char* family, float pointSize, int weight,
EffectType effect, int effectThickness, const QColor& color) :
_effectType(effect),
_effectThickness(effectThickness),
_pointSize(pointSize),
_color(toGlm(color)),
_font(loadFont3D(family)) {
if (!_font) {

View file

@ -65,8 +65,6 @@ private:
// the thickness of the effect
const int _effectThickness;
const float _pointSize;
// text color
const glm::vec4 _color;

View file

@ -0,0 +1,29 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// model.frag
// fragment shader
//
// Created by Sam Gateau on 6/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D diffuseMap;
varying vec2 varTexcoord;
varying vec3 varEyeNormal;
varying vec4 varColor;
void main(void) {
vec4 diffuse = texture2D(diffuseMap, varTexcoord.st);
if (diffuse.a < 0.5) {
discard;
}
gl_FragColor = vec4(varColor * diffuse);
}

View file

@ -0,0 +1,40 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// overlay3D.slv
//
// Created by Sam Gateau on 6/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
//attribute vec2 texcoord;
varying vec2 varTexcoord;
// interpolated eye position
varying vec4 varEyePosition;
// the interpolated normal
varying vec3 varEyeNormal;
varying vec4 varColor;
void main(void) {
varTexcoord = gl_MultiTexCoord0.xy;
// pass along the color
varColor = gl_Color;
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToEyeAndClipPos(cam, obj, gl_Vertex, varEyePosition, gl_Position)$>
<$transformModelToEyeDir(cam, obj, gl_Normal, varEyeNormal.xyz)$>
}

View file

@ -2,8 +2,20 @@ set(TARGET_NAME shared)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
# TODO: there isn't really a good reason to have Script linked here - let's get what is requiring it out (RegisteredMetaTypes.cpp)
setup_hifi_library(Gui Network Script Widgets)
setup_hifi_library(Gui Network OpenGL Script Widgets)
setup_hifi_opengl()
add_dependency_external_projects(glm)
find_package(GLM REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
if (WIN32)
add_dependency_external_projects(boostconfig)
find_package(BoostConfig REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
add_dependency_external_projects(oglplus)
find_package(OGLPLUS REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})
endif()

View file

@ -118,11 +118,15 @@ public:
template <typename Function>
void withPush(Function f) {
#ifndef NDEBUG
size_t startingDepth = size();
#endif
push();
f();
pop();
assert(startingDepth = size());
#ifndef NDEBUG
assert(startingDepth == size());
#endif
}
template <typename Function>

View file

@ -0,0 +1,325 @@
#ifdef Q_OS_WIN
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OglplusHelpers.h"
using namespace oglplus;
using namespace oglplus::shapes;
static const char * SIMPLE_TEXTURED_VS = R"VS(#version 410 core
#pragma line __LINE__
uniform mat4 Projection = mat4(1);
uniform mat4 ModelView = mat4(1);
layout(location = 0) in vec3 Position;
layout(location = 1) in vec2 TexCoord;
out vec2 vTexCoord;
void main() {
gl_Position = Projection * ModelView * vec4(Position, 1);
vTexCoord = TexCoord;
}
)VS";
static const char * SIMPLE_TEXTURED_FS = R"FS(#version 410 core
#pragma line __LINE__
uniform sampler2D sampler;
uniform float Alpha = 1.0;
in vec2 vTexCoord;
out vec4 vFragColor;
void main() {
vec4 c = texture(sampler, vTexCoord);
c.a = min(Alpha, c.a);
vFragColor = c;
}
)FS";
ProgramPtr loadDefaultShader() {
ProgramPtr result;
compileProgram(result, SIMPLE_TEXTURED_VS, SIMPLE_TEXTURED_FS);
return result;
}
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs) {
using namespace oglplus;
try {
result = ProgramPtr(new Program());
// attach the shaders to the program
result->AttachShader(
VertexShader()
.Source(GLSLSource(vs))
.Compile()
);
result->AttachShader(
FragmentShader()
.Source(GLSLSource(fs))
.Compile()
);
result->Link();
} catch (ProgramBuildError & err) {
Q_UNUSED(err);
Q_ASSERT_X(false, "compileProgram", "Failed to build shader program");
qFatal((const char*)err.Message);
result.reset();
}
}
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect) {
using namespace oglplus;
Vec3f a(1, 0, 0);
Vec3f b(0, 1, 0);
if (aspect > 1) {
b[1] /= aspect;
} else {
a[0] *= aspect;
}
return ShapeWrapperPtr(
new shapes::ShapeWrapper({ "Position", "TexCoord" }, shapes::Plane(a, b), *program)
);
}
// Return a point's cartesian coordinates on a sphere from pitch and yaw
static glm::vec3 getPoint(float yaw, float pitch) {
return glm::vec3(glm::cos(-pitch) * (-glm::sin(yaw)),
glm::sin(-pitch),
glm::cos(-pitch) * (-glm::cos(yaw)));
}
class SphereSection : public DrawingInstructionWriter, public DrawMode {
public:
using IndexArray = std::vector<GLuint>;
using PosArray = std::vector<float>;
using TexArray = std::vector<float>;
/// The type of the index container returned by Indices()
// vertex positions
PosArray _pos_data;
// vertex tex coords
TexArray _tex_data;
IndexArray _idx_data;
unsigned int _prim_count{ 0 };
public:
SphereSection(
const float fov,
const float aspectRatio,
const int slices_,
const int stacks_) {
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
int gridSize = std::max(slices_, stacks_);
int gridSizeLog2 = 1;
while (1 << gridSizeLog2 < gridSize) {
++gridSizeLog2;
}
gridSize = (1 << gridSizeLog2) + 1;
// Compute number of vertices needed
int vertices = gridSize * gridSize;
_pos_data.resize(vertices * 3);
_tex_data.resize(vertices * 2);
// Compute vertices positions and texture UV coordinate
for (int y = 0; y <= gridSize; ++y) {
for (int x = 0; x <= gridSize; ++x) {
}
}
for (int i = 0; i < gridSize; i++) {
float stacksRatio = (float)i / (float)(gridSize - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (stacksRatio - 0.5f);
for (int j = 0; j < gridSize; j++) {
float slicesRatio = (float)j / (float)(gridSize - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (slicesRatio - 0.5f);
int vertex = i * gridSize + j;
int posOffset = vertex * 3;
int texOffset = vertex * 2;
vec3 pos = getPoint(yaw, pitch);
_pos_data[posOffset] = pos.x;
_pos_data[posOffset + 1] = pos.y;
_pos_data[posOffset + 2] = pos.z;
_tex_data[texOffset] = slicesRatio;
_tex_data[texOffset + 1] = stacksRatio;
}
} // done with vertices
int rowLen = gridSize;
// gridsize now refers to the triangles, not the vertices, so reduce by one
// or die by fencepost error http://en.wikipedia.org/wiki/Off-by-one_error
--gridSize;
int quads = gridSize * gridSize;
for (int t = 0; t < quads; ++t) {
int x =
((t & 0x0001) >> 0) |
((t & 0x0004) >> 1) |
((t & 0x0010) >> 2) |
((t & 0x0040) >> 3) |
((t & 0x0100) >> 4) |
((t & 0x0400) >> 5) |
((t & 0x1000) >> 6) |
((t & 0x4000) >> 7);
int y =
((t & 0x0002) >> 1) |
((t & 0x0008) >> 2) |
((t & 0x0020) >> 3) |
((t & 0x0080) >> 4) |
((t & 0x0200) >> 5) |
((t & 0x0800) >> 6) |
((t & 0x2000) >> 7) |
((t & 0x8000) >> 8);
int i = x * (rowLen) + y;
_idx_data.push_back(i);
_idx_data.push_back(i + 1);
_idx_data.push_back(i + rowLen + 1);
_idx_data.push_back(i + rowLen + 1);
_idx_data.push_back(i + rowLen);
_idx_data.push_back(i);
}
_prim_count = quads * 2;
}
/// Returns the winding direction of faces
FaceOrientation FaceWinding(void) const {
return FaceOrientation::CCW;
}
typedef GLuint(SphereSection::*VertexAttribFunc)(std::vector<GLfloat>&) const;
/// Makes the vertex positions and returns the number of values per vertex
template <typename T>
GLuint Positions(std::vector<T>& dest) const {
dest.clear();
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
return 3;
}
/// Makes the vertex normals and returns the number of values per vertex
template <typename T>
GLuint Normals(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the vertex tangents and returns the number of values per vertex
template <typename T>
GLuint Tangents(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the vertex bi-tangents and returns the number of values per vertex
template <typename T>
GLuint Bitangents(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the texture coordinates returns the number of values per vertex
template <typename T>
GLuint TexCoordinates(std::vector<T>& dest) const {
dest.clear();
dest.insert(dest.begin(), _tex_data.begin(), _tex_data.end());
return 2;
}
typedef VertexAttribsInfo<
SphereSection,
std::tuple<
VertexPositionsTag,
VertexNormalsTag,
VertexTangentsTag,
VertexBitangentsTag,
VertexTexCoordinatesTag
>
> VertexAttribs;
Spheref MakeBoundingSphere(void) const {
GLfloat min_x = _pos_data[3], max_x = _pos_data[3];
GLfloat min_y = _pos_data[4], max_y = _pos_data[4];
GLfloat min_z = _pos_data[5], max_z = _pos_data[5];
for (std::size_t v = 0, vn = _pos_data.size() / 3; v != vn; ++v) {
GLfloat x = _pos_data[v * 3 + 0];
GLfloat y = _pos_data[v * 3 + 1];
GLfloat z = _pos_data[v * 3 + 2];
if (min_x > x) min_x = x;
if (min_y > y) min_y = y;
if (min_z > z) min_z = z;
if (max_x < x) max_x = x;
if (max_y < y) max_y = y;
if (max_z < z) max_z = z;
}
Vec3f c(
(min_x + max_x) * 0.5f,
(min_y + max_y) * 0.5f,
(min_z + max_z) * 0.5f
);
return Spheref(
c.x(), c.y(), c.z(),
Distance(c, Vec3f(min_x, min_y, min_z))
);
}
/// Queries the bounding sphere coordinates and dimensions
template <typename T>
void BoundingSphere(oglplus::Sphere<T>& bounding_sphere) const {
bounding_sphere = oglplus::Sphere<T>(MakeBoundingSphere());
}
/// Returns element indices that are used with the drawing instructions
const IndexArray & Indices(Default = Default()) const {
return _idx_data;
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(PrimitiveType primitive) const {
DrawingInstructions instr = this->MakeInstructions();
DrawOperation operation;
operation.method = DrawOperation::Method::DrawElements;
operation.mode = primitive;
operation.first = 0;
operation.count = _prim_count * 3;
operation.restart_index = DrawOperation::NoRestartIndex();
operation.phase = 0;
this->AddInstruction(instr, operation);
return std::move(instr);
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(Default = Default()) const {
return Instructions(PrimitiveType::Triangles);
}
};
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, int slices, int stacks) {
using namespace oglplus;
return ShapeWrapperPtr(
new shapes::ShapeWrapper({ "Position", "TexCoord" }, SphereSection(fov, aspect, slices, stacks), *program)
);
}
#endif

View file

@ -0,0 +1,173 @@
//
// Created by Bradley Austin Davis on 2015/05/26
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
// FIXME support oglplus on all platforms
// For now it's a convenient helper for Windows
#include <QtGlobal>
#ifdef Q_OS_WIN
#include "GLMHelpers.h"
#define OGLPLUS_USE_GLCOREARB_H 0
#if defined(__APPLE__)
#define OGLPLUS_USE_GL3_H 1
#elif defined(WIN32)
#define OGLPLUS_USE_GLEW 1
#pragma warning(disable : 4068)
#elif defined(ANDROID)
#else
#define OGLPLUS_USE_GLCOREARB_H 1
#endif
#define OGLPLUS_USE_BOOST_CONFIG 1
#define OGLPLUS_NO_SITE_CONFIG 1
#define OGLPLUS_LOW_PROFILE 1
#include <oglplus/gl.hpp>
#include <oglplus/all.hpp>
#include <oglplus/interop/glm.hpp>
#include <oglplus/bound/texture.hpp>
#include <oglplus/bound/framebuffer.hpp>
#include <oglplus/bound/renderbuffer.hpp>
#include <oglplus/shapes/wrapper.hpp>
#include <oglplus/shapes/plane.hpp>
#include "NumericalConstants.h"
using FramebufferPtr = std::shared_ptr<oglplus::Framebuffer>;
using ShapeWrapperPtr = std::shared_ptr<oglplus::shapes::ShapeWrapper>;
using BufferPtr = std::shared_ptr<oglplus::Buffer>;
using VertexArrayPtr = std::shared_ptr<oglplus::VertexArray>;
using ProgramPtr = std::shared_ptr<oglplus::Program>;
using Mat4Uniform = oglplus::Uniform<mat4>;
ProgramPtr loadDefaultShader();
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs);
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect = 1.0f);
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 32, int stacks = 32);
// A basic wrapper for constructing a framebuffer with a renderbuffer
// for the depth attachment and an undefined type for the color attachement
// This allows us to reuse the basic framebuffer code for both the Mirror
// FBO as well as the Oculus swap textures we will use to render the scene
// Though we don't really need depth at all for the mirror FBO, or even an
// FBO, but using one means I can just use a glBlitFramebuffer to get it onto
// the screen.
template <
typename C,
typename D
>
struct FramebufferWrapper {
uvec2 size;
oglplus::Framebuffer fbo;
C color;
D depth;
FramebufferWrapper() {}
virtual ~FramebufferWrapper() {
}
virtual void Init(const uvec2 & size) {
this->size = size;
initColor();
initDepth();
initDone();
}
template <typename F>
void Bound(F f) {
Bound(oglplus::Framebuffer::Target::Draw, f);
}
template <typename F>
void Bound(oglplus::Framebuffer::Target target , F f) {
fbo.Bind(target);
onBind(target);
f();
onUnbind(target);
oglplus::DefaultFramebuffer().Bind(target);
}
void Viewport() {
oglplus::Context::Viewport(size.x, size.y);
}
protected:
virtual void onBind(oglplus::Framebuffer::Target target) {}
virtual void onUnbind(oglplus::Framebuffer::Target target) {}
static GLenum toEnum(oglplus::Framebuffer::Target target) {
switch (target) {
case oglplus::Framebuffer::Target::Draw:
return GL_DRAW_FRAMEBUFFER;
case oglplus::Framebuffer::Target::Read:
return GL_READ_FRAMEBUFFER;
default:
Q_ASSERT(false);
return GL_FRAMEBUFFER;
}
}
virtual void initDepth() {}
virtual void initColor() {}
virtual void initDone() = 0;
};
struct BasicFramebufferWrapper : public FramebufferWrapper <oglplus::Texture, oglplus::Renderbuffer> {
protected:
virtual void initDepth() override {
using namespace oglplus;
Context::Bound(Renderbuffer::Target::Renderbuffer, depth)
.Storage(
PixelDataInternalFormat::DepthComponent,
size.x, size.y);
}
virtual void initColor() override {
using namespace oglplus;
Context::Bound(oglplus::Texture::Target::_2D, color)
.MinFilter(TextureMinFilter::Linear)
.MagFilter(TextureMagFilter::Linear)
.WrapS(TextureWrap::ClampToEdge)
.WrapT(TextureWrap::ClampToEdge)
.Image2D(
0, PixelDataInternalFormat::RGBA8,
size.x, size.y,
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr
);
}
virtual void initDone() override {
using namespace oglplus;
static const Framebuffer::Target target = Framebuffer::Target::Draw;
Bound(target, [&] {
fbo.AttachTexture(target, FramebufferAttachment::Color, color, 0);
fbo.AttachRenderbuffer(target, FramebufferAttachment::Depth, depth);
fbo.Complete(target);
});
}
};
using BasicFramebufferWrapperPtr = std::shared_ptr<BasicFramebufferWrapper>;
#endif

View file

@ -55,7 +55,7 @@ const int16_t COLLISION_GROUP_COLLISIONLESS = 1 << 15;
const int16_t COLLISION_MASK_DEFAULT = ~ COLLISION_GROUP_COLLISIONLESS;
// STATIC also doesn't collide with other STATIC
const int16_t COLLISION_MASK_STATIC = ~ (COLLISION_GROUP_COLLISIONLESS | COLLISION_MASK_STATIC);
const int16_t COLLISION_MASK_STATIC = ~ (COLLISION_GROUP_COLLISIONLESS | COLLISION_GROUP_STATIC);
const int16_t COLLISION_MASK_KINEMATIC = COLLISION_MASK_DEFAULT;

View file

@ -13,6 +13,8 @@
#define hifi_RenderArgs_h
#include <functional>
#include <memory>
class AABox;
class OctreeRenderer;
@ -20,6 +22,7 @@ class ViewFrustum;
namespace gpu {
class Batch;
class Context;
class Texture;
}
class RenderDetails {
@ -109,6 +112,8 @@ public:
gpu::Batch* _batch = nullptr;
ShoudRenderFunctor _shouldRender;
std::shared_ptr<gpu::Texture> _whiteTexture;
RenderDetails _details;
float _alphaThreshold = 0.5f;

View file

@ -13,35 +13,61 @@
#include "SharedLogging.h"
#include "VariantMapToScriptValue.h"
QScriptValue variantToScriptValue(QVariant& qValue, QScriptEngine& scriptEngine) {
switch(qValue.type()) {
case QVariant::Bool:
return qValue.toBool();
break;
case QVariant::Int:
return qValue.toInt();
break;
case QVariant::Double:
return qValue.toDouble();
break;
case QVariant::String: {
return scriptEngine.newVariant(qValue);
break;
}
case QVariant::Map: {
QVariantMap childMap = qValue.toMap();
return variantMapToScriptValue(childMap, scriptEngine);
break;
}
case QVariant::List: {
QVariantList childList = qValue.toList();
return variantListToScriptValue(childList, scriptEngine);
break;
}
default:
qCDebug(shared) << "unhandled QScript type" << qValue.type();
break;
}
return QScriptValue();
}
QScriptValue variantMapToScriptValue(QVariantMap& variantMap, QScriptEngine& scriptEngine) {
QScriptValue scriptValue = scriptEngine.newObject();
for (QVariantMap::const_iterator iter = variantMap.begin(); iter != variantMap.end(); ++iter) {
QString key = iter.key();
QVariant qValue = iter.value();
switch(qValue.type()) {
case QVariant::Bool:
scriptValue.setProperty(key, qValue.toBool());
break;
case QVariant::Int:
scriptValue.setProperty(key, qValue.toInt());
break;
case QVariant::Double:
scriptValue.setProperty(key, qValue.toDouble());
break;
case QVariant::String: {
scriptValue.setProperty(key, scriptEngine.newVariant(qValue));
break;
}
case QVariant::Map: {
QVariantMap childMap = qValue.toMap();
scriptValue.setProperty(key, variantMapToScriptValue(childMap, scriptEngine));
break;
}
default:
qCDebug(shared) << "unhandled QScript type" << qValue.type();
}
scriptValue.setProperty(key, variantToScriptValue(qValue, scriptEngine));
}
return scriptValue;
}
QScriptValue variantListToScriptValue(QVariantList& variantList, QScriptEngine& scriptEngine) {
QScriptValue scriptValue = scriptEngine.newObject();
scriptValue.setProperty("length", variantList.size());
int i = 0;
foreach (QVariant v, variantList) {
scriptValue.setProperty(i++, variantToScriptValue(v, scriptEngine));
}
return scriptValue;

View file

@ -13,4 +13,6 @@
#include <QScriptValue>
#include <QScriptEngine>
QScriptValue variantToScriptValue(QVariant& qValue, QScriptEngine& scriptEngine);
QScriptValue variantMapToScriptValue(QVariantMap& variantMap, QScriptEngine& scriptEngine);
QScriptValue variantListToScriptValue(QVariantList& variantList, QScriptEngine& scriptEngine);

View file

@ -135,8 +135,8 @@ void VrMenu::setRootMenu(QObject* rootMenu) {
void VrMenu::addMenu(QMenu* menu) {
Q_ASSERT(!MenuUserData::forObject(menu));
QObject * parent = menu->parent();
QObject * qmlParent;
QObject* parent = menu->parent();
QObject* qmlParent = nullptr;
if (dynamic_cast<QMenu*>(parent)) {
MenuUserData* userData = MenuUserData::forObject(parent);
qmlParent = findMenuObject(userData->uuid.toString());

View file

@ -322,6 +322,7 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
QString outputFileName = baseFileName + "-" + QString::number(count) + ".obj";
writeOBJ(outputFileName, fbx, outputCentimeters, count);
count++;
(void)meshPart; // quiet warning
}
}
}