mirror of
https://github.com/lubosz/overte.git
synced 2025-04-23 23:33:48 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into island
This commit is contained in:
commit
abf83bcf9e
27 changed files with 985 additions and 1290 deletions
|
@ -162,6 +162,7 @@ option(GET_GLM "Get GLM library automatically as external project" 1)
|
|||
option(GET_GVERB "Get Gverb library automatically as external project" 1)
|
||||
option(GET_SOXR "Get Soxr library automatically as external project" 1)
|
||||
option(GET_TBB "Get Threading Building Blocks library automatically as external project" 1)
|
||||
option(GET_LIBOVR "Get LibOVR library automatically as external project" 1)
|
||||
option(USE_NSIGHT "Attempt to find the nSight libraries" 1)
|
||||
|
||||
if (WIN32)
|
||||
|
|
85
cmake/externals/LibOVR/CMakeLists.txt
vendored
Normal file
85
cmake/externals/LibOVR/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME LibOVR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
|
||||
URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/ovr_sdk_macos_0.5.0.1.tar.gz
|
||||
URL_MD5 0a0785a04fb285f64f62267388344ad6
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
# In theory we should use the Headers path inside the framework, as seen here
|
||||
# but unfortunately Oculus doesn't seem to have figured out automated testing
|
||||
# so they released a framework with missing headers.
|
||||
#set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/Headers/ CACHE TYPE INTERNAL)
|
||||
|
||||
# Work around the broken framework by using a different path for the headers.
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/LibOVR CACHE TYPE INTERNAL)
|
||||
|
||||
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# http://static.oculus.com/sdk-downloads/ovr_sdk_linux_0.4.4.tar.xz
|
||||
# ec3bd8cff4a1461b4e21210e7feb0572
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
PREFIX ${EXTERNAL_NAME}
|
||||
GIT_REPOSITORY https://github.com/jherico/OculusSDK.git
|
||||
GIT_TAG b9832379a401640c5f615ed75a60edaf09be64ef
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
LOG_DOWNLOAD ON
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libovr.a CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE TYPE INTERNAL)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
find_package(X11 REQUIRED)
|
||||
|
||||
# Check for XRandR (modern resolution switching and gamma control)
|
||||
if (NOT X11_Xrandr_FOUND)
|
||||
message(FATAL_ERROR "The RandR library and headers were not found")
|
||||
endif()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS rt udev ${CMAKE_THREAD_LIBS_INIT} ${X11_X11_LIB} ${X11_Xrandr_LIB})
|
||||
|
||||
select_library_configurations(${EXTERNAL_NAME_UPPER})
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include ${SOURCE_DIR}/LibOVR/Src CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARY} ${${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS} CACHE TYPE INTERNAL)
|
||||
endif()
|
||||
|
||||
|
|
@ -18,48 +18,12 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("libovr")
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
if (NOT ANDROID)
|
||||
|
||||
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
if (APPLE)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(ApplicationServices ApplicationServices)
|
||||
find_library(IOKit IOKit)
|
||||
elseif (UNIX)
|
||||
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
|
||||
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
|
||||
|
||||
if (CMAKE_CL_64)
|
||||
set(LINUX_ARCH_DIR "i386")
|
||||
else()
|
||||
set(LINUX_ARCH_DIR "x86_64")
|
||||
endif()
|
||||
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
select_library_configurations(UDEV)
|
||||
select_library_configurations(XINERAMA)
|
||||
|
||||
elseif (WIN32)
|
||||
if (MSVC10)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
elseif (MSVC12)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
endif ()
|
||||
find_package(ATL)
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES)
|
||||
|
||||
else (NOT ANDROID)
|
||||
set(_VRLIB_JNI_DIR "VRLib/jni")
|
||||
set(_VRLIB_LIBS_DIR "VRLib/obj/local/armeabi-v7a")
|
||||
|
@ -76,31 +40,4 @@ else (NOT ANDROID)
|
|||
find_library(TURBOJPEG_LIBRARY NAMES jpeg PATH_SUFFIXES 3rdParty/turbojpeg HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
endif (NOT ANDROID)
|
||||
|
||||
select_library_configurations(LIBOVR)
|
||||
set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY})
|
||||
|
||||
list(APPEND LIBOVR_ARGS_LIST LIBOVR_INCLUDE_DIRS LIBOVR_SRC_DIR LIBOVR_LIBRARY)
|
||||
|
||||
if (APPLE)
|
||||
list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices})
|
||||
list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices)
|
||||
elseif (ANDROID)
|
||||
|
||||
list(APPEND LIBOVR_ANDROID_LIBRARIES "-lGLESv3" "-lEGL" "-landroid" "-lOpenMAXAL" "-llog" "-lz" "-lOpenSLES")
|
||||
list(APPEND LIBOVR_ARGS_LIST LIBOVR_ANDROID_LIBRARIES LIBOVR_VRLIB_DIR MINIZIP_DIR JNI_DIR TURBOJPEG_LIBRARY)
|
||||
elseif (UNIX)
|
||||
list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
|
||||
list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY)
|
||||
elseif (WIN32)
|
||||
list(APPEND LIBOVR_LIBRARIES ${ATL_LIBRARIES})
|
||||
list(APPEND LIBOVR_ARGS_LIST ATL_LIBRARIES)
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST})
|
||||
|
||||
if (ANDROID)
|
||||
list(APPEND LIBOVR_INCLUDE_DIRS ${LIBOVR_SRC_DIR} ${MINIZIP_DIR} ${JNI_DIR})
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS)
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
//
|
||||
// Created by Brad Hefta-Gaub on 12/31/13.
|
||||
// Modified by Philip on 3/3/14
|
||||
// Modified by Thijs Wenker on 3/31/15
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that turns the hydra controllers and mouse into a entity gun.
|
||||
|
@ -66,7 +67,7 @@ var impactSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/BulletIm
|
|||
var targetHitSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/hit.raw");
|
||||
var targetLaunchSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/shoot.raw");
|
||||
|
||||
var gunModel = "http://public.highfidelity.io/models/attachments/HaloGun.fst";
|
||||
var gunModel = "https://s3.amazonaws.com/hifi-public/cozza13/gun/m1911-handgun+1.fbx?v=4";
|
||||
|
||||
var audioOptions = {
|
||||
volume: 0.9
|
||||
|
@ -103,7 +104,7 @@ var reticle = Overlays.addOverlay("image", {
|
|||
y: screenSize.y / 2 - (BUTTON_SIZE / 2),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/billiardsReticle.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/crosshairs.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -112,8 +113,8 @@ var offButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",
|
||||
alpha: 1
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/close.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
startX += BUTTON_SIZE + PADDING;
|
||||
|
@ -122,7 +123,7 @@ var platformButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/city.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/platform-targets.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -132,7 +133,7 @@ var gridButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/blocks.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/floating-targets.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -168,7 +169,7 @@ function shootBullet(position, velocity, grenade) {
|
|||
{ type: "Sphere",
|
||||
position: position,
|
||||
dimensions: { x: bSize, y: bSize, z: bSize },
|
||||
color: { red: 255, green: 0, blue: 0 },
|
||||
color: { red: 0, green: 0, blue: 0 },
|
||||
velocity: bVelocity,
|
||||
lifetime: BULLET_LIFETIME,
|
||||
gravity: { x: 0, y: bGravity, z: 0 },
|
||||
|
@ -265,6 +266,7 @@ function makeGrid(type, scale, size) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makePlatform(gravity, scale, size) {
|
||||
var separation = scale * 2;
|
||||
var pos = Vec3.sum(Camera.getPosition(), Vec3.multiply(10.0 * scale * separation, Quat.getFront(Camera.getOrientation())));
|
||||
|
@ -377,8 +379,8 @@ function takeFiringPose() {
|
|||
}
|
||||
}
|
||||
|
||||
MyAvatar.attach(gunModel, "RightHand", {x:0.02, y: 0.11, z: 0.04}, Quat.fromPitchYawRollDegrees(-0, -160, -79), 0.20);
|
||||
MyAvatar.attach(gunModel, "LeftHand", {x:-0.02, y: 0.11, z: 0.04}, Quat.fromPitchYawRollDegrees(0, 0, 79), 0.20);
|
||||
MyAvatar.attach(gunModel, "RightHand", {x:0.04, y: 0.22, z: 0.02}, Quat.fromPitchYawRollDegrees(-172, -85, 79), 0.40);
|
||||
MyAvatar.attach(gunModel, "LeftHand", {x:-0.04, y: 0.22, z: 0.02}, Quat.fromPitchYawRollDegrees(-172, 85, -79), 0.40);
|
||||
|
||||
// Give a bit of time to load before playing sound
|
||||
Script.setTimeout(playLoadSound, 2000);
|
||||
|
|
|
@ -14,7 +14,6 @@ Script.load("selectAudioDevice.js");
|
|||
Script.load("controllers/hydra/hydraMove.js");
|
||||
Script.load("headMove.js");
|
||||
Script.load("inspect.js");
|
||||
Script.load("lobby.js");
|
||||
Script.load("notifications.js");
|
||||
Script.load("look.js");
|
||||
Script.load("users.js");
|
||||
|
|
|
@ -16,8 +16,6 @@ HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
|||
Script.include([
|
||||
"libraries/stringHelpers.js",
|
||||
"libraries/dataviewHelpers.js",
|
||||
"libraries/httpMultiPart.js",
|
||||
"libraries/modelUploader.js",
|
||||
"libraries/toolBars.js",
|
||||
"libraries/progressDialog.js",
|
||||
|
||||
|
|
|
@ -8,27 +8,82 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var intensity = 1.0;
|
||||
var day = 0.0;
|
||||
var hour = 12.0;
|
||||
var longitude = 115.0;
|
||||
var latitude = 31.0;
|
||||
var stageOrientation = Quat.fromPitchYawRollDegrees(0.0, 180.0, 0.0);
|
||||
Script.include("../../utilities/tools/cookies.js");
|
||||
|
||||
Scene.setStageDayTime(hour);
|
||||
Scene.setStageOrientation(stageOrientation);
|
||||
Scene.setStageLocation(longitude, latitude, 0.0);
|
||||
/*
|
||||
function ticktack() {
|
||||
hour += 0.1;
|
||||
//Scene.setSunIntensity(Math.cos(time));
|
||||
if (hour > 24.0) {
|
||||
hour = 0.0;
|
||||
day++;
|
||||
Scene.setStageYearTime(day);
|
||||
var panel = new Panel(10, 400);
|
||||
|
||||
panel.newSlider("Origin Longitude", -180, 180,
|
||||
function(value) { Scene.setStageLocation(value, Scene.getStageLocationLatitude(), Scene.getStageLocationAltitude()); },
|
||||
function() { return Scene.getStageLocationLongitude(); },
|
||||
function(value) { return value.toFixed(0) + " deg"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Origin Latitude", -90, 90,
|
||||
function(value) { Scene.setStageLocation(Scene.getStageLocationLongitude(), value, Scene.getStageLocationAltitude()); },
|
||||
function() { return Scene.getStageLocationLatitude(); },
|
||||
function(value) { return value.toFixed(0) + " deg"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Origin Altitude", 0, 1000,
|
||||
function(value) { Scene.setStageLocation(Scene.getStageLocationLongitude(), Scene.getStageLocationLatitude(), value); },
|
||||
function() { return Scene.getStageLocationAltitude(); },
|
||||
function(value) { return (value).toFixed(0) + " km"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Year Time", 0, 364,
|
||||
function(value) { Scene.setStageYearTime(value); },
|
||||
function() { return Scene.getStageYearTime(); },
|
||||
function(value) {
|
||||
var numDaysPerMonth = 365.0 / 12.0;
|
||||
var monthly = (value / numDaysPerMonth);
|
||||
var month = Math.floor(monthly);
|
||||
return (month + 1).toFixed(0) + "/" + Math.ceil(0.5 + (monthly - month)*Math.ceil(numDaysPerMonth)).toFixed(0); }
|
||||
);
|
||||
|
||||
panel.newSlider("Day Time", 0, 24,
|
||||
function(value) { Scene.setStageDayTime(value); },
|
||||
function() { return Scene.getStageDayTime(); },
|
||||
function(value) {
|
||||
var hour = Math.floor(value);
|
||||
return (hour).toFixed(0) + ":" + ((value - hour)*60.0).toFixed(0);
|
||||
}
|
||||
Scene.setStageDayTime(hour);
|
||||
}
|
||||
);
|
||||
|
||||
Script.setInterval(ticktack, 41);
|
||||
*/
|
||||
var tickTackPeriod = 50;
|
||||
var tickTackSpeed = 0.0;
|
||||
panel.newSlider("Tick tack time", -1.0, 1.0,
|
||||
function(value) { tickTackSpeed = value; },
|
||||
function() { return tickTackSpeed; },
|
||||
function(value) { return (value).toFixed(2); }
|
||||
);
|
||||
|
||||
function runStageTime() {
|
||||
if (tickTackSpeed != 0.0) {
|
||||
var hour = panel.get("Day Time");
|
||||
hour += tickTackSpeed;
|
||||
panel.set("Day Time", hour);
|
||||
|
||||
if (hour >= 24.0) {
|
||||
panel.set("Year Time", panel.get("Year Time") + 1);
|
||||
} else if (hour < 0.0) {
|
||||
panel.set("Year Time", panel.get("Year Time") - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Script.setInterval(runStageTime, tickTackPeriod);
|
||||
|
||||
panel.newSlider("Light Intensity", 0.0, 5,
|
||||
function(value) { Scene.setSunIntensity(value); },
|
||||
function() { return Scene.getSunIntensity(); },
|
||||
function(value) { return (value).toFixed(2); }
|
||||
);
|
||||
|
||||
Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) { return panel.mouseMoveEvent(event); });
|
||||
Controller.mousePressEvent.connect( function panelMousePressEvent(event) { return panel.mousePressEvent(event); });
|
||||
Controller.mouseReleaseEvent.connect(function(event) { return panel.mouseReleaseEvent(event); });
|
||||
|
||||
function scriptEnding() {
|
||||
Menu.removeMenu("Developer > Scene");
|
||||
panel.destroy();
|
||||
}
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
|
|
@ -312,7 +312,7 @@
|
|||
}
|
||||
|
||||
elTextText.value = properties.text;
|
||||
elTextLineHeight.value = properties.lineHeight;
|
||||
elTextLineHeight.value = properties.lineHeight.toFixed(4);
|
||||
elTextTextColorRed.value = properties.textColor.red;
|
||||
elTextTextColorGreen.value = properties.textColor.green;
|
||||
elTextTextColorBlue.value = properties.textColor.blue;
|
||||
|
@ -477,6 +477,28 @@
|
|||
ev.initEvent("change", true, true);
|
||||
document.activeElement.dispatchEvent(ev);
|
||||
}
|
||||
|
||||
// For input and textarea elements, select all of the text on focus
|
||||
// WebKit-based browsers, such as is used with QWebView, have a quirk
|
||||
// where the mouseup event comes after the focus event, causing the
|
||||
// text to be deselected immediately after selecting all of the text.
|
||||
// To make this work we block the first mouseup event after the elements
|
||||
// received focus. If we block all mouseup events the user will not
|
||||
// be able to click within the selected text.
|
||||
var els = document.querySelectorAll("input, textarea");
|
||||
for (var i = 0; i < els.length; i++) {
|
||||
var clicked = false;
|
||||
els[i].onfocus = function() {
|
||||
this.select();
|
||||
clicked = false;
|
||||
};
|
||||
els[i].onmouseup = function(e) {
|
||||
if (!clicked) {
|
||||
e.preventDefault();
|
||||
clicked = true;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
@ -723,7 +745,7 @@
|
|||
<div class="text-section property">
|
||||
<div class="label">Line Height</div>
|
||||
<div class="value">
|
||||
<input class="coord" type='number' id="property-text-line-height"></input>
|
||||
<input class="coord" type='number' id="property-text-line-height" min="0" step="0.005"></input>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-section property">
|
||||
|
|
|
@ -275,3 +275,29 @@ td {
|
|||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
input[type="number"] {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
/* Spin Buttons modified - credit for original implementation goes to http://jsfiddle.net/Volker_E/WwfW9/ */
|
||||
input[type="number"]::-webkit-outer-spin-button,
|
||||
input[type="number"]::-webkit-inner-spin-button {
|
||||
-webkit-appearance: none;
|
||||
background: #FFF url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAYAAADgkQYQAAAAKUlEQVQYlWNgwAT/sYhhKPiPT+F/LJgEsHv37v+EMGkmkuImoh2NoQAANlcun/q4OoYAAAAASUVORK5CYII=) no-repeat center center;
|
||||
width: 0.9em;
|
||||
height: 4px;
|
||||
opacity: 0.5; /* shows Spin Buttons per default (Chrome >= 39) */
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
border-top-left-radius: 3px;
|
||||
border-top-right-radius: 3px;
|
||||
border-bottom-right-radius: 3px;
|
||||
border-bottom-left-radius: 3px;
|
||||
}
|
||||
|
||||
input[type="number"]::-webkit-inner-spin-button:hover,
|
||||
input[type="number"]::-webkit-inner-spin-button:active{
|
||||
opacity: .8;
|
||||
}
|
||||
|
|
|
@ -1,693 +0,0 @@
|
|||
//
|
||||
// modelUploader.js
|
||||
// examples/libraries
|
||||
//
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
modelUploader = (function () {
|
||||
var that = {},
|
||||
modelFile,
|
||||
modelName,
|
||||
modelURL,
|
||||
modelCallback,
|
||||
isProcessing,
|
||||
fstBuffer,
|
||||
fbxBuffer,
|
||||
//svoBuffer,
|
||||
mapping,
|
||||
geometry,
|
||||
API_URL = "https://metaverse.highfidelity.com/api/v1/models",
|
||||
MODEL_URL = "http://public.highfidelity.com/models/content",
|
||||
NAME_FIELD = "name",
|
||||
SCALE_FIELD = "scale",
|
||||
FILENAME_FIELD = "filename",
|
||||
TEXDIR_FIELD = "texdir",
|
||||
MAX_TEXTURE_SIZE = 1024;
|
||||
|
||||
function info(message) {
|
||||
if (progressDialog.isOpen()) {
|
||||
progressDialog.update(message);
|
||||
} else {
|
||||
progressDialog.open(message);
|
||||
}
|
||||
print(message);
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
if (progressDialog.isOpen()) {
|
||||
progressDialog.close();
|
||||
}
|
||||
print(message);
|
||||
Window.alert(message);
|
||||
}
|
||||
|
||||
function randomChar(length) {
|
||||
var characters = "0123457689abcdefghijklmnopqrstuvwxyz",
|
||||
string = "",
|
||||
i;
|
||||
|
||||
for (i = 0; i < length; i += 1) {
|
||||
string += characters[Math.floor(Math.random() * 36)];
|
||||
}
|
||||
|
||||
return string;
|
||||
}
|
||||
|
||||
function resetDataObjects() {
|
||||
fstBuffer = null;
|
||||
fbxBuffer = null;
|
||||
//svoBuffer = null;
|
||||
mapping = {};
|
||||
geometry = {};
|
||||
geometry.textures = [];
|
||||
geometry.embedded = [];
|
||||
}
|
||||
|
||||
function readFile(filename) {
|
||||
var url = "file:///" + filename,
|
||||
req = new XMLHttpRequest();
|
||||
|
||||
req.open("GET", url, false);
|
||||
req.responseType = "arraybuffer";
|
||||
req.send();
|
||||
if (req.status !== 200) {
|
||||
error("Could not read file: " + filename + " : " + req.statusText);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
filename: filename.fileName(),
|
||||
buffer: req.response
|
||||
};
|
||||
}
|
||||
|
||||
function readMapping(buffer) {
|
||||
var dv = new DataView(buffer.buffer),
|
||||
lines,
|
||||
line,
|
||||
tokens,
|
||||
i,
|
||||
name,
|
||||
value,
|
||||
remainder,
|
||||
existing;
|
||||
|
||||
mapping = {}; // { name : value | name : { value : [remainder] } }
|
||||
lines = dv.string(0, dv.byteLength).split(/\r\n|\r|\n/);
|
||||
for (i = 0; i < lines.length; i += 1) {
|
||||
line = lines[i].trim();
|
||||
if (line.length > 0 && line[0] !== "#") {
|
||||
tokens = line.split(/\s*=\s*/);
|
||||
if (tokens.length > 1) {
|
||||
name = tokens[0];
|
||||
value = tokens[1];
|
||||
if (tokens.length > 2) {
|
||||
remainder = tokens.slice(2, tokens.length).join(" = ");
|
||||
} else {
|
||||
remainder = null;
|
||||
}
|
||||
if (tokens.length === 2 && mapping[name] === undefined) {
|
||||
mapping[name] = value;
|
||||
} else {
|
||||
if (mapping[name] === undefined) {
|
||||
mapping[name] = {};
|
||||
|
||||
} else if (typeof mapping[name] !== "object") {
|
||||
existing = mapping[name];
|
||||
mapping[name] = { existing : null };
|
||||
}
|
||||
|
||||
if (mapping[name][value] === undefined) {
|
||||
mapping[name][value] = [];
|
||||
}
|
||||
mapping[name][value].push(remainder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeMapping(buffer) {
|
||||
var name,
|
||||
value,
|
||||
remainder,
|
||||
i,
|
||||
string = "";
|
||||
|
||||
for (name in mapping) {
|
||||
if (mapping.hasOwnProperty(name)) {
|
||||
if (typeof mapping[name] === "object") {
|
||||
for (value in mapping[name]) {
|
||||
if (mapping[name].hasOwnProperty(value)) {
|
||||
remainder = mapping[name][value];
|
||||
if (remainder === null) {
|
||||
string += (name + " = " + value + "\n");
|
||||
} else {
|
||||
for (i = 0; i < remainder.length; i += 1) {
|
||||
string += (name + " = " + value + " = " + remainder[i] + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
string += (name + " = " + mapping[name] + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
buffer.buffer = string.toArrayBuffer();
|
||||
}
|
||||
|
||||
function readGeometry(fbxBuffer) {
|
||||
var textures,
|
||||
view,
|
||||
index,
|
||||
EOF,
|
||||
previousNodeFilename;
|
||||
|
||||
// Reference:
|
||||
// http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/
|
||||
|
||||
textures = {};
|
||||
view = new DataView(fbxBuffer.buffer);
|
||||
EOF = false;
|
||||
|
||||
function parseBinaryFBX() {
|
||||
var endOffset,
|
||||
numProperties,
|
||||
propertyListLength,
|
||||
nameLength,
|
||||
name,
|
||||
filename;
|
||||
|
||||
endOffset = view.getUint32(index, true);
|
||||
numProperties = view.getUint32(index + 4, true);
|
||||
propertyListLength = view.getUint32(index + 8, true);
|
||||
nameLength = view.getUint8(index + 12);
|
||||
index += 13;
|
||||
|
||||
if (endOffset === 0) {
|
||||
return;
|
||||
}
|
||||
if (endOffset < index || endOffset > view.byteLength) {
|
||||
EOF = true;
|
||||
return;
|
||||
}
|
||||
|
||||
name = view.string(index, nameLength).toLowerCase();
|
||||
index += nameLength;
|
||||
|
||||
if (name === "content" && previousNodeFilename !== "") {
|
||||
// Blender 2.71 exporter "embeds" external textures as empty binary blobs so ignore these
|
||||
if (propertyListLength > 5) {
|
||||
geometry.embedded.push(previousNodeFilename);
|
||||
}
|
||||
}
|
||||
|
||||
if (name === "relativefilename") {
|
||||
filename = view.string(index + 5, view.getUint32(index + 1, true)).fileName();
|
||||
if (!textures.hasOwnProperty(filename)) {
|
||||
textures[filename] = "";
|
||||
geometry.textures.push(filename);
|
||||
}
|
||||
previousNodeFilename = filename;
|
||||
} else {
|
||||
previousNodeFilename = "";
|
||||
}
|
||||
|
||||
index += (propertyListLength);
|
||||
|
||||
while (index < endOffset && !EOF) {
|
||||
parseBinaryFBX();
|
||||
}
|
||||
}
|
||||
|
||||
function readTextFBX() {
|
||||
var line,
|
||||
view,
|
||||
viewLength,
|
||||
charCode,
|
||||
charCodes,
|
||||
numCharCodes,
|
||||
filename,
|
||||
relativeFilename = "",
|
||||
MAX_CHAR_CODES = 250;
|
||||
|
||||
view = new Uint8Array(fbxBuffer.buffer);
|
||||
viewLength = view.byteLength;
|
||||
charCodes = [];
|
||||
numCharCodes = 0;
|
||||
|
||||
for (index = 0; index < viewLength; index += 1) {
|
||||
charCode = view[index];
|
||||
if (charCode !== 9 && charCode !== 32) {
|
||||
if (charCode === 10) { // EOL. Can ignore EOF.
|
||||
line = String.fromCharCode.apply(String, charCodes).toLowerCase();
|
||||
// For embedded textures, "Content:" line immediately follows "RelativeFilename:" line.
|
||||
if (line.slice(0, 8) === "content:" && relativeFilename !== "") {
|
||||
geometry.embedded.push(relativeFilename);
|
||||
}
|
||||
if (line.slice(0, 17) === "relativefilename:") {
|
||||
filename = line.slice(line.indexOf("\""), line.lastIndexOf("\"") - line.length).fileName();
|
||||
if (!textures.hasOwnProperty(filename)) {
|
||||
textures[filename] = "";
|
||||
geometry.textures.push(filename);
|
||||
}
|
||||
relativeFilename = filename;
|
||||
} else {
|
||||
relativeFilename = "";
|
||||
}
|
||||
charCodes = [];
|
||||
numCharCodes = 0;
|
||||
} else {
|
||||
if (numCharCodes < MAX_CHAR_CODES) { // Only interested in start of line
|
||||
charCodes.push(charCode);
|
||||
numCharCodes += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
readTextFBX();
|
||||
|
||||
|
||||
}
|
||||
|
||||
function readModel() {
|
||||
var fbxFilename,
|
||||
//svoFilename,
|
||||
fileType;
|
||||
|
||||
info("Reading model file");
|
||||
print("Model file: " + modelFile);
|
||||
|
||||
if (modelFile.toLowerCase().fileType() === "fst") {
|
||||
fstBuffer = readFile(modelFile);
|
||||
if (fstBuffer === null) {
|
||||
return false;
|
||||
}
|
||||
readMapping(fstBuffer);
|
||||
fileType = mapping[FILENAME_FIELD].toLowerCase().fileType();
|
||||
if (mapping.hasOwnProperty(FILENAME_FIELD)) {
|
||||
if (fileType === "fbx") {
|
||||
fbxFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD];
|
||||
//} else if (fileType === "svo") {
|
||||
// svoFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD];
|
||||
} else {
|
||||
error("Unrecognized model type in FST file!");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
error("Model file name not found in FST file!");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
fstBuffer = {
|
||||
filename: "Interface." + randomChar(6), // Simulate avatar model uploading behaviour
|
||||
buffer: null
|
||||
};
|
||||
|
||||
if (modelFile.toLowerCase().fileType() === "fbx") {
|
||||
fbxFilename = modelFile;
|
||||
mapping[FILENAME_FIELD] = modelFile.fileName();
|
||||
|
||||
//} else if (modelFile.toLowerCase().fileType() === "svo") {
|
||||
// svoFilename = modelFile;
|
||||
// mapping[FILENAME_FIELD] = modelFile.fileName();
|
||||
|
||||
} else {
|
||||
error("Unrecognized file type: " + modelFile);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isProcessing) { return false; }
|
||||
|
||||
if (fbxFilename) {
|
||||
fbxBuffer = readFile(fbxFilename);
|
||||
if (fbxBuffer === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!isProcessing) { return false; }
|
||||
|
||||
readGeometry(fbxBuffer);
|
||||
}
|
||||
|
||||
//if (svoFilename) {
|
||||
// svoBuffer = readFile(svoFilename);
|
||||
// if (svoBuffer === null) {
|
||||
// return false;
|
||||
// }
|
||||
//}
|
||||
|
||||
// Add any missing basic mappings
|
||||
if (!mapping.hasOwnProperty(NAME_FIELD)) {
|
||||
mapping[NAME_FIELD] = modelFile.fileName().fileBase();
|
||||
}
|
||||
if (!mapping.hasOwnProperty(TEXDIR_FIELD)) {
|
||||
mapping[TEXDIR_FIELD] = ".";
|
||||
}
|
||||
if (!mapping.hasOwnProperty(SCALE_FIELD)) {
|
||||
mapping[SCALE_FIELD] = 1.0;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function setProperties() {
|
||||
var form = [],
|
||||
directory,
|
||||
displayAs,
|
||||
validateAs;
|
||||
|
||||
progressDialog.close();
|
||||
print("Setting model properties");
|
||||
|
||||
form.push({ label: "Name:", value: mapping[NAME_FIELD] });
|
||||
|
||||
directory = modelFile.path() + "/" + mapping[TEXDIR_FIELD];
|
||||
displayAs = new RegExp("^" + modelFile.path().regExpEscape() + "[\\\\\\\/](.*)");
|
||||
validateAs = new RegExp("^" + modelFile.path().regExpEscape() + "([\\\\\\\/].*)?");
|
||||
|
||||
form.push({
|
||||
label: "Texture directory:",
|
||||
directory: modelFile.path() + "/" + mapping[TEXDIR_FIELD],
|
||||
title: "Choose Texture Directory",
|
||||
displayAs: displayAs,
|
||||
validateAs: validateAs,
|
||||
errorMessage: "Texture directory must be subdirectory of the model directory."
|
||||
});
|
||||
|
||||
form.push({ button: "Cancel" });
|
||||
|
||||
if (!Window.form("Set Model Properties", form)) {
|
||||
print("User cancelled uploading model");
|
||||
return false;
|
||||
}
|
||||
|
||||
mapping[NAME_FIELD] = form[0].value;
|
||||
mapping[TEXDIR_FIELD] = form[1].directory.slice(modelFile.path().length + 1);
|
||||
if (mapping[TEXDIR_FIELD] === "") {
|
||||
mapping[TEXDIR_FIELD] = ".";
|
||||
}
|
||||
|
||||
writeMapping(fstBuffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function createHttpMessage(callback) {
|
||||
var multiparts = [],
|
||||
lodCount,
|
||||
lodFile,
|
||||
lodBuffer,
|
||||
textureBuffer,
|
||||
textureSourceFormat,
|
||||
textureTargetFormat,
|
||||
embeddedTextures,
|
||||
i;
|
||||
|
||||
info("Preparing to send model");
|
||||
|
||||
// Model name
|
||||
if (mapping.hasOwnProperty(NAME_FIELD)) {
|
||||
multiparts.push({
|
||||
name : "model_name",
|
||||
string : mapping[NAME_FIELD]
|
||||
});
|
||||
} else {
|
||||
error("Model name is missing");
|
||||
httpMultiPart.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
// FST file
|
||||
if (fstBuffer) {
|
||||
multiparts.push({
|
||||
name : "fst",
|
||||
buffer: fstBuffer
|
||||
});
|
||||
}
|
||||
|
||||
// FBX file
|
||||
if (fbxBuffer) {
|
||||
multiparts.push({
|
||||
name : "fbx",
|
||||
buffer: fbxBuffer
|
||||
});
|
||||
}
|
||||
|
||||
// SVO file
|
||||
//if (svoBuffer) {
|
||||
// multiparts.push({
|
||||
// name : "svo",
|
||||
// buffer: svoBuffer
|
||||
// });
|
||||
//}
|
||||
|
||||
// LOD files
|
||||
lodCount = 0;
|
||||
for (lodFile in mapping.lod) {
|
||||
if (mapping.lod.hasOwnProperty(lodFile)) {
|
||||
lodBuffer = readFile(modelFile.path() + "\/" + lodFile);
|
||||
if (lodBuffer === null) {
|
||||
return;
|
||||
}
|
||||
multiparts.push({
|
||||
name: "lod" + lodCount,
|
||||
buffer: lodBuffer
|
||||
});
|
||||
lodCount += 1;
|
||||
}
|
||||
if (!isProcessing) { return; }
|
||||
}
|
||||
|
||||
// Textures
|
||||
embeddedTextures = "|" + geometry.embedded.join("|") + "|";
|
||||
for (i = 0; i < geometry.textures.length; i += 1) {
|
||||
if (embeddedTextures.indexOf("|" + geometry.textures[i].fileName() + "|") === -1) {
|
||||
textureBuffer = readFile(modelFile.path() + "\/"
|
||||
+ (mapping[TEXDIR_FIELD] !== "." ? mapping[TEXDIR_FIELD] + "\/" : "")
|
||||
+ geometry.textures[i]);
|
||||
if (textureBuffer === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
textureSourceFormat = geometry.textures[i].fileType().toLowerCase();
|
||||
textureTargetFormat = (textureSourceFormat === "jpg" ? "jpg" : "png");
|
||||
textureBuffer.buffer =
|
||||
textureBuffer.buffer.recodeImage(textureSourceFormat, textureTargetFormat, MAX_TEXTURE_SIZE);
|
||||
textureBuffer.filename = textureBuffer.filename.slice(0, -textureSourceFormat.length) + textureTargetFormat;
|
||||
|
||||
multiparts.push({
|
||||
name: "texture" + i,
|
||||
buffer: textureBuffer
|
||||
});
|
||||
}
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
}
|
||||
|
||||
// Model category
|
||||
multiparts.push({
|
||||
name : "model_category",
|
||||
string : "content"
|
||||
});
|
||||
|
||||
// Create HTTP message
|
||||
httpMultiPart.clear();
|
||||
Script.setTimeout(function addMultipart() {
|
||||
var multipart = multiparts.shift();
|
||||
httpMultiPart.add(multipart);
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
if (multiparts.length > 0) {
|
||||
Script.setTimeout(addMultipart, 25);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
}, 25);
|
||||
}
|
||||
|
||||
function sendToHighFidelity() {
|
||||
var req,
|
||||
uploadedChecks,
|
||||
HTTP_GET_TIMEOUT = 60, // 1 minute
|
||||
HTTP_SEND_TIMEOUT = 900, // 15 minutes
|
||||
UPLOADED_CHECKS = 30,
|
||||
CHECK_UPLOADED_TIMEOUT = 1, // 1 second
|
||||
handleCheckUploadedResponses,
|
||||
handleUploadModelResponses,
|
||||
handleRequestUploadResponses;
|
||||
|
||||
function uploadTimedOut() {
|
||||
error("Model upload failed: Internet request timed out!");
|
||||
}
|
||||
|
||||
function debugResponse() {
|
||||
print("req.errorCode = " + req.errorCode);
|
||||
print("req.readyState = " + req.readyState);
|
||||
print("req.status = " + req.status);
|
||||
print("req.statusText = " + req.statusText);
|
||||
print("req.responseType = " + req.responseType);
|
||||
print("req.responseText = " + req.responseText);
|
||||
print("req.response = " + req.response);
|
||||
print("req.getAllResponseHeaders() = " + req.getAllResponseHeaders());
|
||||
}
|
||||
|
||||
function checkUploaded() {
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
info("Checking uploaded model");
|
||||
|
||||
req = new XMLHttpRequest();
|
||||
req.open("HEAD", modelURL, true);
|
||||
req.timeout = HTTP_GET_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleCheckUploadedResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send();
|
||||
}
|
||||
|
||||
handleCheckUploadedResponses = function () {
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
// Note: Unlike avatar models, for content models we don't need to refresh texture cache.
|
||||
print("Model uploaded: " + modelURL);
|
||||
progressDialog.close();
|
||||
if (Window.confirm("Your model has been uploaded as: " + modelURL + "\nDo you want to rez it?")) {
|
||||
modelCallback(modelURL);
|
||||
}
|
||||
} else if (req.status === 404) {
|
||||
if (uploadedChecks > 0) {
|
||||
uploadedChecks -= 1;
|
||||
Script.setTimeout(checkUploaded, CHECK_UPLOADED_TIMEOUT * 1000);
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("We could not verify that your model was successfully uploaded but it may have been at: "
|
||||
+ modelURL);
|
||||
}
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("There was a problem with your upload, please try again later.");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function uploadModel(method) {
|
||||
var url;
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
req = new XMLHttpRequest();
|
||||
if (method === "PUT") {
|
||||
url = API_URL + "\/" + modelName;
|
||||
req.open("PUT", url, true); //print("PUT " + url);
|
||||
} else {
|
||||
url = API_URL;
|
||||
req.open("POST", url, true); //print("POST " + url);
|
||||
}
|
||||
req.setRequestHeader("Content-Type", "multipart/form-data; boundary=\"" + httpMultiPart.boundary() + "\"");
|
||||
req.timeout = HTTP_SEND_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleUploadModelResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send(httpMultiPart.response().buffer);
|
||||
}
|
||||
|
||||
handleUploadModelResponses = function () {
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
uploadedChecks = UPLOADED_CHECKS;
|
||||
checkUploaded();
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("There was a problem with your upload, please try again later.");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function requestUpload() {
|
||||
var url;
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
url = API_URL + "\/" + modelName; // XMLHttpRequest automatically handles authorization of API requests.
|
||||
req = new XMLHttpRequest();
|
||||
req.open("GET", url, true); //print("GET " + url);
|
||||
req.responseType = "json";
|
||||
req.timeout = HTTP_GET_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleRequestUploadResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send();
|
||||
}
|
||||
|
||||
handleRequestUploadResponses = function () {
|
||||
var response;
|
||||
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
if (req.responseType === "json") {
|
||||
response = JSON.parse(req.responseText);
|
||||
if (response.status === "success") {
|
||||
if (response.exists === false) {
|
||||
uploadModel("POST");
|
||||
} else if (response.can_update === true) {
|
||||
uploadModel("PUT");
|
||||
} else {
|
||||
error("This model file already exists and is owned by someone else!");
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
}
|
||||
error("Model upload failed! Something went wrong at the data server.");
|
||||
}
|
||||
};
|
||||
|
||||
info("Sending model to High Fidelity");
|
||||
|
||||
requestUpload();
|
||||
}
|
||||
|
||||
that.upload = function (file, callback) {
|
||||
|
||||
modelFile = file;
|
||||
modelCallback = callback;
|
||||
|
||||
isProcessing = true;
|
||||
|
||||
progressDialog.onCancel = function () {
|
||||
print("User cancelled uploading model");
|
||||
isProcessing = false;
|
||||
};
|
||||
|
||||
resetDataObjects();
|
||||
|
||||
if (readModel()) {
|
||||
if (setProperties()) {
|
||||
modelName = mapping[NAME_FIELD];
|
||||
modelURL = MODEL_URL + "\/" + mapping[NAME_FIELD] + ".fst"; // All models are uploaded as an FST
|
||||
|
||||
createHttpMessage(sendToHighFidelity);
|
||||
}
|
||||
}
|
||||
|
||||
resetDataObjects();
|
||||
};
|
||||
|
||||
return that;
|
||||
}());
|
|
@ -145,98 +145,3 @@ test("Test timeout", function() {
|
|||
this.assertEquals(0, req.status, "status should be `0`");
|
||||
this.assertEquals(4, req.errorCode, "4 is the timeout error code for QNetworkReply::NetworkError");
|
||||
});
|
||||
|
||||
|
||||
var localFile = Window.browse("Find defaultScripts.js file ...", "", "defaultScripts.js (defaultScripts.js)");
|
||||
|
||||
if (localFile !== null) {
|
||||
|
||||
localFile = "file:///" + localFile;
|
||||
|
||||
test("Test GET local file synchronously", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
|
||||
var statesVisited = [true, false, false, false, false]
|
||||
req.onreadystatechange = function () {
|
||||
statesVisited[req.readyState] = true;
|
||||
};
|
||||
|
||||
req.open("GET", localFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(200, req.status, "status should be `200`");
|
||||
this.assertEquals("OK", req.statusText, "statusText should be `OK`");
|
||||
this.assertEquals(0, req.errorCode);
|
||||
this.assertNotEquals("", req.getAllResponseHeaders(), "headers should not be null");
|
||||
this.assertContains("High Fidelity", req.response.substring(0, 100), "expected text not found in response")
|
||||
|
||||
for (var i = 0; i <= req.DONE; i++) {
|
||||
this.assertEquals(true, statesVisited[i], i + " should be set");
|
||||
}
|
||||
});
|
||||
|
||||
test("Test GET nonexistent local file", function () {
|
||||
var nonexistentFile = localFile.replace(".js", "NoExist.js");
|
||||
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", nonexistentFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(404, req.status, "status should be `404`");
|
||||
this.assertEquals("Not Found", req.statusText, "statusText should be `Not Found`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test GET local file already open", function () {
|
||||
// Can't open file exclusively in order to test.
|
||||
});
|
||||
|
||||
test("Test GET local file with data not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, true);
|
||||
req.send("data");
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test GET local file asynchronously not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, true);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test POST local file not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("POST", localFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test local file username and password not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, false, "username", "password");
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
} else {
|
||||
print("Local file operation not tested");
|
||||
}
|
||||
|
|
278
examples/utilities/tools/cookies.js
Executable file
278
examples/utilities/tools/cookies.js
Executable file
|
@ -0,0 +1,278 @@
|
|||
//
|
||||
// cookies.js
|
||||
//
|
||||
// version 1.0
|
||||
//
|
||||
// Created by Sam Gateau, 4/1/2015
|
||||
// A simple ui panel that present a list of porperties and the proper widget to edit it
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// The Slider class
|
||||
Slider = function(x,y,width,thumbSize) {
|
||||
|
||||
this.thumb = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: thumbSize,
|
||||
height: thumbSize,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 1.0,
|
||||
visible: true
|
||||
});
|
||||
this.background = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 125, green: 125, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: width,
|
||||
height: thumbSize,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true
|
||||
});
|
||||
|
||||
this.thumbSize = thumbSize;
|
||||
this.thumbHalfSize = 0.5 * thumbSize;
|
||||
|
||||
this.minThumbX = x + this.thumbHalfSize;
|
||||
this.maxThumbX = x + width - this.thumbHalfSize;
|
||||
this.thumbX = this.minThumbX;
|
||||
|
||||
this.minValue = 0.0;
|
||||
this.maxValue = 1.0;
|
||||
|
||||
this.clickOffsetX = 0;
|
||||
this.isMoving = false;
|
||||
|
||||
this.updateThumb = function() {
|
||||
thumbTruePos = this.thumbX - 0.5 * this.thumbSize;
|
||||
Overlays.editOverlay(this.thumb, { x: thumbTruePos } );
|
||||
};
|
||||
|
||||
this.onMouseMoveEvent = function(event) {
|
||||
if (this.isMoving) {
|
||||
newThumbX = event.x - this.clickOffsetX;
|
||||
if (newThumbX < this.minThumbX) {
|
||||
newThumbX = this.minThumbX;
|
||||
}
|
||||
if (newThumbX > this.maxThumbX) {
|
||||
newThumbX = this.maxThumbX;
|
||||
}
|
||||
this.thumbX = newThumbX;
|
||||
this.updateThumb();
|
||||
this.onValueChanged(this.getValue());
|
||||
}
|
||||
};
|
||||
|
||||
this.onMousePressEvent = function(event) {
|
||||
this.isMoving = true;
|
||||
var clickOffset = event.x - this.thumbX;
|
||||
if ((clickOffset > -this.thumbHalfSize) && (clickOffset < this.thumbHalfSize)) {
|
||||
this.clickOffsetX = clickOffset;
|
||||
} else {
|
||||
this.clickOffsetX = 0;
|
||||
this.thumbX = event.x;
|
||||
this.updateThumb();
|
||||
this.onValueChanged(this.getValue());
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
this.onMouseReleaseEvent = function(event) {
|
||||
this.isMoving = false;
|
||||
};
|
||||
|
||||
// Public members:
|
||||
|
||||
this.setNormalizedValue = function(value) {
|
||||
if (value < 0.0) {
|
||||
this.thumbX = this.minThumbX;
|
||||
} else if (value > 1.0) {
|
||||
this.thumbX = this.maxThumbX;
|
||||
} else {
|
||||
this.thumbX = value * (this.maxThumbX - this.minThumbX) + this.minThumbX;
|
||||
}
|
||||
this.updateThumb();
|
||||
};
|
||||
this.getNormalizedValue = function() {
|
||||
return (this.thumbX - this.minThumbX) / (this.maxThumbX - this.minThumbX);
|
||||
};
|
||||
|
||||
this.setValue = function(value) {
|
||||
var normValue = (value - this.minValue) / (this.maxValue - this.minValue);
|
||||
this.setNormalizedValue(normValue);
|
||||
};
|
||||
|
||||
this.getValue = function() {
|
||||
return this.getNormalizedValue() * (this.maxValue - this.minValue) + this.minValue;
|
||||
};
|
||||
|
||||
this.onValueChanged = function(value) {};
|
||||
|
||||
this.destroy = function() {
|
||||
Overlays.deleteOverlay(this.background);
|
||||
Overlays.deleteOverlay(this.thumb);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
var textFontSize = 16;
|
||||
|
||||
function PanelItem(name, setter, getter, displayer, x, y, textWidth, valueWidth, height) {
|
||||
this.name = name;
|
||||
|
||||
|
||||
this.displayer = typeof displayer !== 'undefined' ? displayer : function(value) { return value.toFixed(2); };
|
||||
|
||||
var topMargin = (height - textFontSize);
|
||||
this.title = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: textWidth,
|
||||
height: height,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true,
|
||||
text: name,
|
||||
font: {size: textFontSize},
|
||||
topMargin: topMargin,
|
||||
});
|
||||
|
||||
this.value = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x + textWidth,
|
||||
y: y,
|
||||
width: valueWidth,
|
||||
height: height,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true,
|
||||
text: this.displayer(getter()),
|
||||
font: {size: textFontSize},
|
||||
topMargin: topMargin
|
||||
|
||||
});
|
||||
this.getter = getter;
|
||||
|
||||
this.setter = function(value) {
|
||||
setter(value);
|
||||
Overlays.editOverlay(this.value, {text: this.displayer(getter())});
|
||||
if (this.widget) {
|
||||
this.widget.setValue(value);
|
||||
}
|
||||
};
|
||||
this.setterFromWidget = function(value) {
|
||||
setter(value);
|
||||
Overlays.editOverlay(this.value, {text: this.displayer(getter())});
|
||||
};
|
||||
|
||||
|
||||
this.widget = null;
|
||||
|
||||
this.destroy = function() {
|
||||
Overlays.deleteOverlay(this.title);
|
||||
Overlays.deleteOverlay(this.value);
|
||||
if (this.widget != null) {
|
||||
this.widget.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var textWidth = 180;
|
||||
var valueWidth = 100;
|
||||
var widgetWidth = 300;
|
||||
var rawHeight = 20;
|
||||
var rawYDelta = rawHeight * 1.5;
|
||||
|
||||
Panel = function(x, y) {
|
||||
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.nextY = y;
|
||||
|
||||
this.widgetX = x + textWidth + valueWidth;
|
||||
|
||||
this.items = new Array();
|
||||
this.activeWidget = null;
|
||||
|
||||
this.mouseMoveEvent = function(event) {
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseMoveEvent(event);
|
||||
}
|
||||
};
|
||||
|
||||
// we also handle click detection in our mousePressEvent()
|
||||
this.mousePressEvent = function(event) {
|
||||
// Make sure we quitted previous widget
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseReleaseEvent(event);
|
||||
}
|
||||
this.activeWidget = null;
|
||||
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
|
||||
// If the user clicked any of the slider background then...
|
||||
for (var i in this.items) {
|
||||
var widget = this.items[i].widget;
|
||||
|
||||
if (clickedOverlay == widget.background) {
|
||||
this.activeWidget = widget;
|
||||
this.activeWidget.onMousePressEvent(event);
|
||||
// print("clicked... widget=" + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
this.mouseReleaseEvent = function(event) {
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseReleaseEvent(event);
|
||||
}
|
||||
this.activeWidget = null;
|
||||
};
|
||||
|
||||
this.newSlider = function(name, minValue, maxValue, setValue, getValue, displayValue) {
|
||||
|
||||
var sliderItem = new PanelItem(name, setValue, getValue, displayValue, this.x, this.nextY, textWidth, valueWidth, rawHeight);
|
||||
|
||||
var slider = new Slider(this.widgetX, this.nextY, widgetWidth, rawHeight);
|
||||
slider.minValue = minValue;
|
||||
slider.maxValue = maxValue;
|
||||
slider.onValueChanged = function(value) { sliderItem.setterFromWidget(value); };
|
||||
|
||||
|
||||
sliderItem.widget = slider;
|
||||
sliderItem.setter(getValue());
|
||||
this.items[name] = sliderItem;
|
||||
this.nextY += rawYDelta;
|
||||
// print("created Item... slider=" + name);
|
||||
};
|
||||
|
||||
this.destroy = function() {
|
||||
for (var i in this.items) {
|
||||
this.items[i].destroy();
|
||||
}
|
||||
}
|
||||
|
||||
this.set = function(name, value) {
|
||||
var item = this.items[name];
|
||||
if (item != null) {
|
||||
return item.setter(value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
this.get = function(name) {
|
||||
var item = this.items[name];
|
||||
if (item != null) {
|
||||
return item.getter();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
|||
project(${TARGET_NAME})
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
|
@ -110,6 +110,11 @@ add_dependency_external_projects(glm bullet)
|
|||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
find_package(Bullet REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${BULLET_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES})
|
||||
|
|
16
interface/external/libovr/readme.txt
vendored
16
interface/external/libovr/readme.txt
vendored
|
@ -1,16 +0,0 @@
|
|||
|
||||
Instructions for adding the Oculus library (LibOVR) to Interface
|
||||
Stephen Birarda, March 6, 2014
|
||||
|
||||
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1.
|
||||
|
||||
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
|
||||
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
|
||||
|
||||
NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory.
|
||||
|
||||
2. Clear your build directory, run cmake and build, and you should be all set.
|
||||
|
|
@ -258,7 +258,8 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto speechRecognizer = DependencyManager::set<SpeechRecognizer>();
|
||||
#endif
|
||||
auto discoverabilityManager = DependencyManager::set<DiscoverabilityManager>();
|
||||
|
||||
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -568,6 +569,9 @@ void Application::aboutToQuit() {
|
|||
}
|
||||
|
||||
void Application::cleanupBeforeQuit() {
|
||||
|
||||
_entities.clear(); // this will allow entity scripts to properly shutdown
|
||||
|
||||
_datagramProcessor->shutdown(); // tell the datagram processor we're shutting down, so it can short circuit
|
||||
_entities.shutdown(); // tell the entities system we're shutting down, so it will stop running scripts
|
||||
ScriptEngine::stopAllScripts(this); // stop all currently running global scripts
|
||||
|
@ -775,10 +779,6 @@ void Application::paintGL() {
|
|||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
//Clear the color buffer to ensure that there isnt any residual color
|
||||
//Left over from when OR was not connected.
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
//When in mirror mode, use camera rotation. Otherwise, use body rotation
|
||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
OculusManager::display(_myCamera.getRotation(), _myCamera.getPosition(), _myCamera);
|
||||
|
@ -2771,7 +2771,7 @@ void Application::updateShadowMap() {
|
|||
|
||||
const GLfloat WORLD_AMBIENT_COLOR[] = { 0.525f, 0.525f, 0.6f };
|
||||
const GLfloat WORLD_DIFFUSE_COLOR[] = { 0.6f, 0.525f, 0.525f };
|
||||
const GLfloat WORLD_SPECULAR_COLOR[] = { 0.94f, 0.94f, 0.737f, 1.0f };
|
||||
const GLfloat WORLD_SPECULAR_COLOR[] = { 0.08f, 0.08f, 0.08f, 1.0f };
|
||||
|
||||
const glm::vec3 GLOBAL_LIGHT_COLOR = { 0.6f, 0.525f, 0.525f };
|
||||
|
||||
|
@ -3596,6 +3596,8 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition2D", HMDScriptingInterface::getHUDLookAtPosition2D, 0);
|
||||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0);
|
||||
|
||||
scriptEngine->registerGlobalObject("Scene", DependencyManager::get<SceneScriptingInterface>().data());
|
||||
|
||||
#ifdef HAVE_RTMIDI
|
||||
scriptEngine->registerGlobalObject("MIDI", &MIDIManager::getInstance());
|
||||
#endif
|
||||
|
|
|
@ -66,9 +66,13 @@ void GLCanvas::paintGL() {
|
|||
}
|
||||
|
||||
Application::getInstance()->paintGL();
|
||||
swapBuffers();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
|
||||
if (!OculusManager::isConnected()) {
|
||||
swapBuffers();
|
||||
} else {
|
||||
if (OculusManager::allowSwap()) {
|
||||
swapBuffers();
|
||||
}
|
||||
OculusManager::endFrameTiming();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
#include <QDesktopWidget>
|
||||
#include <QGuiApplication>
|
||||
#include <QOpenGLFramebufferObject>
|
||||
#include <QScreen>
|
||||
#include <QScreen>
|
||||
#include <QOpenGLTimerQuery>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
|
@ -29,12 +30,28 @@
|
|||
#include <SharedUtil.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
template <typename Function>
|
||||
void for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
using namespace OVR;
|
||||
template <typename Function>
|
||||
void for_each_eye(const ovrHmd & hmd, Function function) {
|
||||
for (int i = 0; i < ovrEye_Count; ++i) {
|
||||
ovrEyeType eye = hmd->EyeRenderOrder[i];
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
ProgramObject OculusManager::_program;
|
||||
int OculusManager::_textureLocation;
|
||||
int OculusManager::_eyeToSourceUVScaleLocation;
|
||||
|
@ -46,24 +63,27 @@ int OculusManager::_colorAttributeLocation;
|
|||
int OculusManager::_texCoord0AttributeLocation;
|
||||
int OculusManager::_texCoord1AttributeLocation;
|
||||
int OculusManager::_texCoord2AttributeLocation;
|
||||
bool OculusManager::_isConnected = false;
|
||||
|
||||
ovrHmd OculusManager::_ovrHmd;
|
||||
ovrHmdDesc OculusManager::_ovrHmdDesc;
|
||||
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
|
||||
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
|
||||
ovrSizei OculusManager::_renderTargetSize;
|
||||
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
|
||||
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
|
||||
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
|
||||
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
|
||||
ovrFrameTiming OculusManager::_hmdFrameTiming;
|
||||
ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count];
|
||||
bool OculusManager::_programInitialized = false;
|
||||
#endif
|
||||
|
||||
ovrTexture OculusManager::_eyeTextures[ovrEye_Count];
|
||||
bool OculusManager::_isConnected = false;
|
||||
ovrHmd OculusManager::_ovrHmd;
|
||||
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
|
||||
ovrVector3f OculusManager::_eyeOffset[ovrEye_Count];
|
||||
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
|
||||
ovrSizei OculusManager::_renderTargetSize;
|
||||
glm::mat4 OculusManager::_eyeProjection[ovrEye_Count];
|
||||
unsigned int OculusManager::_frameIndex = 0;
|
||||
bool OculusManager::_frameTimingActive = false;
|
||||
bool OculusManager::_programInitialized = false;
|
||||
Camera* OculusManager::_camera = NULL;
|
||||
int OculusManager::_activeEyeIndex = -1;
|
||||
ovrEyeType OculusManager::_activeEye = ovrEye_Count;
|
||||
bool OculusManager::_hswDismissed = false;
|
||||
|
||||
float OculusManager::CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
|
||||
float OculusManager::CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
|
||||
|
@ -76,68 +96,86 @@ glm::vec3 OculusManager::_calibrationPosition;
|
|||
glm::quat OculusManager::_calibrationOrientation;
|
||||
quint64 OculusManager::_calibrationStartTime;
|
||||
int OculusManager::_calibrationMessage = NULL;
|
||||
glm::vec3 OculusManager::_eyePositions[ovrEye_Count];
|
||||
// TODO expose this as a developer toggle
|
||||
bool OculusManager::_eyePerFrameMode = false;
|
||||
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
|
||||
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
|
||||
float OculusManager::_offscreenRenderScale = 1.0;
|
||||
|
||||
|
||||
void OculusManager::initSdk() {
|
||||
ovr_Initialize();
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
if (!_ovrHmd) {
|
||||
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
|
||||
}
|
||||
}
|
||||
|
||||
void OculusManager::shutdownSdk() {
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
}
|
||||
|
||||
void OculusManager::init() {
|
||||
#ifdef OVR_DIRECT_MODE
|
||||
initSdk();
|
||||
#endif
|
||||
|
||||
glm::vec3 OculusManager::_leftEyePosition = glm::vec3();
|
||||
glm::vec3 OculusManager::_rightEyePosition = glm::vec3();
|
||||
}
|
||||
|
||||
void OculusManager::connect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifndef OVR_DIRECT_MODE
|
||||
initSdk();
|
||||
#endif
|
||||
_calibrationState = UNCALIBRATED;
|
||||
qDebug() << "Oculus SDK" << OVR_VERSION_STRING;
|
||||
ovr_Initialize();
|
||||
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
if (_ovrHmd) {
|
||||
if (!_isConnected) {
|
||||
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
|
||||
}
|
||||
_isConnected = true;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
_eyeFov[0] = _ovrHmd->DefaultEyeFov[0];
|
||||
_eyeFov[1] = _ovrHmd->DefaultEyeFov[1];
|
||||
#else
|
||||
ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc);
|
||||
_eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0];
|
||||
_eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1];
|
||||
#endif
|
||||
//Get texture size
|
||||
ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left,
|
||||
_eyeFov[0], 1.0f);
|
||||
ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right,
|
||||
_eyeFov[1], 1.0f);
|
||||
_renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
|
||||
_renderTargetSize.h = recommendedTex0Size.h;
|
||||
if (_renderTargetSize.h < recommendedTex1Size.h) {
|
||||
_renderTargetSize.h = recommendedTex1Size.h;
|
||||
}
|
||||
|
||||
_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]);
|
||||
_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]);
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
|
||||
});
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence);
|
||||
#else
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest);
|
||||
#endif
|
||||
ovrGLConfig cfg;
|
||||
memset(&cfg, 0, sizeof(cfg));
|
||||
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
|
||||
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
|
||||
cfg.OGL.Header.Multisample = 1;
|
||||
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_Vignette
|
||||
| ovrDistortionCap_Overdrive
|
||||
| ovrDistortionCap_TimeWarp;
|
||||
|
||||
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
|
||||
distortionCaps, _eyeFov, _eyeRenderDesc);
|
||||
assert(configResult);
|
||||
|
||||
|
||||
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
|
||||
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
//Get texture size
|
||||
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
|
||||
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
|
||||
});
|
||||
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
|
||||
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position |
|
||||
ovrTrackingCap_MagYawCorrection,
|
||||
ovrTrackingCap_Orientation);
|
||||
#else
|
||||
ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection |
|
||||
ovrSensorCap_Position,
|
||||
ovrSensorCap_Orientation);
|
||||
#endif
|
||||
|
||||
if (!_camera) {
|
||||
_camera = new Camera;
|
||||
configureCamera(*_camera, 0, 0); // no need to use screen dimensions; they're ignored
|
||||
}
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
if (!_programInitialized) {
|
||||
// Shader program
|
||||
_programInitialized = true;
|
||||
|
@ -162,27 +200,27 @@ void OculusManager::connect() {
|
|||
|
||||
//Generate the distortion VBOs
|
||||
generateDistortionMesh();
|
||||
|
||||
#endif
|
||||
} else {
|
||||
_isConnected = false;
|
||||
|
||||
// we're definitely not in "VR mode" so tell the menu that
|
||||
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
|
||||
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Disconnects and deallocates the OR
|
||||
void OculusManager::disconnect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_isConnected) {
|
||||
_isConnected = false;
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
// Prepare to potentially have to dismiss the HSW again
|
||||
// if the user re-enables VR
|
||||
_hswDismissed = false;
|
||||
#ifndef OVR_DIRECT_MODE
|
||||
shutdownSdk();
|
||||
#endif
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
//Free the distortion mesh data
|
||||
for (int i = 0; i < ovrEye_Count; i++) {
|
||||
if (_vertices[i] != 0) {
|
||||
|
@ -194,11 +232,10 @@ void OculusManager::disconnect() {
|
|||
_indices[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
glm::quat headOrientation = myAvatar->getHeadOrientation();
|
||||
|
@ -209,9 +246,7 @@ void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
|||
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
||||
billboard->setRotation(headOrientation);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
|
||||
static QString instructionMessage = "Hold still to calibrate";
|
||||
static QString progressMessage;
|
||||
|
@ -303,26 +338,21 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
|
|||
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void OculusManager::recalibrate() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_calibrationState = UNCALIBRATED;
|
||||
#endif
|
||||
}
|
||||
|
||||
void OculusManager::abandonCalibration() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_calibrationState = CALIBRATED;
|
||||
if (_calibrationMessage) {
|
||||
qDebug() << "Abandoned HMD calibration";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = NULL;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
void OculusManager::generateDistortionMesh() {
|
||||
|
||||
//Check if we already have the distortion mesh
|
||||
|
@ -331,29 +361,19 @@ void OculusManager::generateDistortionMesh() {
|
|||
return;
|
||||
}
|
||||
|
||||
//Viewport for the render target for each eye
|
||||
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
|
||||
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
|
||||
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
|
||||
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
|
||||
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
// Allocate and generate distortion mesh vertices
|
||||
ovrDistortionMesh meshData;
|
||||
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData);
|
||||
|
||||
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum],
|
||||
_UVScaleOffset[eyeNum]);
|
||||
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmd->DistortionCaps, &meshData);
|
||||
|
||||
// Parse the vertex data and create a render ready vertex buffer
|
||||
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
|
||||
DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount];
|
||||
_meshSize[eyeNum] = meshData.IndexCount;
|
||||
|
||||
// Convert the oculus vertex data to the DistortionVertex format.
|
||||
DistortionVertex* v = pVBVerts;
|
||||
ovrDistortionVertex* ov = meshData.pVertexData;
|
||||
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
v->pos.x = ov->ScreenPosNDC.x;
|
||||
v->pos.y = ov->ScreenPosNDC.y;
|
||||
v->texR.x = ov->TanEyeAnglesR.x;
|
||||
|
@ -362,16 +382,6 @@ void OculusManager::generateDistortionMesh() {
|
|||
v->texG.y = ov->TanEyeAnglesG.y;
|
||||
v->texB.x = ov->TanEyeAnglesB.x;
|
||||
v->texB.y = ov->TanEyeAnglesB.y;
|
||||
#else
|
||||
v->pos.x = ov->Pos.x;
|
||||
v->pos.y = ov->Pos.y;
|
||||
v->texR.x = ov->TexR.x;
|
||||
v->texR.y = ov->TexR.y;
|
||||
v->texG.x = ov->TexG.x;
|
||||
v->texG.y = ov->TexG.y;
|
||||
v->texB.x = ov->TexB.x;
|
||||
v->texB.y = ov->TexB.y;
|
||||
#endif
|
||||
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
|
||||
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
|
||||
v++;
|
||||
|
@ -391,7 +401,7 @@ void OculusManager::generateDistortionMesh() {
|
|||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
|
||||
//Now that we have the VBOs we can get rid of the mesh data
|
||||
OVR_FREE(pVBVerts);
|
||||
delete [] pVBVerts;
|
||||
ovrHmd_DestroyDistortionMesh(&meshData);
|
||||
}
|
||||
|
||||
|
@ -399,46 +409,101 @@ void OculusManager::generateDistortionMesh() {
|
|||
#endif
|
||||
|
||||
bool OculusManager::isConnected() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Begins the frame timing for oculus prediction purposes
|
||||
void OculusManager::beginFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
|
||||
if (_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
|
||||
}
|
||||
|
||||
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
|
||||
_frameTimingActive = true;
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
|
||||
#endif
|
||||
_frameTimingActive = true;
|
||||
}
|
||||
|
||||
bool OculusManager::allowSwap() {
|
||||
return false;
|
||||
}
|
||||
|
||||
//Ends frame timing
|
||||
void OculusManager::endFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
ovrHmd_EndFrameTiming(_ovrHmd);
|
||||
#endif
|
||||
_frameIndex++;
|
||||
_frameTimingActive = false;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Sets the camera FoV and aspect ratio
|
||||
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
camera.setAspectRatio(_renderTargetSize.w * 0.5f / _renderTargetSize.h);
|
||||
camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f);
|
||||
#endif
|
||||
}
|
||||
|
||||
static bool timerActive = false;
|
||||
//Displays everything for the oculus, frame timing must be active
|
||||
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Ensure the frame counter always increments by exactly 1
|
||||
static int oldFrameIndex = -1;
|
||||
assert(oldFrameIndex == -1 || oldFrameIndex == _frameIndex - 1);
|
||||
oldFrameIndex = _frameIndex;
|
||||
#endif
|
||||
|
||||
// Every so often do some additional timing calculations and debug output
|
||||
bool debugFrame = 0 == _frameIndex % 400;
|
||||
|
||||
#if 0
|
||||
// Try to measure the amount of time taken to do the distortion
|
||||
// (does not seem to work on OSX with SDK based distortion)
|
||||
// FIXME can't use a static object here, because it will cause a crash when the
|
||||
// query attempts deconstruct after the GL context is gone.
|
||||
static QOpenGLTimerQuery timerQuery;
|
||||
if (!timerQuery.isCreated()) {
|
||||
timerQuery.create();
|
||||
}
|
||||
|
||||
if (timerActive && timerQuery.isResultAvailable()) {
|
||||
auto result = timerQuery.waitForResult();
|
||||
if (result) { qDebug() << "Distortion took " << result << "ns"; };
|
||||
timerActive = false;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef OVR_DIRECT_MODE
|
||||
static bool attached = false;
|
||||
if (!attached) {
|
||||
attached = true;
|
||||
void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId();
|
||||
if (nullptr != nativeWindowHandle) {
|
||||
ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef OVR_CLIENT_DISTORTION
|
||||
// FIXME: we need a better way of responding to the HSW. In particular
|
||||
// we need to ensure that it's only displayed once per session, rather than
|
||||
// every time the user toggles VR mode, and we need to hook it up to actual
|
||||
// keyboard input. OVR claim they are refactoring HSW
|
||||
// https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599
|
||||
static ovrHSWDisplayState hasWarningState;
|
||||
if (!_hswDismissed) {
|
||||
ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState);
|
||||
if (hasWarningState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_ovrHmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
//beginFrameTiming must be called before display
|
||||
if (!_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
|
||||
|
@ -459,7 +524,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
ovrPosef eyeRenderPose[ovrEye_Count];
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
|
@ -470,7 +534,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
glm::quat orientation;
|
||||
glm::vec3 trackerPosition;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
|
||||
|
||||
|
@ -483,105 +546,153 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
}
|
||||
|
||||
trackerPosition = bodyOrientation * trackerPosition;
|
||||
#endif
|
||||
|
||||
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
|
||||
ovrPosef eyePoses[ovrEye_Count];
|
||||
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
|
||||
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
|
||||
static ovrPosef eyeRenderPose[ovrEye_Count];
|
||||
//Render each eye into an fbo
|
||||
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
|
||||
_activeEyeIndex = eyeIndex;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
|
||||
#else
|
||||
ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
|
||||
#endif
|
||||
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
|
||||
// If we're in eye-per-frame mode, only render one eye
|
||||
// per call to display, and allow timewarp to correct for
|
||||
// the other eye. Poor man's perf improvement
|
||||
if (_eyePerFrameMode && eye == _lastEyeRendered) {
|
||||
return;
|
||||
}
|
||||
_lastEyeRendered = _activeEye = eye;
|
||||
eyeRenderPose[eye] = eyePoses[eye];
|
||||
// Set the camera rotation for this eye
|
||||
eyeRenderPose[eye] = ovrHmd_GetEyePose(_ovrHmd, eye);
|
||||
orientation.x = eyeRenderPose[eye].Orientation.x;
|
||||
orientation.y = eyeRenderPose[eye].Orientation.y;
|
||||
orientation.z = eyeRenderPose[eye].Orientation.z;
|
||||
orientation.w = eyeRenderPose[eye].Orientation.w;
|
||||
|
||||
|
||||
// Update the application camera with the latest HMD position
|
||||
whichCamera.setHmdPosition(trackerPosition);
|
||||
whichCamera.setHmdRotation(orientation);
|
||||
|
||||
|
||||
// Update our camera to what the application camera is doing
|
||||
_camera->setRotation(whichCamera.getRotation());
|
||||
_camera->setPosition(whichCamera.getPosition());
|
||||
|
||||
|
||||
// Store the latest left and right eye render locations for things that need to know
|
||||
glm::vec3 thisEyePosition = position + trackerPosition +
|
||||
(bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) *
|
||||
glm::vec3(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z));
|
||||
|
||||
RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT;
|
||||
if (eyeIndex == 0) {
|
||||
_leftEyePosition = thisEyePosition;
|
||||
} else {
|
||||
_rightEyePosition = thisEyePosition;
|
||||
renderSide = RenderArgs::STEREO_RIGHT;
|
||||
}
|
||||
glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z));
|
||||
|
||||
_eyePositions[eye] = thisEyePosition;
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glLoadIdentity();
|
||||
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip();
|
||||
glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan,
|
||||
nearClip * port.UpTan, nearClip, farClip);
|
||||
|
||||
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
|
||||
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
|
||||
|
||||
|
||||
ovrRecti & vp = _eyeTextures[eye].Header.RenderViewport;
|
||||
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
|
||||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
|
||||
|
||||
// HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset
|
||||
//glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
|
||||
|
||||
_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z));
|
||||
|
||||
_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].HmdToEyeViewOffset.x, -_eyeRenderDesc[eye].HmdToEyeViewOffset.y, -_eyeRenderDesc[eye].HmdToEyeViewOffset.z));
|
||||
Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO);
|
||||
|
||||
applicationOverlay.displayOverlayTextureOculus(*_camera);
|
||||
_activeEyeIndex = -1;
|
||||
}
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
|
||||
});
|
||||
_activeEye = ovrEye_Count;
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
|
||||
QOpenGLFramebufferObject * finalFbo = nullptr;
|
||||
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
QOpenGLFramebufferObject* fbo = DependencyManager::get<GlowEffect>()->render(true);
|
||||
glBindTexture(GL_TEXTURE_2D, fbo->texture());
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
finalFbo = DependencyManager::get<GlowEffect>()->render(true);
|
||||
} else {
|
||||
DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->release();
|
||||
glBindTexture(GL_TEXTURE_2D, DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture());
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject();
|
||||
finalFbo->release();
|
||||
}
|
||||
|
||||
// restore our normal viewport
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
|
||||
|
||||
#if 0
|
||||
if (debugFrame && !timerActive) {
|
||||
timerQuery.begin();
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
|
||||
|
||||
//Clear the color buffer to ensure that there isnt any residual color
|
||||
//Left over from when OR was not connected.
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, finalFbo->texture());
|
||||
|
||||
//Renders the distorted mesh onto the screen
|
||||
renderDistortionMesh(eyeRenderPose);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glCanvas->swapBuffers();
|
||||
|
||||
#else
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
|
||||
glEyeTexture.OGL.TexId = finalFbo->texture();
|
||||
|
||||
});
|
||||
|
||||
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
|
||||
|
||||
#endif
|
||||
|
||||
#if 0
|
||||
if (debugFrame && !timerActive) {
|
||||
timerQuery.end();
|
||||
timerActive = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
// No DK2, no message.
|
||||
char latency2Text[128] = "";
|
||||
{
|
||||
float latencies[5] = {};
|
||||
if (debugFrame && ovrHmd_GetFloatArray(_ovrHmd, "DK2Latency", latencies, 5) == 5)
|
||||
{
|
||||
bool nonZero = false;
|
||||
for (int i = 0; i < 5; ++i)
|
||||
{
|
||||
nonZero |= (latencies[i] != 0.f);
|
||||
}
|
||||
|
||||
if (nonZero)
|
||||
{
|
||||
qDebug() << QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms",
|
||||
latencies[0], latencies[1], latencies[2], latencies[3], latencies[4]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
||||
|
||||
glLoadIdentity();
|
||||
|
@ -602,24 +713,25 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
|||
|
||||
//Render the distortion meshes for each eye
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
|
||||
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeTextures[eyeNum].Header.RenderViewport,
|
||||
_UVScaleOffset[eyeNum]);
|
||||
|
||||
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
|
||||
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
|
||||
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, 1.0f - _UVScaleOffset[eyeNum][1].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
|
||||
|
||||
ovrMatrix4f timeWarpMatrices[2];
|
||||
Matrix4f transposeMatrices[2];
|
||||
glm::mat4 transposeMatrices[2];
|
||||
//Grabs the timewarp matrices to be used in the shader
|
||||
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
|
||||
transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
|
||||
transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
|
||||
|
||||
//Have to transpose the matrices before using them
|
||||
transposeMatrices[0].Transpose();
|
||||
transposeMatrices[1].Transpose();
|
||||
transposeMatrices[0] = glm::transpose(toGlm(timeWarpMatrices[0]));
|
||||
transposeMatrices[1] = glm::transpose(toGlm(timeWarpMatrices[1]));
|
||||
|
||||
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
|
||||
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
|
||||
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[0][0][0]);
|
||||
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[1][0][0]);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
|
||||
|
||||
|
@ -649,86 +761,54 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
|||
|
||||
//Tries to reconnect to the sensors
|
||||
void OculusManager::reset() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_isConnected) {
|
||||
ovrHmd_RecenterPose(_ovrHmd);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Gets the current predicted angles from the oculus sensors
|
||||
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
#else
|
||||
ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds);
|
||||
#endif
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
|
||||
#else
|
||||
if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrPosef headPose = ts.HeadPose.ThePose;
|
||||
#else
|
||||
ovrPosef headPose = ss.Predicted.Pose;
|
||||
#endif
|
||||
Quatf orientation = Quatf(headPose.Orientation);
|
||||
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
|
||||
glm::vec3 euler = glm::eulerAngles(toGlm(ts.HeadPose.ThePose.Orientation));
|
||||
yaw = euler.y;
|
||||
pitch = euler.x;
|
||||
roll = euler.z;
|
||||
} else {
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
}
|
||||
#else
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
#endif
|
||||
}
|
||||
|
||||
glm::vec3 OculusManager::getRelativePosition() {
|
||||
#if (defined(__APPLE__) || defined(_WIN32)) && HAVE_LIBOVR
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f headPosition = trackingState.HeadPose.ThePose.Position;
|
||||
|
||||
return glm::vec3(headPosition.x, headPosition.y, headPosition.z);
|
||||
#else
|
||||
// no positional tracking in Linux yet
|
||||
return glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
#endif
|
||||
}
|
||||
|
||||
//Used to set the size of the glow framebuffers
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
rv.setHeight(_renderTargetSize.h);
|
||||
return rv;
|
||||
#else
|
||||
return QSize(100, 100);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_activeEyeIndex != -1) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
|
||||
if (_activeEye != ovrEye_Count) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
right = nearVal * port.RightTan;
|
||||
left = -nearVal * port.LeftTan;
|
||||
top = nearVal * port.UpTan;
|
||||
bottom = -nearVal * port.DownTan;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
int OculusManager::getHMDScreen() {
|
||||
int hmdScreenIndex = -1; // unknown
|
||||
#ifdef HAVE_LIBOVR
|
||||
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
|
||||
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
|
||||
|
||||
|
@ -777,7 +857,6 @@ int OculusManager::getHMDScreen() {
|
|||
screenNumber++;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return hmdScreenIndex;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,19 +13,39 @@
|
|||
#ifndef hifi_OculusManager_h
|
||||
#define hifi_OculusManager_h
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#include <OVR.h>
|
||||
#endif
|
||||
#include <OVR_CAPI.h>
|
||||
|
||||
#include <ProgramObject.h>
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
class Camera;
|
||||
class PalmData;
|
||||
class Text3DOverlay;
|
||||
|
||||
// Uncomment this to enable client side distortion. NOT recommended since
|
||||
// the Oculus SDK will ideally provide the best practices for distortion in
|
||||
// in terms of performance and quality, and by using it we will get updated
|
||||
// best practices for free with new runtime releases.
|
||||
#define OVR_CLIENT_DISTORTION 1
|
||||
|
||||
|
||||
// On Win32 platforms, enabling Direct HMD requires that the SDK be
|
||||
// initialized before the GL context is set up, but this breaks v-sync
|
||||
// for any application that has a Direct mode enable Rift connected
|
||||
// but is not rendering to it. For the time being I'm setting this as
|
||||
// a macro enabled mechanism which changes where the SDK is initialized.
|
||||
// To enable Direct HMD mode, you can un-comment this, but with the
|
||||
// caveat that it will break v-sync in NON-VR mode if you have an Oculus
|
||||
// Rift connect and in Direct mode
|
||||
#define OVR_DIRECT_MODE 1
|
||||
|
||||
|
||||
/// Handles interaction with the Oculus Rift.
|
||||
class OculusManager {
|
||||
public:
|
||||
static void init();
|
||||
static void connect();
|
||||
static void disconnect();
|
||||
static bool isConnected();
|
||||
|
@ -33,6 +53,7 @@ public:
|
|||
static void abandonCalibration();
|
||||
static void beginFrameTiming();
|
||||
static void endFrameTiming();
|
||||
static bool allowSwap();
|
||||
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
|
||||
static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
|
||||
static void reset();
|
||||
|
@ -47,18 +68,17 @@ public:
|
|||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
|
||||
static glm::vec3 getLeftEyePosition() { return _leftEyePosition; }
|
||||
static glm::vec3 getRightEyePosition() { return _rightEyePosition; }
|
||||
static glm::vec3 getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
|
||||
static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
|
||||
|
||||
static int getHMDScreen();
|
||||
|
||||
private:
|
||||
#ifdef HAVE_LIBOVR
|
||||
static void initSdk();
|
||||
static void shutdownSdk();
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
static void generateDistortionMesh();
|
||||
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
|
||||
|
||||
static bool similarNames(const QString& nameA,const QString& nameB);
|
||||
|
||||
struct DistortionVertex {
|
||||
glm::vec2 pos;
|
||||
glm::vec2 texR;
|
||||
|
@ -85,25 +105,28 @@ private:
|
|||
static int _texCoord0AttributeLocation;
|
||||
static int _texCoord1AttributeLocation;
|
||||
static int _texCoord2AttributeLocation;
|
||||
|
||||
static bool _isConnected;
|
||||
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrHmdDesc _ovrHmdDesc;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
|
||||
static GLuint _vertices[ovrEye_Count];
|
||||
static GLuint _indices[ovrEye_Count];
|
||||
static GLsizei _meshSize[ovrEye_Count];
|
||||
static ovrFrameTiming _hmdFrameTiming;
|
||||
static ovrRecti _eyeRenderViewport[ovrEye_Count];
|
||||
static bool _programInitialized;
|
||||
#endif
|
||||
|
||||
static ovrTexture _eyeTextures[ovrEye_Count];
|
||||
static bool _isConnected;
|
||||
static glm::vec3 _eyePositions[ovrEye_Count];
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrVector3f _eyeOffset[ovrEye_Count];
|
||||
static glm::mat4 _eyeProjection[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static unsigned int _frameIndex;
|
||||
static bool _frameTimingActive;
|
||||
static bool _programInitialized;
|
||||
static Camera* _camera;
|
||||
static int _activeEyeIndex;
|
||||
static ovrEyeType _activeEye;
|
||||
static bool _hswDismissed;
|
||||
|
||||
static void calibrate(const glm::vec3 position, const glm::quat orientation);
|
||||
enum CalibrationState {
|
||||
|
@ -125,13 +148,65 @@ private:
|
|||
static glm::quat _calibrationOrientation;
|
||||
static quint64 _calibrationStartTime;
|
||||
static int _calibrationMessage;
|
||||
|
||||
#endif
|
||||
|
||||
static glm::vec3 _leftEyePosition;
|
||||
static glm::vec3 _rightEyePosition;
|
||||
|
||||
|
||||
// TODO drop this variable and use the existing 'Developer | Render | Scale Resolution' value
|
||||
static ovrSizei _recommendedTexSize;
|
||||
static float _offscreenRenderScale;
|
||||
static bool _eyePerFrameMode;
|
||||
static ovrEyeType _lastEyeRendered;
|
||||
};
|
||||
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
#endif // hifi_OculusManager_h
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include "AddressManager.h"
|
||||
#include "Application.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
static BOOL CALLBACK enumWindowsCallback(HWND hWnd, LPARAM lParam) {
|
||||
|
@ -92,6 +93,10 @@ int main(int argc, const char* argv[]) {
|
|||
usecTimestampNowForceClockSkew(clockSkew);
|
||||
qDebug("clockSkewOption=%s clockSkew=%d", clockSkewOption, clockSkew);
|
||||
}
|
||||
// Oculus initialization MUST PRECEDE OpenGL context creation.
|
||||
// The nature of the Application constructor means this has to be either here,
|
||||
// or in the main window ctor, before GL startup.
|
||||
OculusManager::init();
|
||||
|
||||
int exitCode;
|
||||
{
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "LoginDialog.h"
|
||||
#include "UIUtil.h"
|
||||
|
||||
const QString CREATE_ACCOUNT_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/create";
|
||||
const QString CREATE_ACCOUNT_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/signup";
|
||||
const QString FORGOT_PASSWORD_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/users/password/new";
|
||||
|
||||
LoginDialog::LoginDialog(QWidget* parent) :
|
||||
|
|
|
@ -107,6 +107,7 @@ void EntityTreeRenderer::init() {
|
|||
}
|
||||
|
||||
void EntityTreeRenderer::shutdown() {
|
||||
_entitiesScriptEngine->disconnect(); // disconnect all slots/signals from the script engine
|
||||
_shuttingDown = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -92,8 +92,8 @@ void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
|
|||
double moduloRange(double val, double minVal, double maxVal) {
|
||||
double range = maxVal - minVal;
|
||||
double rval = (val - minVal) / range;
|
||||
double intval;
|
||||
return modf(rval, &intval) * range + minVal;
|
||||
rval = rval - floor(rval);
|
||||
return rval * range + minVal;
|
||||
}
|
||||
|
||||
const float MAX_LONGITUDE = 180.0f;
|
||||
|
|
|
@ -20,20 +20,47 @@ void SceneScriptingInterface::setStageLocation(float longitude, float latitude,
|
|||
_skyStage->setOriginLocation(longitude, latitude, altitude);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getStageLocationLongitude() const {
|
||||
return _skyStage->getOriginLongitude();
|
||||
}
|
||||
float SceneScriptingInterface::getStageLocationLatitude() const {
|
||||
return _skyStage->getOriginLatitude();
|
||||
}
|
||||
float SceneScriptingInterface::getStageLocationAltitude() const {
|
||||
return _skyStage->getOriginSurfaceAltitude();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setStageDayTime(float hour) {
|
||||
_skyStage->setDayTime(hour);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getStageDayTime() const {
|
||||
return _skyStage->getDayTime();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setStageYearTime(int day) {
|
||||
_skyStage->setYearTime(day);
|
||||
}
|
||||
|
||||
int SceneScriptingInterface::getStageYearTime() const {
|
||||
return _skyStage->getYearTime();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setSunColor(const glm::vec3& color) {
|
||||
_skyStage->setSunColor(color);
|
||||
}
|
||||
|
||||
const glm::vec3& SceneScriptingInterface::getSunColor() const {
|
||||
return _skyStage->getSunColor();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setSunIntensity(float intensity) {
|
||||
_skyStage->setSunIntensity(intensity);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getSunIntensity() const {
|
||||
return _skyStage->getSunIntensity();
|
||||
}
|
||||
|
||||
model::SunSkyStagePointer SceneScriptingInterface::getSkyStage() const {
|
||||
return _skyStage;
|
||||
|
|
|
@ -24,12 +24,21 @@ class SceneScriptingInterface : public QObject, public Dependency {
|
|||
|
||||
public:
|
||||
Q_INVOKABLE void setStageOrientation(const glm::quat& orientation);
|
||||
|
||||
Q_INVOKABLE void setStageLocation(float longitude, float latitude, float altitude);
|
||||
Q_INVOKABLE float getStageLocationLongitude() const;
|
||||
Q_INVOKABLE float getStageLocationLatitude() const;
|
||||
Q_INVOKABLE float getStageLocationAltitude() const;
|
||||
|
||||
Q_INVOKABLE void setStageDayTime(float hour);
|
||||
Q_INVOKABLE float getStageDayTime() const;
|
||||
Q_INVOKABLE void setStageYearTime(int day);
|
||||
Q_INVOKABLE int getStageYearTime() const;
|
||||
|
||||
Q_INVOKABLE void setSunColor(const glm::vec3& color);
|
||||
Q_INVOKABLE const glm::vec3& getSunColor() const;
|
||||
Q_INVOKABLE void setSunIntensity(float intensity);
|
||||
Q_INVOKABLE float getSunIntensity() const;
|
||||
|
||||
model::SunSkyStagePointer getSkyStage() const;
|
||||
|
||||
|
|
|
@ -305,8 +305,6 @@ void ScriptEngine::init() {
|
|||
|
||||
_isInitialized = true;
|
||||
|
||||
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
entityScriptingInterface->init();
|
||||
|
||||
|
@ -350,7 +348,6 @@ void ScriptEngine::init() {
|
|||
registerGlobalObject("Vec3", &_vec3Library);
|
||||
registerGlobalObject("Uuid", &_uuidLibrary);
|
||||
registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||
registerGlobalObject("Scene", DependencyManager::get<SceneScriptingInterface>().data());
|
||||
|
||||
// constants
|
||||
globalObject().setProperty("TREE_SCALE", newVariant(QVariant(TREE_SCALE)));
|
||||
|
|
|
@ -13,15 +13,14 @@
|
|||
//
|
||||
|
||||
#include <QEventLoop>
|
||||
#include <QFile>
|
||||
#include <qurlquery.h>
|
||||
|
||||
#include <AccountManager.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include <AccountManager.h>
|
||||
#include "XMLHttpRequestClass.h"
|
||||
#include "ScriptEngine.h"
|
||||
#include "XMLHttpRequestClass.h"
|
||||
|
||||
const QString METAVERSE_API_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/api/";
|
||||
|
||||
|
@ -42,7 +41,6 @@ XMLHttpRequestClass::XMLHttpRequestClass(QScriptEngine* engine) :
|
|||
_onReadyStateChange(QScriptValue::NullValue),
|
||||
_readyState(XMLHttpRequestClass::UNSENT),
|
||||
_errorCode(QNetworkReply::NoError),
|
||||
_file(NULL),
|
||||
_timeout(0),
|
||||
_timer(this),
|
||||
_numRedirects(0) {
|
||||
|
@ -63,22 +61,6 @@ QScriptValue XMLHttpRequestClass::getStatus() const {
|
|||
if (_reply) {
|
||||
return QScriptValue(_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt());
|
||||
}
|
||||
if(_url.isLocalFile()) {
|
||||
switch (_errorCode) {
|
||||
case QNetworkReply::NoError:
|
||||
return QScriptValue(200);
|
||||
case QNetworkReply::ContentNotFoundError:
|
||||
return QScriptValue(404);
|
||||
case QNetworkReply::ContentConflictError:
|
||||
return QScriptValue(409);
|
||||
case QNetworkReply::TimeoutError:
|
||||
return QScriptValue(408);
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return QScriptValue(501);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return QScriptValue(0);
|
||||
}
|
||||
|
||||
|
@ -86,22 +68,6 @@ QString XMLHttpRequestClass::getStatusText() const {
|
|||
if (_reply) {
|
||||
return _reply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
switch (_errorCode) {
|
||||
case QNetworkReply::NoError:
|
||||
return "OK";
|
||||
case QNetworkReply::ContentNotFoundError:
|
||||
return "Not Found";
|
||||
case QNetworkReply::ContentConflictError:
|
||||
return "Conflict";
|
||||
case QNetworkReply::TimeoutError:
|
||||
return "Timeout";
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return "Not Implemented";
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -147,13 +113,6 @@ QScriptValue XMLHttpRequestClass::getAllResponseHeaders() const {
|
|||
}
|
||||
return QString(headers.data());
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
QString headers = QString("Content-Type: application/octet-stream\n");
|
||||
headers.append("Content-Length: ");
|
||||
headers.append(QString("%1").arg(_rawResponseData.length()));
|
||||
headers.append("\n");
|
||||
return headers;
|
||||
}
|
||||
return QScriptValue("");
|
||||
}
|
||||
|
||||
|
@ -161,14 +120,6 @@ QScriptValue XMLHttpRequestClass::getResponseHeader(const QString& name) const {
|
|||
if (_reply && _reply->hasRawHeader(name.toLatin1())) {
|
||||
return QScriptValue(QString(_reply->rawHeader(name.toLatin1())));
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
if (name.toLower() == "content-type") {
|
||||
return QString("application/octet-stream");
|
||||
}
|
||||
if (name.toLower() == "content-length") {
|
||||
return QString("%1").arg(_rawResponseData.length());
|
||||
}
|
||||
}
|
||||
return QScriptValue::NullValue;
|
||||
}
|
||||
|
||||
|
@ -188,47 +139,24 @@ void XMLHttpRequestClass::open(const QString& method, const QString& url, bool a
|
|||
_url.setUrl(url);
|
||||
_async = async;
|
||||
|
||||
if (_url.isLocalFile()) {
|
||||
if (_method.toUpper() == "GET" && !_async && username.isEmpty() && password.isEmpty()) {
|
||||
_file = new QFile(_url.toLocalFile());
|
||||
if (!_file->exists()) {
|
||||
qDebug() << "Can't find file " << _url.fileName();
|
||||
abortRequest();
|
||||
_errorCode = QNetworkReply::ContentNotFoundError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
} else if (!_file->open(QIODevice::ReadOnly)) {
|
||||
qDebug() << "Can't open file " << _url.fileName();
|
||||
abortRequest();
|
||||
_errorCode = QNetworkReply::ContentConflictError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
} else {
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
} else {
|
||||
notImplemented();
|
||||
}
|
||||
} else {
|
||||
if (url.toLower().left(METAVERSE_API_URL.length()) == METAVERSE_API_URL) {
|
||||
AccountManager& accountManager = AccountManager::getInstance();
|
||||
if (url.toLower().left(METAVERSE_API_URL.length()) == METAVERSE_API_URL) {
|
||||
AccountManager& accountManager = AccountManager::getInstance();
|
||||
|
||||
if (accountManager.hasValidAccessToken()) {
|
||||
QUrlQuery urlQuery(_url.query());
|
||||
urlQuery.addQueryItem("access_token", accountManager.getAccountInfo().getAccessToken().token);
|
||||
_url.setQuery(urlQuery);
|
||||
}
|
||||
if (accountManager.hasValidAccessToken()) {
|
||||
QUrlQuery urlQuery(_url.query());
|
||||
urlQuery.addQueryItem("access_token", accountManager.getAccountInfo().getAccessToken().token);
|
||||
_url.setQuery(urlQuery);
|
||||
}
|
||||
|
||||
}
|
||||
if (!username.isEmpty()) {
|
||||
_url.setUserName(username);
|
||||
}
|
||||
if (!password.isEmpty()) {
|
||||
_url.setPassword(password);
|
||||
}
|
||||
_request.setUrl(_url);
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
if (!username.isEmpty()) {
|
||||
_url.setUserName(username);
|
||||
}
|
||||
if (!password.isEmpty()) {
|
||||
_url.setPassword(password);
|
||||
}
|
||||
_request.setUrl(_url);
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,23 +167,18 @@ void XMLHttpRequestClass::send() {
|
|||
void XMLHttpRequestClass::send(const QScriptValue& data) {
|
||||
if (_readyState == OPENED && !_reply) {
|
||||
if (!data.isNull()) {
|
||||
if (_url.isLocalFile()) {
|
||||
notImplemented();
|
||||
return;
|
||||
_sendData = new QBuffer(this);
|
||||
if (data.isObject()) {
|
||||
QByteArray ba = qscriptvalue_cast<QByteArray>(data);
|
||||
_sendData->setData(ba);
|
||||
} else {
|
||||
_sendData = new QBuffer(this);
|
||||
if (data.isObject()) {
|
||||
QByteArray ba = qscriptvalue_cast<QByteArray>(data);
|
||||
_sendData->setData(ba);
|
||||
} else {
|
||||
_sendData->setData(data.toString().toUtf8());
|
||||
}
|
||||
_sendData->setData(data.toString().toUtf8());
|
||||
}
|
||||
}
|
||||
|
||||
doSend();
|
||||
|
||||
if (!_async && !_url.isLocalFile()) {
|
||||
if (!_async) {
|
||||
QEventLoop loop;
|
||||
connect(this, SIGNAL(requestComplete()), &loop, SLOT(quit()));
|
||||
loop.exec();
|
||||
|
@ -265,23 +188,13 @@ void XMLHttpRequestClass::send(const QScriptValue& data) {
|
|||
|
||||
void XMLHttpRequestClass::doSend() {
|
||||
|
||||
if (!_url.isLocalFile()) {
|
||||
_reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData);
|
||||
connectToReply(_reply);
|
||||
}
|
||||
_reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData);
|
||||
connectToReply(_reply);
|
||||
|
||||
if (_timeout > 0) {
|
||||
_timer.start(_timeout);
|
||||
connect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout()));
|
||||
}
|
||||
|
||||
if (_url.isLocalFile()) {
|
||||
setReadyState(HEADERS_RECEIVED);
|
||||
setReadyState(LOADING);
|
||||
_rawResponseData = _file->readAll();
|
||||
_file->close();
|
||||
requestFinished();
|
||||
}
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::requestTimeout() {
|
||||
|
@ -300,16 +213,10 @@ void XMLHttpRequestClass::requestError(QNetworkReply::NetworkError code) {
|
|||
void XMLHttpRequestClass::requestFinished() {
|
||||
disconnect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout()));
|
||||
|
||||
if (!_url.isLocalFile()) {
|
||||
_errorCode = _reply->error();
|
||||
} else {
|
||||
_errorCode = QNetworkReply::NoError;
|
||||
}
|
||||
_errorCode = _reply->error();
|
||||
|
||||
if (_errorCode == QNetworkReply::NoError) {
|
||||
if (!_url.isLocalFile()) {
|
||||
_rawResponseData.append(_reply->readAll());
|
||||
}
|
||||
_rawResponseData.append(_reply->readAll());
|
||||
|
||||
if (_responseType == "json") {
|
||||
_responseData = _engine->evaluate("(" + QString(_rawResponseData.data()) + ")");
|
||||
|
@ -338,19 +245,6 @@ void XMLHttpRequestClass::abortRequest() {
|
|||
_reply->deleteLater();
|
||||
_reply = NULL;
|
||||
}
|
||||
|
||||
if (_file != NULL) {
|
||||
_file->close();
|
||||
_file = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::notImplemented() {
|
||||
abortRequest();
|
||||
//_errorCode = QNetworkReply::OperationNotImplementedError; TODO: Use this status code when update to Qt 5.3
|
||||
_errorCode = QNetworkReply::ContentOperationNotPermittedError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::connectToReply(QNetworkReply* reply) {
|
||||
|
|
|
@ -97,7 +97,6 @@ private:
|
|||
void connectToReply(QNetworkReply* reply);
|
||||
void disconnectFromReply(QNetworkReply* reply);
|
||||
void abortRequest();
|
||||
void notImplemented();
|
||||
|
||||
QScriptEngine* _engine;
|
||||
bool _async;
|
||||
|
@ -113,7 +112,6 @@ private:
|
|||
QScriptValue _onReadyStateChange;
|
||||
ReadyState _readyState;
|
||||
QNetworkReply::NetworkError _errorCode;
|
||||
QFile* _file;
|
||||
int _timeout;
|
||||
QTimer _timer;
|
||||
int _numRedirects;
|
||||
|
|
Loading…
Reference in a new issue