mirror of
https://github.com/overte-org/overte.git
synced 2025-08-13 21:59:40 +02:00
Merge branch 'master' of https://github.com/worklist/hifi into 20367
This commit is contained in:
commit
b98ef40489
48 changed files with 1440 additions and 1648 deletions
|
@ -3,4 +3,4 @@ Please read the [general build guide](BUILD.md) for information on dependencies
|
|||
###Qt5 Dependencies
|
||||
Should you choose not to install Qt5 via a package manager that handles dependencies for you, you may be missing some Qt5 dependencies. On Ubuntu, for example, the following additional packages are required:
|
||||
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack-dev
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack-dev libxrandr-dev
|
||||
|
|
|
@ -162,6 +162,7 @@ option(GET_GLM "Get GLM library automatically as external project" 1)
|
|||
option(GET_GVERB "Get Gverb library automatically as external project" 1)
|
||||
option(GET_SOXR "Get Soxr library automatically as external project" 1)
|
||||
option(GET_TBB "Get Threading Building Blocks library automatically as external project" 1)
|
||||
option(GET_LIBOVR "Get LibOVR library automatically as external project" 1)
|
||||
option(USE_NSIGHT "Attempt to find the nSight libraries" 1)
|
||||
|
||||
if (WIN32)
|
||||
|
|
85
cmake/externals/LibOVR/CMakeLists.txt
vendored
Normal file
85
cmake/externals/LibOVR/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME LibOVR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
|
||||
URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/ovr_sdk_macos_0.5.0.1.tar.gz
|
||||
URL_MD5 0a0785a04fb285f64f62267388344ad6
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
# In theory we should use the Headers path inside the framework, as seen here
|
||||
# but unfortunately Oculus doesn't seem to have figured out automated testing
|
||||
# so they released a framework with missing headers.
|
||||
#set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/Headers/ CACHE TYPE INTERNAL)
|
||||
|
||||
# Work around the broken framework by using a different path for the headers.
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/LibOVR CACHE TYPE INTERNAL)
|
||||
|
||||
|
||||
|
||||
elseif(NOT ANDROID)
|
||||
|
||||
# http://static.oculus.com/sdk-downloads/ovr_sdk_linux_0.4.4.tar.xz
|
||||
# ec3bd8cff4a1461b4e21210e7feb0572
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
PREFIX ${EXTERNAL_NAME}
|
||||
GIT_REPOSITORY https://github.com/jherico/OculusSDK.git
|
||||
GIT_TAG 0d6f0cf110ea7566fc6d64b8d4fe6bb881d9cff5
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
LOG_DOWNLOAD ON
|
||||
)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libovr.a CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE TYPE INTERNAL)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
find_package(X11 REQUIRED)
|
||||
|
||||
# Check for XRandR (modern resolution switching and gamma control)
|
||||
if (NOT X11_Xrandr_FOUND)
|
||||
message(FATAL_ERROR "The RandR library and headers were not found")
|
||||
endif()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS rt udev ${CMAKE_THREAD_LIBS_INIT} ${X11_X11_LIB} ${X11_Xrandr_LIB})
|
||||
|
||||
select_library_configurations(${EXTERNAL_NAME_UPPER})
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include ${SOURCE_DIR}/LibOVR/Src CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARY} ${${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS} CACHE TYPE INTERNAL)
|
||||
endif()
|
||||
|
||||
|
|
@ -13,8 +13,6 @@
|
|||
set(_TBB_LIBRARY_DIR ${CMAKE_CURRENT_SOURCE_DIR}/lib)
|
||||
file(GLOB_RECURSE _TBB_LIBRARIES "${_TBB_LIBRARY_DIR}/*.dylib")
|
||||
|
||||
message(${_TBB_LIBRARIES})
|
||||
|
||||
# raise an error if we found none
|
||||
if (NOT _TBB_LIBRARIES)
|
||||
message(FATAL_ERROR "Did not find any TBB libraries")
|
||||
|
|
|
@ -18,48 +18,12 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("libovr")
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
if (NOT ANDROID)
|
||||
|
||||
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
if (APPLE)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(ApplicationServices ApplicationServices)
|
||||
find_library(IOKit IOKit)
|
||||
elseif (UNIX)
|
||||
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
|
||||
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
|
||||
|
||||
if (CMAKE_CL_64)
|
||||
set(LINUX_ARCH_DIR "i386")
|
||||
else()
|
||||
set(LINUX_ARCH_DIR "x86_64")
|
||||
endif()
|
||||
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
select_library_configurations(UDEV)
|
||||
select_library_configurations(XINERAMA)
|
||||
|
||||
elseif (WIN32)
|
||||
if (MSVC10)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
elseif (MSVC12)
|
||||
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
endif ()
|
||||
find_package(ATL)
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES)
|
||||
|
||||
else (NOT ANDROID)
|
||||
set(_VRLIB_JNI_DIR "VRLib/jni")
|
||||
set(_VRLIB_LIBS_DIR "VRLib/obj/local/armeabi-v7a")
|
||||
|
@ -76,31 +40,4 @@ else (NOT ANDROID)
|
|||
find_library(TURBOJPEG_LIBRARY NAMES jpeg PATH_SUFFIXES 3rdParty/turbojpeg HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
endif (NOT ANDROID)
|
||||
|
||||
select_library_configurations(LIBOVR)
|
||||
set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY})
|
||||
|
||||
list(APPEND LIBOVR_ARGS_LIST LIBOVR_INCLUDE_DIRS LIBOVR_SRC_DIR LIBOVR_LIBRARY)
|
||||
|
||||
if (APPLE)
|
||||
list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices})
|
||||
list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices)
|
||||
elseif (ANDROID)
|
||||
|
||||
list(APPEND LIBOVR_ANDROID_LIBRARIES "-lGLESv3" "-lEGL" "-landroid" "-lOpenMAXAL" "-llog" "-lz" "-lOpenSLES")
|
||||
list(APPEND LIBOVR_ARGS_LIST LIBOVR_ANDROID_LIBRARIES LIBOVR_VRLIB_DIR MINIZIP_DIR JNI_DIR TURBOJPEG_LIBRARY)
|
||||
elseif (UNIX)
|
||||
list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
|
||||
list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY)
|
||||
elseif (WIN32)
|
||||
list(APPEND LIBOVR_LIBRARIES ${ATL_LIBRARIES})
|
||||
list(APPEND LIBOVR_ARGS_LIST ATL_LIBRARIES)
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST})
|
||||
|
||||
if (ANDROID)
|
||||
list(APPEND LIBOVR_INCLUDE_DIRS ${LIBOVR_SRC_DIR} ${MINIZIP_DIR} ${JNI_DIR})
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS)
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
//
|
||||
// Created by Brad Hefta-Gaub on 12/31/13.
|
||||
// Modified by Philip on 3/3/14
|
||||
// Modified by Thijs Wenker on 3/31/15
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example script that turns the hydra controllers and mouse into a entity gun.
|
||||
|
@ -66,7 +67,7 @@ var impactSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Guns/BulletIm
|
|||
var targetHitSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/hit.raw");
|
||||
var targetLaunchSound = SoundCache.getSound(HIFI_PUBLIC_BUCKET + "sounds/Space%20Invaders/shoot.raw");
|
||||
|
||||
var gunModel = "http://public.highfidelity.io/models/attachments/HaloGun.fst";
|
||||
var gunModel = "https://s3.amazonaws.com/hifi-public/cozza13/gun/m1911-handgun+1.fbx?v=4";
|
||||
|
||||
var audioOptions = {
|
||||
volume: 0.9
|
||||
|
@ -103,7 +104,7 @@ var reticle = Overlays.addOverlay("image", {
|
|||
y: screenSize.y / 2 - (BUTTON_SIZE / 2),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/billiardsReticle.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/crosshairs.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -112,8 +113,8 @@ var offButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",
|
||||
alpha: 1
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/close.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
startX += BUTTON_SIZE + PADDING;
|
||||
|
@ -122,7 +123,7 @@ var platformButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/city.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/platform-targets.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -132,7 +133,7 @@ var gridButton = Overlays.addOverlay("image", {
|
|||
y: screenSize.y - (BUTTON_SIZE + PADDING),
|
||||
width: BUTTON_SIZE,
|
||||
height: BUTTON_SIZE,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/blocks.png",
|
||||
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/floating-targets.svg",
|
||||
alpha: 1
|
||||
});
|
||||
|
||||
|
@ -168,7 +169,7 @@ function shootBullet(position, velocity, grenade) {
|
|||
{ type: "Sphere",
|
||||
position: position,
|
||||
dimensions: { x: bSize, y: bSize, z: bSize },
|
||||
color: { red: 255, green: 0, blue: 0 },
|
||||
color: { red: 0, green: 0, blue: 0 },
|
||||
velocity: bVelocity,
|
||||
lifetime: BULLET_LIFETIME,
|
||||
gravity: { x: 0, y: bGravity, z: 0 },
|
||||
|
@ -265,6 +266,7 @@ function makeGrid(type, scale, size) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makePlatform(gravity, scale, size) {
|
||||
var separation = scale * 2;
|
||||
var pos = Vec3.sum(Camera.getPosition(), Vec3.multiply(10.0 * scale * separation, Quat.getFront(Camera.getOrientation())));
|
||||
|
@ -377,8 +379,8 @@ function takeFiringPose() {
|
|||
}
|
||||
}
|
||||
|
||||
MyAvatar.attach(gunModel, "RightHand", {x:0.02, y: 0.11, z: 0.04}, Quat.fromPitchYawRollDegrees(-0, -160, -79), 0.20);
|
||||
MyAvatar.attach(gunModel, "LeftHand", {x:-0.02, y: 0.11, z: 0.04}, Quat.fromPitchYawRollDegrees(0, 0, 79), 0.20);
|
||||
MyAvatar.attach(gunModel, "RightHand", {x:0.04, y: 0.22, z: 0.02}, Quat.fromPitchYawRollDegrees(-172, -85, 79), 0.40);
|
||||
MyAvatar.attach(gunModel, "LeftHand", {x:-0.04, y: 0.22, z: 0.02}, Quat.fromPitchYawRollDegrees(-172, 85, -79), 0.40);
|
||||
|
||||
// Give a bit of time to load before playing sound
|
||||
Script.setTimeout(playLoadSound, 2000);
|
||||
|
|
|
@ -16,8 +16,6 @@ HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
|||
Script.include([
|
||||
"libraries/stringHelpers.js",
|
||||
"libraries/dataviewHelpers.js",
|
||||
"libraries/httpMultiPart.js",
|
||||
"libraries/modelUploader.js",
|
||||
"libraries/toolBars.js",
|
||||
"libraries/progressDialog.js",
|
||||
|
||||
|
|
|
@ -8,27 +8,82 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var intensity = 1.0;
|
||||
var day = 0.0;
|
||||
var hour = 12.0;
|
||||
var longitude = 115.0;
|
||||
var latitude = 31.0;
|
||||
var stageOrientation = Quat.fromPitchYawRollDegrees(0.0, 180.0, 0.0);
|
||||
Script.include("../../utilities/tools/cookies.js");
|
||||
|
||||
Scene.setStageDayTime(hour);
|
||||
Scene.setStageOrientation(stageOrientation);
|
||||
Scene.setStageLocation(longitude, latitude, 0.0);
|
||||
/*
|
||||
function ticktack() {
|
||||
hour += 0.1;
|
||||
//Scene.setSunIntensity(Math.cos(time));
|
||||
if (hour > 24.0) {
|
||||
hour = 0.0;
|
||||
day++;
|
||||
Scene.setStageYearTime(day);
|
||||
var panel = new Panel(10, 400);
|
||||
|
||||
panel.newSlider("Origin Longitude", -180, 180,
|
||||
function(value) { Scene.setStageLocation(value, Scene.getStageLocationLatitude(), Scene.getStageLocationAltitude()); },
|
||||
function() { return Scene.getStageLocationLongitude(); },
|
||||
function(value) { return value.toFixed(0) + " deg"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Origin Latitude", -90, 90,
|
||||
function(value) { Scene.setStageLocation(Scene.getStageLocationLongitude(), value, Scene.getStageLocationAltitude()); },
|
||||
function() { return Scene.getStageLocationLatitude(); },
|
||||
function(value) { return value.toFixed(0) + " deg"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Origin Altitude", 0, 1000,
|
||||
function(value) { Scene.setStageLocation(Scene.getStageLocationLongitude(), Scene.getStageLocationLatitude(), value); },
|
||||
function() { return Scene.getStageLocationAltitude(); },
|
||||
function(value) { return (value).toFixed(0) + " km"; }
|
||||
);
|
||||
|
||||
panel.newSlider("Year Time", 0, 364,
|
||||
function(value) { Scene.setStageYearTime(value); },
|
||||
function() { return Scene.getStageYearTime(); },
|
||||
function(value) {
|
||||
var numDaysPerMonth = 365.0 / 12.0;
|
||||
var monthly = (value / numDaysPerMonth);
|
||||
var month = Math.floor(monthly);
|
||||
return (month + 1).toFixed(0) + "/" + Math.ceil(0.5 + (monthly - month)*Math.ceil(numDaysPerMonth)).toFixed(0); }
|
||||
);
|
||||
|
||||
panel.newSlider("Day Time", 0, 24,
|
||||
function(value) { Scene.setStageDayTime(value); },
|
||||
function() { return Scene.getStageDayTime(); },
|
||||
function(value) {
|
||||
var hour = Math.floor(value);
|
||||
return (hour).toFixed(0) + ":" + ((value - hour)*60.0).toFixed(0);
|
||||
}
|
||||
Scene.setStageDayTime(hour);
|
||||
}
|
||||
);
|
||||
|
||||
Script.setInterval(ticktack, 41);
|
||||
*/
|
||||
var tickTackPeriod = 50;
|
||||
var tickTackSpeed = 0.0;
|
||||
panel.newSlider("Tick tack time", -1.0, 1.0,
|
||||
function(value) { tickTackSpeed = value; },
|
||||
function() { return tickTackSpeed; },
|
||||
function(value) { return (value).toFixed(2); }
|
||||
);
|
||||
|
||||
function runStageTime() {
|
||||
if (tickTackSpeed != 0.0) {
|
||||
var hour = panel.get("Day Time");
|
||||
hour += tickTackSpeed;
|
||||
panel.set("Day Time", hour);
|
||||
|
||||
if (hour >= 24.0) {
|
||||
panel.set("Year Time", panel.get("Year Time") + 1);
|
||||
} else if (hour < 0.0) {
|
||||
panel.set("Year Time", panel.get("Year Time") - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Script.setInterval(runStageTime, tickTackPeriod);
|
||||
|
||||
panel.newSlider("Light Intensity", 0.0, 5,
|
||||
function(value) { Scene.setSunIntensity(value); },
|
||||
function() { return Scene.getSunIntensity(); },
|
||||
function(value) { return (value).toFixed(2); }
|
||||
);
|
||||
|
||||
Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) { return panel.mouseMoveEvent(event); });
|
||||
Controller.mousePressEvent.connect( function panelMousePressEvent(event) { return panel.mousePressEvent(event); });
|
||||
Controller.mouseReleaseEvent.connect(function(event) { return panel.mouseReleaseEvent(event); });
|
||||
|
||||
function scriptEnding() {
|
||||
Menu.removeMenu("Developer > Scene");
|
||||
panel.destroy();
|
||||
}
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
|
|
@ -312,7 +312,7 @@
|
|||
}
|
||||
|
||||
elTextText.value = properties.text;
|
||||
elTextLineHeight.value = properties.lineHeight;
|
||||
elTextLineHeight.value = properties.lineHeight.toFixed(4);
|
||||
elTextTextColorRed.value = properties.textColor.red;
|
||||
elTextTextColorGreen.value = properties.textColor.green;
|
||||
elTextTextColorBlue.value = properties.textColor.blue;
|
||||
|
@ -477,6 +477,28 @@
|
|||
ev.initEvent("change", true, true);
|
||||
document.activeElement.dispatchEvent(ev);
|
||||
}
|
||||
|
||||
// For input and textarea elements, select all of the text on focus
|
||||
// WebKit-based browsers, such as is used with QWebView, have a quirk
|
||||
// where the mouseup event comes after the focus event, causing the
|
||||
// text to be deselected immediately after selecting all of the text.
|
||||
// To make this work we block the first mouseup event after the elements
|
||||
// received focus. If we block all mouseup events the user will not
|
||||
// be able to click within the selected text.
|
||||
var els = document.querySelectorAll("input, textarea");
|
||||
for (var i = 0; i < els.length; i++) {
|
||||
var clicked = false;
|
||||
els[i].onfocus = function() {
|
||||
this.select();
|
||||
clicked = false;
|
||||
};
|
||||
els[i].onmouseup = function(e) {
|
||||
if (!clicked) {
|
||||
e.preventDefault();
|
||||
clicked = true;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
@ -723,7 +745,7 @@
|
|||
<div class="text-section property">
|
||||
<div class="label">Line Height</div>
|
||||
<div class="value">
|
||||
<input class="coord" type='number' id="property-text-line-height"></input>
|
||||
<input class="coord" type='number' id="property-text-line-height" min="0" step="0.005"></input>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-section property">
|
||||
|
|
|
@ -275,3 +275,29 @@ td {
|
|||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
input[type="number"] {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
/* Spin Buttons modified - credit for original implementation goes to http://jsfiddle.net/Volker_E/WwfW9/ */
|
||||
input[type="number"]::-webkit-outer-spin-button,
|
||||
input[type="number"]::-webkit-inner-spin-button {
|
||||
-webkit-appearance: none;
|
||||
background: #FFF url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAYAAADgkQYQAAAAKUlEQVQYlWNgwAT/sYhhKPiPT+F/LJgEsHv37v+EMGkmkuImoh2NoQAANlcun/q4OoYAAAAASUVORK5CYII=) no-repeat center center;
|
||||
width: 0.9em;
|
||||
height: 4px;
|
||||
opacity: 0.5; /* shows Spin Buttons per default (Chrome >= 39) */
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
border-top-left-radius: 3px;
|
||||
border-top-right-radius: 3px;
|
||||
border-bottom-right-radius: 3px;
|
||||
border-bottom-left-radius: 3px;
|
||||
}
|
||||
|
||||
input[type="number"]::-webkit-inner-spin-button:hover,
|
||||
input[type="number"]::-webkit-inner-spin-button:active{
|
||||
opacity: .8;
|
||||
}
|
||||
|
|
|
@ -1,693 +0,0 @@
|
|||
//
|
||||
// modelUploader.js
|
||||
// examples/libraries
|
||||
//
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
modelUploader = (function () {
|
||||
var that = {},
|
||||
modelFile,
|
||||
modelName,
|
||||
modelURL,
|
||||
modelCallback,
|
||||
isProcessing,
|
||||
fstBuffer,
|
||||
fbxBuffer,
|
||||
//svoBuffer,
|
||||
mapping,
|
||||
geometry,
|
||||
API_URL = "https://metaverse.highfidelity.com/api/v1/models",
|
||||
MODEL_URL = "http://public.highfidelity.com/models/content",
|
||||
NAME_FIELD = "name",
|
||||
SCALE_FIELD = "scale",
|
||||
FILENAME_FIELD = "filename",
|
||||
TEXDIR_FIELD = "texdir",
|
||||
MAX_TEXTURE_SIZE = 1024;
|
||||
|
||||
function info(message) {
|
||||
if (progressDialog.isOpen()) {
|
||||
progressDialog.update(message);
|
||||
} else {
|
||||
progressDialog.open(message);
|
||||
}
|
||||
print(message);
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
if (progressDialog.isOpen()) {
|
||||
progressDialog.close();
|
||||
}
|
||||
print(message);
|
||||
Window.alert(message);
|
||||
}
|
||||
|
||||
function randomChar(length) {
|
||||
var characters = "0123457689abcdefghijklmnopqrstuvwxyz",
|
||||
string = "",
|
||||
i;
|
||||
|
||||
for (i = 0; i < length; i += 1) {
|
||||
string += characters[Math.floor(Math.random() * 36)];
|
||||
}
|
||||
|
||||
return string;
|
||||
}
|
||||
|
||||
function resetDataObjects() {
|
||||
fstBuffer = null;
|
||||
fbxBuffer = null;
|
||||
//svoBuffer = null;
|
||||
mapping = {};
|
||||
geometry = {};
|
||||
geometry.textures = [];
|
||||
geometry.embedded = [];
|
||||
}
|
||||
|
||||
function readFile(filename) {
|
||||
var url = "file:///" + filename,
|
||||
req = new XMLHttpRequest();
|
||||
|
||||
req.open("GET", url, false);
|
||||
req.responseType = "arraybuffer";
|
||||
req.send();
|
||||
if (req.status !== 200) {
|
||||
error("Could not read file: " + filename + " : " + req.statusText);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
filename: filename.fileName(),
|
||||
buffer: req.response
|
||||
};
|
||||
}
|
||||
|
||||
function readMapping(buffer) {
|
||||
var dv = new DataView(buffer.buffer),
|
||||
lines,
|
||||
line,
|
||||
tokens,
|
||||
i,
|
||||
name,
|
||||
value,
|
||||
remainder,
|
||||
existing;
|
||||
|
||||
mapping = {}; // { name : value | name : { value : [remainder] } }
|
||||
lines = dv.string(0, dv.byteLength).split(/\r\n|\r|\n/);
|
||||
for (i = 0; i < lines.length; i += 1) {
|
||||
line = lines[i].trim();
|
||||
if (line.length > 0 && line[0] !== "#") {
|
||||
tokens = line.split(/\s*=\s*/);
|
||||
if (tokens.length > 1) {
|
||||
name = tokens[0];
|
||||
value = tokens[1];
|
||||
if (tokens.length > 2) {
|
||||
remainder = tokens.slice(2, tokens.length).join(" = ");
|
||||
} else {
|
||||
remainder = null;
|
||||
}
|
||||
if (tokens.length === 2 && mapping[name] === undefined) {
|
||||
mapping[name] = value;
|
||||
} else {
|
||||
if (mapping[name] === undefined) {
|
||||
mapping[name] = {};
|
||||
|
||||
} else if (typeof mapping[name] !== "object") {
|
||||
existing = mapping[name];
|
||||
mapping[name] = { existing : null };
|
||||
}
|
||||
|
||||
if (mapping[name][value] === undefined) {
|
||||
mapping[name][value] = [];
|
||||
}
|
||||
mapping[name][value].push(remainder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeMapping(buffer) {
|
||||
var name,
|
||||
value,
|
||||
remainder,
|
||||
i,
|
||||
string = "";
|
||||
|
||||
for (name in mapping) {
|
||||
if (mapping.hasOwnProperty(name)) {
|
||||
if (typeof mapping[name] === "object") {
|
||||
for (value in mapping[name]) {
|
||||
if (mapping[name].hasOwnProperty(value)) {
|
||||
remainder = mapping[name][value];
|
||||
if (remainder === null) {
|
||||
string += (name + " = " + value + "\n");
|
||||
} else {
|
||||
for (i = 0; i < remainder.length; i += 1) {
|
||||
string += (name + " = " + value + " = " + remainder[i] + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
string += (name + " = " + mapping[name] + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
buffer.buffer = string.toArrayBuffer();
|
||||
}
|
||||
|
||||
function readGeometry(fbxBuffer) {
|
||||
var textures,
|
||||
view,
|
||||
index,
|
||||
EOF,
|
||||
previousNodeFilename;
|
||||
|
||||
// Reference:
|
||||
// http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/
|
||||
|
||||
textures = {};
|
||||
view = new DataView(fbxBuffer.buffer);
|
||||
EOF = false;
|
||||
|
||||
function parseBinaryFBX() {
|
||||
var endOffset,
|
||||
numProperties,
|
||||
propertyListLength,
|
||||
nameLength,
|
||||
name,
|
||||
filename;
|
||||
|
||||
endOffset = view.getUint32(index, true);
|
||||
numProperties = view.getUint32(index + 4, true);
|
||||
propertyListLength = view.getUint32(index + 8, true);
|
||||
nameLength = view.getUint8(index + 12);
|
||||
index += 13;
|
||||
|
||||
if (endOffset === 0) {
|
||||
return;
|
||||
}
|
||||
if (endOffset < index || endOffset > view.byteLength) {
|
||||
EOF = true;
|
||||
return;
|
||||
}
|
||||
|
||||
name = view.string(index, nameLength).toLowerCase();
|
||||
index += nameLength;
|
||||
|
||||
if (name === "content" && previousNodeFilename !== "") {
|
||||
// Blender 2.71 exporter "embeds" external textures as empty binary blobs so ignore these
|
||||
if (propertyListLength > 5) {
|
||||
geometry.embedded.push(previousNodeFilename);
|
||||
}
|
||||
}
|
||||
|
||||
if (name === "relativefilename") {
|
||||
filename = view.string(index + 5, view.getUint32(index + 1, true)).fileName();
|
||||
if (!textures.hasOwnProperty(filename)) {
|
||||
textures[filename] = "";
|
||||
geometry.textures.push(filename);
|
||||
}
|
||||
previousNodeFilename = filename;
|
||||
} else {
|
||||
previousNodeFilename = "";
|
||||
}
|
||||
|
||||
index += (propertyListLength);
|
||||
|
||||
while (index < endOffset && !EOF) {
|
||||
parseBinaryFBX();
|
||||
}
|
||||
}
|
||||
|
||||
function readTextFBX() {
|
||||
var line,
|
||||
view,
|
||||
viewLength,
|
||||
charCode,
|
||||
charCodes,
|
||||
numCharCodes,
|
||||
filename,
|
||||
relativeFilename = "",
|
||||
MAX_CHAR_CODES = 250;
|
||||
|
||||
view = new Uint8Array(fbxBuffer.buffer);
|
||||
viewLength = view.byteLength;
|
||||
charCodes = [];
|
||||
numCharCodes = 0;
|
||||
|
||||
for (index = 0; index < viewLength; index += 1) {
|
||||
charCode = view[index];
|
||||
if (charCode !== 9 && charCode !== 32) {
|
||||
if (charCode === 10) { // EOL. Can ignore EOF.
|
||||
line = String.fromCharCode.apply(String, charCodes).toLowerCase();
|
||||
// For embedded textures, "Content:" line immediately follows "RelativeFilename:" line.
|
||||
if (line.slice(0, 8) === "content:" && relativeFilename !== "") {
|
||||
geometry.embedded.push(relativeFilename);
|
||||
}
|
||||
if (line.slice(0, 17) === "relativefilename:") {
|
||||
filename = line.slice(line.indexOf("\""), line.lastIndexOf("\"") - line.length).fileName();
|
||||
if (!textures.hasOwnProperty(filename)) {
|
||||
textures[filename] = "";
|
||||
geometry.textures.push(filename);
|
||||
}
|
||||
relativeFilename = filename;
|
||||
} else {
|
||||
relativeFilename = "";
|
||||
}
|
||||
charCodes = [];
|
||||
numCharCodes = 0;
|
||||
} else {
|
||||
if (numCharCodes < MAX_CHAR_CODES) { // Only interested in start of line
|
||||
charCodes.push(charCode);
|
||||
numCharCodes += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
readTextFBX();
|
||||
|
||||
|
||||
}
|
||||
|
||||
function readModel() {
|
||||
var fbxFilename,
|
||||
//svoFilename,
|
||||
fileType;
|
||||
|
||||
info("Reading model file");
|
||||
print("Model file: " + modelFile);
|
||||
|
||||
if (modelFile.toLowerCase().fileType() === "fst") {
|
||||
fstBuffer = readFile(modelFile);
|
||||
if (fstBuffer === null) {
|
||||
return false;
|
||||
}
|
||||
readMapping(fstBuffer);
|
||||
fileType = mapping[FILENAME_FIELD].toLowerCase().fileType();
|
||||
if (mapping.hasOwnProperty(FILENAME_FIELD)) {
|
||||
if (fileType === "fbx") {
|
||||
fbxFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD];
|
||||
//} else if (fileType === "svo") {
|
||||
// svoFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD];
|
||||
} else {
|
||||
error("Unrecognized model type in FST file!");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
error("Model file name not found in FST file!");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
fstBuffer = {
|
||||
filename: "Interface." + randomChar(6), // Simulate avatar model uploading behaviour
|
||||
buffer: null
|
||||
};
|
||||
|
||||
if (modelFile.toLowerCase().fileType() === "fbx") {
|
||||
fbxFilename = modelFile;
|
||||
mapping[FILENAME_FIELD] = modelFile.fileName();
|
||||
|
||||
//} else if (modelFile.toLowerCase().fileType() === "svo") {
|
||||
// svoFilename = modelFile;
|
||||
// mapping[FILENAME_FIELD] = modelFile.fileName();
|
||||
|
||||
} else {
|
||||
error("Unrecognized file type: " + modelFile);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isProcessing) { return false; }
|
||||
|
||||
if (fbxFilename) {
|
||||
fbxBuffer = readFile(fbxFilename);
|
||||
if (fbxBuffer === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!isProcessing) { return false; }
|
||||
|
||||
readGeometry(fbxBuffer);
|
||||
}
|
||||
|
||||
//if (svoFilename) {
|
||||
// svoBuffer = readFile(svoFilename);
|
||||
// if (svoBuffer === null) {
|
||||
// return false;
|
||||
// }
|
||||
//}
|
||||
|
||||
// Add any missing basic mappings
|
||||
if (!mapping.hasOwnProperty(NAME_FIELD)) {
|
||||
mapping[NAME_FIELD] = modelFile.fileName().fileBase();
|
||||
}
|
||||
if (!mapping.hasOwnProperty(TEXDIR_FIELD)) {
|
||||
mapping[TEXDIR_FIELD] = ".";
|
||||
}
|
||||
if (!mapping.hasOwnProperty(SCALE_FIELD)) {
|
||||
mapping[SCALE_FIELD] = 1.0;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function setProperties() {
|
||||
var form = [],
|
||||
directory,
|
||||
displayAs,
|
||||
validateAs;
|
||||
|
||||
progressDialog.close();
|
||||
print("Setting model properties");
|
||||
|
||||
form.push({ label: "Name:", value: mapping[NAME_FIELD] });
|
||||
|
||||
directory = modelFile.path() + "/" + mapping[TEXDIR_FIELD];
|
||||
displayAs = new RegExp("^" + modelFile.path().regExpEscape() + "[\\\\\\\/](.*)");
|
||||
validateAs = new RegExp("^" + modelFile.path().regExpEscape() + "([\\\\\\\/].*)?");
|
||||
|
||||
form.push({
|
||||
label: "Texture directory:",
|
||||
directory: modelFile.path() + "/" + mapping[TEXDIR_FIELD],
|
||||
title: "Choose Texture Directory",
|
||||
displayAs: displayAs,
|
||||
validateAs: validateAs,
|
||||
errorMessage: "Texture directory must be subdirectory of the model directory."
|
||||
});
|
||||
|
||||
form.push({ button: "Cancel" });
|
||||
|
||||
if (!Window.form("Set Model Properties", form)) {
|
||||
print("User cancelled uploading model");
|
||||
return false;
|
||||
}
|
||||
|
||||
mapping[NAME_FIELD] = form[0].value;
|
||||
mapping[TEXDIR_FIELD] = form[1].directory.slice(modelFile.path().length + 1);
|
||||
if (mapping[TEXDIR_FIELD] === "") {
|
||||
mapping[TEXDIR_FIELD] = ".";
|
||||
}
|
||||
|
||||
writeMapping(fstBuffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function createHttpMessage(callback) {
|
||||
var multiparts = [],
|
||||
lodCount,
|
||||
lodFile,
|
||||
lodBuffer,
|
||||
textureBuffer,
|
||||
textureSourceFormat,
|
||||
textureTargetFormat,
|
||||
embeddedTextures,
|
||||
i;
|
||||
|
||||
info("Preparing to send model");
|
||||
|
||||
// Model name
|
||||
if (mapping.hasOwnProperty(NAME_FIELD)) {
|
||||
multiparts.push({
|
||||
name : "model_name",
|
||||
string : mapping[NAME_FIELD]
|
||||
});
|
||||
} else {
|
||||
error("Model name is missing");
|
||||
httpMultiPart.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
// FST file
|
||||
if (fstBuffer) {
|
||||
multiparts.push({
|
||||
name : "fst",
|
||||
buffer: fstBuffer
|
||||
});
|
||||
}
|
||||
|
||||
// FBX file
|
||||
if (fbxBuffer) {
|
||||
multiparts.push({
|
||||
name : "fbx",
|
||||
buffer: fbxBuffer
|
||||
});
|
||||
}
|
||||
|
||||
// SVO file
|
||||
//if (svoBuffer) {
|
||||
// multiparts.push({
|
||||
// name : "svo",
|
||||
// buffer: svoBuffer
|
||||
// });
|
||||
//}
|
||||
|
||||
// LOD files
|
||||
lodCount = 0;
|
||||
for (lodFile in mapping.lod) {
|
||||
if (mapping.lod.hasOwnProperty(lodFile)) {
|
||||
lodBuffer = readFile(modelFile.path() + "\/" + lodFile);
|
||||
if (lodBuffer === null) {
|
||||
return;
|
||||
}
|
||||
multiparts.push({
|
||||
name: "lod" + lodCount,
|
||||
buffer: lodBuffer
|
||||
});
|
||||
lodCount += 1;
|
||||
}
|
||||
if (!isProcessing) { return; }
|
||||
}
|
||||
|
||||
// Textures
|
||||
embeddedTextures = "|" + geometry.embedded.join("|") + "|";
|
||||
for (i = 0; i < geometry.textures.length; i += 1) {
|
||||
if (embeddedTextures.indexOf("|" + geometry.textures[i].fileName() + "|") === -1) {
|
||||
textureBuffer = readFile(modelFile.path() + "\/"
|
||||
+ (mapping[TEXDIR_FIELD] !== "." ? mapping[TEXDIR_FIELD] + "\/" : "")
|
||||
+ geometry.textures[i]);
|
||||
if (textureBuffer === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
textureSourceFormat = geometry.textures[i].fileType().toLowerCase();
|
||||
textureTargetFormat = (textureSourceFormat === "jpg" ? "jpg" : "png");
|
||||
textureBuffer.buffer =
|
||||
textureBuffer.buffer.recodeImage(textureSourceFormat, textureTargetFormat, MAX_TEXTURE_SIZE);
|
||||
textureBuffer.filename = textureBuffer.filename.slice(0, -textureSourceFormat.length) + textureTargetFormat;
|
||||
|
||||
multiparts.push({
|
||||
name: "texture" + i,
|
||||
buffer: textureBuffer
|
||||
});
|
||||
}
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
}
|
||||
|
||||
// Model category
|
||||
multiparts.push({
|
||||
name : "model_category",
|
||||
string : "content"
|
||||
});
|
||||
|
||||
// Create HTTP message
|
||||
httpMultiPart.clear();
|
||||
Script.setTimeout(function addMultipart() {
|
||||
var multipart = multiparts.shift();
|
||||
httpMultiPart.add(multipart);
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
if (multiparts.length > 0) {
|
||||
Script.setTimeout(addMultipart, 25);
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
}, 25);
|
||||
}
|
||||
|
||||
function sendToHighFidelity() {
|
||||
var req,
|
||||
uploadedChecks,
|
||||
HTTP_GET_TIMEOUT = 60, // 1 minute
|
||||
HTTP_SEND_TIMEOUT = 900, // 15 minutes
|
||||
UPLOADED_CHECKS = 30,
|
||||
CHECK_UPLOADED_TIMEOUT = 1, // 1 second
|
||||
handleCheckUploadedResponses,
|
||||
handleUploadModelResponses,
|
||||
handleRequestUploadResponses;
|
||||
|
||||
function uploadTimedOut() {
|
||||
error("Model upload failed: Internet request timed out!");
|
||||
}
|
||||
|
||||
function debugResponse() {
|
||||
print("req.errorCode = " + req.errorCode);
|
||||
print("req.readyState = " + req.readyState);
|
||||
print("req.status = " + req.status);
|
||||
print("req.statusText = " + req.statusText);
|
||||
print("req.responseType = " + req.responseType);
|
||||
print("req.responseText = " + req.responseText);
|
||||
print("req.response = " + req.response);
|
||||
print("req.getAllResponseHeaders() = " + req.getAllResponseHeaders());
|
||||
}
|
||||
|
||||
function checkUploaded() {
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
info("Checking uploaded model");
|
||||
|
||||
req = new XMLHttpRequest();
|
||||
req.open("HEAD", modelURL, true);
|
||||
req.timeout = HTTP_GET_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleCheckUploadedResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send();
|
||||
}
|
||||
|
||||
handleCheckUploadedResponses = function () {
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
// Note: Unlike avatar models, for content models we don't need to refresh texture cache.
|
||||
print("Model uploaded: " + modelURL);
|
||||
progressDialog.close();
|
||||
if (Window.confirm("Your model has been uploaded as: " + modelURL + "\nDo you want to rez it?")) {
|
||||
modelCallback(modelURL);
|
||||
}
|
||||
} else if (req.status === 404) {
|
||||
if (uploadedChecks > 0) {
|
||||
uploadedChecks -= 1;
|
||||
Script.setTimeout(checkUploaded, CHECK_UPLOADED_TIMEOUT * 1000);
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("We could not verify that your model was successfully uploaded but it may have been at: "
|
||||
+ modelURL);
|
||||
}
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("There was a problem with your upload, please try again later.");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function uploadModel(method) {
|
||||
var url;
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
req = new XMLHttpRequest();
|
||||
if (method === "PUT") {
|
||||
url = API_URL + "\/" + modelName;
|
||||
req.open("PUT", url, true); //print("PUT " + url);
|
||||
} else {
|
||||
url = API_URL;
|
||||
req.open("POST", url, true); //print("POST " + url);
|
||||
}
|
||||
req.setRequestHeader("Content-Type", "multipart/form-data; boundary=\"" + httpMultiPart.boundary() + "\"");
|
||||
req.timeout = HTTP_SEND_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleUploadModelResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send(httpMultiPart.response().buffer);
|
||||
}
|
||||
|
||||
handleUploadModelResponses = function () {
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
uploadedChecks = UPLOADED_CHECKS;
|
||||
checkUploaded();
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
error("There was a problem with your upload, please try again later.");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function requestUpload() {
|
||||
var url;
|
||||
|
||||
if (!isProcessing) { return; }
|
||||
|
||||
url = API_URL + "\/" + modelName; // XMLHttpRequest automatically handles authorization of API requests.
|
||||
req = new XMLHttpRequest();
|
||||
req.open("GET", url, true); //print("GET " + url);
|
||||
req.responseType = "json";
|
||||
req.timeout = HTTP_GET_TIMEOUT * 1000;
|
||||
req.onreadystatechange = handleRequestUploadResponses;
|
||||
req.ontimeout = uploadTimedOut;
|
||||
req.send();
|
||||
}
|
||||
|
||||
handleRequestUploadResponses = function () {
|
||||
var response;
|
||||
|
||||
//debugResponse();
|
||||
if (req.readyState === req.DONE) {
|
||||
if (req.status === 200) {
|
||||
if (req.responseType === "json") {
|
||||
response = JSON.parse(req.responseText);
|
||||
if (response.status === "success") {
|
||||
if (response.exists === false) {
|
||||
uploadModel("POST");
|
||||
} else if (response.can_update === true) {
|
||||
uploadModel("PUT");
|
||||
} else {
|
||||
error("This model file already exists and is owned by someone else!");
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
print("Error: " + req.status + " " + req.statusText);
|
||||
}
|
||||
error("Model upload failed! Something went wrong at the data server.");
|
||||
}
|
||||
};
|
||||
|
||||
info("Sending model to High Fidelity");
|
||||
|
||||
requestUpload();
|
||||
}
|
||||
|
||||
that.upload = function (file, callback) {
|
||||
|
||||
modelFile = file;
|
||||
modelCallback = callback;
|
||||
|
||||
isProcessing = true;
|
||||
|
||||
progressDialog.onCancel = function () {
|
||||
print("User cancelled uploading model");
|
||||
isProcessing = false;
|
||||
};
|
||||
|
||||
resetDataObjects();
|
||||
|
||||
if (readModel()) {
|
||||
if (setProperties()) {
|
||||
modelName = mapping[NAME_FIELD];
|
||||
modelURL = MODEL_URL + "\/" + mapping[NAME_FIELD] + ".fst"; // All models are uploaded as an FST
|
||||
|
||||
createHttpMessage(sendToHighFidelity);
|
||||
}
|
||||
}
|
||||
|
||||
resetDataObjects();
|
||||
};
|
||||
|
||||
return that;
|
||||
}());
|
|
@ -45,8 +45,6 @@ var panelsCenterShift = Vec3.subtract(panelsCenter, orbCenter);
|
|||
|
||||
var ORB_SHIFT = { x: 0, y: -1.4, z: -0.8};
|
||||
|
||||
var HELMET_ATTACHMENT_URL = HIFI_PUBLIC_BUCKET + "models/attachments/IronManMaskOnly.fbx"
|
||||
|
||||
var LOBBY_PANEL_WALL_URL = HIFI_PUBLIC_BUCKET + "models/sets/Lobby/PanelWallForInterface.fbx";
|
||||
var LOBBY_BLANK_PANEL_TEXTURE_URL = HIFI_PUBLIC_BUCKET + "models/sets/Lobby/Texture.jpg";
|
||||
var LOBBY_SHELL_URL = HIFI_PUBLIC_BUCKET + "models/sets/Lobby/LobbyShellForInterface.fbx";
|
||||
|
@ -136,9 +134,6 @@ function drawLobby() {
|
|||
panelWall = Overlays.addOverlay("model", panelWallProps);
|
||||
orbShell = Overlays.addOverlay("model", orbShellProps);
|
||||
descriptionText = Overlays.addOverlay("text3d", descriptionTextProps);
|
||||
|
||||
// add an attachment on this avatar so other people see them in the lobby
|
||||
MyAvatar.attach(HELMET_ATTACHMENT_URL, "Neck", {x: 0, y: 0, z: 0}, Quat.fromPitchYawRollDegrees(0, 0, 0), 1.15);
|
||||
|
||||
if (droneSound.downloaded) {
|
||||
// start the drone sound
|
||||
|
@ -237,6 +232,8 @@ function playRandomMuzak() {
|
|||
}
|
||||
|
||||
function cleanupLobby() {
|
||||
toggleEnvironmentRendering(true);
|
||||
|
||||
// for each of the 21 placeholder textures, set them back to default so the cached model doesn't have changed textures
|
||||
var panelTexturesReset = {};
|
||||
panelTexturesReset["textures"] = {};
|
||||
|
@ -254,15 +251,18 @@ function cleanupLobby() {
|
|||
panelWall = false;
|
||||
orbShell = false;
|
||||
|
||||
currentDrone.stop();
|
||||
currentMuzakInjector.stop();
|
||||
if (currentDrone) {
|
||||
currentDrone.stop();
|
||||
currentDrone = null
|
||||
}
|
||||
|
||||
currentMuzakInjector = null;
|
||||
if (currentMuzakInjector) {
|
||||
currentMuzakInjector.stop();
|
||||
currentMuzakInjector = null;
|
||||
}
|
||||
|
||||
places = {};
|
||||
toggleEnvironmentRendering(true);
|
||||
|
||||
MyAvatar.detachOne(HELMET_ATTACHMENT_URL);
|
||||
}
|
||||
|
||||
function actionStartEvent(event) {
|
||||
|
@ -311,10 +311,8 @@ function maybeCleanupLobby() {
|
|||
}
|
||||
|
||||
function toggleEnvironmentRendering(shouldRender) {
|
||||
Menu.setIsOptionChecked("Voxels", shouldRender);
|
||||
Menu.setIsOptionChecked("Entities", shouldRender);
|
||||
Menu.setIsOptionChecked("Metavoxels", shouldRender);
|
||||
Menu.setIsOptionChecked("Avatars", shouldRender);
|
||||
Scene.shouldRenderAvatars = shouldRender;
|
||||
Scene.shouldRenderEntities = shouldRender;
|
||||
}
|
||||
|
||||
function handleLookAt(pickRay) {
|
||||
|
|
|
@ -145,98 +145,3 @@ test("Test timeout", function() {
|
|||
this.assertEquals(0, req.status, "status should be `0`");
|
||||
this.assertEquals(4, req.errorCode, "4 is the timeout error code for QNetworkReply::NetworkError");
|
||||
});
|
||||
|
||||
|
||||
var localFile = Window.browse("Find defaultScripts.js file ...", "", "defaultScripts.js (defaultScripts.js)");
|
||||
|
||||
if (localFile !== null) {
|
||||
|
||||
localFile = "file:///" + localFile;
|
||||
|
||||
test("Test GET local file synchronously", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
|
||||
var statesVisited = [true, false, false, false, false]
|
||||
req.onreadystatechange = function () {
|
||||
statesVisited[req.readyState] = true;
|
||||
};
|
||||
|
||||
req.open("GET", localFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(200, req.status, "status should be `200`");
|
||||
this.assertEquals("OK", req.statusText, "statusText should be `OK`");
|
||||
this.assertEquals(0, req.errorCode);
|
||||
this.assertNotEquals("", req.getAllResponseHeaders(), "headers should not be null");
|
||||
this.assertContains("High Fidelity", req.response.substring(0, 100), "expected text not found in response")
|
||||
|
||||
for (var i = 0; i <= req.DONE; i++) {
|
||||
this.assertEquals(true, statesVisited[i], i + " should be set");
|
||||
}
|
||||
});
|
||||
|
||||
test("Test GET nonexistent local file", function () {
|
||||
var nonexistentFile = localFile.replace(".js", "NoExist.js");
|
||||
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", nonexistentFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(404, req.status, "status should be `404`");
|
||||
this.assertEquals("Not Found", req.statusText, "statusText should be `Not Found`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test GET local file already open", function () {
|
||||
// Can't open file exclusively in order to test.
|
||||
});
|
||||
|
||||
test("Test GET local file with data not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, true);
|
||||
req.send("data");
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test GET local file asynchronously not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, true);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test POST local file not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("POST", localFile, false);
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
test("Test local file username and password not implemented", function () {
|
||||
var req = new XMLHttpRequest();
|
||||
req.open("GET", localFile, false, "username", "password");
|
||||
req.send();
|
||||
|
||||
this.assertEquals(req.DONE, req.readyState, "readyState should be DONE");
|
||||
this.assertEquals(501, req.status, "status should be `501`");
|
||||
this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`");
|
||||
this.assertNotEquals(0, req.errorCode);
|
||||
});
|
||||
|
||||
} else {
|
||||
print("Local file operation not tested");
|
||||
}
|
||||
|
|
278
examples/utilities/tools/cookies.js
Executable file
278
examples/utilities/tools/cookies.js
Executable file
|
@ -0,0 +1,278 @@
|
|||
//
|
||||
// cookies.js
|
||||
//
|
||||
// version 1.0
|
||||
//
|
||||
// Created by Sam Gateau, 4/1/2015
|
||||
// A simple ui panel that present a list of porperties and the proper widget to edit it
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// The Slider class
|
||||
Slider = function(x,y,width,thumbSize) {
|
||||
|
||||
this.thumb = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: thumbSize,
|
||||
height: thumbSize,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 1.0,
|
||||
visible: true
|
||||
});
|
||||
this.background = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 125, green: 125, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: width,
|
||||
height: thumbSize,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true
|
||||
});
|
||||
|
||||
this.thumbSize = thumbSize;
|
||||
this.thumbHalfSize = 0.5 * thumbSize;
|
||||
|
||||
this.minThumbX = x + this.thumbHalfSize;
|
||||
this.maxThumbX = x + width - this.thumbHalfSize;
|
||||
this.thumbX = this.minThumbX;
|
||||
|
||||
this.minValue = 0.0;
|
||||
this.maxValue = 1.0;
|
||||
|
||||
this.clickOffsetX = 0;
|
||||
this.isMoving = false;
|
||||
|
||||
this.updateThumb = function() {
|
||||
thumbTruePos = this.thumbX - 0.5 * this.thumbSize;
|
||||
Overlays.editOverlay(this.thumb, { x: thumbTruePos } );
|
||||
};
|
||||
|
||||
this.onMouseMoveEvent = function(event) {
|
||||
if (this.isMoving) {
|
||||
newThumbX = event.x - this.clickOffsetX;
|
||||
if (newThumbX < this.minThumbX) {
|
||||
newThumbX = this.minThumbX;
|
||||
}
|
||||
if (newThumbX > this.maxThumbX) {
|
||||
newThumbX = this.maxThumbX;
|
||||
}
|
||||
this.thumbX = newThumbX;
|
||||
this.updateThumb();
|
||||
this.onValueChanged(this.getValue());
|
||||
}
|
||||
};
|
||||
|
||||
this.onMousePressEvent = function(event) {
|
||||
this.isMoving = true;
|
||||
var clickOffset = event.x - this.thumbX;
|
||||
if ((clickOffset > -this.thumbHalfSize) && (clickOffset < this.thumbHalfSize)) {
|
||||
this.clickOffsetX = clickOffset;
|
||||
} else {
|
||||
this.clickOffsetX = 0;
|
||||
this.thumbX = event.x;
|
||||
this.updateThumb();
|
||||
this.onValueChanged(this.getValue());
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
this.onMouseReleaseEvent = function(event) {
|
||||
this.isMoving = false;
|
||||
};
|
||||
|
||||
// Public members:
|
||||
|
||||
this.setNormalizedValue = function(value) {
|
||||
if (value < 0.0) {
|
||||
this.thumbX = this.minThumbX;
|
||||
} else if (value > 1.0) {
|
||||
this.thumbX = this.maxThumbX;
|
||||
} else {
|
||||
this.thumbX = value * (this.maxThumbX - this.minThumbX) + this.minThumbX;
|
||||
}
|
||||
this.updateThumb();
|
||||
};
|
||||
this.getNormalizedValue = function() {
|
||||
return (this.thumbX - this.minThumbX) / (this.maxThumbX - this.minThumbX);
|
||||
};
|
||||
|
||||
this.setValue = function(value) {
|
||||
var normValue = (value - this.minValue) / (this.maxValue - this.minValue);
|
||||
this.setNormalizedValue(normValue);
|
||||
};
|
||||
|
||||
this.getValue = function() {
|
||||
return this.getNormalizedValue() * (this.maxValue - this.minValue) + this.minValue;
|
||||
};
|
||||
|
||||
this.onValueChanged = function(value) {};
|
||||
|
||||
this.destroy = function() {
|
||||
Overlays.deleteOverlay(this.background);
|
||||
Overlays.deleteOverlay(this.thumb);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
var textFontSize = 16;
|
||||
|
||||
function PanelItem(name, setter, getter, displayer, x, y, textWidth, valueWidth, height) {
|
||||
this.name = name;
|
||||
|
||||
|
||||
this.displayer = typeof displayer !== 'undefined' ? displayer : function(value) { return value.toFixed(2); };
|
||||
|
||||
var topMargin = (height - textFontSize);
|
||||
this.title = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x,
|
||||
y: y,
|
||||
width: textWidth,
|
||||
height: height,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true,
|
||||
text: name,
|
||||
font: {size: textFontSize},
|
||||
topMargin: topMargin,
|
||||
});
|
||||
|
||||
this.value = Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 255, green: 255, blue: 255 },
|
||||
x: x + textWidth,
|
||||
y: y,
|
||||
width: valueWidth,
|
||||
height: height,
|
||||
alpha: 1.0,
|
||||
backgroundAlpha: 0.5,
|
||||
visible: true,
|
||||
text: this.displayer(getter()),
|
||||
font: {size: textFontSize},
|
||||
topMargin: topMargin
|
||||
|
||||
});
|
||||
this.getter = getter;
|
||||
|
||||
this.setter = function(value) {
|
||||
setter(value);
|
||||
Overlays.editOverlay(this.value, {text: this.displayer(getter())});
|
||||
if (this.widget) {
|
||||
this.widget.setValue(value);
|
||||
}
|
||||
};
|
||||
this.setterFromWidget = function(value) {
|
||||
setter(value);
|
||||
Overlays.editOverlay(this.value, {text: this.displayer(getter())});
|
||||
};
|
||||
|
||||
|
||||
this.widget = null;
|
||||
|
||||
this.destroy = function() {
|
||||
Overlays.deleteOverlay(this.title);
|
||||
Overlays.deleteOverlay(this.value);
|
||||
if (this.widget != null) {
|
||||
this.widget.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var textWidth = 180;
|
||||
var valueWidth = 100;
|
||||
var widgetWidth = 300;
|
||||
var rawHeight = 20;
|
||||
var rawYDelta = rawHeight * 1.5;
|
||||
|
||||
Panel = function(x, y) {
|
||||
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.nextY = y;
|
||||
|
||||
this.widgetX = x + textWidth + valueWidth;
|
||||
|
||||
this.items = new Array();
|
||||
this.activeWidget = null;
|
||||
|
||||
this.mouseMoveEvent = function(event) {
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseMoveEvent(event);
|
||||
}
|
||||
};
|
||||
|
||||
// we also handle click detection in our mousePressEvent()
|
||||
this.mousePressEvent = function(event) {
|
||||
// Make sure we quitted previous widget
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseReleaseEvent(event);
|
||||
}
|
||||
this.activeWidget = null;
|
||||
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
|
||||
// If the user clicked any of the slider background then...
|
||||
for (var i in this.items) {
|
||||
var widget = this.items[i].widget;
|
||||
|
||||
if (clickedOverlay == widget.background) {
|
||||
this.activeWidget = widget;
|
||||
this.activeWidget.onMousePressEvent(event);
|
||||
// print("clicked... widget=" + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
this.mouseReleaseEvent = function(event) {
|
||||
if (this.activeWidget) {
|
||||
this.activeWidget.onMouseReleaseEvent(event);
|
||||
}
|
||||
this.activeWidget = null;
|
||||
};
|
||||
|
||||
this.newSlider = function(name, minValue, maxValue, setValue, getValue, displayValue) {
|
||||
|
||||
var sliderItem = new PanelItem(name, setValue, getValue, displayValue, this.x, this.nextY, textWidth, valueWidth, rawHeight);
|
||||
|
||||
var slider = new Slider(this.widgetX, this.nextY, widgetWidth, rawHeight);
|
||||
slider.minValue = minValue;
|
||||
slider.maxValue = maxValue;
|
||||
slider.onValueChanged = function(value) { sliderItem.setterFromWidget(value); };
|
||||
|
||||
|
||||
sliderItem.widget = slider;
|
||||
sliderItem.setter(getValue());
|
||||
this.items[name] = sliderItem;
|
||||
this.nextY += rawYDelta;
|
||||
// print("created Item... slider=" + name);
|
||||
};
|
||||
|
||||
this.destroy = function() {
|
||||
for (var i in this.items) {
|
||||
this.items[i].destroy();
|
||||
}
|
||||
}
|
||||
|
||||
this.set = function(name, value) {
|
||||
var item = this.items[name];
|
||||
if (item != null) {
|
||||
return item.setter(value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
this.get = function(name) {
|
||||
var item = this.items[name];
|
||||
if (item != null) {
|
||||
return item.getter();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -11,13 +11,21 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var createdRenderMenu = false;
|
||||
|
||||
var ENTITIES_MENU = "Developer > Entities";
|
||||
var COLLISION_UPDATES_TO_SERVER = "Don't send collision updates to server";
|
||||
|
||||
var RENDER_MENU = "Developer > Render";
|
||||
var ENTITIES_ITEM = "Entities";
|
||||
var AVATARS_ITEM = "Avatars";
|
||||
|
||||
function setupMenus() {
|
||||
if (!Menu.menuExists("Developer")) {
|
||||
Menu.addMenu("Developer");
|
||||
}
|
||||
if (!Menu.menuExists("Developer > Entities")) {
|
||||
Menu.addMenu("Developer > Entities");
|
||||
if (!Menu.menuExists(ENTITIES_MENU)) {
|
||||
Menu.addMenu(ENTITIES_MENU);
|
||||
|
||||
// NOTE: these menu items aren't currently working. I've temporarily removed them. Will add them back once we
|
||||
// rewire these to work
|
||||
|
@ -31,27 +39,55 @@ function setupMenus() {
|
|||
Menu.addMenuItem({ menuName: "Developer > Entities", menuItemName: "Don't Do Precision Picking", isCheckable: true, isChecked: false });
|
||||
Menu.addMenuItem({ menuName: "Developer > Entities", menuItemName: "Disable Light Entities", isCheckable: true, isChecked: false });
|
||||
*/
|
||||
Menu.addMenuItem({ menuName: "Developer > Entities", menuItemName: "Don't send collision updates to server", isCheckable: true, isChecked: false });
|
||||
Menu.addMenuItem({ menuName: ENTITIES_MENU, menuItemName: COLLISION_UPDATES_TO_SERVER, isCheckable: true, isChecked: false });
|
||||
}
|
||||
|
||||
if (!Menu.menuExists(RENDER_MENU)) {
|
||||
Menu.addMenu(RENDER_MENU);
|
||||
createdRenderMenu = true;
|
||||
}
|
||||
|
||||
if (!Menu.menuItemExists(RENDER_MENU, ENTITIES_ITEM)) {
|
||||
Menu.addMenuItem({ menuName: RENDER_MENU, menuItemName: ENTITIES_ITEM, isCheckable: true, isChecked: Scene.shouldRenderEntities })
|
||||
}
|
||||
|
||||
if (!Menu.menuItemExists(RENDER_MENU, AVATARS_ITEM)) {
|
||||
Menu.addMenuItem({ menuName: RENDER_MENU, menuItemName: AVATARS_ITEM, isCheckable: true, isChecked: Scene.shouldRenderAvatars })
|
||||
}
|
||||
}
|
||||
|
||||
Menu.menuItemEvent.connect(function (menuItem) {
|
||||
print("menuItemEvent() in JS... menuItem=" + menuItem);
|
||||
|
||||
if (menuItem == "Don't send collision updates to server") {
|
||||
var dontSendUpdates = Menu.isOptionChecked("Don't send collision updates to server");
|
||||
if (menuItem == COLLISION_UPDATES_TO_SERVER) {
|
||||
var dontSendUpdates = Menu.isOptionChecked(COLLISION_UPDATES_TO_SERVER);
|
||||
print(" dontSendUpdates... checked=" + dontSendUpdates);
|
||||
Entities.setSendPhysicsUpdates(!dontSendUpdates);
|
||||
} else if (menuItem == ENTITIES_ITEM) {
|
||||
Scene.shouldRenderEntities = Menu.isOptionChecked(ENTITIES_ITEM);
|
||||
} else if (menuItem == AVATARS_ITEM) {
|
||||
Scene.shouldRenderAvatars = Menu.isOptionChecked(AVATARS_ITEM);
|
||||
}
|
||||
});
|
||||
|
||||
setupMenus();
|
||||
Scene.shouldRenderAvatarsChanged.connect(function(shouldRenderAvatars) {
|
||||
Menu.setIsOptionChecked(AVATARS_ITEM, shouldRenderAvatars)
|
||||
});
|
||||
|
||||
// register our scriptEnding callback
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
Scene.shouldRenderEntitiesChanged.connect(function(shouldRenderEntities) {
|
||||
Menu.setIsOptionChecked(ENTITIES_ITEM, shouldRenderEntities)
|
||||
});
|
||||
|
||||
function scriptEnding() {
|
||||
Menu.removeMenu("Developer > Entities");
|
||||
Menu.removeMenu(ENTITIES_MENU);
|
||||
|
||||
if (createdRenderMenu) {
|
||||
Menu.removeMenu(RENDER_MENU);
|
||||
} else {
|
||||
Menu.removeMenuItem(RENDER_MENU, ENTITIES_ITEM);
|
||||
Menu.removeMenuItem(RENDER_MENU, AVATARS_ITEM);
|
||||
}
|
||||
}
|
||||
|
||||
setupMenus();
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
|||
project(${TARGET_NAME})
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
|
@ -110,6 +110,11 @@ add_dependency_external_projects(glm bullet)
|
|||
find_package(GLM REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
|
||||
|
||||
add_dependency_external_projects(LibOVR)
|
||||
find_package(LibOVR REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
|
||||
|
||||
find_package(Bullet REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${BULLET_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES})
|
||||
|
|
16
interface/external/libovr/readme.txt
vendored
16
interface/external/libovr/readme.txt
vendored
|
@ -1,16 +0,0 @@
|
|||
|
||||
Instructions for adding the Oculus library (LibOVR) to Interface
|
||||
Stephen Birarda, March 6, 2014
|
||||
|
||||
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1.
|
||||
|
||||
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
|
||||
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
|
||||
|
||||
NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory.
|
||||
|
||||
2. Clear your build directory, run cmake and build, and you should be all set.
|
||||
|
|
@ -164,6 +164,8 @@ const QString SKIP_FILENAME = QStandardPaths::writableLocation(QStandardPaths::D
|
|||
|
||||
const QString DEFAULT_SCRIPTS_JS_URL = "http://s3.amazonaws.com/hifi-public/scripts/defaultScripts.js";
|
||||
|
||||
bool renderCollisionHulls = false;
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
class MyNativeEventFilter : public QAbstractNativeEventFilter {
|
||||
public:
|
||||
|
@ -256,7 +258,8 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto speechRecognizer = DependencyManager::set<SpeechRecognizer>();
|
||||
#endif
|
||||
auto discoverabilityManager = DependencyManager::set<DiscoverabilityManager>();
|
||||
|
||||
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -566,6 +569,9 @@ void Application::aboutToQuit() {
|
|||
}
|
||||
|
||||
void Application::cleanupBeforeQuit() {
|
||||
|
||||
_entities.clear(); // this will allow entity scripts to properly shutdown
|
||||
|
||||
_datagramProcessor->shutdown(); // tell the datagram processor we're shutting down, so it can short circuit
|
||||
_entities.shutdown(); // tell the entities system we're shutting down, so it will stop running scripts
|
||||
ScriptEngine::stopAllScripts(this); // stop all currently running global scripts
|
||||
|
@ -773,10 +779,6 @@ void Application::paintGL() {
|
|||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
//Clear the color buffer to ensure that there isnt any residual color
|
||||
//Left over from when OR was not connected.
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
//When in mirror mode, use camera rotation. Otherwise, use body rotation
|
||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
OculusManager::display(_myCamera.getRotation(), _myCamera.getPosition(), _myCamera);
|
||||
|
@ -970,9 +972,6 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
|
||||
case Qt::Key_E:
|
||||
case Qt::Key_PageUp:
|
||||
if (!_myAvatar->getDriveKeys(UP)) {
|
||||
_myAvatar->jump();
|
||||
}
|
||||
_myAvatar->setDriveKeys(UP, 1.0f);
|
||||
break;
|
||||
|
||||
|
@ -1181,6 +1180,10 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
break;
|
||||
}
|
||||
|
||||
case Qt::Key_Comma: {
|
||||
renderCollisionHulls = !renderCollisionHulls;
|
||||
}
|
||||
|
||||
default:
|
||||
event->ignore();
|
||||
break;
|
||||
|
@ -2207,6 +2210,7 @@ void Application::update(float deltaTime) {
|
|||
|
||||
{
|
||||
PerformanceTimer perfTimer("physics");
|
||||
_myAvatar->relayDriveKeysToCharacterController();
|
||||
_physicsEngine.stepSimulation();
|
||||
}
|
||||
|
||||
|
@ -2259,7 +2263,7 @@ void Application::update(float deltaTime) {
|
|||
if (queryIsDue || viewIsDifferentEnough) {
|
||||
_lastQueriedTime = now;
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Entities)) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
queryOctree(NodeType::EntityServer, PacketTypeEntityQuery, _entityServerJurisdictions);
|
||||
}
|
||||
_lastQueriedViewFrustum = _viewFrustum;
|
||||
|
@ -2765,7 +2769,7 @@ void Application::updateShadowMap() {
|
|||
|
||||
const GLfloat WORLD_AMBIENT_COLOR[] = { 0.525f, 0.525f, 0.6f };
|
||||
const GLfloat WORLD_DIFFUSE_COLOR[] = { 0.6f, 0.525f, 0.525f };
|
||||
const GLfloat WORLD_SPECULAR_COLOR[] = { 0.94f, 0.94f, 0.737f, 1.0f };
|
||||
const GLfloat WORLD_SPECULAR_COLOR[] = { 0.08f, 0.08f, 0.08f, 1.0f };
|
||||
|
||||
const glm::vec3 GLOBAL_LIGHT_COLOR = { 0.6f, 0.525f, 0.525f };
|
||||
|
||||
|
@ -2972,11 +2976,15 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
|||
DependencyManager::get<GeometryCache>()->renderSphere(originSphereRadius, 15, 15, glm::vec4(1.0f, 0.0f, 0.0f, 1.0f));
|
||||
|
||||
// render models...
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Entities)) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
PerformanceTimer perfTimer("entities");
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
"Application::displaySide() ... entities...");
|
||||
_entities.render(RenderArgs::DEFAULT_RENDER_MODE, renderSide);
|
||||
if (renderCollisionHulls) {
|
||||
_entities.render(RenderArgs::DEBUG_RENDER_MODE, renderSide);
|
||||
} else {
|
||||
_entities.render(RenderArgs::DEFAULT_RENDER_MODE, renderSide);
|
||||
}
|
||||
}
|
||||
|
||||
// render JS/scriptable overlays
|
||||
|
@ -2993,17 +3001,15 @@ void Application::displaySide(Camera& theCamera, bool selfAvatarOnly, RenderArgs
|
|||
DependencyManager::get<AmbientOcclusionEffect>()->render();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
bool mirrorMode = (theCamera.getMode() == CAMERA_MODE_MIRROR);
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("avatars");
|
||||
DependencyManager::get<AvatarManager>()->renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
||||
false, selfAvatarOnly);
|
||||
false, selfAvatarOnly);
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
DependencyManager::get<DeferredLightingEffect>()->setAmbientLightMode(getRenderAmbientLight());
|
||||
auto skyStage = DependencyManager::get<SceneScriptingInterface>()->getSkyStage();
|
||||
|
@ -3586,6 +3592,8 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition2D", HMDScriptingInterface::getHUDLookAtPosition2D, 0);
|
||||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0);
|
||||
|
||||
scriptEngine->registerGlobalObject("Scene", DependencyManager::get<SceneScriptingInterface>().data());
|
||||
|
||||
#ifdef HAVE_RTMIDI
|
||||
scriptEngine->registerGlobalObject("MIDI", &MIDIManager::getInstance());
|
||||
#endif
|
||||
|
|
|
@ -66,9 +66,13 @@ void GLCanvas::paintGL() {
|
|||
}
|
||||
|
||||
Application::getInstance()->paintGL();
|
||||
swapBuffers();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
|
||||
if (!OculusManager::isConnected()) {
|
||||
swapBuffers();
|
||||
} else {
|
||||
if (OculusManager::allowSwap()) {
|
||||
swapBuffers();
|
||||
}
|
||||
OculusManager::endFrameTiming();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -268,8 +268,6 @@ Menu::Menu() {
|
|||
|
||||
QMenu* renderOptionsMenu = developerMenu->addMenu("Render");
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, Qt::SHIFT | Qt::Key_A, true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Avatars, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Entities, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DontFadeOnOctreeServerChanges);
|
||||
|
||||
|
|
|
@ -119,7 +119,6 @@ namespace MenuOption {
|
|||
const QString AudioSourceInject = "Generated Audio";
|
||||
const QString AudioSourcePinkNoise = "Pink Noise";
|
||||
const QString AudioSourceSine440 = "Sine 440hz";
|
||||
const QString Avatars = "Avatars";
|
||||
const QString BandwidthDetails = "Bandwidth Details";
|
||||
const QString BlueSpeechSphere = "Blue Sphere While Speaking";
|
||||
const QString BookmarkLocation = "Bookmark Location";
|
||||
|
@ -156,7 +155,6 @@ namespace MenuOption {
|
|||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect (Warning: Poor Oculus Performance)";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString Entities = "Entities";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
|
|
|
@ -364,9 +364,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode, bool
|
|||
: GLOW_FROM_AVERAGE_LOUDNESS;
|
||||
|
||||
// render body
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||
renderBody(frustum, renderMode, postLighting, glowLevel);
|
||||
}
|
||||
renderBody(frustum, renderMode, postLighting, glowLevel);
|
||||
|
||||
if (!postLighting && renderMode != SHADOW_RENDER_MODE) {
|
||||
// add local lights
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "AvatarManager.h"
|
||||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
|
||||
// 70 times per second - target is 60hz, but this helps account for any small deviations
|
||||
// in the update loop
|
||||
|
@ -122,15 +123,17 @@ void AvatarManager::renderAvatars(Avatar::RenderMode renderMode, bool postLighti
|
|||
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
|
||||
|
||||
if (!selfAvatarOnly) {
|
||||
foreach (const AvatarSharedPointer& avatarPointer, _avatarHash) {
|
||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||
if (!avatar->isInitialized()) {
|
||||
continue;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
foreach (const AvatarSharedPointer& avatarPointer, _avatarHash) {
|
||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||
if (!avatar->isInitialized()) {
|
||||
continue;
|
||||
}
|
||||
avatar->render(cameraPosition, renderMode, postLighting);
|
||||
avatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||
}
|
||||
avatar->render(cameraPosition, renderMode, postLighting);
|
||||
avatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||
renderAvatarFades(cameraPosition, renderMode);
|
||||
}
|
||||
renderAvatarFades(cameraPosition, renderMode);
|
||||
} else {
|
||||
// just render myAvatar
|
||||
_myAvatar->render(cameraPosition, renderMode, postLighting);
|
||||
|
|
|
@ -1452,3 +1452,9 @@ void MyAvatar::clearDriveKeys() {
|
|||
_driveKeys[i] = 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::relayDriveKeysToCharacterController() {
|
||||
if (_driveKeys[UP] > 0.0f) {
|
||||
_characterController.jump();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,8 +87,9 @@ public:
|
|||
void clearDriveKeys();
|
||||
void setDriveKeys(int key, float val) { _driveKeys[key] = val; };
|
||||
bool getDriveKeys(int key) { return _driveKeys[key] != 0.0f; };
|
||||
void jump() { _characterController.jump(); }
|
||||
|
||||
void relayDriveKeysToCharacterController();
|
||||
|
||||
bool isMyAvatar() { return true; }
|
||||
|
||||
bool isLookingAtLeftEye();
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
#include <QDesktopWidget>
|
||||
#include <QGuiApplication>
|
||||
#include <QOpenGLFramebufferObject>
|
||||
#include <QScreen>
|
||||
#include <QScreen>
|
||||
#include <QOpenGLTimerQuery>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
|
@ -29,12 +30,28 @@
|
|||
#include <SharedUtil.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include <OVR_CAPI_GL.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
template <typename Function>
|
||||
void for_each_eye(Function function) {
|
||||
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
|
||||
eye < ovrEyeType::ovrEye_Count;
|
||||
eye = static_cast<ovrEyeType>(eye + 1)) {
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
using namespace OVR;
|
||||
template <typename Function>
|
||||
void for_each_eye(const ovrHmd & hmd, Function function) {
|
||||
for (int i = 0; i < ovrEye_Count; ++i) {
|
||||
ovrEyeType eye = hmd->EyeRenderOrder[i];
|
||||
function(eye);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
ProgramObject OculusManager::_program;
|
||||
int OculusManager::_textureLocation;
|
||||
int OculusManager::_eyeToSourceUVScaleLocation;
|
||||
|
@ -46,24 +63,27 @@ int OculusManager::_colorAttributeLocation;
|
|||
int OculusManager::_texCoord0AttributeLocation;
|
||||
int OculusManager::_texCoord1AttributeLocation;
|
||||
int OculusManager::_texCoord2AttributeLocation;
|
||||
bool OculusManager::_isConnected = false;
|
||||
|
||||
ovrHmd OculusManager::_ovrHmd;
|
||||
ovrHmdDesc OculusManager::_ovrHmdDesc;
|
||||
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
|
||||
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
|
||||
ovrSizei OculusManager::_renderTargetSize;
|
||||
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
|
||||
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
|
||||
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
|
||||
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
|
||||
ovrFrameTiming OculusManager::_hmdFrameTiming;
|
||||
ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count];
|
||||
bool OculusManager::_programInitialized = false;
|
||||
#endif
|
||||
|
||||
ovrTexture OculusManager::_eyeTextures[ovrEye_Count];
|
||||
bool OculusManager::_isConnected = false;
|
||||
ovrHmd OculusManager::_ovrHmd;
|
||||
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
|
||||
ovrVector3f OculusManager::_eyeOffset[ovrEye_Count];
|
||||
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
|
||||
ovrSizei OculusManager::_renderTargetSize;
|
||||
glm::mat4 OculusManager::_eyeProjection[ovrEye_Count];
|
||||
unsigned int OculusManager::_frameIndex = 0;
|
||||
bool OculusManager::_frameTimingActive = false;
|
||||
bool OculusManager::_programInitialized = false;
|
||||
Camera* OculusManager::_camera = NULL;
|
||||
int OculusManager::_activeEyeIndex = -1;
|
||||
ovrEyeType OculusManager::_activeEye = ovrEye_Count;
|
||||
bool OculusManager::_hswDismissed = false;
|
||||
|
||||
float OculusManager::CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f;
|
||||
float OculusManager::CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE;
|
||||
|
@ -76,68 +96,86 @@ glm::vec3 OculusManager::_calibrationPosition;
|
|||
glm::quat OculusManager::_calibrationOrientation;
|
||||
quint64 OculusManager::_calibrationStartTime;
|
||||
int OculusManager::_calibrationMessage = NULL;
|
||||
glm::vec3 OculusManager::_eyePositions[ovrEye_Count];
|
||||
// TODO expose this as a developer toggle
|
||||
bool OculusManager::_eyePerFrameMode = false;
|
||||
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
|
||||
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
|
||||
float OculusManager::_offscreenRenderScale = 1.0;
|
||||
|
||||
|
||||
void OculusManager::initSdk() {
|
||||
ovr_Initialize();
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
if (!_ovrHmd) {
|
||||
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
|
||||
}
|
||||
}
|
||||
|
||||
void OculusManager::shutdownSdk() {
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
}
|
||||
|
||||
void OculusManager::init() {
|
||||
#ifdef OVR_DIRECT_MODE
|
||||
initSdk();
|
||||
#endif
|
||||
|
||||
glm::vec3 OculusManager::_leftEyePosition = glm::vec3();
|
||||
glm::vec3 OculusManager::_rightEyePosition = glm::vec3();
|
||||
}
|
||||
|
||||
void OculusManager::connect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifndef OVR_DIRECT_MODE
|
||||
initSdk();
|
||||
#endif
|
||||
_calibrationState = UNCALIBRATED;
|
||||
qDebug() << "Oculus SDK" << OVR_VERSION_STRING;
|
||||
ovr_Initialize();
|
||||
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
if (_ovrHmd) {
|
||||
if (!_isConnected) {
|
||||
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
|
||||
}
|
||||
_isConnected = true;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
_eyeFov[0] = _ovrHmd->DefaultEyeFov[0];
|
||||
_eyeFov[1] = _ovrHmd->DefaultEyeFov[1];
|
||||
#else
|
||||
ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc);
|
||||
_eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0];
|
||||
_eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1];
|
||||
#endif
|
||||
//Get texture size
|
||||
ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left,
|
||||
_eyeFov[0], 1.0f);
|
||||
ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right,
|
||||
_eyeFov[1], 1.0f);
|
||||
_renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
|
||||
_renderTargetSize.h = recommendedTex0Size.h;
|
||||
if (_renderTargetSize.h < recommendedTex1Size.h) {
|
||||
_renderTargetSize.h = recommendedTex1Size.h;
|
||||
}
|
||||
|
||||
_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]);
|
||||
_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]);
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
|
||||
});
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence);
|
||||
#else
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest);
|
||||
#endif
|
||||
ovrGLConfig cfg;
|
||||
memset(&cfg, 0, sizeof(cfg));
|
||||
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
|
||||
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
|
||||
cfg.OGL.Header.Multisample = 1;
|
||||
|
||||
int distortionCaps = 0
|
||||
| ovrDistortionCap_Vignette
|
||||
| ovrDistortionCap_Overdrive
|
||||
| ovrDistortionCap_TimeWarp;
|
||||
|
||||
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
|
||||
distortionCaps, _eyeFov, _eyeRenderDesc);
|
||||
assert(configResult);
|
||||
|
||||
|
||||
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
|
||||
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
//Get texture size
|
||||
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
|
||||
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
|
||||
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
|
||||
});
|
||||
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
|
||||
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position |
|
||||
ovrTrackingCap_MagYawCorrection,
|
||||
ovrTrackingCap_Orientation);
|
||||
#else
|
||||
ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection |
|
||||
ovrSensorCap_Position,
|
||||
ovrSensorCap_Orientation);
|
||||
#endif
|
||||
|
||||
if (!_camera) {
|
||||
_camera = new Camera;
|
||||
configureCamera(*_camera, 0, 0); // no need to use screen dimensions; they're ignored
|
||||
}
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
if (!_programInitialized) {
|
||||
// Shader program
|
||||
_programInitialized = true;
|
||||
|
@ -162,27 +200,27 @@ void OculusManager::connect() {
|
|||
|
||||
//Generate the distortion VBOs
|
||||
generateDistortionMesh();
|
||||
|
||||
#endif
|
||||
} else {
|
||||
_isConnected = false;
|
||||
|
||||
// we're definitely not in "VR mode" so tell the menu that
|
||||
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
|
||||
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Disconnects and deallocates the OR
|
||||
void OculusManager::disconnect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_isConnected) {
|
||||
_isConnected = false;
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
// Prepare to potentially have to dismiss the HSW again
|
||||
// if the user re-enables VR
|
||||
_hswDismissed = false;
|
||||
#ifndef OVR_DIRECT_MODE
|
||||
shutdownSdk();
|
||||
#endif
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
//Free the distortion mesh data
|
||||
for (int i = 0; i < ovrEye_Count; i++) {
|
||||
if (_vertices[i] != 0) {
|
||||
|
@ -194,11 +232,10 @@ void OculusManager::disconnect() {
|
|||
_indices[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
glm::quat headOrientation = myAvatar->getHeadOrientation();
|
||||
|
@ -209,9 +246,7 @@ void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
|
|||
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
|
||||
billboard->setRotation(headOrientation);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
|
||||
static QString instructionMessage = "Hold still to calibrate";
|
||||
static QString progressMessage;
|
||||
|
@ -303,26 +338,21 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
|
|||
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void OculusManager::recalibrate() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_calibrationState = UNCALIBRATED;
|
||||
#endif
|
||||
}
|
||||
|
||||
void OculusManager::abandonCalibration() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_calibrationState = CALIBRATED;
|
||||
if (_calibrationMessage) {
|
||||
qDebug() << "Abandoned HMD calibration";
|
||||
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
|
||||
_calibrationMessage = NULL;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
void OculusManager::generateDistortionMesh() {
|
||||
|
||||
//Check if we already have the distortion mesh
|
||||
|
@ -331,29 +361,19 @@ void OculusManager::generateDistortionMesh() {
|
|||
return;
|
||||
}
|
||||
|
||||
//Viewport for the render target for each eye
|
||||
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
|
||||
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
|
||||
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
|
||||
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
|
||||
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
// Allocate and generate distortion mesh vertices
|
||||
ovrDistortionMesh meshData;
|
||||
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData);
|
||||
|
||||
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum],
|
||||
_UVScaleOffset[eyeNum]);
|
||||
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmd->DistortionCaps, &meshData);
|
||||
|
||||
// Parse the vertex data and create a render ready vertex buffer
|
||||
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
|
||||
DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount];
|
||||
_meshSize[eyeNum] = meshData.IndexCount;
|
||||
|
||||
// Convert the oculus vertex data to the DistortionVertex format.
|
||||
DistortionVertex* v = pVBVerts;
|
||||
ovrDistortionVertex* ov = meshData.pVertexData;
|
||||
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
v->pos.x = ov->ScreenPosNDC.x;
|
||||
v->pos.y = ov->ScreenPosNDC.y;
|
||||
v->texR.x = ov->TanEyeAnglesR.x;
|
||||
|
@ -362,16 +382,6 @@ void OculusManager::generateDistortionMesh() {
|
|||
v->texG.y = ov->TanEyeAnglesG.y;
|
||||
v->texB.x = ov->TanEyeAnglesB.x;
|
||||
v->texB.y = ov->TanEyeAnglesB.y;
|
||||
#else
|
||||
v->pos.x = ov->Pos.x;
|
||||
v->pos.y = ov->Pos.y;
|
||||
v->texR.x = ov->TexR.x;
|
||||
v->texR.y = ov->TexR.y;
|
||||
v->texG.x = ov->TexG.x;
|
||||
v->texG.y = ov->TexG.y;
|
||||
v->texB.x = ov->TexB.x;
|
||||
v->texB.y = ov->TexB.y;
|
||||
#endif
|
||||
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
|
||||
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
|
||||
v++;
|
||||
|
@ -391,7 +401,7 @@ void OculusManager::generateDistortionMesh() {
|
|||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
|
||||
//Now that we have the VBOs we can get rid of the mesh data
|
||||
OVR_FREE(pVBVerts);
|
||||
delete [] pVBVerts;
|
||||
ovrHmd_DestroyDistortionMesh(&meshData);
|
||||
}
|
||||
|
||||
|
@ -399,46 +409,101 @@ void OculusManager::generateDistortionMesh() {
|
|||
#endif
|
||||
|
||||
bool OculusManager::isConnected() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Begins the frame timing for oculus prediction purposes
|
||||
void OculusManager::beginFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
|
||||
if (_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
|
||||
}
|
||||
|
||||
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
|
||||
_frameTimingActive = true;
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
|
||||
#endif
|
||||
_frameTimingActive = true;
|
||||
}
|
||||
|
||||
bool OculusManager::allowSwap() {
|
||||
return false;
|
||||
}
|
||||
|
||||
//Ends frame timing
|
||||
void OculusManager::endFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
ovrHmd_EndFrameTiming(_ovrHmd);
|
||||
#endif
|
||||
_frameIndex++;
|
||||
_frameTimingActive = false;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Sets the camera FoV and aspect ratio
|
||||
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
camera.setAspectRatio(_renderTargetSize.w * 0.5f / _renderTargetSize.h);
|
||||
camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f);
|
||||
#endif
|
||||
}
|
||||
|
||||
static bool timerActive = false;
|
||||
//Displays everything for the oculus, frame timing must be active
|
||||
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Ensure the frame counter always increments by exactly 1
|
||||
static int oldFrameIndex = -1;
|
||||
assert(oldFrameIndex == -1 || oldFrameIndex == _frameIndex - 1);
|
||||
oldFrameIndex = _frameIndex;
|
||||
#endif
|
||||
|
||||
// Every so often do some additional timing calculations and debug output
|
||||
bool debugFrame = 0 == _frameIndex % 400;
|
||||
|
||||
#if 0
|
||||
// Try to measure the amount of time taken to do the distortion
|
||||
// (does not seem to work on OSX with SDK based distortion)
|
||||
// FIXME can't use a static object here, because it will cause a crash when the
|
||||
// query attempts deconstruct after the GL context is gone.
|
||||
static QOpenGLTimerQuery timerQuery;
|
||||
if (!timerQuery.isCreated()) {
|
||||
timerQuery.create();
|
||||
}
|
||||
|
||||
if (timerActive && timerQuery.isResultAvailable()) {
|
||||
auto result = timerQuery.waitForResult();
|
||||
if (result) { qDebug() << "Distortion took " << result << "ns"; };
|
||||
timerActive = false;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef OVR_DIRECT_MODE
|
||||
static bool attached = false;
|
||||
if (!attached) {
|
||||
attached = true;
|
||||
void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId();
|
||||
if (nullptr != nativeWindowHandle) {
|
||||
ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef OVR_CLIENT_DISTORTION
|
||||
// FIXME: we need a better way of responding to the HSW. In particular
|
||||
// we need to ensure that it's only displayed once per session, rather than
|
||||
// every time the user toggles VR mode, and we need to hook it up to actual
|
||||
// keyboard input. OVR claim they are refactoring HSW
|
||||
// https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599
|
||||
static ovrHSWDisplayState hasWarningState;
|
||||
if (!_hswDismissed) {
|
||||
ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState);
|
||||
if (hasWarningState.Displayed) {
|
||||
ovrHmd_DismissHSWDisplay(_ovrHmd);
|
||||
} else {
|
||||
_hswDismissed = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
//beginFrameTiming must be called before display
|
||||
if (!_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
|
||||
|
@ -459,7 +524,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
ovrPosef eyeRenderPose[ovrEye_Count];
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
|
@ -470,7 +534,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
glm::quat orientation;
|
||||
glm::vec3 trackerPosition;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
|
||||
|
||||
|
@ -483,105 +546,153 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
}
|
||||
|
||||
trackerPosition = bodyOrientation * trackerPosition;
|
||||
#endif
|
||||
|
||||
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
|
||||
ovrPosef eyePoses[ovrEye_Count];
|
||||
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
|
||||
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
|
||||
static ovrPosef eyeRenderPose[ovrEye_Count];
|
||||
//Render each eye into an fbo
|
||||
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
|
||||
_activeEyeIndex = eyeIndex;
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
|
||||
#else
|
||||
ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
|
||||
#endif
|
||||
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
|
||||
// If we're in eye-per-frame mode, only render one eye
|
||||
// per call to display, and allow timewarp to correct for
|
||||
// the other eye. Poor man's perf improvement
|
||||
if (_eyePerFrameMode && eye == _lastEyeRendered) {
|
||||
return;
|
||||
}
|
||||
_lastEyeRendered = _activeEye = eye;
|
||||
eyeRenderPose[eye] = eyePoses[eye];
|
||||
// Set the camera rotation for this eye
|
||||
eyeRenderPose[eye] = ovrHmd_GetEyePose(_ovrHmd, eye);
|
||||
orientation.x = eyeRenderPose[eye].Orientation.x;
|
||||
orientation.y = eyeRenderPose[eye].Orientation.y;
|
||||
orientation.z = eyeRenderPose[eye].Orientation.z;
|
||||
orientation.w = eyeRenderPose[eye].Orientation.w;
|
||||
|
||||
|
||||
// Update the application camera with the latest HMD position
|
||||
whichCamera.setHmdPosition(trackerPosition);
|
||||
whichCamera.setHmdRotation(orientation);
|
||||
|
||||
|
||||
// Update our camera to what the application camera is doing
|
||||
_camera->setRotation(whichCamera.getRotation());
|
||||
_camera->setPosition(whichCamera.getPosition());
|
||||
|
||||
|
||||
// Store the latest left and right eye render locations for things that need to know
|
||||
glm::vec3 thisEyePosition = position + trackerPosition +
|
||||
(bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) *
|
||||
glm::vec3(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z));
|
||||
|
||||
RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT;
|
||||
if (eyeIndex == 0) {
|
||||
_leftEyePosition = thisEyePosition;
|
||||
} else {
|
||||
_rightEyePosition = thisEyePosition;
|
||||
renderSide = RenderArgs::STEREO_RIGHT;
|
||||
}
|
||||
glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z));
|
||||
|
||||
_eyePositions[eye] = thisEyePosition;
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glLoadIdentity();
|
||||
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip();
|
||||
glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan,
|
||||
nearClip * port.UpTan, nearClip, farClip);
|
||||
|
||||
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
|
||||
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
|
||||
|
||||
|
||||
ovrRecti & vp = _eyeTextures[eye].Header.RenderViewport;
|
||||
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
|
||||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
|
||||
|
||||
// HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset
|
||||
//glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
|
||||
|
||||
_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z));
|
||||
|
||||
_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].HmdToEyeViewOffset.x, -_eyeRenderDesc[eye].HmdToEyeViewOffset.y, -_eyeRenderDesc[eye].HmdToEyeViewOffset.z));
|
||||
Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO);
|
||||
|
||||
applicationOverlay.displayOverlayTextureOculus(*_camera);
|
||||
_activeEyeIndex = -1;
|
||||
}
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
|
||||
});
|
||||
_activeEye = ovrEye_Count;
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
|
||||
QOpenGLFramebufferObject * finalFbo = nullptr;
|
||||
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
QOpenGLFramebufferObject* fbo = DependencyManager::get<GlowEffect>()->render(true);
|
||||
glBindTexture(GL_TEXTURE_2D, fbo->texture());
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
finalFbo = DependencyManager::get<GlowEffect>()->render(true);
|
||||
} else {
|
||||
DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->release();
|
||||
glBindTexture(GL_TEXTURE_2D, DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture());
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject();
|
||||
finalFbo->release();
|
||||
}
|
||||
|
||||
// restore our normal viewport
|
||||
auto glCanvas = Application::getInstance()->getGLWidget();
|
||||
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
|
||||
|
||||
#if 0
|
||||
if (debugFrame && !timerActive) {
|
||||
timerQuery.begin();
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
|
||||
|
||||
//Clear the color buffer to ensure that there isnt any residual color
|
||||
//Left over from when OR was not connected.
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, finalFbo->texture());
|
||||
|
||||
//Renders the distorted mesh onto the screen
|
||||
renderDistortionMesh(eyeRenderPose);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glCanvas->swapBuffers();
|
||||
|
||||
#else
|
||||
|
||||
for_each_eye([&](ovrEyeType eye) {
|
||||
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
|
||||
glEyeTexture.OGL.TexId = finalFbo->texture();
|
||||
|
||||
});
|
||||
|
||||
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
|
||||
|
||||
#endif
|
||||
|
||||
#if 0
|
||||
if (debugFrame && !timerActive) {
|
||||
timerQuery.end();
|
||||
timerActive = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
// No DK2, no message.
|
||||
char latency2Text[128] = "";
|
||||
{
|
||||
float latencies[5] = {};
|
||||
if (debugFrame && ovrHmd_GetFloatArray(_ovrHmd, "DK2Latency", latencies, 5) == 5)
|
||||
{
|
||||
bool nonZero = false;
|
||||
for (int i = 0; i < 5; ++i)
|
||||
{
|
||||
nonZero |= (latencies[i] != 0.f);
|
||||
}
|
||||
|
||||
if (nonZero)
|
||||
{
|
||||
qDebug() << QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms",
|
||||
latencies[0], latencies[1], latencies[2], latencies[3], latencies[4]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
||||
|
||||
glLoadIdentity();
|
||||
|
@ -602,24 +713,25 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
|||
|
||||
//Render the distortion meshes for each eye
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
|
||||
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeTextures[eyeNum].Header.RenderViewport,
|
||||
_UVScaleOffset[eyeNum]);
|
||||
|
||||
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
|
||||
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
|
||||
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, 1.0f - _UVScaleOffset[eyeNum][1].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
|
||||
|
||||
ovrMatrix4f timeWarpMatrices[2];
|
||||
Matrix4f transposeMatrices[2];
|
||||
glm::mat4 transposeMatrices[2];
|
||||
//Grabs the timewarp matrices to be used in the shader
|
||||
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
|
||||
transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
|
||||
transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
|
||||
|
||||
//Have to transpose the matrices before using them
|
||||
transposeMatrices[0].Transpose();
|
||||
transposeMatrices[1].Transpose();
|
||||
transposeMatrices[0] = glm::transpose(toGlm(timeWarpMatrices[0]));
|
||||
transposeMatrices[1] = glm::transpose(toGlm(timeWarpMatrices[1]));
|
||||
|
||||
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
|
||||
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
|
||||
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[0][0][0]);
|
||||
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[1][0][0]);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
|
||||
|
||||
|
@ -649,86 +761,54 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
|||
|
||||
//Tries to reconnect to the sensors
|
||||
void OculusManager::reset() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_isConnected) {
|
||||
ovrHmd_RecenterPose(_ovrHmd);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Gets the current predicted angles from the oculus sensors
|
||||
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
#else
|
||||
ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds);
|
||||
#endif
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
|
||||
#else
|
||||
if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN32)
|
||||
ovrPosef headPose = ts.HeadPose.ThePose;
|
||||
#else
|
||||
ovrPosef headPose = ss.Predicted.Pose;
|
||||
#endif
|
||||
Quatf orientation = Quatf(headPose.Orientation);
|
||||
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
|
||||
glm::vec3 euler = glm::eulerAngles(toGlm(ts.HeadPose.ThePose.Orientation));
|
||||
yaw = euler.y;
|
||||
pitch = euler.x;
|
||||
roll = euler.z;
|
||||
} else {
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
}
|
||||
#else
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
#endif
|
||||
}
|
||||
|
||||
glm::vec3 OculusManager::getRelativePosition() {
|
||||
#if (defined(__APPLE__) || defined(_WIN32)) && HAVE_LIBOVR
|
||||
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
|
||||
ovrVector3f headPosition = trackingState.HeadPose.ThePose.Position;
|
||||
|
||||
return glm::vec3(headPosition.x, headPosition.y, headPosition.z);
|
||||
#else
|
||||
// no positional tracking in Linux yet
|
||||
return glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
#endif
|
||||
}
|
||||
|
||||
//Used to set the size of the glow framebuffers
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
rv.setHeight(_renderTargetSize.h);
|
||||
return rv;
|
||||
#else
|
||||
return QSize(100, 100);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_activeEyeIndex != -1) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
|
||||
if (_activeEye != ovrEye_Count) {
|
||||
const ovrFovPort& port = _eyeFov[_activeEye];
|
||||
right = nearVal * port.RightTan;
|
||||
left = -nearVal * port.LeftTan;
|
||||
top = nearVal * port.UpTan;
|
||||
bottom = -nearVal * port.DownTan;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
int OculusManager::getHMDScreen() {
|
||||
int hmdScreenIndex = -1; // unknown
|
||||
#ifdef HAVE_LIBOVR
|
||||
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
|
||||
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
|
||||
|
||||
|
@ -777,7 +857,6 @@ int OculusManager::getHMDScreen() {
|
|||
screenNumber++;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return hmdScreenIndex;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,19 +13,39 @@
|
|||
#ifndef hifi_OculusManager_h
|
||||
#define hifi_OculusManager_h
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#include <OVR.h>
|
||||
#endif
|
||||
#include <OVR_CAPI.h>
|
||||
|
||||
#include <ProgramObject.h>
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
class Camera;
|
||||
class PalmData;
|
||||
class Text3DOverlay;
|
||||
|
||||
// Uncomment this to enable client side distortion. NOT recommended since
|
||||
// the Oculus SDK will ideally provide the best practices for distortion in
|
||||
// in terms of performance and quality, and by using it we will get updated
|
||||
// best practices for free with new runtime releases.
|
||||
#define OVR_CLIENT_DISTORTION 1
|
||||
|
||||
|
||||
// On Win32 platforms, enabling Direct HMD requires that the SDK be
|
||||
// initialized before the GL context is set up, but this breaks v-sync
|
||||
// for any application that has a Direct mode enable Rift connected
|
||||
// but is not rendering to it. For the time being I'm setting this as
|
||||
// a macro enabled mechanism which changes where the SDK is initialized.
|
||||
// To enable Direct HMD mode, you can un-comment this, but with the
|
||||
// caveat that it will break v-sync in NON-VR mode if you have an Oculus
|
||||
// Rift connect and in Direct mode
|
||||
#define OVR_DIRECT_MODE 1
|
||||
|
||||
|
||||
/// Handles interaction with the Oculus Rift.
|
||||
class OculusManager {
|
||||
public:
|
||||
static void init();
|
||||
static void connect();
|
||||
static void disconnect();
|
||||
static bool isConnected();
|
||||
|
@ -33,6 +53,7 @@ public:
|
|||
static void abandonCalibration();
|
||||
static void beginFrameTiming();
|
||||
static void endFrameTiming();
|
||||
static bool allowSwap();
|
||||
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
|
||||
static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
|
||||
static void reset();
|
||||
|
@ -47,18 +68,17 @@ public:
|
|||
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
|
||||
|
||||
static glm::vec3 getLeftEyePosition() { return _leftEyePosition; }
|
||||
static glm::vec3 getRightEyePosition() { return _rightEyePosition; }
|
||||
static glm::vec3 getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
|
||||
static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
|
||||
|
||||
static int getHMDScreen();
|
||||
|
||||
private:
|
||||
#ifdef HAVE_LIBOVR
|
||||
static void initSdk();
|
||||
static void shutdownSdk();
|
||||
#ifdef OVR_CLIENT_DISTORTION
|
||||
static void generateDistortionMesh();
|
||||
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
|
||||
|
||||
static bool similarNames(const QString& nameA,const QString& nameB);
|
||||
|
||||
struct DistortionVertex {
|
||||
glm::vec2 pos;
|
||||
glm::vec2 texR;
|
||||
|
@ -85,25 +105,28 @@ private:
|
|||
static int _texCoord0AttributeLocation;
|
||||
static int _texCoord1AttributeLocation;
|
||||
static int _texCoord2AttributeLocation;
|
||||
|
||||
static bool _isConnected;
|
||||
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrHmdDesc _ovrHmdDesc;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
|
||||
static GLuint _vertices[ovrEye_Count];
|
||||
static GLuint _indices[ovrEye_Count];
|
||||
static GLsizei _meshSize[ovrEye_Count];
|
||||
static ovrFrameTiming _hmdFrameTiming;
|
||||
static ovrRecti _eyeRenderViewport[ovrEye_Count];
|
||||
static bool _programInitialized;
|
||||
#endif
|
||||
|
||||
static ovrTexture _eyeTextures[ovrEye_Count];
|
||||
static bool _isConnected;
|
||||
static glm::vec3 _eyePositions[ovrEye_Count];
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrVector3f _eyeOffset[ovrEye_Count];
|
||||
static glm::mat4 _eyeProjection[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static unsigned int _frameIndex;
|
||||
static bool _frameTimingActive;
|
||||
static bool _programInitialized;
|
||||
static Camera* _camera;
|
||||
static int _activeEyeIndex;
|
||||
static ovrEyeType _activeEye;
|
||||
static bool _hswDismissed;
|
||||
|
||||
static void calibrate(const glm::vec3 position, const glm::quat orientation);
|
||||
enum CalibrationState {
|
||||
|
@ -125,13 +148,65 @@ private:
|
|||
static glm::quat _calibrationOrientation;
|
||||
static quint64 _calibrationStartTime;
|
||||
static int _calibrationMessage;
|
||||
|
||||
#endif
|
||||
|
||||
static glm::vec3 _leftEyePosition;
|
||||
static glm::vec3 _rightEyePosition;
|
||||
|
||||
|
||||
// TODO drop this variable and use the existing 'Developer | Render | Scale Resolution' value
|
||||
static ovrSizei _recommendedTexSize;
|
||||
static float _offscreenRenderScale;
|
||||
static bool _eyePerFrameMode;
|
||||
static ovrEyeType _lastEyeRendered;
|
||||
};
|
||||
|
||||
|
||||
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
|
||||
return glm::transpose(glm::make_mat4(&om.M[0][0]));
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
|
||||
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
|
||||
}
|
||||
|
||||
inline glm::vec3 toGlm(const ovrVector3f & ov) {
|
||||
return glm::make_vec3(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::vec2 toGlm(const ovrVector2f & ov) {
|
||||
return glm::make_vec2(&ov.x);
|
||||
}
|
||||
|
||||
inline glm::uvec2 toGlm(const ovrSizei & ov) {
|
||||
return glm::uvec2(ov.w, ov.h);
|
||||
}
|
||||
|
||||
inline glm::quat toGlm(const ovrQuatf & oq) {
|
||||
return glm::make_quat(&oq.x);
|
||||
}
|
||||
|
||||
inline glm::mat4 toGlm(const ovrPosef & op) {
|
||||
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
|
||||
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
|
||||
return translation * orientation;
|
||||
}
|
||||
|
||||
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
|
||||
ovrMatrix4f result;
|
||||
glm::mat4 transposed(glm::transpose(m));
|
||||
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
|
||||
return result;
|
||||
}
|
||||
|
||||
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
|
||||
return{ v.x, v.y, v.z };
|
||||
}
|
||||
|
||||
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
|
||||
return{ v.x, v.y };
|
||||
}
|
||||
|
||||
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
|
||||
return{ (int)v.x, (int)v.y };
|
||||
}
|
||||
|
||||
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
|
||||
return{ q.x, q.y, q.z, q.w };
|
||||
}
|
||||
|
||||
#endif // hifi_OculusManager_h
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include "AddressManager.h"
|
||||
#include "Application.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
static BOOL CALLBACK enumWindowsCallback(HWND hWnd, LPARAM lParam) {
|
||||
|
@ -92,6 +93,10 @@ int main(int argc, const char* argv[]) {
|
|||
usecTimestampNowForceClockSkew(clockSkew);
|
||||
qDebug("clockSkewOption=%s clockSkew=%d", clockSkewOption, clockSkew);
|
||||
}
|
||||
// Oculus initialization MUST PRECEDE OpenGL context creation.
|
||||
// The nature of the Application constructor means this has to be either here,
|
||||
// or in the main window ctor, before GL startup.
|
||||
OculusManager::init();
|
||||
|
||||
int exitCode;
|
||||
{
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
#include "OctreePacketProcessor.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
|
||||
void OctreePacketProcessor::processPacket(const SharedNodePointer& sendingNode, const QByteArray& packet) {
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
|
@ -81,13 +82,13 @@ void OctreePacketProcessor::processPacket(const SharedNodePointer& sendingNode,
|
|||
|
||||
switch(voxelPacketType) {
|
||||
case PacketTypeEntityErase: {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Entities)) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
app->_entities.processEraseMessage(mutablePacket, sendingNode);
|
||||
}
|
||||
} break;
|
||||
|
||||
case PacketTypeEntityData: {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Entities)) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
app->_entities.processDatagram(mutablePacket, sendingNode);
|
||||
}
|
||||
} break;
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "LoginDialog.h"
|
||||
#include "UIUtil.h"
|
||||
|
||||
const QString CREATE_ACCOUNT_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/create";
|
||||
const QString CREATE_ACCOUNT_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/signup";
|
||||
const QString FORGOT_PASSWORD_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/users/password/new";
|
||||
|
||||
LoginDialog::LoginDialog(QWidget* parent) :
|
||||
|
|
|
@ -107,6 +107,7 @@ void EntityTreeRenderer::init() {
|
|||
}
|
||||
|
||||
void EntityTreeRenderer::shutdown() {
|
||||
_entitiesScriptEngine->disconnect(); // disconnect all slots/signals from the script engine
|
||||
_shuttingDown = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -393,7 +393,6 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
// collision model's extents).
|
||||
|
||||
glm::vec3 scale = _dimensions / renderGeometry.getUnscaledMeshExtents().size();
|
||||
|
||||
// multiply each point by scale before handing the point-set off to the physics engine
|
||||
for (int i = 0; i < _points.size(); i++) {
|
||||
for (int j = 0; j < _points[i].size(); j++) {
|
||||
|
|
|
@ -819,8 +819,9 @@ void appendIndex(MeshData& data, QVector<int>& indices, int index) {
|
|||
}
|
||||
}
|
||||
|
||||
ExtractedMesh extractMesh(const FBXNode& object) {
|
||||
ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex) {
|
||||
MeshData data;
|
||||
data.extracted.mesh.meshIndex = meshIndex++;
|
||||
QVector<int> materials;
|
||||
QVector<int> textures;
|
||||
foreach (const FBXNode& child, object.children) {
|
||||
|
@ -1261,6 +1262,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
|
|||
float unitScaleFactor = 1.0f;
|
||||
glm::vec3 ambientColor;
|
||||
QString hifiGlobalNodeID;
|
||||
unsigned int meshIndex = 0;
|
||||
foreach (const FBXNode& child, node.children) {
|
||||
|
||||
if (child.name == "FBXHeaderExtension") {
|
||||
|
@ -1305,7 +1307,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
|
|||
foreach (const FBXNode& object, child.children) {
|
||||
if (object.name == "Geometry") {
|
||||
if (object.properties.at(2) == "Mesh") {
|
||||
meshes.insert(getID(object.properties), extractMesh(object));
|
||||
meshes.insert(getID(object.properties), extractMesh(object, meshIndex));
|
||||
} else { // object.properties.at(2) == "Shape"
|
||||
ExtractedBlendshape extracted = { getID(object.properties), extractBlendshape(object) };
|
||||
blendshapes.append(extracted);
|
||||
|
@ -1440,7 +1442,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
|
|||
} else if (subobject.name == "Vertices") {
|
||||
// it's a mesh as well as a model
|
||||
mesh = &meshes[getID(object.properties)];
|
||||
*mesh = extractMesh(object);
|
||||
*mesh = extractMesh(object, meshIndex);
|
||||
|
||||
} else if (subobject.name == "Shape") {
|
||||
ExtractedBlendshape blendshape = { subobject.properties.at(0).toString(),
|
||||
|
@ -1980,7 +1982,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
|
|||
|
||||
// see if any materials have texture children
|
||||
bool materialsHaveTextures = checkMaterialsHaveTextures(materials, textureFilenames, childMap);
|
||||
|
||||
|
||||
for (QHash<QString, ExtractedMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
|
||||
ExtractedMesh& extracted = it.value();
|
||||
|
||||
|
|
|
@ -152,6 +152,8 @@ public:
|
|||
|
||||
bool hasSpecularTexture() const;
|
||||
bool hasEmissiveTexture() const;
|
||||
|
||||
unsigned int meshIndex; // the order the meshes appeared in the object file
|
||||
};
|
||||
|
||||
/// A single animation frame extracted from an FBX document.
|
||||
|
|
|
@ -343,6 +343,7 @@ FBXGeometry readOBJ(QIODevice* device, const QVariantHash& mapping) {
|
|||
}
|
||||
|
||||
FBXMesh &mesh = geometry.meshes[0];
|
||||
mesh.meshIndex = 0;
|
||||
|
||||
// if we got a hint about units, scale all the points
|
||||
if (scaleGuess != 1.0f) {
|
||||
|
|
|
@ -92,8 +92,8 @@ void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
|
|||
double moduloRange(double val, double minVal, double maxVal) {
|
||||
double range = maxVal - minVal;
|
||||
double rval = (val - minVal) / range;
|
||||
double intval;
|
||||
return modf(rval, &intval) * range + minVal;
|
||||
rval = rval - floor(rval);
|
||||
return rval * range + minVal;
|
||||
}
|
||||
|
||||
const float MAX_LONGITUDE = 180.0f;
|
||||
|
|
|
@ -214,7 +214,7 @@ btVector3 CharacterController::perpindicularComponent(const btVector3& direction
|
|||
const btVector3 LOCAL_UP_AXIS(0.0f, 1.0f, 0.0f);
|
||||
const float DEFAULT_GRAVITY = 5.0f;
|
||||
const float TERMINAL_VELOCITY = 55.0f;
|
||||
const float JUMP_SPEED = 5.0f;
|
||||
const float JUMP_SPEED = 3.5f;
|
||||
|
||||
CharacterController::CharacterController(AvatarData* avatarData) {
|
||||
assert(avatarData);
|
||||
|
@ -229,8 +229,8 @@ CharacterController::CharacterController(AvatarData* avatarData) {
|
|||
_velocityTimeInterval = 0.0f;
|
||||
_verticalVelocity = 0.0f;
|
||||
_verticalOffset = 0.0f;
|
||||
_gravity = DEFAULT_GRAVITY; // slower than Earth's
|
||||
_maxFallSpeed = TERMINAL_VELOCITY; // Terminal velocity of a sky diver in m/s.
|
||||
_gravity = DEFAULT_GRAVITY;
|
||||
_maxFallSpeed = TERMINAL_VELOCITY;
|
||||
_jumpSpeed = JUMP_SPEED;
|
||||
_isOnGround = false;
|
||||
_isJumping = false;
|
||||
|
@ -529,6 +529,7 @@ void CharacterController::stepDown(btCollisionWorld* collisionWorld, btScalar dt
|
|||
_verticalVelocity = 0.0f;
|
||||
_verticalOffset = 0.0f;
|
||||
_isJumping = false;
|
||||
_isHovering = false;
|
||||
_isOnGround = true;
|
||||
} else if (!_isJumping) {
|
||||
// sweep again for floor within downStep threshold
|
||||
|
@ -555,6 +556,7 @@ void CharacterController::stepDown(btCollisionWorld* collisionWorld, btScalar dt
|
|||
_verticalVelocity = 0.0f;
|
||||
_verticalOffset = 0.0f;
|
||||
_isJumping = false;
|
||||
_isHovering = false;
|
||||
_isOnGround = true;
|
||||
} else {
|
||||
// nothing to step down on
|
||||
|
@ -583,6 +585,7 @@ void CharacterController::reset(btCollisionWorld* collisionWorld) {
|
|||
_verticalOffset = 0.0;
|
||||
_isOnGround = false;
|
||||
_isJumping = false;
|
||||
_isHovering = true;
|
||||
_walkDirection.setValue(0,0,0);
|
||||
_velocityTimeInterval = 0.0;
|
||||
|
||||
|
@ -630,8 +633,13 @@ void CharacterController::playerStep(btCollisionWorld* collisionWorld, btScalar
|
|||
|
||||
// Update fall velocity.
|
||||
if (_isHovering) {
|
||||
const btScalar HOVER_RELAXATION_TIMESCALE = 1.0f;
|
||||
_verticalVelocity *= (1.0f - dt / HOVER_RELAXATION_TIMESCALE);
|
||||
const btScalar MIN_HOVER_VERTICAL_VELOCITY = 0.1f;
|
||||
if (fabsf(_verticalVelocity) < MIN_HOVER_VERTICAL_VELOCITY) {
|
||||
_verticalVelocity = 0.0f;
|
||||
} else {
|
||||
const btScalar HOVER_RELAXATION_TIMESCALE = 0.8f;
|
||||
_verticalVelocity *= (1.0f - dt / HOVER_RELAXATION_TIMESCALE);
|
||||
}
|
||||
} else {
|
||||
_verticalVelocity -= _gravity * dt;
|
||||
if (_verticalVelocity > _jumpSpeed) {
|
||||
|
@ -696,7 +704,7 @@ void CharacterController::jump() {
|
|||
} else {
|
||||
quint64 now = usecTimestampNow();
|
||||
const quint64 JUMP_TO_HOVER_PERIOD = USECS_PER_SECOND;
|
||||
if (now - _jumpToHoverStart < JUMP_TO_HOVER_PERIOD) {
|
||||
if (now - _jumpToHoverStart > JUMP_TO_HOVER_PERIOD) {
|
||||
_isHovering = true;
|
||||
}
|
||||
}
|
||||
|
@ -781,6 +789,7 @@ void CharacterController::setEnabled(bool enabled) {
|
|||
// Setting the ADD bit here works for all cases so we don't even bother checking other bits.
|
||||
_pendingFlags |= PENDING_FLAG_ADD_TO_SIMULATION;
|
||||
_isHovering = true;
|
||||
_verticalVelocity = 0.0f;
|
||||
} else {
|
||||
if (_dynamicsWorld) {
|
||||
_pendingFlags |= PENDING_FLAG_REMOVE_FROM_SIMULATION;
|
||||
|
|
|
@ -82,7 +82,8 @@ Model::Model(QObject* parent) :
|
|||
_appliedBlendNumber(0),
|
||||
_calculatedMeshBoxesValid(false),
|
||||
_calculatedMeshTrianglesValid(false),
|
||||
_meshGroupsKnown(false) {
|
||||
_meshGroupsKnown(false),
|
||||
_renderCollisionHull(false) {
|
||||
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
if (_viewState) {
|
||||
|
@ -712,13 +713,13 @@ bool Model::renderCore(float alpha, RenderMode mode, RenderArgs* args) {
|
|||
{
|
||||
GLenum buffers[3];
|
||||
int bufferCount = 0;
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
|
||||
}
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
|
||||
}
|
||||
if (mode == DEFAULT_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
|
||||
}
|
||||
GLBATCH(glDrawBuffers)(bufferCount, buffers);
|
||||
|
@ -777,7 +778,7 @@ bool Model::renderCore(float alpha, RenderMode mode, RenderArgs* args) {
|
|||
GLBATCH(glDrawBuffers)(bufferCount, buffers);
|
||||
}
|
||||
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
const float MOSTLY_TRANSPARENT_THRESHOLD = 0.0f;
|
||||
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, false, args, true);
|
||||
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, true, args, true);
|
||||
|
@ -1778,13 +1779,13 @@ void Model::endScene(RenderMode mode, RenderArgs* args) {
|
|||
{
|
||||
GLenum buffers[3];
|
||||
int bufferCount = 0;
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
|
||||
}
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
|
||||
}
|
||||
if (mode == DEFAULT_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
|
||||
}
|
||||
GLBATCH(glDrawBuffers)(bufferCount, buffers);
|
||||
|
@ -1843,7 +1844,7 @@ void Model::endScene(RenderMode mode, RenderArgs* args) {
|
|||
GLBATCH(glDrawBuffers)(bufferCount, buffers);
|
||||
}
|
||||
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
|
||||
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE || mode == DEBUG_RENDER_MODE) {
|
||||
const float MOSTLY_TRANSPARENT_THRESHOLD = 0.0f;
|
||||
translucentParts += renderMeshesForModelsInScene(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, false, args);
|
||||
translucentParts += renderMeshesForModelsInScene(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, true, args);
|
||||
|
@ -1919,6 +1920,23 @@ bool Model::renderInScene(float alpha, RenderArgs* args) {
|
|||
if (_meshStates.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (args->_renderMode == RenderArgs::DEBUG_RENDER_MODE && _renderCollisionHull == false) {
|
||||
// turning collision hull rendering on
|
||||
_renderCollisionHull = true;
|
||||
_nextGeometry = _collisionGeometry;
|
||||
_saveNonCollisionGeometry = _geometry;
|
||||
updateGeometry();
|
||||
simulate(0.0, true);
|
||||
} else if (args->_renderMode != RenderArgs::DEBUG_RENDER_MODE && _renderCollisionHull == true) {
|
||||
// turning collision hull rendering off
|
||||
_renderCollisionHull = false;
|
||||
_nextGeometry = _saveNonCollisionGeometry;
|
||||
_saveNonCollisionGeometry.clear();
|
||||
updateGeometry();
|
||||
simulate(0.0, true);
|
||||
}
|
||||
|
||||
renderSetup(args);
|
||||
_modelsInScene.push_back(this);
|
||||
return true;
|
||||
|
@ -2402,8 +2420,9 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
|
|||
}
|
||||
|
||||
|
||||
int Model::renderMeshesFromList(QVector<int>& list, gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold, RenderArgs* args,
|
||||
Locations* locations, SkinLocations* skinLocations, bool forceRenderMeshes) {
|
||||
int Model::renderMeshesFromList(QVector<int>& list, gpu::Batch& batch, RenderMode mode, bool translucent,
|
||||
float alphaThreshold, RenderArgs* args, Locations* locations, SkinLocations* skinLocations,
|
||||
bool forceRenderMeshes) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
|
|
@ -91,7 +91,7 @@ public:
|
|||
void reset();
|
||||
virtual void simulate(float deltaTime, bool fullUpdate = true);
|
||||
|
||||
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE };
|
||||
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE, DEBUG_RENDER_MODE };
|
||||
|
||||
bool render(float alpha = 1.0f, RenderMode mode = DEFAULT_RENDER_MODE, RenderArgs* args = NULL);
|
||||
|
||||
|
@ -295,6 +295,7 @@ private:
|
|||
float _nextLODHysteresis;
|
||||
|
||||
QSharedPointer<NetworkGeometry> _collisionGeometry;
|
||||
QSharedPointer<NetworkGeometry> _saveNonCollisionGeometry;
|
||||
|
||||
float _pupilDilation;
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
|
@ -479,6 +480,7 @@ private:
|
|||
|
||||
static AbstractViewStateInterface* _viewState;
|
||||
|
||||
bool _renderCollisionHull;
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(QPointer<Model>)
|
||||
|
|
|
@ -20,21 +20,62 @@ void SceneScriptingInterface::setStageLocation(float longitude, float latitude,
|
|||
_skyStage->setOriginLocation(longitude, latitude, altitude);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getStageLocationLongitude() const {
|
||||
return _skyStage->getOriginLongitude();
|
||||
}
|
||||
float SceneScriptingInterface::getStageLocationLatitude() const {
|
||||
return _skyStage->getOriginLatitude();
|
||||
}
|
||||
float SceneScriptingInterface::getStageLocationAltitude() const {
|
||||
return _skyStage->getOriginSurfaceAltitude();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setStageDayTime(float hour) {
|
||||
_skyStage->setDayTime(hour);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getStageDayTime() const {
|
||||
return _skyStage->getDayTime();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setStageYearTime(int day) {
|
||||
_skyStage->setYearTime(day);
|
||||
}
|
||||
|
||||
int SceneScriptingInterface::getStageYearTime() const {
|
||||
return _skyStage->getYearTime();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setSunColor(const glm::vec3& color) {
|
||||
_skyStage->setSunColor(color);
|
||||
}
|
||||
|
||||
const glm::vec3& SceneScriptingInterface::getSunColor() const {
|
||||
return _skyStage->getSunColor();
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setSunIntensity(float intensity) {
|
||||
_skyStage->setSunIntensity(intensity);
|
||||
}
|
||||
|
||||
float SceneScriptingInterface::getSunIntensity() const {
|
||||
return _skyStage->getSunIntensity();
|
||||
}
|
||||
|
||||
model::SunSkyStagePointer SceneScriptingInterface::getSkyStage() const {
|
||||
return _skyStage;
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setShouldRenderAvatars(bool shouldRenderAvatars) {
|
||||
if (shouldRenderAvatars != _shouldRenderAvatars) {
|
||||
_shouldRenderAvatars = shouldRenderAvatars;
|
||||
emit shouldRenderAvatarsChanged(_shouldRenderAvatars);
|
||||
}
|
||||
}
|
||||
|
||||
void SceneScriptingInterface::setShouldRenderEntities(bool shouldRenderEntities) {
|
||||
if (shouldRenderEntities != _shouldRenderEntities) {
|
||||
_shouldRenderEntities = shouldRenderEntities;
|
||||
emit shouldRenderEntitiesChanged(_shouldRenderEntities);
|
||||
}
|
||||
}
|
|
@ -21,23 +21,47 @@
|
|||
class SceneScriptingInterface : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
Q_PROPERTY(bool shouldRenderAvatars READ shouldRenderAvatars WRITE setShouldRenderAvatars)
|
||||
Q_PROPERTY(bool shouldRenderEntities READ shouldRenderEntities WRITE setShouldRenderEntities)
|
||||
|
||||
public:
|
||||
Q_INVOKABLE void setStageOrientation(const glm::quat& orientation);
|
||||
|
||||
Q_INVOKABLE void setStageLocation(float longitude, float latitude, float altitude);
|
||||
Q_INVOKABLE float getStageLocationLongitude() const;
|
||||
Q_INVOKABLE float getStageLocationLatitude() const;
|
||||
Q_INVOKABLE float getStageLocationAltitude() const;
|
||||
|
||||
Q_INVOKABLE void setStageDayTime(float hour);
|
||||
Q_INVOKABLE float getStageDayTime() const;
|
||||
Q_INVOKABLE void setStageYearTime(int day);
|
||||
Q_INVOKABLE int getStageYearTime() const;
|
||||
|
||||
Q_INVOKABLE void setSunColor(const glm::vec3& color);
|
||||
Q_INVOKABLE const glm::vec3& getSunColor() const;
|
||||
Q_INVOKABLE void setSunIntensity(float intensity);
|
||||
Q_INVOKABLE float getSunIntensity() const;
|
||||
|
||||
model::SunSkyStagePointer getSkyStage() const;
|
||||
|
||||
|
||||
Q_INVOKABLE void setShouldRenderAvatars(bool shouldRenderAvatars);
|
||||
Q_INVOKABLE bool shouldRenderAvatars() const { return _shouldRenderAvatars; }
|
||||
|
||||
Q_INVOKABLE void setShouldRenderEntities(bool shouldRenderEntities);
|
||||
Q_INVOKABLE bool shouldRenderEntities() const { return _shouldRenderEntities; }
|
||||
|
||||
signals:
|
||||
void shouldRenderAvatarsChanged(bool shouldRenderAvatars);
|
||||
void shouldRenderEntitiesChanged(bool shouldRenderEntities);
|
||||
protected:
|
||||
SceneScriptingInterface() {};
|
||||
~SceneScriptingInterface() {};
|
||||
|
||||
model::SunSkyStagePointer _skyStage = model::SunSkyStagePointer(new model::SunSkyStage());
|
||||
|
||||
bool _shouldRenderAvatars = true;
|
||||
bool _shouldRenderEntities = true;
|
||||
};
|
||||
|
||||
#endif // hifi_SceneScriptingInterface_h
|
||||
|
|
|
@ -305,8 +305,6 @@ void ScriptEngine::init() {
|
|||
|
||||
_isInitialized = true;
|
||||
|
||||
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
entityScriptingInterface->init();
|
||||
|
||||
|
@ -350,7 +348,6 @@ void ScriptEngine::init() {
|
|||
registerGlobalObject("Vec3", &_vec3Library);
|
||||
registerGlobalObject("Uuid", &_uuidLibrary);
|
||||
registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||
registerGlobalObject("Scene", DependencyManager::get<SceneScriptingInterface>().data());
|
||||
|
||||
// constants
|
||||
globalObject().setProperty("TREE_SCALE", newVariant(QVariant(TREE_SCALE)));
|
||||
|
|
|
@ -13,15 +13,14 @@
|
|||
//
|
||||
|
||||
#include <QEventLoop>
|
||||
#include <QFile>
|
||||
#include <qurlquery.h>
|
||||
|
||||
#include <AccountManager.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <NetworkingConstants.h>
|
||||
|
||||
#include <AccountManager.h>
|
||||
#include "XMLHttpRequestClass.h"
|
||||
#include "ScriptEngine.h"
|
||||
#include "XMLHttpRequestClass.h"
|
||||
|
||||
const QString METAVERSE_API_URL = NetworkingConstants::METAVERSE_SERVER_URL.toString() + "/api/";
|
||||
|
||||
|
@ -42,7 +41,6 @@ XMLHttpRequestClass::XMLHttpRequestClass(QScriptEngine* engine) :
|
|||
_onReadyStateChange(QScriptValue::NullValue),
|
||||
_readyState(XMLHttpRequestClass::UNSENT),
|
||||
_errorCode(QNetworkReply::NoError),
|
||||
_file(NULL),
|
||||
_timeout(0),
|
||||
_timer(this),
|
||||
_numRedirects(0) {
|
||||
|
@ -63,22 +61,6 @@ QScriptValue XMLHttpRequestClass::getStatus() const {
|
|||
if (_reply) {
|
||||
return QScriptValue(_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt());
|
||||
}
|
||||
if(_url.isLocalFile()) {
|
||||
switch (_errorCode) {
|
||||
case QNetworkReply::NoError:
|
||||
return QScriptValue(200);
|
||||
case QNetworkReply::ContentNotFoundError:
|
||||
return QScriptValue(404);
|
||||
case QNetworkReply::ContentConflictError:
|
||||
return QScriptValue(409);
|
||||
case QNetworkReply::TimeoutError:
|
||||
return QScriptValue(408);
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return QScriptValue(501);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return QScriptValue(0);
|
||||
}
|
||||
|
||||
|
@ -86,22 +68,6 @@ QString XMLHttpRequestClass::getStatusText() const {
|
|||
if (_reply) {
|
||||
return _reply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
switch (_errorCode) {
|
||||
case QNetworkReply::NoError:
|
||||
return "OK";
|
||||
case QNetworkReply::ContentNotFoundError:
|
||||
return "Not Found";
|
||||
case QNetworkReply::ContentConflictError:
|
||||
return "Conflict";
|
||||
case QNetworkReply::TimeoutError:
|
||||
return "Timeout";
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return "Not Implemented";
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -147,13 +113,6 @@ QScriptValue XMLHttpRequestClass::getAllResponseHeaders() const {
|
|||
}
|
||||
return QString(headers.data());
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
QString headers = QString("Content-Type: application/octet-stream\n");
|
||||
headers.append("Content-Length: ");
|
||||
headers.append(QString("%1").arg(_rawResponseData.length()));
|
||||
headers.append("\n");
|
||||
return headers;
|
||||
}
|
||||
return QScriptValue("");
|
||||
}
|
||||
|
||||
|
@ -161,14 +120,6 @@ QScriptValue XMLHttpRequestClass::getResponseHeader(const QString& name) const {
|
|||
if (_reply && _reply->hasRawHeader(name.toLatin1())) {
|
||||
return QScriptValue(QString(_reply->rawHeader(name.toLatin1())));
|
||||
}
|
||||
if (_url.isLocalFile()) {
|
||||
if (name.toLower() == "content-type") {
|
||||
return QString("application/octet-stream");
|
||||
}
|
||||
if (name.toLower() == "content-length") {
|
||||
return QString("%1").arg(_rawResponseData.length());
|
||||
}
|
||||
}
|
||||
return QScriptValue::NullValue;
|
||||
}
|
||||
|
||||
|
@ -188,47 +139,24 @@ void XMLHttpRequestClass::open(const QString& method, const QString& url, bool a
|
|||
_url.setUrl(url);
|
||||
_async = async;
|
||||
|
||||
if (_url.isLocalFile()) {
|
||||
if (_method.toUpper() == "GET" && !_async && username.isEmpty() && password.isEmpty()) {
|
||||
_file = new QFile(_url.toLocalFile());
|
||||
if (!_file->exists()) {
|
||||
qDebug() << "Can't find file " << _url.fileName();
|
||||
abortRequest();
|
||||
_errorCode = QNetworkReply::ContentNotFoundError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
} else if (!_file->open(QIODevice::ReadOnly)) {
|
||||
qDebug() << "Can't open file " << _url.fileName();
|
||||
abortRequest();
|
||||
_errorCode = QNetworkReply::ContentConflictError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
} else {
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
} else {
|
||||
notImplemented();
|
||||
}
|
||||
} else {
|
||||
if (url.toLower().left(METAVERSE_API_URL.length()) == METAVERSE_API_URL) {
|
||||
AccountManager& accountManager = AccountManager::getInstance();
|
||||
if (url.toLower().left(METAVERSE_API_URL.length()) == METAVERSE_API_URL) {
|
||||
AccountManager& accountManager = AccountManager::getInstance();
|
||||
|
||||
if (accountManager.hasValidAccessToken()) {
|
||||
QUrlQuery urlQuery(_url.query());
|
||||
urlQuery.addQueryItem("access_token", accountManager.getAccountInfo().getAccessToken().token);
|
||||
_url.setQuery(urlQuery);
|
||||
}
|
||||
if (accountManager.hasValidAccessToken()) {
|
||||
QUrlQuery urlQuery(_url.query());
|
||||
urlQuery.addQueryItem("access_token", accountManager.getAccountInfo().getAccessToken().token);
|
||||
_url.setQuery(urlQuery);
|
||||
}
|
||||
|
||||
}
|
||||
if (!username.isEmpty()) {
|
||||
_url.setUserName(username);
|
||||
}
|
||||
if (!password.isEmpty()) {
|
||||
_url.setPassword(password);
|
||||
}
|
||||
_request.setUrl(_url);
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
if (!username.isEmpty()) {
|
||||
_url.setUserName(username);
|
||||
}
|
||||
if (!password.isEmpty()) {
|
||||
_url.setPassword(password);
|
||||
}
|
||||
_request.setUrl(_url);
|
||||
setReadyState(OPENED);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,23 +167,18 @@ void XMLHttpRequestClass::send() {
|
|||
void XMLHttpRequestClass::send(const QScriptValue& data) {
|
||||
if (_readyState == OPENED && !_reply) {
|
||||
if (!data.isNull()) {
|
||||
if (_url.isLocalFile()) {
|
||||
notImplemented();
|
||||
return;
|
||||
_sendData = new QBuffer(this);
|
||||
if (data.isObject()) {
|
||||
QByteArray ba = qscriptvalue_cast<QByteArray>(data);
|
||||
_sendData->setData(ba);
|
||||
} else {
|
||||
_sendData = new QBuffer(this);
|
||||
if (data.isObject()) {
|
||||
QByteArray ba = qscriptvalue_cast<QByteArray>(data);
|
||||
_sendData->setData(ba);
|
||||
} else {
|
||||
_sendData->setData(data.toString().toUtf8());
|
||||
}
|
||||
_sendData->setData(data.toString().toUtf8());
|
||||
}
|
||||
}
|
||||
|
||||
doSend();
|
||||
|
||||
if (!_async && !_url.isLocalFile()) {
|
||||
if (!_async) {
|
||||
QEventLoop loop;
|
||||
connect(this, SIGNAL(requestComplete()), &loop, SLOT(quit()));
|
||||
loop.exec();
|
||||
|
@ -265,23 +188,13 @@ void XMLHttpRequestClass::send(const QScriptValue& data) {
|
|||
|
||||
void XMLHttpRequestClass::doSend() {
|
||||
|
||||
if (!_url.isLocalFile()) {
|
||||
_reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData);
|
||||
connectToReply(_reply);
|
||||
}
|
||||
_reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData);
|
||||
connectToReply(_reply);
|
||||
|
||||
if (_timeout > 0) {
|
||||
_timer.start(_timeout);
|
||||
connect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout()));
|
||||
}
|
||||
|
||||
if (_url.isLocalFile()) {
|
||||
setReadyState(HEADERS_RECEIVED);
|
||||
setReadyState(LOADING);
|
||||
_rawResponseData = _file->readAll();
|
||||
_file->close();
|
||||
requestFinished();
|
||||
}
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::requestTimeout() {
|
||||
|
@ -300,16 +213,10 @@ void XMLHttpRequestClass::requestError(QNetworkReply::NetworkError code) {
|
|||
void XMLHttpRequestClass::requestFinished() {
|
||||
disconnect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout()));
|
||||
|
||||
if (!_url.isLocalFile()) {
|
||||
_errorCode = _reply->error();
|
||||
} else {
|
||||
_errorCode = QNetworkReply::NoError;
|
||||
}
|
||||
_errorCode = _reply->error();
|
||||
|
||||
if (_errorCode == QNetworkReply::NoError) {
|
||||
if (!_url.isLocalFile()) {
|
||||
_rawResponseData.append(_reply->readAll());
|
||||
}
|
||||
_rawResponseData.append(_reply->readAll());
|
||||
|
||||
if (_responseType == "json") {
|
||||
_responseData = _engine->evaluate("(" + QString(_rawResponseData.data()) + ")");
|
||||
|
@ -338,19 +245,6 @@ void XMLHttpRequestClass::abortRequest() {
|
|||
_reply->deleteLater();
|
||||
_reply = NULL;
|
||||
}
|
||||
|
||||
if (_file != NULL) {
|
||||
_file->close();
|
||||
_file = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::notImplemented() {
|
||||
abortRequest();
|
||||
//_errorCode = QNetworkReply::OperationNotImplementedError; TODO: Use this status code when update to Qt 5.3
|
||||
_errorCode = QNetworkReply::ContentOperationNotPermittedError;
|
||||
setReadyState(DONE);
|
||||
emit requestComplete();
|
||||
}
|
||||
|
||||
void XMLHttpRequestClass::connectToReply(QNetworkReply* reply) {
|
||||
|
|
|
@ -97,7 +97,6 @@ private:
|
|||
void connectToReply(QNetworkReply* reply);
|
||||
void disconnectFromReply(QNetworkReply* reply);
|
||||
void abortRequest();
|
||||
void notImplemented();
|
||||
|
||||
QScriptEngine* _engine;
|
||||
bool _async;
|
||||
|
@ -113,7 +112,6 @@ private:
|
|||
QScriptValue _onReadyStateChange;
|
||||
ReadyState _readyState;
|
||||
QNetworkReply::NetworkError _errorCode;
|
||||
QFile* _file;
|
||||
int _timeout;
|
||||
QTimer _timer;
|
||||
int _numRedirects;
|
||||
|
|
|
@ -17,7 +17,7 @@ class OctreeRenderer;
|
|||
|
||||
class RenderArgs {
|
||||
public:
|
||||
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE };
|
||||
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE, DEBUG_RENDER_MODE };
|
||||
enum RenderSide { MONO, STEREO_LEFT, STEREO_RIGHT };
|
||||
|
||||
OctreeRenderer* _renderer;
|
||||
|
|
|
@ -13,8 +13,18 @@
|
|||
#include "VHACDUtil.h"
|
||||
|
||||
|
||||
//Read all the meshes from provided FBX file
|
||||
bool vhacd::VHACDUtil::loadFBX(const QString filename, vhacd::LoadFBXResults *results) {
|
||||
// FBXReader jumbles the order of the meshes by reading them back out of a hashtable. This will put
|
||||
// them back in the order in which they appeared in the file.
|
||||
bool FBXGeometryLessThan(const FBXMesh& e1, const FBXMesh& e2) {
|
||||
return e1.meshIndex < e2.meshIndex;
|
||||
}
|
||||
void reSortFBXGeometryMeshes(FBXGeometry& geometry) {
|
||||
qSort(geometry.meshes.begin(), geometry.meshes.end(), FBXGeometryLessThan);
|
||||
}
|
||||
|
||||
|
||||
// Read all the meshes from provided FBX file
|
||||
bool vhacd::VHACDUtil::loadFBX(const QString filename, FBXGeometry& result) {
|
||||
|
||||
// open the fbx file
|
||||
QFile fbx(filename);
|
||||
|
@ -25,174 +35,71 @@ bool vhacd::VHACDUtil::loadFBX(const QString filename, vhacd::LoadFBXResults *re
|
|||
|
||||
QByteArray fbxContents = fbx.readAll();
|
||||
|
||||
|
||||
FBXGeometry geometry;
|
||||
|
||||
if (filename.toLower().endsWith(".obj")) {
|
||||
geometry = readOBJ(fbxContents, QVariantHash());
|
||||
result = readOBJ(fbxContents, QVariantHash());
|
||||
} else if (filename.toLower().endsWith(".fbx")) {
|
||||
geometry = readFBX(fbxContents, QVariantHash());
|
||||
result = readFBX(fbxContents, QVariantHash());
|
||||
} else {
|
||||
qDebug() << "unknown file extension";
|
||||
return false;
|
||||
}
|
||||
|
||||
reSortFBXGeometryMeshes(result);
|
||||
|
||||
std::cout << "-------------------\n";
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
foreach (const FBXMeshPart &meshPart, mesh.parts) {
|
||||
std::cout << meshPart.triangleIndices.size() << " ";
|
||||
}
|
||||
}
|
||||
std::cout << "\n";
|
||||
|
||||
|
||||
//results->meshCount = geometry.meshes.count();
|
||||
// qDebug() << "read in" << geometry.meshes.count() << "meshes";
|
||||
|
||||
int count = 0;
|
||||
foreach(FBXMesh mesh, geometry.meshes) {
|
||||
//get vertices for each mesh
|
||||
// QVector<glm::vec3> vertices = mesh.vertices;
|
||||
|
||||
|
||||
QVector<glm::vec3> vertices;
|
||||
foreach (glm::vec3 vertex, mesh.vertices) {
|
||||
vertices.append(glm::vec3(mesh.modelTransform * glm::vec4(vertex, 1.0f)));
|
||||
}
|
||||
|
||||
// get the triangle indices for each mesh
|
||||
QVector<int> triangles;
|
||||
foreach(FBXMeshPart meshPart, mesh.parts){
|
||||
QVector<int> indices = meshPart.triangleIndices;
|
||||
triangles += indices;
|
||||
|
||||
unsigned int quadCount = meshPart.quadIndices.size() / 4;
|
||||
for (unsigned int i = 0; i < quadCount; i++) {
|
||||
unsigned int p0Index = meshPart.quadIndices[i * 4];
|
||||
unsigned int p1Index = meshPart.quadIndices[i * 4 + 1];
|
||||
unsigned int p2Index = meshPart.quadIndices[i * 4 + 2];
|
||||
unsigned int p3Index = meshPart.quadIndices[i * 4 + 3];
|
||||
// split each quad into two triangles
|
||||
triangles.append(p0Index);
|
||||
triangles.append(p1Index);
|
||||
triangles.append(p2Index);
|
||||
triangles.append(p0Index);
|
||||
triangles.append(p2Index);
|
||||
triangles.append(p3Index);
|
||||
}
|
||||
}
|
||||
|
||||
// only read meshes with triangles
|
||||
if (triangles.count() <= 0){
|
||||
continue;
|
||||
}
|
||||
|
||||
AABox aaBox;
|
||||
foreach (glm::vec3 p, vertices) {
|
||||
aaBox += p;
|
||||
}
|
||||
|
||||
results->perMeshVertices.append(vertices);
|
||||
results->perMeshTriangleIndices.append(triangles);
|
||||
results->perMeshLargestDimension.append(aaBox.getLargestDimension());
|
||||
count++;
|
||||
}
|
||||
|
||||
results->meshCount = count;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void vhacd::VHACDUtil::combineMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const {
|
||||
float largestDimension = 0;
|
||||
int indexStart = 0;
|
||||
|
||||
QVector<glm::vec3> emptyVertices;
|
||||
QVector<int> emptyTriangles;
|
||||
results->perMeshVertices.append(emptyVertices);
|
||||
results->perMeshTriangleIndices.append(emptyTriangles);
|
||||
results->perMeshLargestDimension.append(largestDimension);
|
||||
// void vhacd::VHACDUtil::fattenMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const {
|
||||
|
||||
for (int i = 0; i < meshes->meshCount; i++) {
|
||||
QVector<glm::vec3> vertices = meshes->perMeshVertices.at(i);
|
||||
QVector<int> triangles = meshes->perMeshTriangleIndices.at(i);
|
||||
const float largestDimension = meshes->perMeshLargestDimension.at(i);
|
||||
// for (int i = 0; i < meshes->meshCount; i++) {
|
||||
// QVector<glm::vec3> vertices = meshes->perMeshVertices.at(i);
|
||||
// QVector<int> triangles = meshes->perMeshTriangleIndices.at(i);
|
||||
// const float largestDimension = meshes->perMeshLargestDimension.at(i);
|
||||
|
||||
for (int j = 0; j < triangles.size(); j++) {
|
||||
triangles[ j ] += indexStart;
|
||||
}
|
||||
indexStart += vertices.size();
|
||||
// results->perMeshVertices.append(vertices);
|
||||
// results->perMeshTriangleIndices.append(triangles);
|
||||
// results->perMeshLargestDimension.append(largestDimension);
|
||||
|
||||
results->perMeshVertices[0] << vertices;
|
||||
results->perMeshTriangleIndices[0] << triangles;
|
||||
if (results->perMeshLargestDimension[0] < largestDimension) {
|
||||
results->perMeshLargestDimension[0] = largestDimension;
|
||||
}
|
||||
}
|
||||
// for (int j = 0; j < triangles.size(); j += 3) {
|
||||
// auto p0 = vertices[triangles[j]];
|
||||
// auto p1 = vertices[triangles[j+1]];
|
||||
// auto p2 = vertices[triangles[j+2]];
|
||||
|
||||
results->meshCount = 1;
|
||||
}
|
||||
// auto d0 = p1 - p0;
|
||||
// auto d1 = p2 - p0;
|
||||
|
||||
// auto cp = glm::cross(d0, d1);
|
||||
// cp = -2.0f * glm::normalize(cp);
|
||||
|
||||
void vhacd::VHACDUtil::fattenMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const {
|
||||
|
||||
for (int i = 0; i < meshes->meshCount; i++) {
|
||||
QVector<glm::vec3> vertices = meshes->perMeshVertices.at(i);
|
||||
QVector<int> triangles = meshes->perMeshTriangleIndices.at(i);
|
||||
const float largestDimension = meshes->perMeshLargestDimension.at(i);
|
||||
|
||||
results->perMeshVertices.append(vertices);
|
||||
results->perMeshTriangleIndices.append(triangles);
|
||||
results->perMeshLargestDimension.append(largestDimension);
|
||||
|
||||
for (int j = 0; j < triangles.size(); j += 3) {
|
||||
auto p0 = vertices[triangles[j]];
|
||||
auto p1 = vertices[triangles[j+1]];
|
||||
auto p2 = vertices[triangles[j+2]];
|
||||
|
||||
auto d0 = p1 - p0;
|
||||
auto d1 = p2 - p0;
|
||||
|
||||
auto cp = glm::cross(d0, d1);
|
||||
cp = -2.0f * glm::normalize(cp);
|
||||
|
||||
auto p3 = p0 + cp;
|
||||
// auto p3 = p0 + cp;
|
||||
|
||||
auto n = results->perMeshVertices.size();
|
||||
results->perMeshVertices[i] << p3;
|
||||
// auto n = results->perMeshVertices.size();
|
||||
// results->perMeshVertices[i] << p3;
|
||||
|
||||
results->perMeshTriangleIndices[i] << triangles[j] << n << triangles[j + 1];
|
||||
results->perMeshTriangleIndices[i] << triangles[j + 1] << n << triangles[j + 2];
|
||||
results->perMeshTriangleIndices[i] << triangles[j + 2] << n << triangles[j];
|
||||
}
|
||||
// results->perMeshTriangleIndices[i] << triangles[j] << n << triangles[j + 1];
|
||||
// results->perMeshTriangleIndices[i] << triangles[j + 1] << n << triangles[j + 2];
|
||||
// results->perMeshTriangleIndices[i] << triangles[j + 2] << n << triangles[j];
|
||||
// }
|
||||
|
||||
results->meshCount++;
|
||||
}
|
||||
}
|
||||
// results->meshCount++;
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
|
||||
bool vhacd::VHACDUtil::computeVHACD(vhacd::LoadFBXResults *inMeshes, VHACD::IVHACD::Parameters params,
|
||||
vhacd::ComputeResults *results,
|
||||
int startMeshIndex, int endMeshIndex, float minimumMeshSize,
|
||||
bool fattenFaces) const {
|
||||
|
||||
vhacd::LoadFBXResults *meshes = new vhacd::LoadFBXResults;
|
||||
// combineMeshes(inMeshes, meshes);
|
||||
|
||||
// vhacd::LoadFBXResults *meshes = new vhacd::LoadFBXResults;
|
||||
|
||||
if (fattenFaces) {
|
||||
fattenMeshes(inMeshes, meshes);
|
||||
} else {
|
||||
meshes = inMeshes;
|
||||
}
|
||||
|
||||
bool vhacd::VHACDUtil::computeVHACD(FBXGeometry& geometry,
|
||||
VHACD::IVHACD::Parameters params,
|
||||
FBXGeometry& result,
|
||||
int startMeshIndex,
|
||||
int endMeshIndex, float minimumMeshSize,
|
||||
bool fattenFaces) {
|
||||
// count the mesh-parts
|
||||
QVector<FBXMeshPart> meshParts;
|
||||
int meshCount = 0;
|
||||
|
||||
VHACD::IVHACD * interfaceVHACD = VHACD::CreateVHACD();
|
||||
int meshCount = meshes->meshCount;
|
||||
int count = 0;
|
||||
|
||||
if (startMeshIndex < 0) {
|
||||
startMeshIndex = 0;
|
||||
|
@ -201,66 +108,127 @@ bool vhacd::VHACDUtil::computeVHACD(vhacd::LoadFBXResults *inMeshes, VHACD::IVHA
|
|||
endMeshIndex = meshCount;
|
||||
}
|
||||
|
||||
for (int i = 0; i < meshCount; i++) {
|
||||
std::cout << meshes->perMeshTriangleIndices.at(i).size() << " ";
|
||||
}
|
||||
std::cout << "\n";
|
||||
|
||||
|
||||
std::cout << "Performing V-HACD computation on " << endMeshIndex - startMeshIndex << " meshes ..... " << std::endl;
|
||||
|
||||
for (int i = startMeshIndex; i < endMeshIndex; i++){
|
||||
qDebug() << "--------------------";
|
||||
std::vector<glm::vec3> vertices = meshes->perMeshVertices.at(i).toStdVector();
|
||||
std::vector<int> triangles = meshes->perMeshTriangleIndices.at(i).toStdVector();
|
||||
int nPoints = (unsigned int)vertices.size();
|
||||
int nTriangles = (unsigned int)triangles.size() / 3;
|
||||
const float largestDimension = meshes->perMeshLargestDimension.at(i);
|
||||
result.meshExtents.reset();
|
||||
result.meshes.append(FBXMesh());
|
||||
FBXMesh &resultMesh = result.meshes.last();
|
||||
|
||||
qDebug() << "Mesh " << i << " -- " << nPoints << " points, " << nTriangles << " triangles, "
|
||||
<< "size =" << largestDimension;
|
||||
int count = 0;
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
|
||||
if (largestDimension < minimumMeshSize /* || largestDimension > 1000 */) {
|
||||
qDebug() << " Skipping...";
|
||||
continue;
|
||||
// each mesh has its own transform to move it to model-space
|
||||
std::vector<glm::vec3> vertices;
|
||||
foreach (glm::vec3 vertex, mesh.vertices) {
|
||||
vertices.push_back(glm::vec3(mesh.modelTransform * glm::vec4(vertex, 1.0f)));
|
||||
}
|
||||
// compute approximate convex decomposition
|
||||
bool res = interfaceVHACD->Compute(&vertices[0].x, 3, nPoints, &triangles[0], 3, nTriangles, params);
|
||||
if (!res){
|
||||
qDebug() << "V-HACD computation failed for Mesh : " << i;
|
||||
continue;
|
||||
}
|
||||
count++; //For counting number of successfull computations
|
||||
|
||||
//Number of hulls for the mesh
|
||||
unsigned int nConvexHulls = interfaceVHACD->GetNConvexHulls();
|
||||
results->convexHullsCountList.append(nConvexHulls);
|
||||
foreach (const FBXMeshPart &meshPart, mesh.parts) {
|
||||
|
||||
//get all the convex hulls for this mesh
|
||||
QVector<VHACD::IVHACD::ConvexHull> convexHulls;
|
||||
for (unsigned int j = 0; j < nConvexHulls; j++){
|
||||
VHACD::IVHACD::ConvexHull hull;
|
||||
interfaceVHACD->GetConvexHull(j, hull);
|
||||
|
||||
double *m_points_copy = new double[hull.m_nPoints * 3];
|
||||
// std::copy(std::begin(hull.m_points), std::end(hull.m_points), std::begin(m_points_copy));
|
||||
for (unsigned int i=0; i<hull.m_nPoints * 3; i++) {
|
||||
m_points_copy[ i ] = hull.m_points[ i ];
|
||||
if (count < startMeshIndex || count >= endMeshIndex) {
|
||||
count ++;
|
||||
continue;
|
||||
}
|
||||
hull.m_points = m_points_copy;
|
||||
|
||||
int *m_triangles_copy = new int[hull.m_nTriangles * 3];
|
||||
// std::copy(std::begin(hull.m_triangles), std::end(hull.m_triangles), std::begin(m_triangles_copy));
|
||||
for (unsigned int i=0; i<hull.m_nTriangles * 3; i++) {
|
||||
m_triangles_copy[ i ] = hull.m_triangles[ i ];
|
||||
qDebug() << "--------------------";
|
||||
|
||||
std::vector<int> triangles = meshPart.triangleIndices.toStdVector();
|
||||
|
||||
AABox aaBox;
|
||||
unsigned int triangleCount = meshPart.triangleIndices.size() / 3;
|
||||
for (unsigned int i = 0; i < triangleCount; i++) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[i * 3]];
|
||||
glm::vec3 p1 = mesh.vertices[meshPart.triangleIndices[i * 3 + 1]];
|
||||
glm::vec3 p2 = mesh.vertices[meshPart.triangleIndices[i * 3 + 2]];
|
||||
aaBox += p0;
|
||||
aaBox += p1;
|
||||
aaBox += p2;
|
||||
}
|
||||
hull.m_triangles = m_triangles_copy;
|
||||
convexHulls.append(hull);
|
||||
}
|
||||
results->convexHullList.append(convexHulls);
|
||||
} //end of for loop
|
||||
|
||||
results->meshCount = count;
|
||||
// convert quads to triangles
|
||||
unsigned int quadCount = meshPart.quadIndices.size() / 4;
|
||||
for (unsigned int i = 0; i < quadCount; i++) {
|
||||
unsigned int p0Index = meshPart.quadIndices[i * 4];
|
||||
unsigned int p1Index = meshPart.quadIndices[i * 4 + 1];
|
||||
unsigned int p2Index = meshPart.quadIndices[i * 4 + 2];
|
||||
unsigned int p3Index = meshPart.quadIndices[i * 4 + 3];
|
||||
glm::vec3 p0 = mesh.vertices[p0Index];
|
||||
glm::vec3 p1 = mesh.vertices[p1Index + 1];
|
||||
glm::vec3 p2 = mesh.vertices[p2Index + 2];
|
||||
glm::vec3 p3 = mesh.vertices[p3Index + 3];
|
||||
aaBox += p0;
|
||||
aaBox += p1;
|
||||
aaBox += p2;
|
||||
aaBox += p3;
|
||||
// split each quad into two triangles
|
||||
triangles.push_back(p0Index);
|
||||
triangles.push_back(p1Index);
|
||||
triangles.push_back(p2Index);
|
||||
triangles.push_back(p0Index);
|
||||
triangles.push_back(p2Index);
|
||||
triangles.push_back(p3Index);
|
||||
triangleCount += 2;
|
||||
}
|
||||
|
||||
// only process meshes with triangles
|
||||
if (triangles.size() <= 0) {
|
||||
qDebug() << " Skipping (no triangles)...";
|
||||
count++;
|
||||
continue;
|
||||
}
|
||||
|
||||
int nPoints = vertices.size();
|
||||
const float largestDimension = aaBox.getLargestDimension();
|
||||
|
||||
qDebug() << "Mesh " << count << " -- " << nPoints << " points, " << triangleCount << " triangles, "
|
||||
<< "size =" << largestDimension;
|
||||
|
||||
if (largestDimension < minimumMeshSize /* || largestDimension > 1000 */) {
|
||||
qDebug() << " Skipping (too small)...";
|
||||
count++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// compute approximate convex decomposition
|
||||
bool res = interfaceVHACD->Compute(&vertices[0].x, 3, nPoints, &triangles[0], 3, triangleCount, params);
|
||||
if (!res){
|
||||
qDebug() << "V-HACD computation failed for Mesh : " << count;
|
||||
count++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Number of hulls for this input meshPart
|
||||
unsigned int nConvexHulls = interfaceVHACD->GetNConvexHulls();
|
||||
|
||||
// create an output meshPart for each convex hull
|
||||
for (unsigned int j = 0; j < nConvexHulls; j++) {
|
||||
VHACD::IVHACD::ConvexHull hull;
|
||||
interfaceVHACD->GetConvexHull(j, hull);
|
||||
|
||||
resultMesh.parts.append(FBXMeshPart());
|
||||
FBXMeshPart &resultMeshPart = resultMesh.parts.last();
|
||||
|
||||
int hullIndexStart = resultMesh.vertices.size();
|
||||
for (unsigned int i = 0; i < hull.m_nPoints; i++) {
|
||||
float x = hull.m_points[i * 3];
|
||||
float y = hull.m_points[i * 3 + 1];
|
||||
float z = hull.m_points[i * 3 + 2];
|
||||
resultMesh.vertices.append(glm::vec3(x, y, z));
|
||||
}
|
||||
|
||||
for (unsigned int i = 0; i < hull.m_nTriangles; i++) {
|
||||
int index0 = hull.m_triangles[i * 3] + hullIndexStart;
|
||||
int index1 = hull.m_triangles[i * 3 + 1] + hullIndexStart;
|
||||
int index2 = hull.m_triangles[i * 3 + 2] + hullIndexStart;
|
||||
resultMeshPart.triangleIndices.append(index0);
|
||||
resultMeshPart.triangleIndices.append(index1);
|
||||
resultMeshPart.triangleIndices.append(index2);
|
||||
}
|
||||
}
|
||||
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
//release memory
|
||||
interfaceVHACD->Clean();
|
||||
|
@ -272,7 +240,6 @@ bool vhacd::VHACDUtil::computeVHACD(vhacd::LoadFBXResults *inMeshes, VHACD::IVHA
|
|||
else{
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
vhacd::VHACDUtil:: ~VHACDUtil(){
|
||||
|
@ -280,8 +247,11 @@ vhacd::VHACDUtil:: ~VHACDUtil(){
|
|||
}
|
||||
|
||||
//ProgressClaback implementation
|
||||
void vhacd::ProgressCallback::Update(const double overallProgress, const double stageProgress, const double operationProgress,
|
||||
const char * const stage, const char * const operation){
|
||||
void vhacd::ProgressCallback::Update(const double overallProgress,
|
||||
const double stageProgress,
|
||||
const double operationProgress,
|
||||
const char* const stage,
|
||||
const char* const operation) {
|
||||
int progress = (int)(overallProgress + 0.5);
|
||||
|
||||
if (progress < 10){
|
||||
|
@ -293,9 +263,9 @@ void vhacd::ProgressCallback::Update(const double overallProgress, const double
|
|||
|
||||
std::cout << progress << "%";
|
||||
|
||||
if (progress >= 100){
|
||||
std::cout << std::endl;
|
||||
}
|
||||
if (progress >= 100){
|
||||
std::cout << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
vhacd::ProgressCallback::ProgressCallback(void){}
|
||||
|
|
|
@ -23,28 +23,17 @@
|
|||
#include <VHACD.h>
|
||||
|
||||
namespace vhacd {
|
||||
|
||||
typedef struct {
|
||||
int meshCount;
|
||||
QVector<int> convexHullsCountList;
|
||||
QVector<QVector<VHACD::IVHACD::ConvexHull>> convexHullList;
|
||||
} ComputeResults;
|
||||
|
||||
typedef struct {
|
||||
int meshCount;
|
||||
QVector<QVector<glm::vec3>> perMeshVertices;
|
||||
QVector<QVector<int>> perMeshTriangleIndices;
|
||||
QVector<float> perMeshLargestDimension;
|
||||
} LoadFBXResults;
|
||||
|
||||
class VHACDUtil {
|
||||
public:
|
||||
bool loadFBX(const QString filename, vhacd::LoadFBXResults *results);
|
||||
void combineMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const;
|
||||
void fattenMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const;
|
||||
bool computeVHACD(vhacd::LoadFBXResults *meshes, VHACD::IVHACD::Parameters params,
|
||||
vhacd::ComputeResults *results, int startMeshIndex, int endMeshIndex, float minimumMeshSize,
|
||||
bool fattenFaces) const;
|
||||
bool loadFBX(const QString filename, FBXGeometry& result);
|
||||
// void combineMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const;
|
||||
// void fattenMeshes(vhacd::LoadFBXResults *meshes, vhacd::LoadFBXResults *results) const;
|
||||
bool computeVHACD(FBXGeometry& geometry,
|
||||
VHACD::IVHACD::Parameters params,
|
||||
FBXGeometry& result,
|
||||
int startMeshIndex, int endMeshIndex,
|
||||
float minimumMeshSize,
|
||||
bool fattenFaces);
|
||||
~VHACDUtil();
|
||||
};
|
||||
|
||||
|
@ -53,7 +42,7 @@ namespace vhacd {
|
|||
ProgressCallback(void);
|
||||
~ProgressCallback();
|
||||
|
||||
//Couldn't follow coding guideline here due to virtual function declared in IUserCallback
|
||||
// Couldn't follow coding guideline here due to virtual function declared in IUserCallback
|
||||
void Update(const double overallProgress, const double stageProgress, const double operationProgress,
|
||||
const char * const stage, const char * const operation);
|
||||
};
|
||||
|
|
|
@ -32,7 +32,7 @@ QString formatFloat(double n) {
|
|||
}
|
||||
|
||||
|
||||
bool writeOBJ(QString outFileName, QVector<QVector<VHACD::IVHACD::ConvexHull>>& meshList, bool outputOneMesh) {
|
||||
bool writeOBJ(QString outFileName, FBXGeometry& geometry) {
|
||||
QFile file(outFileName);
|
||||
if (!file.open(QIODevice::WriteOnly)) {
|
||||
qDebug() << "Unable to write to " << outFileName;
|
||||
|
@ -41,26 +41,25 @@ bool writeOBJ(QString outFileName, QVector<QVector<VHACD::IVHACD::ConvexHull>>&
|
|||
|
||||
QTextStream out(&file);
|
||||
|
||||
unsigned int pointStartOffset = 0;
|
||||
unsigned int nth = 0;
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
for (int i = 0; i < mesh.vertices.size(); i++) {
|
||||
out << "v ";
|
||||
out << formatFloat(mesh.vertices[i][0]) << " ";
|
||||
out << formatFloat(mesh.vertices[i][1]) << " ";
|
||||
out << formatFloat(mesh.vertices[i][2]) << "\n";
|
||||
}
|
||||
|
||||
foreach (QVector<VHACD::IVHACD::ConvexHull> hulls, meshList) {
|
||||
unsigned int nth = 0;
|
||||
foreach (VHACD::IVHACD::ConvexHull hull, hulls) {
|
||||
foreach (const FBXMeshPart &meshPart, mesh.parts) {
|
||||
out << "g hull-" << nth++ << "\n";
|
||||
for (unsigned int i = 0; i < hull.m_nPoints; i++) {
|
||||
out << "v ";
|
||||
out << formatFloat(hull.m_points[i*3]) << " ";
|
||||
out << formatFloat(hull.m_points[i*3+1]) << " ";
|
||||
out << formatFloat(hull.m_points[i*3+2]) << "\n";
|
||||
}
|
||||
for (unsigned int i = 0; i < hull.m_nTriangles; i++) {
|
||||
int triangleCount = meshPart.triangleIndices.size() / 3;
|
||||
for (int i = 0; i < triangleCount; i++) {
|
||||
out << "f ";
|
||||
out << hull.m_triangles[i*3] + 1 + pointStartOffset << " ";
|
||||
out << hull.m_triangles[i*3+1] + 1 + pointStartOffset << " ";
|
||||
out << hull.m_triangles[i*3+2] + 1 + pointStartOffset << "\n";
|
||||
out << meshPart.triangleIndices[i*3] + 1 << " ";
|
||||
out << meshPart.triangleIndices[i*3+1] + 1 << " ";
|
||||
out << meshPart.triangleIndices[i*3+2] + 1 << "\n";
|
||||
}
|
||||
out << "\n";
|
||||
pointStartOffset += hull.m_nPoints;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,8 +75,6 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
vector<int> triangles; // array of indexes
|
||||
vector<float> points; // array of coordinates
|
||||
vhacd::VHACDUtil vUtil;
|
||||
vhacd::LoadFBXResults fbx; //mesh data from loaded fbx file
|
||||
vhacd::ComputeResults results; // results after computing vhacd
|
||||
VHACD::IVHACD::Parameters params;
|
||||
vhacd::ProgressCallback pCallBack;
|
||||
|
||||
|
@ -89,9 +86,6 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
|
||||
const QCommandLineOption helpOption = parser.addHelpOption();
|
||||
|
||||
const QCommandLineOption outputOneMeshOption("1", "output hulls as single mesh");
|
||||
parser.addOption(outputOneMeshOption);
|
||||
|
||||
const QCommandLineOption fattenFacesOption("f", "fatten faces");
|
||||
parser.addOption(fattenFacesOption);
|
||||
|
||||
|
@ -107,18 +101,48 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
const QCommandLineOption endMeshIndexOption("e", "end-mesh index", "0");
|
||||
parser.addOption(endMeshIndexOption);
|
||||
|
||||
const QCommandLineOption minimumMeshSizeOption("m", "minimum mesh size to consider", "0");
|
||||
const QCommandLineOption minimumMeshSizeOption("m", "minimum mesh (diagonal) size to consider", "0");
|
||||
parser.addOption(minimumMeshSizeOption);
|
||||
|
||||
const QCommandLineOption vHacdResolutionOption("resolution", "v-hacd resolution", "100000");
|
||||
const QCommandLineOption vHacdResolutionOption("resolution", "Maximum number of voxels generated during the "
|
||||
"voxelization stage (range=10,000-16,000,000)", "100000");
|
||||
parser.addOption(vHacdResolutionOption);
|
||||
|
||||
const QCommandLineOption vHacdDepthOption("depth", "v-hacd depth", "20");
|
||||
const QCommandLineOption vHacdDepthOption("depth", "Maximum number of clipping stages. During each split stage, parts "
|
||||
"with a concavity higher than the user defined threshold are clipped "
|
||||
"according the \"best\" clipping plane (range=1-32)", "20");
|
||||
parser.addOption(vHacdDepthOption);
|
||||
|
||||
const QCommandLineOption vHacdDeltaOption("delta", "v-hacd delta", "0.05");
|
||||
const QCommandLineOption vHacdDeltaOption("delta", "Controls the bias toward maximaxing local concavity (range=0.0-1.0)", "0.05");
|
||||
parser.addOption(vHacdDeltaOption);
|
||||
|
||||
const QCommandLineOption vHacdConcavityOption("concavity", "Maximum allowed concavity (range=0.0-1.0)", "0.0025");
|
||||
parser.addOption(vHacdConcavityOption);
|
||||
|
||||
const QCommandLineOption vHacdPlanedownsamplingOption("planedownsampling", "Controls the granularity of the search for"
|
||||
" the \"best\" clipping plane (range=1-16)", "4");
|
||||
parser.addOption(vHacdPlanedownsamplingOption);
|
||||
|
||||
const QCommandLineOption vHacdConvexhulldownsamplingOption("convexhulldownsampling", "Controls the precision of the "
|
||||
"convex-hull generation process during the clipping "
|
||||
"plane selection stage (range=1-16)", "4");
|
||||
parser.addOption(vHacdConvexhulldownsamplingOption);
|
||||
|
||||
// alpha
|
||||
// beta
|
||||
// gamma
|
||||
// delta
|
||||
// pca
|
||||
// mode
|
||||
|
||||
const QCommandLineOption vHacdMaxVerticesPerCHOption("maxvertices", "Controls the maximum number of triangles per "
|
||||
"convex-hull (range=4-1024)", "64");
|
||||
parser.addOption(vHacdMaxVerticesPerCHOption);
|
||||
|
||||
// minVolumePerCH
|
||||
|
||||
// convexhullApproximation
|
||||
|
||||
|
||||
if (!parser.parse(QCoreApplication::arguments())) {
|
||||
qCritical() << parser.errorText() << endl;
|
||||
|
@ -132,7 +156,6 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
}
|
||||
|
||||
bool fattenFaces = parser.isSet(fattenFacesOption);
|
||||
bool outputOneMesh = parser.isSet(outputOneMeshOption);
|
||||
|
||||
QString inputFilename;
|
||||
if (parser.isSet(inputFilenameOption)) {
|
||||
|
@ -187,22 +210,42 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
vHacdDelta = parser.value(vHacdDeltaOption).toFloat();
|
||||
}
|
||||
|
||||
float vHacdConcavity = 0.0025;
|
||||
if (parser.isSet(vHacdConcavityOption)) {
|
||||
vHacdConcavity = parser.value(vHacdConcavityOption).toFloat();
|
||||
}
|
||||
|
||||
int vHacdPlanedownsampling = 4;
|
||||
if (parser.isSet(vHacdPlanedownsamplingOption)) {
|
||||
vHacdPlanedownsampling = parser.value(vHacdPlanedownsamplingOption).toInt();
|
||||
}
|
||||
|
||||
int vHacdConvexhulldownsampling = 4;
|
||||
if (parser.isSet(vHacdConvexhulldownsamplingOption)) {
|
||||
vHacdConvexhulldownsampling = parser.value(vHacdConvexhulldownsamplingOption).toInt();
|
||||
}
|
||||
|
||||
int vHacdMaxVerticesPerCH = 64;
|
||||
if (parser.isSet(vHacdMaxVerticesPerCHOption)) {
|
||||
vHacdMaxVerticesPerCH = parser.value(vHacdMaxVerticesPerCHOption).toInt();
|
||||
}
|
||||
|
||||
|
||||
|
||||
//set parameters for V-HACD
|
||||
params.m_callback = &pCallBack; //progress callback
|
||||
params.m_resolution = vHacdResolution; // 100000
|
||||
params.m_depth = vHacdDepth; // 20
|
||||
params.m_concavity = 0.001; // 0.001
|
||||
params.m_delta = vHacdDelta; // 0.05
|
||||
params.m_planeDownsampling = 4; // 4
|
||||
params.m_convexhullDownsampling = 4; // 4
|
||||
params.m_resolution = vHacdResolution;
|
||||
params.m_depth = vHacdDepth;
|
||||
params.m_concavity = vHacdConcavity;
|
||||
params.m_delta = vHacdDelta;
|
||||
params.m_planeDownsampling = vHacdPlanedownsampling;
|
||||
params.m_convexhullDownsampling = vHacdConvexhulldownsampling;
|
||||
params.m_alpha = 0.05; // 0.05 // controls the bias toward clipping along symmetry planes
|
||||
params.m_beta = 0.05; // 0.05
|
||||
params.m_gamma = 0.0005; // 0.0005
|
||||
params.m_pca = 0; // 0 enable/disable normalizing the mesh before applying the convex decomposition
|
||||
params.m_mode = 0; // 0: voxel-based (recommended), 1: tetrahedron-based
|
||||
params.m_maxNumVerticesPerCH = 64; // 64
|
||||
params.m_maxNumVerticesPerCH = vHacdMaxVerticesPerCH;
|
||||
params.m_minVolumePerCH = 0.0001; // 0.0001
|
||||
params.m_callback = 0; // 0
|
||||
params.m_logger = 0; // 0
|
||||
|
@ -211,8 +254,9 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
|
||||
// load the mesh
|
||||
|
||||
FBXGeometry fbx;
|
||||
auto begin = std::chrono::high_resolution_clock::now();
|
||||
if (!vUtil.loadFBX(inputFilename, &fbx)){
|
||||
if (!vUtil.loadFBX(inputFilename, fbx)){
|
||||
cout << "Error in opening FBX file....";
|
||||
}
|
||||
auto end = std::chrono::high_resolution_clock::now();
|
||||
|
@ -221,50 +265,37 @@ VHACDUtilApp::VHACDUtilApp(int argc, char* argv[]) :
|
|||
//perform vhacd computation
|
||||
begin = std::chrono::high_resolution_clock::now();
|
||||
|
||||
if (!vUtil.computeVHACD(&fbx, params, &results, startMeshIndex, endMeshIndex, minimumMeshSize, fattenFaces)) {
|
||||
FBXGeometry result;
|
||||
if (!vUtil.computeVHACD(fbx, params, result, startMeshIndex, endMeshIndex, minimumMeshSize, fattenFaces)) {
|
||||
cout << "Compute Failed...";
|
||||
}
|
||||
end = std::chrono::high_resolution_clock::now();
|
||||
auto computeDuration = std::chrono::duration_cast<std::chrono::nanoseconds>(end - begin).count();
|
||||
|
||||
int totalVertices = 0;
|
||||
for (int i = 0; i < fbx.meshCount; i++){
|
||||
totalVertices += fbx.perMeshVertices.at(i).count();
|
||||
}
|
||||
|
||||
int totalTriangles = 0;
|
||||
for (int i = 0; i < fbx.meshCount; i++){
|
||||
totalTriangles += fbx.perMeshTriangleIndices.at(i).count();
|
||||
int totalMeshParts = 0;
|
||||
foreach (const FBXMesh& mesh, result.meshes) {
|
||||
totalVertices += mesh.vertices.size();
|
||||
foreach (const FBXMeshPart &meshPart, mesh.parts) {
|
||||
totalTriangles += meshPart.triangleIndices.size() / 3;
|
||||
// each quad was made into two triangles
|
||||
totalTriangles += 2 * meshPart.quadIndices.size() / 4;
|
||||
totalMeshParts++;
|
||||
}
|
||||
}
|
||||
|
||||
int totalHulls = 0;
|
||||
QVector<int> hullCounts = results.convexHullsCountList;
|
||||
for (int i = 0; i < results.meshCount; i++){
|
||||
totalHulls += hullCounts.at(i);
|
||||
}
|
||||
int totalHulls = result.meshes[0].parts.size();
|
||||
cout << endl << "Summary of V-HACD Computation..................." << endl;
|
||||
cout << "File Path : " << inputFilename.toStdString() << endl;
|
||||
cout << "Number Of Meshes : " << fbx.meshCount << endl;
|
||||
cout << "Processed Meshes : " << results.meshCount << endl;
|
||||
cout << "Number Of Meshes : " << totalMeshParts << endl;
|
||||
cout << "Total vertices : " << totalVertices << endl;
|
||||
cout << "Total Triangles : " << totalTriangles << endl;
|
||||
cout << "Total Convex Hulls : " << totalHulls << endl;
|
||||
cout << "Total FBX load time: " << (double)loadDuration / 1000000000.00 << " seconds" << endl;
|
||||
cout << "V-HACD Compute time: " << (double)computeDuration / 1000000000.00 << " seconds" << endl;
|
||||
cout << endl << "Summary per convex hull ........................" << endl <<endl;
|
||||
for (int i = 0; i < results.meshCount; i++) {
|
||||
cout << "Mesh : " << i + 1 << endl;
|
||||
QVector<VHACD::IVHACD::ConvexHull> chList = results.convexHullList.at(i);
|
||||
cout << "\t" << "Number Of Hulls : " << chList.count() << endl;
|
||||
|
||||
for (int j = 0; j < results.convexHullList.at(i).count(); j++){
|
||||
cout << "\tHUll : " << j + 1 << endl;
|
||||
cout << "\t\tNumber Of Points : " << chList.at(j).m_nPoints << endl;
|
||||
cout << "\t\tNumber Of Triangles : " << chList.at(j).m_nTriangles << endl;
|
||||
}
|
||||
}
|
||||
|
||||
writeOBJ(outputFilename, results.convexHullList, outputOneMesh);
|
||||
writeOBJ(outputFilename, result);
|
||||
}
|
||||
|
||||
VHACDUtilApp::~VHACDUtilApp() {
|
||||
|
|
Loading…
Reference in a new issue