mirror of
https://github.com/lubosz/overte.git
synced 2025-04-23 20:54:25 +02:00
Merge branch 'PAL_v2' of https://github.com/highfidelity/hifi into PAL_v2
This commit is contained in:
commit
5c1e085e7b
87 changed files with 617 additions and 326 deletions
|
@ -30,6 +30,8 @@ project(hifi)
|
|||
add_definitions(-DGLM_FORCE_RADIANS)
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -DDEBUG")
|
||||
|
||||
find_package( Threads )
|
||||
|
||||
if (WIN32)
|
||||
add_definitions(-DNOMINMAX -D_CRT_SECURE_NO_WARNINGS)
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
|
||||
DependencyManager::set<ResourceCacheSharedItems>();
|
||||
DependencyManager::set<SoundCache>();
|
||||
DependencyManager::set<AudioScriptingInterface>();
|
||||
DependencyManager::set<AudioInjectorManager>();
|
||||
DependencyManager::set<recording::Deck>();
|
||||
DependencyManager::set<recording::Recorder>();
|
||||
|
|
|
@ -24,9 +24,9 @@ macro(PACKAGE_LIBRARIES_FOR_DEPLOYMENT)
|
|||
TARGET ${TARGET_NAME}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-DBUNDLE_EXECUTABLE=$<TARGET_FILE:${TARGET_NAME}>
|
||||
-DBUNDLE_PLUGIN_DIR=$<TARGET_FILE_DIR:${TARGET_NAME}>/${PLUGIN_PATH}
|
||||
-P ${CMAKE_CURRENT_BINARY_DIR}/FixupBundlePostBuild.cmake
|
||||
-DBUNDLE_EXECUTABLE="$<TARGET_FILE:${TARGET_NAME}>"
|
||||
-DBUNDLE_PLUGIN_DIR="$<TARGET_FILE_DIR:${TARGET_NAME}>/${PLUGIN_PATH}"
|
||||
-P "${CMAKE_CURRENT_BINARY_DIR}/FixupBundlePostBuild.cmake"
|
||||
)
|
||||
|
||||
find_program(WINDEPLOYQT_COMMAND windeployqt PATHS ${QT_DIR}/bin NO_DEFAULT_PATH)
|
||||
|
@ -39,27 +39,27 @@ macro(PACKAGE_LIBRARIES_FOR_DEPLOYMENT)
|
|||
add_custom_command(
|
||||
TARGET ${TARGET_NAME}
|
||||
POST_BUILD
|
||||
COMMAND CMD /C "SET PATH=%PATH%;${QT_DIR}/bin && ${WINDEPLOYQT_COMMAND} ${EXTRA_DEPLOY_OPTIONS} $<$<OR:$<CONFIG:Release>,$<CONFIG:MinSizeRel>,$<CONFIG:RelWithDebInfo>>:--release> $<TARGET_FILE:${TARGET_NAME}>"
|
||||
COMMAND CMD /C "SET PATH=%PATH%;${QT_DIR}/bin && ${WINDEPLOYQT_COMMAND} ${EXTRA_DEPLOY_OPTIONS} $<$<OR:$<CONFIG:Release>,$<CONFIG:MinSizeRel>,$<CONFIG:RelWithDebInfo>>:--release> \"$<TARGET_FILE:${TARGET_NAME}>\""
|
||||
)
|
||||
|
||||
set(QTAUDIO_PATH $<TARGET_FILE_DIR:${TARGET_NAME}>/audio)
|
||||
set(QTAUDIO_WIN7_PATH $<TARGET_FILE_DIR:${TARGET_NAME}>/audioWin7/audio)
|
||||
set(QTAUDIO_WIN8_PATH $<TARGET_FILE_DIR:${TARGET_NAME}>/audioWin8/audio)
|
||||
set(QTAUDIO_PATH "$<TARGET_FILE_DIR:${TARGET_NAME}>/audio")
|
||||
set(QTAUDIO_WIN7_PATH "$<TARGET_FILE_DIR:${TARGET_NAME}>/audioWin7/audio")
|
||||
set(QTAUDIO_WIN8_PATH "$<TARGET_FILE_DIR:${TARGET_NAME}>/audioWin8/audio")
|
||||
|
||||
# copy qtaudio_wasapi.dll and qtaudio_windows.dll in the correct directories for runtime selection
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME}
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${QTAUDIO_WIN7_PATH}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${QTAUDIO_WIN8_PATH}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${QTAUDIO_WIN7_PATH}"
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${QTAUDIO_WIN8_PATH}"
|
||||
# copy release DLLs
|
||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windows.dll ( ${CMAKE_COMMAND} -E copy ${QTAUDIO_PATH}/qtaudio_windows.dll ${QTAUDIO_WIN7_PATH} )
|
||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windows.dll ( ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.dll ${QTAUDIO_WIN8_PATH} )
|
||||
COMMAND if exist "${QTAUDIO_PATH}/qtaudio_windows.dll" ( ${CMAKE_COMMAND} -E copy "${QTAUDIO_PATH}/qtaudio_windows.dll" "${QTAUDIO_WIN7_PATH}" )
|
||||
COMMAND if exist "${QTAUDIO_PATH}/qtaudio_windows.dll" ( ${CMAKE_COMMAND} -E copy "${WASAPI_DLL_PATH}/qtaudio_wasapi.dll" "${QTAUDIO_WIN8_PATH}" )
|
||||
# copy debug DLLs
|
||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windowsd.dll ( ${CMAKE_COMMAND} -E copy ${QTAUDIO_PATH}/qtaudio_windowsd.dll ${QTAUDIO_WIN7_PATH} )
|
||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windowsd.dll ( ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.dll ${QTAUDIO_WIN8_PATH} )
|
||||
COMMAND if exist "${QTAUDIO_PATH}/qtaudio_windowsd.dll" ( ${CMAKE_COMMAND} -E copy "${QTAUDIO_PATH}/qtaudio_windowsd.dll" "${QTAUDIO_WIN7_PATH}" )
|
||||
COMMAND if exist "${QTAUDIO_PATH}/qtaudio_windowsd.dll" ( ${CMAKE_COMMAND} -E copy "${WASAPI_DLL_PATH}/qtaudio_wasapid.dll" "${QTAUDIO_WIN8_PATH}" )
|
||||
# remove directory
|
||||
COMMAND ${CMAKE_COMMAND} -E remove_directory ${QTAUDIO_PATH}
|
||||
COMMAND ${CMAKE_COMMAND} -E remove_directory "${QTAUDIO_PATH}"
|
||||
)
|
||||
|
||||
endif ()
|
||||
|
|
|
@ -9,6 +9,7 @@ macro(SETUP_HIFI_PLUGIN)
|
|||
set(${TARGET_NAME}_SHARED 1)
|
||||
setup_hifi_library(${ARGV})
|
||||
add_dependencies(interface ${TARGET_NAME})
|
||||
target_link_libraries(${TARGET_NAME} ${CMAKE_THREAD_LIBS_INIT})
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Plugins")
|
||||
|
||||
if (APPLE)
|
||||
|
|
|
@ -43,6 +43,7 @@ macro(SETUP_HIFI_PROJECT)
|
|||
foreach(QT_MODULE ${${TARGET_NAME}_DEPENDENCY_QT_MODULES})
|
||||
target_link_libraries(${TARGET_NAME} Qt5::${QT_MODULE})
|
||||
endforeach()
|
||||
target_link_libraries(${TARGET_NAME} ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
target_glm()
|
||||
|
||||
|
|
|
@ -108,6 +108,7 @@ macro(SETUP_HIFI_TESTCASE)
|
|||
foreach(QT_MODULE ${${TARGET_NAME}_DEPENDENCY_QT_MODULES})
|
||||
target_link_libraries(${TARGET_NAME} Qt5::${QT_MODULE})
|
||||
endforeach()
|
||||
target_link_libraries(${TARGET_NAME} ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "hidden/test-executables")
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ macro(SYMLINK_OR_COPY_DIRECTORY_BESIDE_TARGET _SHOULD_SYMLINK _DIRECTORY _DESTIN
|
|||
# remove the current directory
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E remove_directory $<TARGET_FILE_DIR:${TARGET_NAME}>/${_DESTINATION}
|
||||
COMMAND "${CMAKE_COMMAND}" -E remove_directory "$<TARGET_FILE_DIR:${TARGET_NAME}>/${_DESTINATION}"
|
||||
)
|
||||
|
||||
if (${_SHOULD_SYMLINK})
|
||||
|
@ -48,8 +48,8 @@ macro(SYMLINK_OR_COPY_DIRECTORY_BESIDE_TARGET _SHOULD_SYMLINK _DIRECTORY _DESTIN
|
|||
# copy the directory
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${_DIRECTORY}
|
||||
$<TARGET_FILE_DIR:${TARGET_NAME}>/${_DESTINATION}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory "${_DIRECTORY}"
|
||||
"$<TARGET_FILE_DIR:${TARGET_NAME}>/${_DESTINATION}"
|
||||
)
|
||||
endif ()
|
||||
# glob everything in this directory - add a custom command to copy any files
|
||||
|
|
|
@ -288,7 +288,7 @@ if (APPLE)
|
|||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${CMAKE_SOURCE_DIR}/scripts"
|
||||
$<TARGET_FILE_DIR:${TARGET_NAME}>/../Resources/scripts
|
||||
"$<TARGET_FILE_DIR:${TARGET_NAME}>/../Resources/scripts"
|
||||
)
|
||||
|
||||
# call the fixup_interface macro to add required bundling commands for installation
|
||||
|
@ -299,10 +299,10 @@ else (APPLE)
|
|||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${PROJECT_SOURCE_DIR}/resources"
|
||||
$<TARGET_FILE_DIR:${TARGET_NAME}>/resources
|
||||
"$<TARGET_FILE_DIR:${TARGET_NAME}>/resources"
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${CMAKE_SOURCE_DIR}/scripts"
|
||||
$<TARGET_FILE_DIR:${TARGET_NAME}>/scripts
|
||||
"$<TARGET_FILE_DIR:${TARGET_NAME}>/scripts"
|
||||
)
|
||||
|
||||
# link target to external libraries
|
||||
|
@ -337,7 +337,7 @@ endif()
|
|||
add_bugsplat()
|
||||
|
||||
if (WIN32)
|
||||
set(EXTRA_DEPLOY_OPTIONS "--qmldir ${PROJECT_SOURCE_DIR}/resources/qml")
|
||||
set(EXTRA_DEPLOY_OPTIONS "--qmldir \"${PROJECT_SOURCE_DIR}/resources/qml\"")
|
||||
|
||||
set(TARGET_INSTALL_DIR ${INTERFACE_INSTALL_DIR})
|
||||
set(TARGET_INSTALL_COMPONENT ${CLIENT_COMPONENT})
|
||||
|
|
|
@ -206,7 +206,7 @@ ScrollingWindow {
|
|||
print("Error: model cannot be both static mesh and dynamic. This should never happen.");
|
||||
} else if (url) {
|
||||
var name = assetProxyModel.data(treeView.selection.currentIndex);
|
||||
var addPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getFront(MyAvatar.orientation)));
|
||||
var addPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getForward(MyAvatar.orientation)));
|
||||
var gravity;
|
||||
if (dynamic) {
|
||||
// Create a vector <0, -10, 0>. { x: 0, y: -10, z: 0 } won't work because Qt is dumb and this is a
|
||||
|
|
|
@ -42,8 +42,7 @@ Item {
|
|||
property bool selected: false
|
||||
property bool isAdmin: false
|
||||
property bool isPresent: true
|
||||
property string imageMaskColor: pal.color;
|
||||
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : imageMaskColor))
|
||||
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : "transparent"))
|
||||
|
||||
Item {
|
||||
id: avatarImage
|
||||
|
@ -61,25 +60,25 @@ Item {
|
|||
mipmap: true;
|
||||
// Anchors
|
||||
anchors.fill: parent
|
||||
layer.enabled: true
|
||||
layer.effect: OpacityMask {
|
||||
maskSource: Item {
|
||||
width: userImage.width;
|
||||
height: userImage.height;
|
||||
Rectangle {
|
||||
anchors.centerIn: parent;
|
||||
width: userImage.width; // This works because userImage is square
|
||||
height: width;
|
||||
radius: width;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
AnimatedImage {
|
||||
source: "../../icons/profilePicLoading.gif"
|
||||
anchors.fill: parent;
|
||||
visible: userImage.status != Image.Ready;
|
||||
}
|
||||
// Circular mask
|
||||
Rectangle {
|
||||
id: avatarImageMask;
|
||||
visible: avatarImage.visible;
|
||||
anchors.verticalCenter: avatarImage.verticalCenter;
|
||||
anchors.horizontalCenter: avatarImage.horizontalCenter;
|
||||
width: avatarImage.width * 2;
|
||||
height: avatarImage.height * 2;
|
||||
color: "transparent"
|
||||
radius: avatarImage.height;
|
||||
border.color: imageMaskColor;
|
||||
border.width: avatarImage.height/2;
|
||||
}
|
||||
StateImage {
|
||||
id: infoHoverImage;
|
||||
visible: false;
|
||||
|
|
|
@ -126,7 +126,6 @@ Rectangle {
|
|||
id: myCard;
|
||||
// Properties
|
||||
profileUrl: myData.profileUrl;
|
||||
imageMaskColor: pal.color;
|
||||
displayName: myData.displayName;
|
||||
userName: myData.userName;
|
||||
audioLevel: myData.audioLevel;
|
||||
|
@ -301,12 +300,11 @@ Rectangle {
|
|||
anchors.fill: parent;
|
||||
onClicked: {
|
||||
if (activeTab != "connectionsTab") {
|
||||
connectionsLoading.visible = false;
|
||||
connectionsLoading.visible = true;
|
||||
pal.sendToScript({method: 'refreshConnections'});
|
||||
}
|
||||
activeTab = "connectionsTab";
|
||||
connectionsLoading.visible = false;
|
||||
connectionsLoading.visible = true;
|
||||
connectionsRefreshProblemText.visible = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -587,7 +585,7 @@ Rectangle {
|
|||
property bool isCheckBox: styleData.role === "personalMute" || styleData.role === "ignore";
|
||||
property bool isButton: styleData.role === "mute" || styleData.role === "kick";
|
||||
property bool isAvgAudio: styleData.role === "avgAudioLevel";
|
||||
opacity: model && model.isPresent ? 1.0 : 0.4;
|
||||
opacity: !isButton ? (model && model.isPresent ? 1.0 : 0.4) : 1.0; // Admin actions shouldn't turn gray
|
||||
|
||||
// This NameCard refers to the cell that contains an avatar's
|
||||
// DisplayName and UserName
|
||||
|
@ -595,7 +593,6 @@ Rectangle {
|
|||
id: nameCard;
|
||||
// Properties
|
||||
profileUrl: (model && model.profileUrl) || "";
|
||||
imageMaskColor: rowColor(styleData.selected, styleData.row % 2);
|
||||
displayName: styleData.value;
|
||||
userName: model ? model.userName : "";
|
||||
connectionStatus: model ? model.connection : "";
|
||||
|
@ -916,7 +913,6 @@ Rectangle {
|
|||
// Properties
|
||||
visible: styleData.role === "userName";
|
||||
profileUrl: (model && model.profileUrl) || "";
|
||||
imageMaskColor: rowColor(styleData.selected, styleData.row % 2);
|
||||
displayName: "";
|
||||
userName: model ? model.userName : "";
|
||||
connectionStatus : model ? model.connection : "";
|
||||
|
|
|
@ -2132,7 +2132,7 @@ void Application::paintGL() {
|
|||
PerformanceTimer perfTimer("CameraUpdates");
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
boomOffset = myAvatar->getScale() * myAvatar->getBoomLength() * -IDENTITY_FRONT;
|
||||
boomOffset = myAvatar->getScale() * myAvatar->getBoomLength() * -IDENTITY_FORWARD;
|
||||
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
|
||||
|
@ -3955,7 +3955,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
auto lookingAtHead = static_pointer_cast<Avatar>(lookingAt)->getHead();
|
||||
|
||||
const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
|
||||
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
|
||||
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FORWARD;
|
||||
glm::vec3 fromLookingAtToMe = glm::normalize(myAvatar->getHead()->getEyePosition()
|
||||
- lookingAtHead->getEyePosition());
|
||||
float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
|
||||
|
|
|
@ -236,7 +236,6 @@ protected:
|
|||
|
||||
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
glm::vec3 getBodyFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
|
||||
glm::quat computeRotationFromBodyToWorldUp(float proportion = 1.0f) const;
|
||||
void measureMotionDerivatives(float deltaTime);
|
||||
|
||||
|
|
|
@ -56,9 +56,9 @@ void CauterizedModel::createVisibleRenderItemSet() {
|
|||
}
|
||||
|
||||
// We should not have any existing renderItems if we enter this section of code
|
||||
Q_ASSERT(_modelMeshRenderItemsSet.isEmpty());
|
||||
Q_ASSERT(_modelMeshRenderItems.isEmpty());
|
||||
|
||||
_modelMeshRenderItemsSet.clear();
|
||||
_modelMeshRenderItems.clear();
|
||||
|
||||
Transform transform;
|
||||
transform.setTranslation(_translation);
|
||||
|
@ -81,7 +81,7 @@ void CauterizedModel::createVisibleRenderItemSet() {
|
|||
int numParts = (int)mesh->getNumParts();
|
||||
for (int partIndex = 0; partIndex < numParts; partIndex++) {
|
||||
auto ptr = std::make_shared<CauterizedMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
|
||||
_modelMeshRenderItemsSet << std::static_pointer_cast<ModelMeshPartPayload>(ptr);
|
||||
_modelMeshRenderItems << std::static_pointer_cast<ModelMeshPartPayload>(ptr);
|
||||
shapeID++;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -268,7 +268,7 @@ void Head::applyEyelidOffset(glm::quat headOrientation) {
|
|||
return;
|
||||
}
|
||||
|
||||
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getLookAtPosition() - _eyePosition);
|
||||
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FORWARD, getLookAtPosition() - _eyePosition);
|
||||
eyeRotation = eyeRotation * glm::angleAxis(safeEulerAngles(headOrientation).y, IDENTITY_UP); // Rotation w.r.t. head
|
||||
float eyePitch = safeEulerAngles(eyeRotation).x;
|
||||
|
||||
|
@ -375,7 +375,7 @@ glm::quat Head::getCameraOrientation() const {
|
|||
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::vec3 lookAtDelta = _lookAtPosition - eyePosition;
|
||||
return rotationBetween(orientation * IDENTITY_FRONT, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
|
||||
return rotationBetween(orientation * IDENTITY_FORWARD, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
|
||||
}
|
||||
|
||||
void Head::setFinalPitch(float finalPitch) {
|
||||
|
|
|
@ -58,14 +58,14 @@ public:
|
|||
const glm::vec3& getSaccade() const { return _saccade; }
|
||||
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
|
||||
glm::vec3 getForwardDirection() const { return getOrientation() * IDENTITY_FORWARD; }
|
||||
|
||||
glm::quat getEyeRotation(const glm::vec3& eyePosition) const;
|
||||
|
||||
const glm::vec3& getRightEyePosition() const { return _rightEyePosition; }
|
||||
const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; }
|
||||
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getForwardDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getForwardDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getMouthPosition() const { return _eyePosition - getUpDirection() * glm::length(_rightEyePosition - _leftEyePosition); }
|
||||
|
||||
bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
|
||||
|
|
|
@ -1068,7 +1068,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
_lookAtTargetAvatar.reset();
|
||||
_targetAvatarPosition = glm::vec3(0.0f);
|
||||
|
||||
glm::vec3 lookForward = getHead()->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
|
||||
glm::vec3 lookForward = getHead()->getFinalOrientationInWorldFrame() * IDENTITY_FORWARD;
|
||||
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
|
||||
|
||||
float smallestAngleTo = glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES) / 2.0f;
|
||||
|
@ -1770,10 +1770,10 @@ void MyAvatar::updateActionMotor(float deltaTime) {
|
|||
}
|
||||
|
||||
// compute action input
|
||||
glm::vec3 front = (getDriveKey(TRANSLATE_Z)) * IDENTITY_FRONT;
|
||||
glm::vec3 forward = (getDriveKey(TRANSLATE_Z)) * IDENTITY_FORWARD;
|
||||
glm::vec3 right = (getDriveKey(TRANSLATE_X)) * IDENTITY_RIGHT;
|
||||
|
||||
glm::vec3 direction = front + right;
|
||||
glm::vec3 direction = forward + right;
|
||||
CharacterController::State state = _characterController.getState();
|
||||
if (state == CharacterController::State::Hover) {
|
||||
// we're flying --> support vertical motion
|
||||
|
@ -2053,7 +2053,7 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
|
|||
|
||||
// move the user a couple units away
|
||||
const float DISTANCE_TO_USER = 2.0f;
|
||||
_goToPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
_goToPosition = newPosition - quatOrientation * IDENTITY_FORWARD * DISTANCE_TO_USER;
|
||||
}
|
||||
|
||||
_goToOrientation = quatOrientation;
|
||||
|
|
|
@ -46,6 +46,60 @@ Q_DECLARE_METATYPE(AudioListenerMode);
|
|||
|
||||
class MyAvatar : public Avatar {
|
||||
Q_OBJECT
|
||||
|
||||
/**jsdoc
|
||||
* Your avatar is your in-world representation of you. The MyAvatar API is used to manipulate the avatar.
|
||||
* For example, using the MyAvatar API you can customize the avatar's appearance, run custom avatar animations,
|
||||
* change the avatar's position within the domain, or manage the avatar's collisions with other objects.
|
||||
* NOTE: MyAvatar extends Avatar and AvatarData, see those namespace for more properties/methods.
|
||||
*
|
||||
* @namespace MyAvatar
|
||||
* @augments Avatar
|
||||
* @property shouldRenderLocally {bool} Set it to true if you would like to see MyAvatar in your local interface,
|
||||
* and false if you would not like to see MyAvatar in your local interface.
|
||||
* @property motorVelocity {Vec3} Can be used to move the avatar with this velocity.
|
||||
* @property motorTimescale {float} Specifies how quickly the avatar should accelerate to meet the motorVelocity,
|
||||
* smaller values will result in higher acceleration.
|
||||
* @property motorReferenceFrame {string} Reference frame of the motorVelocity, must be one of the following: "avatar", "camera", "world"
|
||||
* @property collisionSoundURL {string} Specifies the sound to play when the avatar experiences a collision.
|
||||
* You can provide a mono or stereo 16-bit WAV file running at either 24 Khz or 48 Khz.
|
||||
* The latter is downsampled by the audio mixer, so all audio effectively plays back at a 24 Khz sample rate.
|
||||
* 48 Khz RAW files are also supported.
|
||||
* @property audioListenerMode {number} When hearing spatialized audio this determines where the listener placed.
|
||||
* Should be one of the following values:
|
||||
* MyAvatar.audioListenerModeHead - the listener located at the avatar's head.
|
||||
* MyAvatar.audioListenerModeCamera - the listener is relative to the camera.
|
||||
* MyAvatar.audioListenerModeCustom - the listener is at a custom location specified by the MyAvatar.customListenPosition
|
||||
* and MyAvatar.customListenOrientation properties.
|
||||
* @property customListenPosition {Vec3} If MyAvatar.audioListenerMode == MyAvatar.audioListenerModeHead, then this determines the position
|
||||
* of audio spatialization listener.
|
||||
* @property customListenOreintation {Quat} If MyAvatar.audioListenerMode == MyAvatar.audioListenerModeHead, then this determines the orientation
|
||||
* of the audio spatialization listener.
|
||||
* @property audioListenerModeHead {number} READ-ONLY. When passed to MyAvatar.audioListenerMode, it will set the audio listener
|
||||
* around the avatar's head.
|
||||
* @property audioListenerModeCamera {number} READ-ONLY. When passed to MyAvatar.audioListenerMode, it will set the audio listener
|
||||
* around the camera.
|
||||
* @property audioListenerModeCustom {number} READ-ONLY. When passed to MyAvatar.audioListenerMode, it will set the audio listener
|
||||
* around the value specified by MyAvatar.customListenPosition and MyAvatar.customListenOrientation.
|
||||
* @property leftHandPosition {Vec3} READ-ONLY. The desired position of the left wrist in avatar space, determined by the hand controllers.
|
||||
* Note: only valid if hand controllers are in use.
|
||||
* @property rightHandPosition {Vec3} READ-ONLY. The desired position of the right wrist in avatar space, determined by the hand controllers.
|
||||
* Note: only valid if hand controllers are in use.
|
||||
* @property leftHandTipPosition {Vec3} READ-ONLY. A position 30 cm offset from MyAvatar.leftHandPosition
|
||||
* @property rightHandTipPosition {Vec3} READ-ONLY. A position 30 cm offset from MyAvatar.rightHandPosition
|
||||
* @property leftHandPose {Pose} READ-ONLY. Returns full pose (translation, orientation, velocity & angularVelocity) of the desired
|
||||
* wrist position, determined by the hand controllers.
|
||||
* @property rightHandPose {Pose} READ-ONLY. Returns full pose (translation, orientation, velocity & angularVelocity) of the desired
|
||||
* wrist position, determined by the hand controllers.
|
||||
* @property leftHandTipPose {Pose} READ-ONLY. Returns a pose offset 30 cm from MyAvatar.leftHandPose
|
||||
* @property rightHandTipPose {Pose} READ-ONLY. Returns a pose offset 30 cm from MyAvatar.rightHandPose
|
||||
* @property hmdLeanRecenterEnabled {bool} This can be used disable the hmd lean recenter behavior. This behavior is what causes your avatar
|
||||
* to follow your HMD as you walk around the room, in room scale VR. Disabling this is useful if you desire to pin the avatar to a fixed location.
|
||||
* @property characterControllerEnabled {bool} This can be used to disable collisions between the avatar and the world.
|
||||
* @property useAdvancedMovementControls {bool} Stores the user preference only, does not change user mappings, this is done in the defaultScript
|
||||
* "scripts/system/controllers/toggleAdvancedMovementForHandControllers.js".
|
||||
*/
|
||||
|
||||
Q_PROPERTY(bool shouldRenderLocally READ getShouldRenderLocally WRITE setShouldRenderLocally)
|
||||
Q_PROPERTY(glm::vec3 motorVelocity READ getScriptedMotorVelocity WRITE setScriptedMotorVelocity)
|
||||
Q_PROPERTY(float motorTimescale READ getScriptedMotorTimescale WRITE setScriptedMotorTimescale)
|
||||
|
@ -106,7 +160,19 @@ public:
|
|||
void reset(bool andRecenter = false, bool andReload = true, bool andHead = true);
|
||||
|
||||
Q_INVOKABLE void resetSensorsAndBody();
|
||||
|
||||
/**jsdoc
|
||||
* Moves and orients the avatar, such that it is directly underneath the HMD, with toes pointed forward.
|
||||
* @function MyAvatar.centerBody
|
||||
*/
|
||||
Q_INVOKABLE void centerBody(); // thread-safe
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* The internal inverse-kinematics system maintains a record of which joints are "locked". Sometimes it is useful to forget this history, to prevent
|
||||
* contorted joints.
|
||||
* @function MyAvatar.clearIKJointLimitHistory
|
||||
*/
|
||||
Q_INVOKABLE void clearIKJointLimitHistory(); // thread-safe
|
||||
|
||||
void update(float deltaTime);
|
||||
|
@ -137,23 +203,109 @@ public:
|
|||
|
||||
void setRealWorldFieldOfView(float realWorldFov) { _realWorldFieldOfView.set(realWorldFov); }
|
||||
|
||||
/**jsdoc
|
||||
* The default position in world coordinates of the point directly between the avatar's eyes
|
||||
* @function MyAvatar.getDefaultEyePosition
|
||||
* @example <caption>This example gets the default eye position and prints it to the debug log.</caption>
|
||||
* var defaultEyePosition = MyAvatar.getDefaultEyePosition();
|
||||
* print (JSON.stringify(defaultEyePosition));
|
||||
* @returns {Vec3} Position between the avatar's eyes.
|
||||
*/
|
||||
Q_INVOKABLE glm::vec3 getDefaultEyePosition() const;
|
||||
|
||||
float getRealWorldFieldOfView() { return _realWorldFieldOfView.get(); }
|
||||
|
||||
// Interrupt the current animation with a custom animation.
|
||||
/**jsdoc
|
||||
* The avatar animation system includes a set of default animations along with rules for how those animations are blended
|
||||
* together with procedural data (such as look at vectors, hand sensors etc.). overrideAnimation() is used to completely
|
||||
* override all motion from the default animation system (including inverse kinematics for hand and head controllers) and
|
||||
* play a specified animation. To end this animation and restore the default animations, use MyAvatar.restoreAnimation.
|
||||
* @function MyAvatar.overrideAnimation
|
||||
* @example <caption> Play a clapping animation on your avatar for three seconds. </caption>
|
||||
* // Clap your hands for 3 seconds then restore animation back to the avatar.
|
||||
* var ANIM_URL = "https://s3.amazonaws.com/hifi-public/animations/ClapAnimations/ClapHands_Standing.fbx";
|
||||
* MyAvatar.overrideAnimation(ANIM_URL, 30, true, 0, 53);
|
||||
* Script.setTimeout(function () {
|
||||
* MyAvatar.restoreAnimation();
|
||||
* }, 3000);
|
||||
* @param url {string} The URL to the animation file. Animation files need to be .FBX format, but only need to contain the avatar skeleton and animation data.
|
||||
* @param fps {number} The frames per second (FPS) rate for the animation playback. 30 FPS is normal speed.
|
||||
* @param loop {bool} Set to true if the animation should loop.
|
||||
* @param firstFrame {number} The frame the animation should start at.
|
||||
* @param lastFrame {number} The frame the animation should end at.
|
||||
*/
|
||||
Q_INVOKABLE void overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
|
||||
|
||||
// Stop the animation that was started with overrideAnimation and go back to the standard animation.
|
||||
/**jsdoc
|
||||
* The avatar animation system includes a set of default animations along with rules for how those animations are blended together with
|
||||
* procedural data (such as look at vectors, hand sensors etc.). Playing your own custom animations will override the default animations.
|
||||
* restoreAnimation() is used to restore all motion from the default animation system including inverse kinematics for hand and head
|
||||
* controllers. If you aren't currently playing an override animation, this function will have no effect.
|
||||
* @function MyAvatar.restoreAnimation
|
||||
* @example <caption> Play a clapping animation on your avatar for three seconds. </caption>
|
||||
* // Clap your hands for 3 seconds then restore animation back to the avatar.
|
||||
* var ANIM_URL = "https://s3.amazonaws.com/hifi-public/animations/ClapAnimations/ClapHands_Standing.fbx";
|
||||
* MyAvatar.overrideAnimation(ANIM_URL, 30, true, 0, 53);
|
||||
* Script.setTimeout(function () {
|
||||
* MyAvatar.restoreAnimation();
|
||||
* }, 3000);
|
||||
*/
|
||||
Q_INVOKABLE void restoreAnimation();
|
||||
|
||||
// Returns a list of all clips that are available
|
||||
/**jsdoc
|
||||
* Each avatar has an avatar-animation.json file that defines which animations are used and how they are blended together with procedural data
|
||||
* (such as look at vectors, hand sensors etc.). Each animation specified in the avatar-animation.json file is known as an animation role.
|
||||
* Animation roles map to easily understandable actions that the avatar can perform, such as "idleStand", "idleTalk", or "walkFwd."
|
||||
* getAnimationRoles() is used get the list of animation roles defined in the avatar-animation.json.
|
||||
* @function MyAvatar.getAnimatationRoles
|
||||
* @example <caption>This example prints the list of animation roles defined in the avatar's avatar-animation.json file to the debug log.</caption>
|
||||
* var roles = MyAvatar.getAnimationRoles();
|
||||
* print("Animation Roles:");
|
||||
* for (var i = 0; i < roles.length; i++) {
|
||||
* print(roles[i]);
|
||||
* }
|
||||
* @returns {string[]} Array of role strings
|
||||
*/
|
||||
Q_INVOKABLE QStringList getAnimationRoles();
|
||||
|
||||
// Replace an existing standard role animation with a custom one.
|
||||
/**jsdoc
|
||||
* Each avatar has an avatar-animation.json file that defines a set of animation roles. Animation roles map to easily understandable actions
|
||||
* that the avatar can perform, such as "idleStand", "idleTalk", or "walkFwd". To get the full list of roles, use getAnimationRoles().
|
||||
* For each role, the avatar-animation.json defines when the animation is used, the animation clip (.FBX) used, and how animations are blended
|
||||
* together with procedural data (such as look at vectors, hand sensors etc.).
|
||||
* overrideRoleAnimation() is used to change the animation clip (.FBX) associated with a specified animation role.
|
||||
* Note: Hand roles only affect the hand. Other 'main' roles, like 'idleStand', 'idleTalk', 'takeoffStand' are full body.
|
||||
* @function MyAvatar.overrideRoleAnimation
|
||||
* @example <caption>The default avatar-animation.json defines an "idleStand" animation role. This role specifies that when the avatar is not moving,
|
||||
* an animation clip of the avatar idling with hands hanging at its side will be used. It also specifies that when the avatar moves, the animation
|
||||
* will smoothly blend to the walking animation used by the "walkFwd" animation role.
|
||||
* In this example, the "idleStand" role animation clip has been replaced with a clapping animation clip. Now instead of standing with its arms
|
||||
* hanging at its sides when it is not moving, the avatar will stand and clap its hands. Note that just as it did before, as soon as the avatar
|
||||
* starts to move, the animation will smoothly blend into the walk animation used by the "walkFwd" animation role.</caption>
|
||||
* // An animation of the avatar clapping its hands while standing
|
||||
* var ANIM_URL = "https://s3.amazonaws.com/hifi-public/animations/ClapAnimations/ClapHands_Standing.fbx";
|
||||
* MyAvatar.overrideRoleAnimation("idleStand", ANIM_URL, 30, true, 0, 53);
|
||||
* // To restore the default animation, use MyAvatar.restoreRoleAnimation().
|
||||
* @param role {string} The animation role to override
|
||||
* @param url {string} The URL to the animation file. Animation files need to be .FBX format, but only need to contain the avatar skeleton and animation data.
|
||||
* @param fps {number} The frames per second (FPS) rate for the animation playback. 30 FPS is normal speed.
|
||||
* @param loop {bool} Set to true if the animation should loop
|
||||
* @param firstFrame {number} The frame the animation should start at
|
||||
* @param lastFrame {number} The frame the animation should end at
|
||||
*/
|
||||
Q_INVOKABLE void overrideRoleAnimation(const QString& role, const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
|
||||
|
||||
// remove an animation role override and return to the standard animation.
|
||||
/**jsdoc
|
||||
* Each avatar has an avatar-animation.json file that defines a set of animation roles. Animation roles map to easily understandable actions that
|
||||
* the avatar can perform, such as "idleStand", "idleTalk", or "walkFwd". To get the full list of roles, use getAnimationRoles(). For each role,
|
||||
* the avatar-animation.json defines when the animation is used, the animation clip (.FBX) used, and how animations are blended together with
|
||||
* procedural data (such as look at vectors, hand sensors etc.). You can change the animation clip (.FBX) associated with a specified animation
|
||||
* role using overrideRoleAnimation().
|
||||
* restoreRoleAnimation() is used to restore a specified animation role's default animation clip. If you have not specified an override animation
|
||||
* for the specified role, this function will have no effect.
|
||||
* @function MyAvatar.restoreRoleAnimation
|
||||
* @param rule {string} The animation role clip to restore
|
||||
*/
|
||||
Q_INVOKABLE void restoreRoleAnimation(const QString& role);
|
||||
|
||||
// Adds handler(animStateDictionaryIn) => animStateDictionaryOut, which will be invoked just before each animGraph state update.
|
||||
|
|
|
@ -213,7 +213,7 @@ public slots:
|
|||
* @function Overlays.findOverlays
|
||||
* @param {Vec3} center the point to search from.
|
||||
* @param {float} radius search radius
|
||||
* @return {List of Overlays.OverlayID} list of overlays withing the radius
|
||||
* @return {Overlays.OverlayID[]} list of overlays withing the radius
|
||||
*/
|
||||
QVector<QUuid> findOverlays(const glm::vec3& center, float radius) const;
|
||||
|
||||
|
|
|
@ -558,15 +558,15 @@ static const std::vector<float> LATERAL_SPEEDS = { 0.2f, 0.65f }; // m/s
|
|||
|
||||
void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation, CharacterControllerState ccState) {
|
||||
|
||||
glm::vec3 front = worldRotation * IDENTITY_FRONT;
|
||||
glm::vec3 forward = worldRotation * IDENTITY_FORWARD;
|
||||
glm::vec3 workingVelocity = worldVelocity;
|
||||
|
||||
{
|
||||
glm::vec3 localVel = glm::inverse(worldRotation) * workingVelocity;
|
||||
|
||||
float forwardSpeed = glm::dot(localVel, IDENTITY_FRONT);
|
||||
float forwardSpeed = glm::dot(localVel, IDENTITY_FORWARD);
|
||||
float lateralSpeed = glm::dot(localVel, IDENTITY_RIGHT);
|
||||
float turningSpeed = glm::orientedAngle(front, _lastFront, IDENTITY_UP) / deltaTime;
|
||||
float turningSpeed = glm::orientedAngle(forward, _lastForward, IDENTITY_UP) / deltaTime;
|
||||
|
||||
// filter speeds using a simple moving average.
|
||||
_averageForwardSpeed.updateAverage(forwardSpeed);
|
||||
|
@ -852,7 +852,7 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
|
|||
_lastEnableInverseKinematics = _enableInverseKinematics;
|
||||
}
|
||||
|
||||
_lastFront = front;
|
||||
_lastForward = forward;
|
||||
_lastPosition = worldPosition;
|
||||
_lastVelocity = workingVelocity;
|
||||
}
|
||||
|
|
|
@ -267,7 +267,7 @@ protected:
|
|||
int _rightElbowJointIndex { -1 };
|
||||
int _rightShoulderJointIndex { -1 };
|
||||
|
||||
glm::vec3 _lastFront;
|
||||
glm::vec3 _lastForward;
|
||||
glm::vec3 _lastPosition;
|
||||
glm::vec3 _lastVelocity;
|
||||
|
||||
|
|
|
@ -47,9 +47,6 @@ quint64 DEFAULT_FILTERED_LOG_EXPIRY = 2 * USECS_PER_SECOND;
|
|||
|
||||
using namespace std;
|
||||
|
||||
const glm::vec3 DEFAULT_LOCAL_AABOX_CORNER(-0.5f);
|
||||
const glm::vec3 DEFAULT_LOCAL_AABOX_SCALE(1.0f);
|
||||
|
||||
const QString AvatarData::FRAME_NAME = "com.highfidelity.recording.AvatarData";
|
||||
|
||||
static const int TRANSLATION_COMPRESSION_RADIX = 12;
|
||||
|
|
|
@ -65,8 +65,8 @@ glm::quat HeadData::getOrientation() const {
|
|||
void HeadData::setOrientation(const glm::quat& orientation) {
|
||||
// rotate body about vertical axis
|
||||
glm::quat bodyOrientation = _owningAvatar->getOrientation();
|
||||
glm::vec3 newFront = glm::inverse(bodyOrientation) * (orientation * IDENTITY_FRONT);
|
||||
bodyOrientation = bodyOrientation * glm::angleAxis(atan2f(-newFront.x, -newFront.z), glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
glm::vec3 newForward = glm::inverse(bodyOrientation) * (orientation * IDENTITY_FORWARD);
|
||||
bodyOrientation = bodyOrientation * glm::angleAxis(atan2f(-newForward.x, -newForward.z), glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
_owningAvatar->setOrientation(bodyOrientation);
|
||||
|
||||
// the rest goes to the head
|
||||
|
|
|
@ -996,7 +996,7 @@ void EntityTreeRenderer::checkAndCallPreload(const EntityItemID& entityID, const
|
|||
}
|
||||
bool shouldLoad = entity->shouldPreloadScript() && _entitiesScriptEngine;
|
||||
QString scriptUrl = entity->getScript();
|
||||
if ((unloadFirst && shouldLoad) || scriptUrl.isEmpty()) {
|
||||
if (shouldLoad && (unloadFirst || scriptUrl.isEmpty())) {
|
||||
_entitiesScriptEngine->unloadEntityScript(entityID);
|
||||
entity->scriptHasUnloaded();
|
||||
}
|
||||
|
|
|
@ -371,109 +371,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
_model->updateRenderItems();
|
||||
}
|
||||
|
||||
if (hasModel()) {
|
||||
// Prepare the current frame
|
||||
{
|
||||
if (!_model || _needsModelReload) {
|
||||
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
|
||||
PerformanceTimer perfTimer("getModel");
|
||||
auto renderer = qSharedPointerCast<EntityTreeRenderer>(args->_renderer);
|
||||
getModel(renderer);
|
||||
|
||||
// Remap textures immediately after loading to avoid flicker
|
||||
remapTextures();
|
||||
}
|
||||
|
||||
if (_model) {
|
||||
if (hasRenderAnimation()) {
|
||||
if (!jointsMapped()) {
|
||||
QStringList modelJointNames = _model->getJointNames();
|
||||
mapJoints(modelJointNames);
|
||||
}
|
||||
}
|
||||
|
||||
_jointDataLock.withWriteLock([&] {
|
||||
getAnimationFrame();
|
||||
|
||||
// relay any inbound joint changes from scripts/animation/network to the model/rig
|
||||
for (int index = 0; index < _localJointRotations.size(); index++) {
|
||||
if (_localJointRotationsDirty[index]) {
|
||||
glm::quat rotation = _localJointRotations[index];
|
||||
_model->setJointRotation(index, true, rotation, 1.0f);
|
||||
_localJointRotationsDirty[index] = false;
|
||||
}
|
||||
}
|
||||
for (int index = 0; index < _localJointTranslations.size(); index++) {
|
||||
if (_localJointTranslationsDirty[index]) {
|
||||
glm::vec3 translation = _localJointTranslations[index];
|
||||
_model->setJointTranslation(index, true, translation, 1.0f);
|
||||
_localJointTranslationsDirty[index] = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
updateModelBounds();
|
||||
}
|
||||
}
|
||||
|
||||
// Enqueue updates for the next frame
|
||||
if (_model) {
|
||||
|
||||
#ifdef WANT_EXTRA_RENDER_DEBUGGING
|
||||
// debugging...
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
_model->renderDebugMeshBoxes(batch);
|
||||
#endif
|
||||
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
||||
// FIXME: this seems like it could be optimized if we tracked our last known visible state in
|
||||
// the renderable item. As it stands now the model checks it's visible/invisible state
|
||||
// so most of the time we don't do anything in this function.
|
||||
_model->setVisibleInScene(getVisible(), scene);
|
||||
|
||||
// Remap textures for the next frame to avoid flicker
|
||||
remapTextures();
|
||||
|
||||
// update whether the model should be showing collision mesh (this may flag for fixupInScene)
|
||||
bool showingCollisionGeometry = (bool)(args->_debugFlags & (int)RenderArgs::RENDER_DEBUG_HULLS);
|
||||
if (showingCollisionGeometry != _showCollisionGeometry) {
|
||||
ShapeType type = getShapeType();
|
||||
_showCollisionGeometry = showingCollisionGeometry;
|
||||
if (_showCollisionGeometry && type != SHAPE_TYPE_STATIC_MESH && type != SHAPE_TYPE_NONE) {
|
||||
// NOTE: it is OK if _collisionMeshKey is nullptr
|
||||
model::MeshPointer mesh = collisionMeshCache.getMesh(_collisionMeshKey);
|
||||
// NOTE: the model will render the collisionGeometry if it has one
|
||||
_model->setCollisionMesh(mesh);
|
||||
} else {
|
||||
// release mesh
|
||||
if (_collisionMeshKey) {
|
||||
collisionMeshCache.releaseMesh(_collisionMeshKey);
|
||||
}
|
||||
// clear model's collision geometry
|
||||
model::MeshPointer mesh = nullptr;
|
||||
_model->setCollisionMesh(mesh);
|
||||
}
|
||||
}
|
||||
|
||||
if (_model->needsFixupInScene()) {
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
_model->removeFromScene(scene, pendingChanges);
|
||||
|
||||
render::Item::Status::Getters statusGetters;
|
||||
makeEntityItemStatusGetters(getThisPointer(), statusGetters);
|
||||
_model->addToScene(scene, pendingChanges, statusGetters);
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
||||
auto& currentURL = getParsedModelURL();
|
||||
if (currentURL != _model->getURL()) {
|
||||
// Defer setting the url to the render thread
|
||||
getModel(_myRenderer);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (!hasModel() || (_model && _model->didVisualGeometryRequestFail())) {
|
||||
static glm::vec4 greenColor(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
bool success;
|
||||
|
@ -482,6 +380,109 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
batch.setModelTransform(shapeTransform); // we want to include the scale as well
|
||||
DependencyManager::get<GeometryCache>()->renderWireCubeInstance(batch, greenColor);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Prepare the current frame
|
||||
{
|
||||
if (!_model || _needsModelReload) {
|
||||
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
|
||||
PerformanceTimer perfTimer("getModel");
|
||||
auto renderer = qSharedPointerCast<EntityTreeRenderer>(args->_renderer);
|
||||
getModel(renderer);
|
||||
|
||||
// Remap textures immediately after loading to avoid flicker
|
||||
remapTextures();
|
||||
}
|
||||
|
||||
if (_model) {
|
||||
if (hasRenderAnimation()) {
|
||||
if (!jointsMapped()) {
|
||||
QStringList modelJointNames = _model->getJointNames();
|
||||
mapJoints(modelJointNames);
|
||||
}
|
||||
}
|
||||
|
||||
_jointDataLock.withWriteLock([&] {
|
||||
getAnimationFrame();
|
||||
|
||||
// relay any inbound joint changes from scripts/animation/network to the model/rig
|
||||
for (int index = 0; index < _localJointRotations.size(); index++) {
|
||||
if (_localJointRotationsDirty[index]) {
|
||||
glm::quat rotation = _localJointRotations[index];
|
||||
_model->setJointRotation(index, true, rotation, 1.0f);
|
||||
_localJointRotationsDirty[index] = false;
|
||||
}
|
||||
}
|
||||
for (int index = 0; index < _localJointTranslations.size(); index++) {
|
||||
if (_localJointTranslationsDirty[index]) {
|
||||
glm::vec3 translation = _localJointTranslations[index];
|
||||
_model->setJointTranslation(index, true, translation, 1.0f);
|
||||
_localJointTranslationsDirty[index] = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
updateModelBounds();
|
||||
}
|
||||
}
|
||||
|
||||
// Enqueue updates for the next frame
|
||||
if (_model) {
|
||||
|
||||
#ifdef WANT_EXTRA_RENDER_DEBUGGING
|
||||
// debugging...
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
_model->renderDebugMeshBoxes(batch);
|
||||
#endif
|
||||
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
||||
// FIXME: this seems like it could be optimized if we tracked our last known visible state in
|
||||
// the renderable item. As it stands now the model checks it's visible/invisible state
|
||||
// so most of the time we don't do anything in this function.
|
||||
_model->setVisibleInScene(getVisible(), scene);
|
||||
|
||||
// Remap textures for the next frame to avoid flicker
|
||||
remapTextures();
|
||||
|
||||
// update whether the model should be showing collision mesh (this may flag for fixupInScene)
|
||||
bool showingCollisionGeometry = (bool)(args->_debugFlags & (int)RenderArgs::RENDER_DEBUG_HULLS);
|
||||
if (showingCollisionGeometry != _showCollisionGeometry) {
|
||||
ShapeType type = getShapeType();
|
||||
_showCollisionGeometry = showingCollisionGeometry;
|
||||
if (_showCollisionGeometry && type != SHAPE_TYPE_STATIC_MESH && type != SHAPE_TYPE_NONE) {
|
||||
// NOTE: it is OK if _collisionMeshKey is nullptr
|
||||
model::MeshPointer mesh = collisionMeshCache.getMesh(_collisionMeshKey);
|
||||
// NOTE: the model will render the collisionGeometry if it has one
|
||||
_model->setCollisionMesh(mesh);
|
||||
} else {
|
||||
// release mesh
|
||||
if (_collisionMeshKey) {
|
||||
collisionMeshCache.releaseMesh(_collisionMeshKey);
|
||||
}
|
||||
// clear model's collision geometry
|
||||
model::MeshPointer mesh = nullptr;
|
||||
_model->setCollisionMesh(mesh);
|
||||
}
|
||||
}
|
||||
|
||||
if (_model->needsFixupInScene()) {
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
_model->removeFromScene(scene, pendingChanges);
|
||||
|
||||
render::Item::Status::Getters statusGetters;
|
||||
makeEntityItemStatusGetters(getThisPointer(), statusGetters);
|
||||
_model->addToScene(scene, pendingChanges, statusGetters);
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
||||
auto& currentURL = getParsedModelURL();
|
||||
if (currentURL != _model->getURL()) {
|
||||
// Defer setting the url to the render thread
|
||||
getModel(_myRenderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -587,6 +588,10 @@ EntityItemProperties RenderableModelEntityItem::getProperties(EntityPropertyFlag
|
|||
return properties;
|
||||
}
|
||||
|
||||
bool RenderableModelEntityItem::supportsDetailedRayIntersection() const {
|
||||
return _model && _model->isLoaded();
|
||||
}
|
||||
|
||||
bool RenderableModelEntityItem::findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
bool& keepSearching, OctreeElementPointer& element, float& distance, BoxFace& face,
|
||||
glm::vec3& surfaceNormal, void** intersectedObject, bool precisionPicking) const {
|
||||
|
@ -807,6 +812,13 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
auto& meshes = _model->getGeometry()->getMeshes();
|
||||
int32_t numMeshes = (int32_t)(meshes.size());
|
||||
|
||||
const int MAX_ALLOWED_MESH_COUNT = 500;
|
||||
if (numMeshes > MAX_ALLOWED_MESH_COUNT) {
|
||||
// too many will cause the deadlock timer to throw...
|
||||
shapeInfo.setParams(SHAPE_TYPE_BOX, 0.5f * dimensions);
|
||||
return;
|
||||
}
|
||||
|
||||
ShapeInfo::PointCollection& pointCollection = shapeInfo.getPointCollection();
|
||||
pointCollection.clear();
|
||||
if (type == SHAPE_TYPE_SIMPLE_COMPOUND) {
|
||||
|
|
|
@ -46,7 +46,7 @@ public:
|
|||
|
||||
void updateModelBounds();
|
||||
virtual void render(RenderArgs* args) override;
|
||||
virtual bool supportsDetailedRayIntersection() const override { return true; }
|
||||
virtual bool supportsDetailedRayIntersection() const override;
|
||||
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
||||
BoxFace& face, glm::vec3& surfaceNormal,
|
||||
|
|
|
@ -67,9 +67,9 @@ void FileCache::initialize() {
|
|||
|
||||
// load persisted files
|
||||
foreach(QString filename, files) {
|
||||
const Key key = filename.section('.', 0, 1).toStdString();
|
||||
const Key key = filename.section('.', 0, 0).toStdString();
|
||||
const std::string filepath = dir.filePath(filename).toStdString();
|
||||
const size_t length = std::ifstream(filepath, std::ios::binary | std::ios::ate).tellg();
|
||||
const size_t length = QFileInfo(filepath.c_str()).size();
|
||||
addFile(Metadata(key, length), filepath);
|
||||
}
|
||||
|
||||
|
|
|
@ -142,6 +142,6 @@ int OctreeQuery::parseData(ReceivedMessage& message) {
|
|||
}
|
||||
|
||||
glm::vec3 OctreeQuery::calculateCameraDirection() const {
|
||||
glm::vec3 direction = glm::vec3(_cameraOrientation * glm::vec4(IDENTITY_FRONT, 0.0f));
|
||||
glm::vec3 direction = glm::vec3(_cameraOrientation * glm::vec4(IDENTITY_FORWARD, 0.0f));
|
||||
return direction;
|
||||
}
|
||||
|
|
|
@ -208,6 +208,7 @@ void PhysicalEntitySimulation::getObjectsToAddToPhysics(VectorOfMotionStates& re
|
|||
assert(!entity->getPhysicsInfo());
|
||||
if (entity->isDead()) {
|
||||
prepareEntityForDelete(entity);
|
||||
entityItr = _entitiesToAddToPhysics.erase(entityItr);
|
||||
} else if (!entity->shouldBePhysical()) {
|
||||
// this entity should no longer be on the internal _entitiesToAddToPhysics
|
||||
entityItr = _entitiesToAddToPhysics.erase(entityItr);
|
||||
|
|
|
@ -27,9 +27,9 @@ void LightStage::Shadow::setKeylightFrustum(const ViewFrustum& viewFrustum, floa
|
|||
const auto& direction = glm::normalize(_light->getDirection());
|
||||
glm::quat orientation;
|
||||
if (direction == IDENTITY_UP) {
|
||||
orientation = glm::quat(glm::mat3(-IDENTITY_RIGHT, IDENTITY_FRONT, -IDENTITY_UP));
|
||||
orientation = glm::quat(glm::mat3(-IDENTITY_RIGHT, IDENTITY_FORWARD, -IDENTITY_UP));
|
||||
} else if (direction == -IDENTITY_UP) {
|
||||
orientation = glm::quat(glm::mat3(IDENTITY_RIGHT, IDENTITY_FRONT, IDENTITY_UP));
|
||||
orientation = glm::quat(glm::mat3(IDENTITY_RIGHT, IDENTITY_FORWARD, IDENTITY_UP));
|
||||
} else {
|
||||
auto side = glm::normalize(glm::cross(direction, IDENTITY_UP));
|
||||
auto up = glm::normalize(glm::cross(side, direction));
|
||||
|
|
|
@ -176,11 +176,11 @@ void Model::setOffset(const glm::vec3& offset) {
|
|||
}
|
||||
|
||||
void Model::calculateTextureInfo() {
|
||||
if (!_hasCalculatedTextureInfo && isLoaded() && getGeometry()->areTexturesLoaded() && !_modelMeshRenderItems.isEmpty()) {
|
||||
if (!_hasCalculatedTextureInfo && isLoaded() && getGeometry()->areTexturesLoaded() && !_modelMeshRenderItemsMap.isEmpty()) {
|
||||
size_t textureSize = 0;
|
||||
int textureCount = 0;
|
||||
bool allTexturesLoaded = true;
|
||||
foreach(auto renderItem, _modelMeshRenderItemsSet) {
|
||||
foreach(auto renderItem, _modelMeshRenderItems) {
|
||||
auto meshPart = renderItem.get();
|
||||
textureSize += meshPart->getMaterialTextureSize();
|
||||
textureCount += meshPart->getMaterialTextureCount();
|
||||
|
@ -236,7 +236,7 @@ void Model::updateRenderItems() {
|
|||
uint32_t deleteGeometryCounter = self->_deleteGeometryCounter;
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
foreach (auto itemID, self->_modelMeshRenderItems.keys()) {
|
||||
foreach (auto itemID, self->_modelMeshRenderItemsMap.keys()) {
|
||||
pendingChanges.updateItem<ModelMeshPartPayload>(itemID, [deleteGeometryCounter](ModelMeshPartPayload& data) {
|
||||
if (data._model && data._model->isLoaded()) {
|
||||
// Ensure the model geometry was not reset between frames
|
||||
|
@ -259,7 +259,7 @@ void Model::updateRenderItems() {
|
|||
Transform collisionMeshOffset;
|
||||
collisionMeshOffset.setIdentity();
|
||||
Transform modelTransform = self->getTransform();
|
||||
foreach (auto itemID, self->_collisionRenderItems.keys()) {
|
||||
foreach(auto itemID, self->_collisionRenderItemsMap.keys()) {
|
||||
pendingChanges.updateItem<MeshPartPayload>(itemID, [modelTransform, collisionMeshOffset](MeshPartPayload& data) {
|
||||
// update the model transform for this render item.
|
||||
data.updateTransform(modelTransform, collisionMeshOffset);
|
||||
|
@ -539,11 +539,11 @@ void Model::setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scen
|
|||
_isVisible = newValue;
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
foreach (auto item, _modelMeshRenderItems.keys()) {
|
||||
pendingChanges.resetItem(item, _modelMeshRenderItems[item]);
|
||||
foreach (auto item, _modelMeshRenderItemsMap.keys()) {
|
||||
pendingChanges.resetItem(item, _modelMeshRenderItemsMap[item]);
|
||||
}
|
||||
foreach (auto item, _collisionRenderItems.keys()) {
|
||||
pendingChanges.resetItem(item, _collisionRenderItems[item]);
|
||||
foreach(auto item, _collisionRenderItemsMap.keys()) {
|
||||
pendingChanges.resetItem(item, _collisionRenderItemsMap[item]);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
@ -555,11 +555,11 @@ void Model::setLayeredInFront(bool layered, std::shared_ptr<render::Scene> scene
|
|||
_isLayeredInFront = layered;
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
foreach(auto item, _modelMeshRenderItems.keys()) {
|
||||
pendingChanges.resetItem(item, _modelMeshRenderItems[item]);
|
||||
foreach(auto item, _modelMeshRenderItemsMap.keys()) {
|
||||
pendingChanges.resetItem(item, _modelMeshRenderItemsMap[item]);
|
||||
}
|
||||
foreach(auto item, _collisionRenderItems.keys()) {
|
||||
pendingChanges.resetItem(item, _collisionRenderItems[item]);
|
||||
foreach(auto item, _collisionRenderItemsMap.keys()) {
|
||||
pendingChanges.resetItem(item, _collisionRenderItemsMap[item]);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
@ -576,39 +576,39 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene,
|
|||
bool somethingAdded = false;
|
||||
if (_collisionGeometry) {
|
||||
if (_collisionRenderItems.empty()) {
|
||||
foreach (auto renderItem, _collisionRenderItemsSet) {
|
||||
foreach (auto renderItem, _collisionRenderItems) {
|
||||
auto item = scene->allocateID();
|
||||
auto renderPayload = std::make_shared<MeshPartPayload::Payload>(renderItem);
|
||||
if (statusGetters.size()) {
|
||||
if (_collisionRenderItems.empty() && statusGetters.size()) {
|
||||
renderPayload->addStatusGetters(statusGetters);
|
||||
}
|
||||
pendingChanges.resetItem(item, renderPayload);
|
||||
_collisionRenderItems.insert(item, renderPayload);
|
||||
_collisionRenderItemsMap.insert(item, renderPayload);
|
||||
}
|
||||
somethingAdded = !_collisionRenderItems.empty();
|
||||
}
|
||||
} else {
|
||||
if (_modelMeshRenderItems.empty()) {
|
||||
if (_modelMeshRenderItemsMap.empty()) {
|
||||
|
||||
bool hasTransparent = false;
|
||||
size_t verticesCount = 0;
|
||||
foreach(auto renderItem, _modelMeshRenderItemsSet) {
|
||||
foreach(auto renderItem, _modelMeshRenderItems) {
|
||||
auto item = scene->allocateID();
|
||||
auto renderPayload = std::make_shared<ModelMeshPartPayload::Payload>(renderItem);
|
||||
if (statusGetters.size()) {
|
||||
if (_modelMeshRenderItemsMap.empty() && statusGetters.size()) {
|
||||
renderPayload->addStatusGetters(statusGetters);
|
||||
}
|
||||
pendingChanges.resetItem(item, renderPayload);
|
||||
|
||||
hasTransparent = hasTransparent || renderItem.get()->getShapeKey().isTranslucent();
|
||||
verticesCount += renderItem.get()->getVerticesCount();
|
||||
_modelMeshRenderItems.insert(item, renderPayload);
|
||||
_modelMeshRenderItemsMap.insert(item, renderPayload);
|
||||
_modelMeshRenderItemIDs.emplace_back(item);
|
||||
}
|
||||
somethingAdded = !_modelMeshRenderItems.empty();
|
||||
somethingAdded = !_modelMeshRenderItemsMap.empty();
|
||||
|
||||
_renderInfoVertexCount = verticesCount;
|
||||
_renderInfoDrawCalls = _modelMeshRenderItems.count();
|
||||
_renderInfoDrawCalls = _modelMeshRenderItemsMap.count();
|
||||
_renderInfoHasTransparent = hasTransparent;
|
||||
}
|
||||
}
|
||||
|
@ -623,18 +623,18 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene,
|
|||
}
|
||||
|
||||
void Model::removeFromScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
|
||||
foreach (auto item, _modelMeshRenderItems.keys()) {
|
||||
foreach (auto item, _modelMeshRenderItemsMap.keys()) {
|
||||
pendingChanges.removeItem(item);
|
||||
}
|
||||
_modelMeshRenderItemIDs.clear();
|
||||
_modelMeshRenderItemsMap.clear();
|
||||
_modelMeshRenderItems.clear();
|
||||
_modelMeshRenderItemsSet.clear();
|
||||
|
||||
foreach (auto item, _collisionRenderItems.keys()) {
|
||||
foreach(auto item, _collisionRenderItemsMap.keys()) {
|
||||
pendingChanges.removeItem(item);
|
||||
}
|
||||
_collisionRenderItems.clear();
|
||||
_collisionRenderItemsSet.clear();
|
||||
_collisionRenderItems.clear();
|
||||
_addedToScene = false;
|
||||
|
||||
_renderInfoVertexCount = 0;
|
||||
|
@ -1052,8 +1052,8 @@ void Model::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
}
|
||||
|
||||
void Model::computeMeshPartLocalBounds() {
|
||||
for (auto& part : _modelMeshRenderItemsSet) {
|
||||
assert(part->_meshIndex < _modelMeshRenderItemsSet.size());
|
||||
for (auto& part : _modelMeshRenderItems) {
|
||||
assert(part->_meshIndex < _modelMeshRenderItems.size());
|
||||
const Model::MeshState& state = _meshStates.at(part->_meshIndex);
|
||||
part->computeAdjustedLocalBound(state.clusterMatrices);
|
||||
}
|
||||
|
@ -1167,7 +1167,7 @@ AABox Model::getRenderableMeshBound() const {
|
|||
} else {
|
||||
// Build a bound using the last known bound from all the renderItems.
|
||||
AABox totalBound;
|
||||
for (auto& renderItem : _modelMeshRenderItemsSet) {
|
||||
for (auto& renderItem : _modelMeshRenderItems) {
|
||||
totalBound += renderItem->getBound();
|
||||
}
|
||||
return totalBound;
|
||||
|
@ -1180,11 +1180,11 @@ const render::ItemIDs& Model::fetchRenderItemIDs() const {
|
|||
|
||||
void Model::createRenderItemSet() {
|
||||
if (_collisionGeometry) {
|
||||
if (_collisionRenderItemsSet.empty()) {
|
||||
if (_collisionRenderItems.empty()) {
|
||||
createCollisionRenderItemSet();
|
||||
}
|
||||
} else {
|
||||
if (_modelMeshRenderItemsSet.empty()) {
|
||||
if (_modelMeshRenderItems.empty()) {
|
||||
createVisibleRenderItemSet();
|
||||
}
|
||||
}
|
||||
|
@ -1201,9 +1201,9 @@ void Model::createVisibleRenderItemSet() {
|
|||
}
|
||||
|
||||
// We should not have any existing renderItems if we enter this section of code
|
||||
Q_ASSERT(_modelMeshRenderItemsSet.isEmpty());
|
||||
Q_ASSERT(_modelMeshRenderItems.isEmpty());
|
||||
|
||||
_modelMeshRenderItemsSet.clear();
|
||||
_modelMeshRenderItems.clear();
|
||||
|
||||
Transform transform;
|
||||
transform.setTranslation(_translation);
|
||||
|
@ -1225,7 +1225,7 @@ void Model::createVisibleRenderItemSet() {
|
|||
// Create the render payloads
|
||||
int numParts = (int)mesh->getNumParts();
|
||||
for (int partIndex = 0; partIndex < numParts; partIndex++) {
|
||||
_modelMeshRenderItemsSet << std::make_shared<ModelMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
|
||||
_modelMeshRenderItems << std::make_shared<ModelMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
|
||||
shapeID++;
|
||||
}
|
||||
}
|
||||
|
@ -1241,7 +1241,7 @@ void Model::createCollisionRenderItemSet() {
|
|||
const auto& meshes = _collisionGeometry->getMeshes();
|
||||
|
||||
// We should not have any existing renderItems if we enter this section of code
|
||||
Q_ASSERT(_collisionRenderItemsSet.isEmpty());
|
||||
Q_ASSERT(_collisionRenderItems.isEmpty());
|
||||
|
||||
Transform identity;
|
||||
identity.setIdentity();
|
||||
|
@ -1262,7 +1262,7 @@ void Model::createCollisionRenderItemSet() {
|
|||
model::MaterialPointer& material = _collisionMaterials[partIndex % NUM_COLLISION_HULL_COLORS];
|
||||
auto payload = std::make_shared<MeshPartPayload>(mesh, partIndex, material);
|
||||
payload->updateTransform(identity, offset);
|
||||
_collisionRenderItemsSet << payload;
|
||||
_collisionRenderItems << payload;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1283,28 +1283,28 @@ bool Model::initWhenReady(render::ScenePointer scene) {
|
|||
|
||||
bool addedPendingChanges = false;
|
||||
if (_collisionGeometry) {
|
||||
foreach (auto renderItem, _collisionRenderItemsSet) {
|
||||
foreach (auto renderItem, _collisionRenderItems) {
|
||||
auto item = scene->allocateID();
|
||||
auto renderPayload = std::make_shared<MeshPartPayload::Payload>(renderItem);
|
||||
_collisionRenderItems.insert(item, renderPayload);
|
||||
_collisionRenderItemsMap.insert(item, renderPayload);
|
||||
pendingChanges.resetItem(item, renderPayload);
|
||||
}
|
||||
addedPendingChanges = !_collisionRenderItems.empty();
|
||||
} else {
|
||||
bool hasTransparent = false;
|
||||
size_t verticesCount = 0;
|
||||
foreach (auto renderItem, _modelMeshRenderItemsSet) {
|
||||
foreach (auto renderItem, _modelMeshRenderItems) {
|
||||
auto item = scene->allocateID();
|
||||
auto renderPayload = std::make_shared<ModelMeshPartPayload::Payload>(renderItem);
|
||||
|
||||
hasTransparent = hasTransparent || renderItem.get()->getShapeKey().isTranslucent();
|
||||
verticesCount += renderItem.get()->getVerticesCount();
|
||||
_modelMeshRenderItems.insert(item, renderPayload);
|
||||
_modelMeshRenderItemsMap.insert(item, renderPayload);
|
||||
pendingChanges.resetItem(item, renderPayload);
|
||||
}
|
||||
addedPendingChanges = !_modelMeshRenderItems.empty();
|
||||
addedPendingChanges = !_modelMeshRenderItemsMap.empty();
|
||||
_renderInfoVertexCount = verticesCount;
|
||||
_renderInfoDrawCalls = _modelMeshRenderItems.count();
|
||||
_renderInfoDrawCalls = _modelMeshRenderItemsMap.count();
|
||||
_renderInfoHasTransparent = hasTransparent;
|
||||
}
|
||||
_addedToScene = addedPendingChanges;
|
||||
|
|
|
@ -248,7 +248,7 @@ public:
|
|||
const MeshState& getMeshState(int index) { return _meshStates.at(index); }
|
||||
|
||||
uint32_t getGeometryCounter() const { return _deleteGeometryCounter; }
|
||||
const QMap<render::ItemID, render::PayloadPointer>& getRenderItems() const { return _modelMeshRenderItems; }
|
||||
const QMap<render::ItemID, render::PayloadPointer>& getRenderItems() const { return _modelMeshRenderItemsMap; }
|
||||
|
||||
void renderDebugMeshBoxes(gpu::Batch& batch);
|
||||
|
||||
|
@ -373,11 +373,11 @@ protected:
|
|||
|
||||
static AbstractViewStateInterface* _viewState;
|
||||
|
||||
QSet<std::shared_ptr<MeshPartPayload>> _collisionRenderItemsSet;
|
||||
QMap<render::ItemID, render::PayloadPointer> _collisionRenderItems;
|
||||
QVector<std::shared_ptr<MeshPartPayload>> _collisionRenderItems;
|
||||
QMap<render::ItemID, render::PayloadPointer> _collisionRenderItemsMap;
|
||||
|
||||
QSet<std::shared_ptr<ModelMeshPartPayload>> _modelMeshRenderItemsSet;
|
||||
QMap<render::ItemID, render::PayloadPointer> _modelMeshRenderItems;
|
||||
QVector<std::shared_ptr<ModelMeshPartPayload>> _modelMeshRenderItems;
|
||||
QMap<render::ItemID, render::PayloadPointer> _modelMeshRenderItemsMap;
|
||||
|
||||
render::ItemIDs _modelMeshRenderItemIDs;
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ glm::mat4 Mat4::inverse(const glm::mat4& m) const {
|
|||
return glm::inverse(m);
|
||||
}
|
||||
|
||||
glm::vec3 Mat4::getFront(const glm::mat4& m) const {
|
||||
glm::vec3 Mat4::getForward(const glm::mat4& m) const {
|
||||
return glm::vec3(-m[0][2], -m[1][2], -m[2][2]);
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,9 @@ public slots:
|
|||
|
||||
glm::mat4 inverse(const glm::mat4& m) const;
|
||||
|
||||
glm::vec3 getFront(const glm::mat4& m) const;
|
||||
// redundant, calls getForward which better describes the returned vector as a direction
|
||||
glm::vec3 getFront(const glm::mat4& m) const { return getForward(m); }
|
||||
glm::vec3 getForward(const glm::mat4& m) const;
|
||||
glm::vec3 getRight(const glm::mat4& m) const;
|
||||
glm::vec3 getUp(const glm::mat4& m) const;
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ glm::quat Quat::inverse(const glm::quat& q) {
|
|||
return glm::inverse(q);
|
||||
}
|
||||
|
||||
glm::vec3 Quat::getFront(const glm::quat& orientation) {
|
||||
glm::vec3 Quat::getForward(const glm::quat& orientation) {
|
||||
return orientation * Vectors::FRONT;
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,9 @@ public slots:
|
|||
glm::quat fromPitchYawRollDegrees(float pitch, float yaw, float roll); // degrees
|
||||
glm::quat fromPitchYawRollRadians(float pitch, float yaw, float roll); // radians
|
||||
glm::quat inverse(const glm::quat& q);
|
||||
glm::vec3 getFront(const glm::quat& orientation);
|
||||
// redundant, calls getForward which better describes the returned vector as a direction
|
||||
glm::vec3 getFront(const glm::quat& orientation) { return getForward(orientation); }
|
||||
glm::vec3 getForward(const glm::quat& orientation);
|
||||
glm::vec3 getRight(const glm::quat& orientation);
|
||||
glm::vec3 getUp(const glm::quat& orientation);
|
||||
glm::vec3 safeEulerAngles(const glm::quat& orientation); // degrees
|
||||
|
|
|
@ -50,7 +50,7 @@ using glm::quat;
|
|||
// this is where the coordinate system is represented
|
||||
const glm::vec3 IDENTITY_RIGHT = glm::vec3( 1.0f, 0.0f, 0.0f);
|
||||
const glm::vec3 IDENTITY_UP = glm::vec3( 0.0f, 1.0f, 0.0f);
|
||||
const glm::vec3 IDENTITY_FRONT = glm::vec3( 0.0f, 0.0f,-1.0f);
|
||||
const glm::vec3 IDENTITY_FORWARD = glm::vec3( 0.0f, 0.0f,-1.0f);
|
||||
|
||||
glm::quat safeMix(const glm::quat& q1, const glm::quat& q2, float alpha);
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ void ViewFrustum::setOrientation(const glm::quat& orientationAsQuaternion) {
|
|||
_orientation = orientationAsQuaternion;
|
||||
_right = glm::vec3(orientationAsQuaternion * glm::vec4(IDENTITY_RIGHT, 0.0f));
|
||||
_up = glm::vec3(orientationAsQuaternion * glm::vec4(IDENTITY_UP, 0.0f));
|
||||
_direction = glm::vec3(orientationAsQuaternion * glm::vec4(IDENTITY_FRONT, 0.0f));
|
||||
_direction = glm::vec3(orientationAsQuaternion * glm::vec4(IDENTITY_FORWARD, 0.0f));
|
||||
_view = glm::translate(mat4(), _position) * glm::mat4_cast(_orientation);
|
||||
}
|
||||
|
||||
|
|
|
@ -153,7 +153,7 @@ private:
|
|||
glm::quat _orientation; // orientation in world-frame
|
||||
|
||||
// calculated from orientation
|
||||
glm::vec3 _direction = IDENTITY_FRONT;
|
||||
glm::vec3 _direction = IDENTITY_FORWARD;
|
||||
glm::vec3 _up = IDENTITY_UP;
|
||||
glm::vec3 _right = IDENTITY_RIGHT;
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
#include <PerfStat.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include <OVR_CAPI.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(oculus)
|
||||
|
@ -42,26 +44,33 @@ bool OculusControllerManager::activate() {
|
|||
}
|
||||
Q_ASSERT(_session);
|
||||
|
||||
// register with UserInputMapper
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
checkForConnectedDevices();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusControllerManager::checkForConnectedDevices() {
|
||||
if (_touch && _remote) {
|
||||
return;
|
||||
}
|
||||
|
||||
unsigned int controllerConnected = ovr_GetConnectedControllerTypes(_session);
|
||||
|
||||
if ((controllerConnected & ovrControllerType_Remote) == ovrControllerType_Remote) {
|
||||
if (!_remote && (controllerConnected & ovrControllerType_Remote) == ovrControllerType_Remote) {
|
||||
if (OVR_SUCCESS(ovr_GetInputState(_session, ovrControllerType_Remote, &_inputState))) {
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
_remote = std::make_shared<RemoteDevice>(*this);
|
||||
userInputMapper->registerDevice(_remote);
|
||||
}
|
||||
}
|
||||
|
||||
if ((controllerConnected & ovrControllerType_Touch) != 0) {
|
||||
if (!_touch && (controllerConnected & ovrControllerType_Touch) != 0) {
|
||||
if (OVR_SUCCESS(ovr_GetInputState(_session, ovrControllerType_Touch, &_inputState))) {
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
_touch = std::make_shared<TouchDevice>(*this);
|
||||
userInputMapper->registerDevice(_touch);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusControllerManager::deactivate() {
|
||||
|
@ -85,6 +94,8 @@ void OculusControllerManager::deactivate() {
|
|||
void OculusControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
||||
PerformanceTimer perfTimer("OculusControllerManager::TouchDevice::update");
|
||||
|
||||
checkForConnectedDevices();
|
||||
|
||||
if (_touch) {
|
||||
if (OVR_SUCCESS(ovr_GetInputState(_session, ovrControllerType_Touch, &_inputState))) {
|
||||
_touch->update(deltaTime, inputCalibrationData);
|
||||
|
|
|
@ -91,6 +91,8 @@ private:
|
|||
friend class OculusControllerManager;
|
||||
};
|
||||
|
||||
void checkForConnectedDevices();
|
||||
|
||||
ovrSession _session { nullptr };
|
||||
ovrInputState _inputState {};
|
||||
RemoteDevice::Pointer _remote;
|
||||
|
|
|
@ -311,3 +311,6 @@ clamp = function(val, min, max){
|
|||
return Math.max(min, Math.min(max, val))
|
||||
}
|
||||
|
||||
easeIn = function(t) {
|
||||
return Math.pow(t / 1, 5);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ var uuid = Entities.addEntity({
|
|||
shape: "Icosahedron",
|
||||
dimensions: Vec3.HALF,
|
||||
script: Script.resolvePath('../../tutorials/entity_scripts/ambientSound.js'),
|
||||
position: Vec3.sum(Vec3.multiply(5, Quat.getFront(MyAvatar.orientation)), MyAvatar.position),
|
||||
position: Vec3.sum(Vec3.multiply(5, Quat.getForward(MyAvatar.orientation)), MyAvatar.position),
|
||||
userData: JSON.stringify({
|
||||
soundURL: WAVE,
|
||||
maxVolume: 0.1,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getForward(orientation)));
|
||||
|
||||
// Math.random ensures no caching of script
|
||||
var SCRIPT_URL = Script.resolvePath("myEntityScript.js")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getForward(orientation)));
|
||||
|
||||
// Math.random ensures no caching of script
|
||||
var SCRIPT_URL = Script.resolvePath("batonSoundTestEntityScript.js")
|
||||
|
|
|
@ -4,7 +4,7 @@ var DIV = NUM_ENTITIES / Math.PI / 2;
|
|||
var PASS_SCRIPT_URL = Script.resolvePath('entityServerStampedeTest-entity.js');
|
||||
var FAIL_SCRIPT_URL = Script.resolvePath('entityStampedeTest-entity-fail.js');
|
||||
|
||||
var origin = Vec3.sum(MyAvatar.position, Vec3.multiply(5, Quat.getFront(MyAvatar.orientation)));
|
||||
var origin = Vec3.sum(MyAvatar.position, Vec3.multiply(5, Quat.getForward(MyAvatar.orientation)));
|
||||
origin.y += HMD.eyeHeight;
|
||||
|
||||
var uuids = [];
|
||||
|
|
|
@ -4,7 +4,7 @@ var DIV = NUM_ENTITIES / Math.PI / 2;
|
|||
var PASS_SCRIPT_URL = Script.resolvePath('').replace('.js', '-entity.js');
|
||||
var FAIL_SCRIPT_URL = Script.resolvePath('').replace('.js', '-entity-fail.js');
|
||||
|
||||
var origin = Vec3.sum(MyAvatar.position, Vec3.multiply(5, Quat.getFront(MyAvatar.orientation)));
|
||||
var origin = Vec3.sum(MyAvatar.position, Vec3.multiply(5, Quat.getForward(MyAvatar.orientation)));
|
||||
origin.y += HMD.eyeHeight;
|
||||
|
||||
var uuids = [];
|
||||
|
|
|
@ -19,7 +19,7 @@ var WIDTH = MAX_DIM * NUM_SPHERES;
|
|||
var entities = [];
|
||||
var right = Quat.getRight(Camera.orientation);
|
||||
// Starting position will be 30 meters in front of the camera
|
||||
var position = Vec3.sum(Camera.position, Vec3.multiply(30, Quat.getFront(Camera.orientation)));
|
||||
var position = Vec3.sum(Camera.position, Vec3.multiply(30, Quat.getForward(Camera.orientation)));
|
||||
position = Vec3.sum(position, Vec3.multiply(-WIDTH/2, right));
|
||||
|
||||
for (var i = 0; i < NUM_SPHERES; ++i) {
|
||||
|
|
|
@ -141,12 +141,12 @@ function testInverse() {
|
|||
assert(mat4FuzzyEqual(IDENTITY, Mat4.multiply(test2, Mat4.inverse(test2))));
|
||||
}
|
||||
|
||||
function testFront() {
|
||||
function testForward() {
|
||||
var test0 = IDENTITY;
|
||||
assert(mat4FuzzyEqual({x: 0, y: 0, z: -1}, Mat4.getFront(test0)));
|
||||
assert(mat4FuzzyEqual({x: 0, y: 0, z: -1}, Mat4.getForward(test0)));
|
||||
|
||||
var test1 = Mat4.createFromScaleRotAndTrans(ONE_HALF, ROT_Y_180, ONE_TWO_THREE);
|
||||
assert(mat4FuzzyEqual({x: 0, y: 0, z: 1}, Mat4.getFront(test1)));
|
||||
assert(mat4FuzzyEqual({x: 0, y: 0, z: 1}, Mat4.getForward(test1)));
|
||||
}
|
||||
|
||||
function testMat4() {
|
||||
|
@ -157,7 +157,7 @@ function testMat4() {
|
|||
testTransformPoint();
|
||||
testTransformVector();
|
||||
testInverse();
|
||||
testFront();
|
||||
testForward();
|
||||
|
||||
print("MAT4 TEST complete! (" + (testCount - failureCount) + "/" + testCount + ") tests passed!");
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ var HOW_FAR_UP = RANGE / 1.5; // higher (for uneven ground) above range/2 (for
|
|||
|
||||
var totalCreated = 0;
|
||||
var offset = Vec3.sum(Vec3.multiply(HOW_FAR_UP, Vec3.UNIT_Y),
|
||||
Vec3.multiply(HOW_FAR_IN_FRONT_OF_ME, Quat.getFront(Camera.orientation)));
|
||||
Vec3.multiply(HOW_FAR_IN_FRONT_OF_ME, Quat.getForward(Camera.orientation)));
|
||||
var center = Vec3.sum(MyAvatar.position, offset);
|
||||
|
||||
function randomVector(range) {
|
||||
|
|
|
@ -20,9 +20,9 @@ orientation = Quat.safeEulerAngles(orientation);
|
|||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
|
||||
var centerUp = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
var centerUp = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getForward(orientation)));
|
||||
centerUp.y += 0.5;
|
||||
var centerDown = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
var centerDown = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getForward(orientation)));
|
||||
centerDown.y -= 0.5;
|
||||
|
||||
var ENTITY_SHADER_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/eric/shaders/uniformTest.fs";
|
||||
|
|
|
@ -15,7 +15,7 @@ MyAvatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
|
|||
orientation = Quat.safeEulerAngles(MyAvatar.orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var tablePosition = Vec3.sum(MyAvatar.position, Quat.getFront(orientation));
|
||||
var tablePosition = Vec3.sum(MyAvatar.position, Quat.getForward(orientation));
|
||||
tablePosition.y += 0.5;
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ var UPDATE_HZ = 60; // standard script update rate
|
|||
var UPDATE_INTERVAL = 1000/UPDATE_HZ; // standard script update interval
|
||||
var UPDATE_WORK_EFFORT = 0; // 1000 is light work, 1000000 ~= 30ms
|
||||
|
||||
var basePosition = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
var basePosition = Vec3.sum(Camera.getPosition(), Quat.getForward(Camera.getOrientation()));
|
||||
|
||||
var timerBox = Entities.addEntity(
|
||||
{ type: "Box",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
describe('Entity', function() {
|
||||
var center = Vec3.sum(
|
||||
MyAvatar.position,
|
||||
Vec3.multiply(3, Quat.getFront(Camera.getOrientation()))
|
||||
Vec3.multiply(3, Quat.getForward(Camera.getOrientation()))
|
||||
);
|
||||
var boxEntity;
|
||||
var boxProps = {
|
||||
|
|
|
@ -24,10 +24,10 @@ var boxZAxis, boxYAxis;
|
|||
var prevThumbDown = false;
|
||||
|
||||
function init() {
|
||||
boxPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(Camera.getOrientation())));
|
||||
var front = Quat.getFront(Camera.getOrientation());
|
||||
boxZAxis = Vec3.normalize(Vec3.cross(front, Y_AXIS));
|
||||
boxYAxis = Vec3.normalize(Vec3.cross(boxZAxis, front));
|
||||
boxPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getForward(Camera.getOrientation())));
|
||||
var forward = Quat.getForward(Camera.getOrientation());
|
||||
boxZAxis = Vec3.normalize(Vec3.cross(forward, Y_AXIS));
|
||||
boxYAxis = Vec3.normalize(Vec3.cross(boxZAxis, forward));
|
||||
|
||||
boxEntity = Entities.addEntity({
|
||||
type: "Box",
|
||||
|
|
|
@ -8,12 +8,13 @@
|
|||
var PhotoBooth = {};
|
||||
PhotoBooth.init = function () {
|
||||
var success = Clipboard.importEntities(PHOTOBOOTH_SETUP_JSON_URL);
|
||||
var frontFactor = 10;
|
||||
var frontUnitVec = Vec3.normalize(Quat.getFront(MyAvatar.orientation));
|
||||
var frontOffset = Vec3.multiply(frontUnitVec,frontFactor);
|
||||
var forwardFactor = 10;
|
||||
var forwardUnitVector = Vec3.normalize(Quat.getForward(MyAvatar.orientation));
|
||||
var forwardOffset = Vec3.multiply(forwardUnitVector,forwardFactor);
|
||||
var rightFactor = 3;
|
||||
// TODO: rightUnitVec is unused and spawnLocation declaration is incorrect
|
||||
var rightUnitVec = Vec3.normalize(Quat.getRight(MyAvatar.orientation));
|
||||
var spawnLocation = Vec3.sum(Vec3.sum(MyAvatar.position,frontOffset),rightFactor);
|
||||
var spawnLocation = Vec3.sum(Vec3.sum(MyAvatar.position,forwardOffset),rightFactor);
|
||||
if (success) {
|
||||
this.pastedEntityIDs = Clipboard.pasteEntities(spawnLocation);
|
||||
this.processPastedEntities();
|
||||
|
|
104
scripts/system/audioMuteOverlay.js
Normal file
104
scripts/system/audioMuteOverlay.js
Normal file
|
@ -0,0 +1,104 @@
|
|||
"use strict";
|
||||
/* jslint vars: true, plusplus: true, forin: true*/
|
||||
/* globals Tablet, Script, AvatarList, Users, Entities, MyAvatar, Camera, Overlays, Vec3, Quat, Controller, print, getControllerWorldLocation */
|
||||
/* eslint indent: ["error", 4, { "outerIIFEBody": 0 }] */
|
||||
//
|
||||
// audioMuteOverlay.js
|
||||
//
|
||||
// client script that creates an overlay to provide mute feedback
|
||||
//
|
||||
// Created by Triplelexx on 17/03/09
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
var utilsPath = Script.resolvePath('../developer/libraries/utils.js');
|
||||
Script.include(utilsPath);
|
||||
|
||||
var TWEEN_SPEED = 0.025;
|
||||
var MIX_AMOUNT = 0.25;
|
||||
|
||||
var overlayPosition = Vec3.ZERO;
|
||||
var tweenPosition = 0;
|
||||
var startColor = {
|
||||
red: 170,
|
||||
green: 170,
|
||||
blue: 170
|
||||
};
|
||||
var endColor = {
|
||||
red: 255,
|
||||
green: 0,
|
||||
blue: 0
|
||||
};
|
||||
var overlayID;
|
||||
|
||||
Script.update.connect(update);
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
|
||||
function update(dt) {
|
||||
if (!AudioDevice.getMuted()) {
|
||||
if (hasOverlay()) {
|
||||
deleteOverlay();
|
||||
}
|
||||
} else if (!hasOverlay()) {
|
||||
createOverlay();
|
||||
} else {
|
||||
updateOverlay();
|
||||
}
|
||||
}
|
||||
|
||||
function getOffsetPosition() {
|
||||
return Vec3.sum(Camera.position, Quat.getFront(Camera.orientation));
|
||||
}
|
||||
|
||||
function createOverlay() {
|
||||
overlayPosition = getOffsetPosition();
|
||||
overlayID = Overlays.addOverlay("sphere", {
|
||||
position: overlayPosition,
|
||||
rotation: Camera.orientation,
|
||||
alpha: 0.9,
|
||||
dimensions: 0.1,
|
||||
solid: true,
|
||||
ignoreRayIntersection: true
|
||||
});
|
||||
}
|
||||
|
||||
function hasOverlay() {
|
||||
return Overlays.getProperty(overlayID, "position") !== undefined;
|
||||
}
|
||||
|
||||
function updateOverlay() {
|
||||
// increase by TWEEN_SPEED until completion
|
||||
if (tweenPosition < 1) {
|
||||
tweenPosition += TWEEN_SPEED;
|
||||
} else {
|
||||
// after tween completion reset to zero and flip values to ping pong
|
||||
tweenPosition = 0;
|
||||
for (var component in startColor) {
|
||||
var storedColor = startColor[component];
|
||||
startColor[component] = endColor[component];
|
||||
endColor[component] = storedColor;
|
||||
}
|
||||
}
|
||||
// mix previous position with new and mix colors
|
||||
overlayPosition = Vec3.mix(overlayPosition, getOffsetPosition(), MIX_AMOUNT);
|
||||
Overlays.editOverlay(overlayID, {
|
||||
color: colorMix(startColor, endColor, easeIn(tweenPosition)),
|
||||
position: overlayPosition,
|
||||
rotation: Camera.orientation
|
||||
});
|
||||
}
|
||||
|
||||
function deleteOverlay() {
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
deleteOverlay();
|
||||
AudioDevice.muteToggled.disconnect(onMuteToggled);
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
}()); // END LOCAL_SCOPE
|
|
@ -87,8 +87,8 @@ function moveCloserToCamera(positionAtHUD) {
|
|||
// we don't actually want to render at the slerped look at... instead, we want to render
|
||||
// slightly closer to the camera than that.
|
||||
var MOVE_CLOSER_TO_CAMERA_BY = -0.25;
|
||||
var cameraFront = Quat.getFront(Camera.orientation);
|
||||
var closerToCamera = Vec3.multiply(cameraFront, MOVE_CLOSER_TO_CAMERA_BY); // slightly closer to camera
|
||||
var cameraForward = Quat.getForward(Camera.orientation);
|
||||
var closerToCamera = Vec3.multiply(cameraForward, MOVE_CLOSER_TO_CAMERA_BY); // slightly closer to camera
|
||||
var slightlyCloserPosition = Vec3.sum(positionAtHUD, closerToCamera);
|
||||
|
||||
return slightlyCloserPosition;
|
||||
|
|
|
@ -463,7 +463,7 @@ Grabber.prototype.moveEvent = function(event) {
|
|||
var orientation = Camera.getOrientation();
|
||||
var dragOffset = Vec3.multiply(drag.x, Quat.getRight(orientation));
|
||||
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-drag.y, Quat.getUp(orientation)));
|
||||
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
|
||||
var axis = Vec3.cross(dragOffset, Quat.getForward(orientation));
|
||||
axis = Vec3.normalize(axis);
|
||||
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
|
||||
var angle = ROTATE_STRENGTH * Math.sqrt((drag.x * drag.x) + (drag.y * drag.y));
|
||||
|
@ -487,7 +487,7 @@ Grabber.prototype.moveEvent = function(event) {
|
|||
|
||||
if (this.mode === "verticalCylinder") {
|
||||
// for this mode we recompute the plane based on current Camera
|
||||
var planeNormal = Quat.getFront(Camera.getOrientation());
|
||||
var planeNormal = Quat.getForward(Camera.getOrientation());
|
||||
planeNormal.y = 0;
|
||||
planeNormal = Vec3.normalize(planeNormal);
|
||||
var pointOnCylinder = Vec3.multiply(planeNormal, this.xzDistanceToGrab);
|
||||
|
|
|
@ -1481,7 +1481,7 @@ function MyController(hand) {
|
|||
var pickRay = {
|
||||
origin: PICK_WITH_HAND_RAY ? worldHandPosition : Camera.position,
|
||||
direction: PICK_WITH_HAND_RAY ? Quat.getUp(worldHandRotation) : Vec3.mix(Quat.getUp(worldHandRotation),
|
||||
Quat.getFront(Camera.orientation),
|
||||
Quat.getForward(Camera.orientation),
|
||||
HAND_HEAD_MIX_RATIO),
|
||||
length: PICK_MAX_DISTANCE
|
||||
};
|
||||
|
|
|
@ -174,7 +174,7 @@ function calculateRayUICollisionPoint(position, direction) {
|
|||
// interect HUD plane, 1m in front of camera, using formula:
|
||||
// scale = hudNormal dot (hudPoint - position) / hudNormal dot direction
|
||||
// intersection = postion + scale*direction
|
||||
var hudNormal = Quat.getFront(Camera.getOrientation());
|
||||
var hudNormal = Quat.getForward(Camera.getOrientation());
|
||||
var hudPoint = Vec3.sum(Camera.getPosition(), hudNormal); // must also scale if PLANAR_PERPENDICULAR_HUD_DISTANCE!=1
|
||||
var denominator = Vec3.dot(hudNormal, direction);
|
||||
if (denominator === 0) {
|
||||
|
|
|
@ -816,7 +816,7 @@ function mouseClickEvent(event) {
|
|||
if (0 < x && sizeOK) {
|
||||
selectedEntityID = foundEntity;
|
||||
orientation = MyAvatar.orientation;
|
||||
intersection = rayPlaneIntersection(pickRay, P, Quat.getFront(orientation));
|
||||
intersection = rayPlaneIntersection(pickRay, P, Quat.getForward(orientation));
|
||||
|
||||
|
||||
if (!event.isShifted) {
|
||||
|
@ -1338,12 +1338,12 @@ function handeMenuEvent(menuItem) {
|
|||
}
|
||||
function getPositionToCreateEntity() {
|
||||
var HALF_TREE_SCALE = 16384;
|
||||
var direction = Quat.getFront(MyAvatar.orientation);
|
||||
var direction = Quat.getForward(MyAvatar.orientation);
|
||||
var distance = 1;
|
||||
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(direction, distance));
|
||||
|
||||
if (Camera.mode === "entity" || Camera.mode === "independent") {
|
||||
position = Vec3.sum(Camera.position, Vec3.multiply(Quat.getFront(Camera.orientation), distance))
|
||||
position = Vec3.sum(Camera.position, Vec3.multiply(Quat.getForward(Camera.orientation), distance))
|
||||
}
|
||||
position.y += 0.5;
|
||||
if (position.x > HALF_TREE_SCALE || position.y > HALF_TREE_SCALE || position.z > HALF_TREE_SCALE) {
|
||||
|
@ -1355,13 +1355,13 @@ function getPositionToCreateEntity() {
|
|||
function getPositionToImportEntity() {
|
||||
var dimensions = Clipboard.getContentsDimensions();
|
||||
var HALF_TREE_SCALE = 16384;
|
||||
var direction = Quat.getFront(MyAvatar.orientation);
|
||||
var direction = Quat.getForward(MyAvatar.orientation);
|
||||
var longest = 1;
|
||||
longest = Math.sqrt(Math.pow(dimensions.x, 2) + Math.pow(dimensions.z, 2));
|
||||
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(direction, longest));
|
||||
|
||||
if (Camera.mode === "entity" || Camera.mode === "independent") {
|
||||
position = Vec3.sum(Camera.position, Vec3.multiply(Quat.getFront(Camera.orientation), longest))
|
||||
position = Vec3.sum(Camera.position, Vec3.multiply(Quat.getForward(Camera.orientation), longest))
|
||||
}
|
||||
|
||||
if (position.x > HALF_TREE_SCALE || position.y > HALF_TREE_SCALE || position.z > HALF_TREE_SCALE) {
|
||||
|
|
|
@ -78,9 +78,9 @@ function calcSpawnInfo(hand, height) {
|
|||
rotation: lookAtRot
|
||||
};
|
||||
} else {
|
||||
var front = Quat.getFront(headRot);
|
||||
finalPosition = Vec3.sum(headPos, Vec3.multiply(0.6, front));
|
||||
var orientation = Quat.lookAt({x: 0, y: 0, z: 0}, front, {x: 0, y: 1, z: 0});
|
||||
var forward = Quat.getForward(headRot);
|
||||
finalPosition = Vec3.sum(headPos, Vec3.multiply(0.6, forward));
|
||||
var orientation = Quat.lookAt({x: 0, y: 0, z: 0}, forward, {x: 0, y: 1, z: 0});
|
||||
return {
|
||||
position: finalPosition,
|
||||
rotation: Quat.multiply(orientation, {x: 0, y: 1, z: 0, w: 0})
|
||||
|
|
|
@ -158,7 +158,7 @@ CameraManager = function() {
|
|||
that.zoomDistance = INITIAL_ZOOM_DISTANCE;
|
||||
that.targetZoomDistance = that.zoomDistance + 3.0;
|
||||
var focalPoint = Vec3.sum(Camera.getPosition(),
|
||||
Vec3.multiply(that.zoomDistance, Quat.getFront(Camera.getOrientation())));
|
||||
Vec3.multiply(that.zoomDistance, Quat.getForward(Camera.getOrientation())));
|
||||
|
||||
// Determine the correct yaw and pitch to keep the camera in the same location
|
||||
var dPos = Vec3.subtract(focalPoint, Camera.getPosition());
|
||||
|
@ -435,7 +435,7 @@ CameraManager = function() {
|
|||
});
|
||||
var q = Quat.multiply(yRot, xRot);
|
||||
|
||||
var pos = Vec3.multiply(Quat.getFront(q), that.zoomDistance);
|
||||
var pos = Vec3.multiply(Quat.getForward(q), that.zoomDistance);
|
||||
Camera.setPosition(Vec3.sum(that.focalPoint, pos));
|
||||
|
||||
yRot = Quat.angleAxis(that.yaw - 180, {
|
||||
|
|
|
@ -2517,7 +2517,7 @@ SelectionDisplay = (function() {
|
|||
onBegin: function(event) {
|
||||
pickRay = generalComputePickRay(event.x, event.y);
|
||||
|
||||
upDownPickNormal = Quat.getFront(lastCameraOrientation);
|
||||
upDownPickNormal = Quat.getForward(lastCameraOrientation);
|
||||
// Remove y component so the y-axis lies along the plane we picking on - this will
|
||||
// give movements that follow the mouse.
|
||||
upDownPickNormal.y = 0;
|
||||
|
|
|
@ -36,7 +36,7 @@ SoundArray = function(audioOptions, autoUpdateAudioPosition) {
|
|||
};
|
||||
this.updateAudioPosition = function() {
|
||||
var position = MyAvatar.position;
|
||||
var forwardVector = Quat.getFront(MyAvatar.orientation);
|
||||
var forwardVector = Quat.getForward(MyAvatar.orientation);
|
||||
this.audioOptions.position = Vec3.sum(position, forwardVector);
|
||||
};
|
||||
};
|
||||
|
|
|
@ -33,7 +33,7 @@ Script.setTimeout(function() {
|
|||
}, STARTUP_DELAY);
|
||||
|
||||
function addNameTag() {
|
||||
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation)));
|
||||
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getForward(MyAvatar.orientation)));
|
||||
nameTagPosition.y += HEIGHT_ABOVE_HEAD;
|
||||
var nameTagProperties = {
|
||||
name: MyAvatar.displayName + ' Name Tag',
|
||||
|
@ -49,7 +49,7 @@ function addNameTag() {
|
|||
|
||||
function updateNameTag() {
|
||||
var nameTagProps = Entities.getEntityProperties(nameTagEntityID);
|
||||
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getFront(MyAvatar.orientation)));
|
||||
var nameTagPosition = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(HEAD_OFFSET, Quat.getForward(MyAvatar.orientation)));
|
||||
nameTagPosition.y += HEIGHT_ABOVE_HEAD;
|
||||
|
||||
Entities.editEntity(nameTagEntityID, {
|
||||
|
|
|
@ -444,7 +444,7 @@ function populateNearbyUserList(selectData, oldAudioData) {
|
|||
verticalHalfAngle = filter && (frustum.fieldOfView / 2),
|
||||
horizontalHalfAngle = filter && (verticalHalfAngle * frustum.aspectRatio),
|
||||
orientation = filter && Camera.orientation,
|
||||
front = filter && Quat.getFront(orientation),
|
||||
forward = filter && Quat.getForward(orientation),
|
||||
verticalAngleNormal = filter && Quat.getRight(orientation),
|
||||
horizontalAngleNormal = filter && Quat.getUp(orientation);
|
||||
avatarsOfInterest = {};
|
||||
|
@ -463,8 +463,8 @@ function populateNearbyUserList(selectData, oldAudioData) {
|
|||
return;
|
||||
}
|
||||
var normal = id && filter && Vec3.normalize(Vec3.subtract(avatar.position, myPosition));
|
||||
var horizontal = normal && angleBetweenVectorsInPlane(normal, front, horizontalAngleNormal);
|
||||
var vertical = normal && angleBetweenVectorsInPlane(normal, front, verticalAngleNormal);
|
||||
var horizontal = normal && angleBetweenVectorsInPlane(normal, forward, horizontalAngleNormal);
|
||||
var vertical = normal && angleBetweenVectorsInPlane(normal, forward, verticalAngleNormal);
|
||||
if (id && filter && ((Math.abs(horizontal) > horizontalHalfAngle) || (Math.abs(vertical) > verticalHalfAngle))) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
} else {
|
||||
// default to friends if it can't be determined
|
||||
myVisibility = "friends";
|
||||
GlobalServices.findableBy = myVisibilty;
|
||||
GlobalServices.findableBy = myVisibility;
|
||||
}
|
||||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
|
|
|
@ -253,7 +253,7 @@ function addTerrainBlock() {
|
|||
if (alreadyThere) {
|
||||
// there is already a terrain block under MyAvatar.
|
||||
// try in front of the avatar.
|
||||
facingPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(8.0, Quat.getFront(Camera.getOrientation())));
|
||||
facingPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(8.0, Quat.getForward(Camera.getOrientation())));
|
||||
facingPosition = Vec3.sum(facingPosition, {
|
||||
x: 8,
|
||||
y: 8,
|
||||
|
|
|
@ -53,7 +53,7 @@ var deleteButton = toolBar.addOverlay("image", {
|
|||
});
|
||||
|
||||
function inFrontOfMe(distance) {
|
||||
return Vec3.sum(Camera.getPosition(), Vec3.multiply(distance, Quat.getFront(Camera.getOrientation())));
|
||||
return Vec3.sum(Camera.getPosition(), Vec3.multiply(distance, Quat.getForward(Camera.getOrientation())));
|
||||
}
|
||||
|
||||
function onButtonClick() {
|
||||
|
|
|
@ -44,8 +44,8 @@ var FIXED_LOCATION = false;
|
|||
|
||||
if (!FIXED_LOCATION) {
|
||||
var flockPosition = Vec3.sum(MyAvatar.position,Vec3.sum(
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_ABOVE_ME),
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_IN_FRONT_OF_ME)));
|
||||
Vec3.multiply(Quat.getForward(MyAvatar.orientation), DISTANCE_ABOVE_ME),
|
||||
Vec3.multiply(Quat.getForward(MyAvatar.orientation), DISTANCE_IN_FRONT_OF_ME)));
|
||||
} else {
|
||||
var flockPosition = { x: 4999.6, y: 4986.5, z: 5003.5 };
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ function updateButterflies(deltaTime) {
|
|||
var HORIZ_SCALE = 0.50;
|
||||
var VERT_SCALE = 0.50;
|
||||
var newHeading = Math.random() * 360.0;
|
||||
var newVelocity = Vec3.multiply(HORIZ_SCALE, Quat.getFront(Quat.fromPitchYawRollDegrees(0.0, newHeading, 0.0)));
|
||||
var newVelocity = Vec3.multiply(HORIZ_SCALE, Quat.getForward(Quat.fromPitchYawRollDegrees(0.0, newHeading, 0.0)));
|
||||
newVelocity.y = (Math.random() + 0.5) * VERT_SCALE;
|
||||
Entities.editEntity(butterflies[i], { rotation: Quat.fromPitchYawRollDegrees(-80 + Math.random() * 20, newHeading, (Math.random() - 0.5) * 10),
|
||||
velocity: newVelocity } );
|
||||
|
|
|
@ -18,7 +18,7 @@ var orientation = MyAvatar.orientation;
|
|||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(2, Quat.getFront(orientation)));
|
||||
var center = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(2, Quat.getForward(orientation)));
|
||||
|
||||
// An entity is described and created by specifying a map of properties
|
||||
var cow = Entities.addEntity({
|
||||
|
|
|
@ -127,8 +127,8 @@ function mousePressEvent(event) {
|
|||
deleteDice();
|
||||
} else if (clickedOverlay == diceButton) {
|
||||
var HOW_HARD = 2.0;
|
||||
var position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
var velocity = Vec3.multiply(HOW_HARD, Quat.getFront(Camera.getOrientation()));
|
||||
var position = Vec3.sum(Camera.getPosition(), Quat.getForward(Camera.getOrientation()));
|
||||
var velocity = Vec3.multiply(HOW_HARD, Quat.getForward(Camera.getOrientation()));
|
||||
shootDice(position, velocity);
|
||||
madeSound = false;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
|
|||
x: 0,
|
||||
y: 0.5,
|
||||
z: 0
|
||||
}), Vec3.multiply(0.5, Quat.getFront(Camera.getOrientation())));
|
||||
}), Vec3.multiply(0.5, Quat.getForward(Camera.getOrientation())));
|
||||
|
||||
var flashlight = Entities.addEntity({
|
||||
type: "Model",
|
||||
|
|
|
@ -15,7 +15,7 @@ var orientation = MyAvatar.orientation;
|
|||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(2, Quat.getFront(orientation)));
|
||||
var center = Vec3.sum(MyAvatar.getHeadPosition(), Vec3.multiply(2, Quat.getForward(orientation)));
|
||||
|
||||
var CLUB_MODEL = "http://hifi-production.s3.amazonaws.com/tutorials/golfClub/putter_VR.fbx";
|
||||
var CLUB_COLLISION_HULL = "http://hifi-production.s3.amazonaws.com/tutorials/golfClub/club_collision_hull.obj";
|
||||
|
|
|
@ -14,7 +14,7 @@ var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
|
|||
x: 0,
|
||||
y: 0.5,
|
||||
z: 0
|
||||
}), Vec3.multiply(1, Quat.getFront(Camera.getOrientation())));
|
||||
}), Vec3.multiply(1, Quat.getForward(Camera.getOrientation())));
|
||||
|
||||
// this is just a model exported from blender with a texture named 'Picture' on one face. also made it emissive so it doesn't require lighting.
|
||||
var MODEL_URL = "http://hifi-production.s3.amazonaws.com/tutorials/pictureFrame/finalFrame.fbx";
|
||||
|
|
|
@ -14,7 +14,7 @@ var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
|
|||
x: 0,
|
||||
y: 0.5,
|
||||
z: 0
|
||||
}), Vec3.multiply(0.5, Quat.getFront(Camera.getOrientation())));
|
||||
}), Vec3.multiply(0.5, Quat.getForward(Camera.getOrientation())));
|
||||
|
||||
|
||||
var pingPongGunProperties = {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(1.5, Quat.getFront(Camera.getOrientation())));
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(1.5, Quat.getForward(Camera.getOrientation())));
|
||||
var SCRIPT_URL = "http://hifi-production.s3.amazonaws.com/tutorials/entity_scripts/pistol.js";
|
||||
var MODEL_URL = "http://hifi-production.s3.amazonaws.com/tutorials/pistol/gun.fbx";
|
||||
var COLLISION_SOUND_URL = 'http://hifi-production.s3.amazonaws.com/tutorials/pistol/drop.wav'
|
||||
|
|
|
@ -13,7 +13,7 @@ var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
|
|||
x: 0,
|
||||
y: 0.5,
|
||||
z: 0
|
||||
}), Vec3.multiply(1, Quat.getFront(Camera.getOrientation())));
|
||||
}), Vec3.multiply(1, Quat.getForward(Camera.getOrientation())));
|
||||
|
||||
function makeBell() {
|
||||
var soundMakerProperties = {
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
// Position yourself facing in the direction you were originally facing, but with a
|
||||
// point on the ground *away* meters from *position* and in front of you.
|
||||
|
||||
var offset = Quat.getFront(MyAvatar.orientation);
|
||||
var offset = Quat.getForward(MyAvatar.orientation);
|
||||
offset.y = 0.0;
|
||||
offset = Vec3.multiply(-away, Vec3.normalize(offset));
|
||||
var newAvatarPosition = Vec3.sum(position, offset);
|
||||
|
@ -72,7 +72,7 @@
|
|||
}
|
||||
|
||||
function inFrontOfMe() {
|
||||
return Vec3.sum(MyAvatar.position, Vec3.multiply(BALL_DROP_DISTANCE, Quat.getFront(MyAvatar.orientation)));
|
||||
return Vec3.sum(MyAvatar.position, Vec3.multiply(BALL_DROP_DISTANCE, Quat.getForward(MyAvatar.orientation)));
|
||||
}
|
||||
|
||||
function avatarHalfHeight() {
|
||||
|
|
|
@ -94,9 +94,9 @@
|
|||
},
|
||||
|
||||
shootBall: function(gunProperties) {
|
||||
var forwardVec = Quat.getFront(Quat.multiply(gunProperties.rotation, Quat.fromPitchYawRollDegrees(0, 180, 0)));
|
||||
forwardVec = Vec3.normalize(forwardVec);
|
||||
forwardVec = Vec3.multiply(forwardVec, GUN_FORCE);
|
||||
var forwardVector = Quat.getForward(Quat.multiply(gunProperties.rotation, Quat.fromPitchYawRollDegrees(0, 180, 0)));
|
||||
forwardVector = Vec3.normalize(forwardVector);
|
||||
forwardVector = Vec3.multiply(forwardVector, GUN_FORCE);
|
||||
|
||||
var properties = {
|
||||
name: 'Tutorial Ping Pong Ball',
|
||||
|
@ -111,7 +111,7 @@
|
|||
rotation: gunProperties.rotation,
|
||||
position: this.getGunTipPosition(gunProperties),
|
||||
gravity: PING_PONG_GUN_GRAVITY,
|
||||
velocity: forwardVec,
|
||||
velocity: forwardVector,
|
||||
lifetime: 10
|
||||
};
|
||||
|
||||
|
@ -131,12 +131,12 @@
|
|||
|
||||
getGunTipPosition: function(properties) {
|
||||
//the tip of the gun is going to be in a different place than the center, so we move in space relative to the model to find that position
|
||||
var frontVector = Quat.getFront(properties.rotation);
|
||||
var frontOffset = Vec3.multiply(frontVector, GUN_TIP_FWD_OFFSET);
|
||||
var forwardVector = Quat.getForward(properties.rotation);
|
||||
var forwardOffset = Vec3.multiply(forwardVector, GUN_TIP_FWD_OFFSET);
|
||||
var upVector = Quat.getUp(properties.rotation);
|
||||
var upOffset = Vec3.multiply(upVector, GUN_TIP_UP_OFFSET);
|
||||
|
||||
var gunTipPosition = Vec3.sum(properties.position, frontOffset);
|
||||
var gunTipPosition = Vec3.sum(properties.position, forwardOffset);
|
||||
gunTipPosition = Vec3.sum(gunTipPosition, upOffset);
|
||||
|
||||
return gunTipPosition;
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
var gunProps = Entities.getEntityProperties(this.entityID, ['position', 'rotation']);
|
||||
this.position = gunProps.position;
|
||||
this.rotation = gunProps.rotation;
|
||||
this.firingDirection = Quat.getFront(this.rotation);
|
||||
this.firingDirection = Quat.getForward(this.rotation);
|
||||
var upVec = Quat.getUp(this.rotation);
|
||||
this.barrelPoint = Vec3.sum(this.position, Vec3.multiply(upVec, this.laserOffsets.y));
|
||||
this.laserTip = Vec3.sum(this.barrelPoint, Vec3.multiply(this.firingDirection, this.laserLength));
|
||||
|
|
|
@ -34,12 +34,12 @@
|
|||
|
||||
var SCRIPT_URL = Script.resolvePath("./entity_scripts/magneticBlock.js");
|
||||
|
||||
var frontVector = Quat.getFront(MyAvatar.orientation);
|
||||
frontVector.y += VERTICAL_OFFSET;
|
||||
var forwardVector = Quat.getForward(MyAvatar.orientation);
|
||||
forwardVector.y += VERTICAL_OFFSET;
|
||||
for (var x = 0; x < COLUMNS; x++) {
|
||||
for (var y = 0; y < ROWS; y++) {
|
||||
|
||||
var frontOffset = {
|
||||
var forwardOffset = {
|
||||
x: 0,
|
||||
y: SIZE * y + SIZE,
|
||||
z: SIZE * x + SIZE
|
||||
|
@ -61,7 +61,7 @@
|
|||
cloneLimit: 9999
|
||||
}
|
||||
}),
|
||||
position: Vec3.sum(MyAvatar.position, Vec3.sum(frontOffset, frontVector)),
|
||||
position: Vec3.sum(MyAvatar.position, Vec3.sum(forwardOffset, forwardVector)),
|
||||
color: newColor(),
|
||||
script: SCRIPT_URL
|
||||
});
|
||||
|
|
|
@ -123,16 +123,16 @@ public:
|
|||
|
||||
void update(float deltaTime) {
|
||||
if (moving()) {
|
||||
glm::vec3 camFront = getOrientation() * Vectors::FRONT;
|
||||
glm::vec3 camForward = getOrientation() * Vectors::FRONT;
|
||||
glm::vec3 camRight = getOrientation() * Vectors::RIGHT;
|
||||
glm::vec3 camUp = getOrientation() * Vectors::UP;
|
||||
float moveSpeed = deltaTime * movementSpeed;
|
||||
|
||||
if (keys[FORWARD]) {
|
||||
position += camFront * moveSpeed;
|
||||
position += camForward * moveSpeed;
|
||||
}
|
||||
if (keys[BACK]) {
|
||||
position -= camFront * moveSpeed;
|
||||
position -= camForward * moveSpeed;
|
||||
}
|
||||
if (keys[LEFT]) {
|
||||
position -= camRight * moveSpeed;
|
||||
|
|
|
@ -7,5 +7,6 @@ set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Tests/manual-tests/")
|
|||
|
||||
# link in the shared libraries
|
||||
link_hifi_libraries(render-utils gl gpu gpu-gl shared)
|
||||
target_link_libraries(${TARGET_NAME} ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -15,13 +15,16 @@ exports.handlers = {
|
|||
// directories to scan for jsdoc comments
|
||||
var dirList = [
|
||||
'../../interface/src',
|
||||
'../../interface/src/avatar',
|
||||
'../../interface/src/scripting',
|
||||
'../../interface/src/ui/overlays',
|
||||
'../../libraries/script-engine/src',
|
||||
'../../libraries/networking/src',
|
||||
'../../libraries/animation/src',
|
||||
'../../libraries/avatars/src',
|
||||
'../../libraries/controllers/src/controllers/',
|
||||
'../../libraries/entities/src',
|
||||
'../../libraries/shared/src'
|
||||
'../../libraries/networking/src',
|
||||
'../../libraries/shared/src',
|
||||
'../../libraries/script-engine/src',
|
||||
];
|
||||
var exts = ['.h', '.cpp'];
|
||||
|
||||
|
|
Loading…
Reference in a new issue