mirror of
https://github.com/overte-org/overte.git
synced 2025-08-09 10:47:11 +02:00
Merge branch 'master' into feature/arkit-blendshapes
This commit is contained in:
commit
d43d12dfa7
46 changed files with 972 additions and 215 deletions
|
@ -265,7 +265,7 @@ static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
|
||||||
void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
quint64 start = usecTimestampNow();
|
quint64 start = usecTimestampNow();
|
||||||
|
|
||||||
if (node->getType() == NodeType::Agent && node->getLinkedData() && node->getActiveSocket() && !node->isUpstream()) {
|
if ((node->getType() == NodeType::Agent || node->getType() == NodeType::EntityScriptServer) && node->getLinkedData() && node->getActiveSocket() && !node->isUpstream()) {
|
||||||
broadcastAvatarDataToAgent(node);
|
broadcastAvatarDataToAgent(node);
|
||||||
} else if (node->getType() == NodeType::DownstreamAvatarMixer) {
|
} else if (node->getType() == NodeType::DownstreamAvatarMixer) {
|
||||||
broadcastAvatarDataToDownstreamMixer(node);
|
broadcastAvatarDataToDownstreamMixer(node);
|
||||||
|
@ -448,13 +448,6 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
// or that somehow we haven't sent
|
// or that somehow we haven't sent
|
||||||
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
|
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
|
||||||
++numAvatarsHeldBack;
|
++numAvatarsHeldBack;
|
||||||
|
|
||||||
// BUGZ-781 verbose debugging:
|
|
||||||
auto usecLastTimeSent = destinationNodeData->getLastOtherAvatarEncodeTime(sourceAvatarNodeData->getNodeLocalID());
|
|
||||||
if (usecLastTimeSent != 0 && startIgnoreCalculation - usecLastTimeSent > 10 * USECS_PER_SECOND) {
|
|
||||||
qCDebug(avatars) << "Not sent avatar" << *sourceAvatarNode << "to Node" << *destinationNode << "in > 10 s";
|
|
||||||
}
|
|
||||||
|
|
||||||
sendAvatar = false;
|
sendAvatar = false;
|
||||||
} else if (lastSeqFromSender == 0) {
|
} else if (lastSeqFromSender == 0) {
|
||||||
// We have have not yet received any data about this avatar. Ignore it for now
|
// We have have not yet received any data about this avatar. Ignore it for now
|
||||||
|
|
|
@ -86,8 +86,6 @@ EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssig
|
||||||
this, "handleOctreePacket");
|
this, "handleOctreePacket");
|
||||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||||
|
|
||||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
|
||||||
|
|
||||||
packetReceiver.registerListener(PacketType::ReloadEntityServerScript, this, "handleReloadEntityServerScriptPacket");
|
packetReceiver.registerListener(PacketType::ReloadEntityServerScript, this, "handleReloadEntityServerScriptPacket");
|
||||||
packetReceiver.registerListener(PacketType::EntityScriptGetStatus, this, "handleEntityScriptGetStatusPacket");
|
packetReceiver.registerListener(PacketType::EntityScriptGetStatus, this, "handleEntityScriptGetStatusPacket");
|
||||||
packetReceiver.registerListener(PacketType::EntityServerScriptLog, this, "handleEntityServerScriptLogPacket");
|
packetReceiver.registerListener(PacketType::EntityServerScriptLog, this, "handleEntityServerScriptLogPacket");
|
||||||
|
@ -255,6 +253,7 @@ void EntityScriptServer::handleEntityScriptCallMethodPacket(QSharedPointer<Recei
|
||||||
void EntityScriptServer::run() {
|
void EntityScriptServer::run() {
|
||||||
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
|
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
|
||||||
DependencyManager::set<EntityScriptServerServices>();
|
DependencyManager::set<EntityScriptServerServices>();
|
||||||
|
DependencyManager::set<AvatarHashMap>();
|
||||||
|
|
||||||
// make sure we request our script once the agent connects to the domain
|
// make sure we request our script once the agent connects to the domain
|
||||||
auto nodeList = DependencyManager::get<NodeList>();
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
|
@ -448,6 +447,7 @@ void EntityScriptServer::resetEntitiesScriptEngine() {
|
||||||
newEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
newEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||||
|
|
||||||
newEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
newEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||||
|
newEngine->registerGlobalObject("AvatarList", DependencyManager::get<AvatarHashMap>().data());
|
||||||
|
|
||||||
// connect this script engines printedMessage signal to the global ScriptEngines these various messages
|
// connect this script engines printedMessage signal to the global ScriptEngines these various messages
|
||||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||||
|
|
4
cmake/externals/wasapi/CMakeLists.txt
vendored
4
cmake/externals/wasapi/CMakeLists.txt
vendored
|
@ -6,8 +6,8 @@ if (WIN32)
|
||||||
include(ExternalProject)
|
include(ExternalProject)
|
||||||
ExternalProject_Add(
|
ExternalProject_Add(
|
||||||
${EXTERNAL_NAME}
|
${EXTERNAL_NAME}
|
||||||
URL https://public.highfidelity.com/dependencies/qtaudio_wasapi11.zip
|
URL https://public.highfidelity.com/dependencies/qtaudio_wasapi13.zip
|
||||||
URL_MD5 d0eb8489455e7f79d59155535a2c8861
|
URL_MD5 aa56a45f19c18caee13d29a40d1d7d28
|
||||||
CONFIGURE_COMMAND ""
|
CONFIGURE_COMMAND ""
|
||||||
BUILD_COMMAND ""
|
BUILD_COMMAND ""
|
||||||
INSTALL_COMMAND ""
|
INSTALL_COMMAND ""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
Source: hifi-deps
|
Source: hifi-deps
|
||||||
Version: 0.1
|
Version: 0.3
|
||||||
Description: Collected dependencies for High Fidelity applications
|
Description: Collected dependencies for High Fidelity applications
|
||||||
Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openexr (!android), openssl (windows), tbb (!android&!osx), zlib, webrtc (!android)
|
Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openexr (!android), openssl (windows), tbb (!android&!osx), zlib, webrtc (!android)
|
||||||
|
|
|
@ -265,7 +265,7 @@ endif()
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-windows3.tar.gz'
|
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-windows3.tar.gz'
|
||||||
elif platform.system() == 'Darwin':
|
elif platform.system() == 'Darwin':
|
||||||
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-macos3.tar.gz'
|
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-macos.tar.gz?versionId=bLAgnoJ8IMKpqv8NFDcAu8hsyQy3Rwwz'
|
||||||
elif platform.system() == 'Linux':
|
elif platform.system() == 'Linux':
|
||||||
if platform.linux_distribution()[1][:3] == '16.':
|
if platform.linux_distribution()[1][:3] == '16.':
|
||||||
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-ubuntu-16.04-with-symbols.tar.gz'
|
url = 'https://hifi-public.s3.amazonaws.com/dependencies/vcpkg/qt5-install-5.12.3-ubuntu-16.04-with-symbols.tar.gz'
|
||||||
|
|
|
@ -4736,32 +4736,6 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 30,
|
|
||||||
"loopFlag": true,
|
|
||||||
"startFrame": 1,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/side_step_short_left.fbx"
|
|
||||||
},
|
|
||||||
"id": "strafeLeftShortStep_c",
|
|
||||||
"type": "clip"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 20,
|
|
||||||
"loopFlag": true,
|
|
||||||
"startFrame": 1,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/side_step_left.fbx"
|
|
||||||
},
|
|
||||||
"id": "strafeLeftStep_c",
|
|
||||||
"type": "clip"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
],
|
],
|
||||||
|
@ -4819,8 +4793,6 @@
|
||||||
"alpha": 0,
|
"alpha": 0,
|
||||||
"alphaVar": "moveLateralAlpha",
|
"alphaVar": "moveLateralAlpha",
|
||||||
"characteristicSpeeds": [
|
"characteristicSpeeds": [
|
||||||
0.1,
|
|
||||||
0.5,
|
|
||||||
1,
|
1,
|
||||||
2.55,
|
2.55,
|
||||||
3.35,
|
3.35,
|
||||||
|
@ -4834,34 +4806,6 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 30,
|
|
||||||
"loopFlag": true,
|
|
||||||
"mirrorFlag": true,
|
|
||||||
"startFrame": 1,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/side_step_short_left.fbx"
|
|
||||||
},
|
|
||||||
"id": "strafeRightShortStep_c",
|
|
||||||
"type": "clip"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 20,
|
|
||||||
"loopFlag": true,
|
|
||||||
"mirrorFlag": true,
|
|
||||||
"startFrame": 1,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/side_step_left.fbx"
|
|
||||||
},
|
|
||||||
"id": "strafeRightStep_c",
|
|
||||||
"type": "clip"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
],
|
],
|
||||||
|
@ -4923,8 +4867,6 @@
|
||||||
"alpha": 0,
|
"alpha": 0,
|
||||||
"alphaVar": "moveLateralAlpha",
|
"alphaVar": "moveLateralAlpha",
|
||||||
"characteristicSpeeds": [
|
"characteristicSpeeds": [
|
||||||
0.1,
|
|
||||||
0.5,
|
|
||||||
1,
|
1,
|
||||||
2.55,
|
2.55,
|
||||||
3.4,
|
3.4,
|
||||||
|
@ -5617,7 +5559,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"state": "STRAFELEFT",
|
"state": "STRAFELEFT",
|
||||||
"var": "isInputLeft"
|
"var": "isMovingLeft"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"state": "turnRight",
|
"state": "turnRight",
|
||||||
|
@ -5681,7 +5623,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"state": "STRAFERIGHT",
|
"state": "STRAFERIGHT",
|
||||||
"var": "isInputRight"
|
"var": "isMovingRight"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"state": "turnRight",
|
"state": "turnRight",
|
||||||
|
|
|
@ -78,6 +78,15 @@
|
||||||
"to": "Actions.Yaw"
|
"to": "Actions.Yaw"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{ "from": { "makeAxis" : [
|
||||||
|
["Keyboard.Left"],
|
||||||
|
["Keyboard.Right"]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": ["Application.CameraFirstPersonLookat", "!Keyboard.Shift"],
|
||||||
|
"to": "Actions.Yaw"
|
||||||
|
},
|
||||||
|
|
||||||
{ "from": { "makeAxis" : [
|
{ "from": { "makeAxis" : [
|
||||||
["Keyboard.Left"],
|
["Keyboard.Left"],
|
||||||
["Keyboard.Right"]
|
["Keyboard.Right"]
|
||||||
|
@ -113,7 +122,16 @@
|
||||||
"when": ["Application.CameraFirstPerson", "!Keyboard.Control"],
|
"when": ["Application.CameraFirstPerson", "!Keyboard.Control"],
|
||||||
"to": "Actions.Yaw"
|
"to": "Actions.Yaw"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{ "from": { "makeAxis" : [
|
||||||
|
["Keyboard.A"],
|
||||||
|
["Keyboard.D"]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": ["Application.CameraFirstPersonLookat", "!Keyboard.Control"],
|
||||||
|
"to": "Actions.Yaw"
|
||||||
|
},
|
||||||
|
|
||||||
{ "from": { "makeAxis" : [
|
{ "from": { "makeAxis" : [
|
||||||
["Keyboard.A"],
|
["Keyboard.A"],
|
||||||
["Keyboard.D"]
|
["Keyboard.D"]
|
||||||
|
@ -149,6 +167,15 @@
|
||||||
"when": "Application.CameraFirstPerson",
|
"when": "Application.CameraFirstPerson",
|
||||||
"to": "Actions.Yaw"
|
"to": "Actions.Yaw"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{ "from": { "makeAxis" : [
|
||||||
|
["Keyboard.TouchpadLeft"],
|
||||||
|
["Keyboard.TouchpadRight"]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": "Application.CameraFirstPersonLookat",
|
||||||
|
"to": "Actions.Yaw"
|
||||||
|
},
|
||||||
|
|
||||||
{ "from": { "makeAxis" : [
|
{ "from": { "makeAxis" : [
|
||||||
["Keyboard.TouchpadLeft"],
|
["Keyboard.TouchpadLeft"],
|
||||||
|
@ -222,10 +249,12 @@
|
||||||
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },
|
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },
|
||||||
{ "from": "Keyboard.Right", "when": "Keyboard.Shift", "to": "Actions.LATERAL_RIGHT" },
|
{ "from": "Keyboard.Right", "when": "Keyboard.Shift", "to": "Actions.LATERAL_RIGHT" },
|
||||||
{ "from": "Keyboard.Up", "when": "Application.CameraFirstPerson", "to": "Actions.LONGITUDINAL_FORWARD" },
|
{ "from": "Keyboard.Up", "when": "Application.CameraFirstPerson", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
|
{ "from": "Keyboard.Up", "when": "Application.CameraFirstPersonLookat", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
{ "from": "Keyboard.Up", "when": "Application.CameraThirdPerson", "to": "Actions.LONGITUDINAL_FORWARD" },
|
{ "from": "Keyboard.Up", "when": "Application.CameraThirdPerson", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
{ "from": "Keyboard.Up", "when": "Application.CameraLookAt", "to": "Actions.LONGITUDINAL_FORWARD" },
|
{ "from": "Keyboard.Up", "when": "Application.CameraLookAt", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
{ "from": "Keyboard.Up", "when": "Application.CameraSelfie", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.Up", "when": "Application.CameraSelfie", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.Down", "when": "Application.CameraFirstPerson", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.Down", "when": "Application.CameraFirstPerson", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
|
{ "from": "Keyboard.Down", "when": "Application.CameraFirstPersonLookat", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.Down", "when": "Application.CameraThirdPerson", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.Down", "when": "Application.CameraThirdPerson", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.Down", "when": "Application.CameraLookAt", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.Down", "when": "Application.CameraLookAt", "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.Down", "when": "Application.CameraSelfie", "to": "Actions.LONGITUDINAL_FORWARD" },
|
{ "from": "Keyboard.Down", "when": "Application.CameraSelfie", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
|
|
|
@ -225,9 +225,9 @@ Flickable {
|
||||||
SimplifiedControls.RadioButton {
|
SimplifiedControls.RadioButton {
|
||||||
id: firstPerson
|
id: firstPerson
|
||||||
text: "First Person View"
|
text: "First Person View"
|
||||||
checked: Camera.mode === "first person"
|
checked: Camera.mode === "first person look at"
|
||||||
onClicked: {
|
onClicked: {
|
||||||
Camera.mode = "first person"
|
Camera.mode = "first person look at"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -254,7 +254,7 @@ Flickable {
|
||||||
target: Camera
|
target: Camera
|
||||||
|
|
||||||
onModeUpdated: {
|
onModeUpdated: {
|
||||||
if (Camera.mode === "first person") {
|
if (Camera.mode === "first person look at") {
|
||||||
firstPerson.checked = true
|
firstPerson.checked = true
|
||||||
} else if (Camera.mode === "look at") {
|
} else if (Camera.mode === "look at") {
|
||||||
thirdPerson.checked = true
|
thirdPerson.checked = true
|
||||||
|
|
|
@ -429,7 +429,7 @@ Rectangle {
|
||||||
SimplifiedControls.TextField {
|
SimplifiedControls.TextField {
|
||||||
id: goToTextField
|
id: goToTextField
|
||||||
readonly property string shortPlaceholderText: "Jump to..."
|
readonly property string shortPlaceholderText: "Jump to..."
|
||||||
readonly property string longPlaceholderText: "Type the name of a location to quickly jump there..."
|
readonly property string longPlaceholderText: "Quickly jump to a location by typing '/LocationName'"
|
||||||
anchors.centerIn: parent
|
anchors.centerIn: parent
|
||||||
width: Math.min(parent.width, 445)
|
width: Math.min(parent.width, 445)
|
||||||
height: 35
|
height: 35
|
||||||
|
|
10
interface/resources/serverless/empty.json
Normal file
10
interface/resources/serverless/empty.json
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"DataVersion": 3,
|
||||||
|
"Paths": {
|
||||||
|
"/": "/0, 0, 0/0,0,0,0"
|
||||||
|
},
|
||||||
|
"Entities": [
|
||||||
|
],
|
||||||
|
"Id": "{5807d519-eb7d-496d-b22a-0820811291c9}",
|
||||||
|
"Version": 120
|
||||||
|
}
|
|
@ -717,6 +717,7 @@ private:
|
||||||
static const QString STATE_IN_HMD = "InHMD";
|
static const QString STATE_IN_HMD = "InHMD";
|
||||||
static const QString STATE_CAMERA_FULL_SCREEN_MIRROR = "CameraFSM";
|
static const QString STATE_CAMERA_FULL_SCREEN_MIRROR = "CameraFSM";
|
||||||
static const QString STATE_CAMERA_FIRST_PERSON = "CameraFirstPerson";
|
static const QString STATE_CAMERA_FIRST_PERSON = "CameraFirstPerson";
|
||||||
|
static const QString STATE_CAMERA_FIRST_PERSON_LOOK_AT = "CameraFirstPersonLookat";
|
||||||
static const QString STATE_CAMERA_THIRD_PERSON = "CameraThirdPerson";
|
static const QString STATE_CAMERA_THIRD_PERSON = "CameraThirdPerson";
|
||||||
static const QString STATE_CAMERA_ENTITY = "CameraEntity";
|
static const QString STATE_CAMERA_ENTITY = "CameraEntity";
|
||||||
static const QString STATE_CAMERA_INDEPENDENT = "CameraIndependent";
|
static const QString STATE_CAMERA_INDEPENDENT = "CameraIndependent";
|
||||||
|
@ -933,7 +934,8 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
|
||||||
DependencyManager::set<AudioInjectorManager>();
|
DependencyManager::set<AudioInjectorManager>();
|
||||||
DependencyManager::set<MessagesClient>();
|
DependencyManager::set<MessagesClient>();
|
||||||
controller::StateController::setStateVariables({ { STATE_IN_HMD, STATE_CAMERA_FULL_SCREEN_MIRROR,
|
controller::StateController::setStateVariables({ { STATE_IN_HMD, STATE_CAMERA_FULL_SCREEN_MIRROR,
|
||||||
STATE_CAMERA_FIRST_PERSON, STATE_CAMERA_THIRD_PERSON, STATE_CAMERA_ENTITY, STATE_CAMERA_INDEPENDENT, STATE_CAMERA_LOOK_AT, STATE_CAMERA_SELFIE,
|
STATE_CAMERA_FIRST_PERSON, STATE_CAMERA_FIRST_PERSON_LOOK_AT, STATE_CAMERA_THIRD_PERSON,
|
||||||
|
STATE_CAMERA_ENTITY, STATE_CAMERA_INDEPENDENT, STATE_CAMERA_LOOK_AT, STATE_CAMERA_SELFIE,
|
||||||
STATE_SNAP_TURN, STATE_ADVANCED_MOVEMENT_CONTROLS, STATE_GROUNDED, STATE_NAV_FOCUSED,
|
STATE_SNAP_TURN, STATE_ADVANCED_MOVEMENT_CONTROLS, STATE_GROUNDED, STATE_NAV_FOCUSED,
|
||||||
STATE_PLATFORM_WINDOWS, STATE_PLATFORM_MAC, STATE_PLATFORM_ANDROID, STATE_LEFT_HAND_DOMINANT, STATE_RIGHT_HAND_DOMINANT, STATE_STRAFE_ENABLED } });
|
STATE_PLATFORM_WINDOWS, STATE_PLATFORM_MAC, STATE_PLATFORM_ANDROID, STATE_LEFT_HAND_DOMINANT, STATE_RIGHT_HAND_DOMINANT, STATE_STRAFE_ENABLED } });
|
||||||
DependencyManager::set<UserInputMapper>();
|
DependencyManager::set<UserInputMapper>();
|
||||||
|
@ -1880,6 +1882,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
||||||
_applicationStateDevice->setInputVariant(STATE_CAMERA_FIRST_PERSON, []() -> float {
|
_applicationStateDevice->setInputVariant(STATE_CAMERA_FIRST_PERSON, []() -> float {
|
||||||
return qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON ? 1 : 0;
|
return qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON ? 1 : 0;
|
||||||
});
|
});
|
||||||
|
_applicationStateDevice->setInputVariant(STATE_CAMERA_FIRST_PERSON_LOOK_AT, []() -> float {
|
||||||
|
return qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON_LOOK_AT ? 1 : 0;
|
||||||
|
});
|
||||||
_applicationStateDevice->setInputVariant(STATE_CAMERA_THIRD_PERSON, []() -> float {
|
_applicationStateDevice->setInputVariant(STATE_CAMERA_THIRD_PERSON, []() -> float {
|
||||||
return qApp->getCamera().getMode() == CAMERA_MODE_THIRD_PERSON ? 1 : 0;
|
return qApp->getCamera().getMode() == CAMERA_MODE_THIRD_PERSON ? 1 : 0;
|
||||||
});
|
});
|
||||||
|
@ -1989,7 +1994,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
||||||
settingsTimer->start();
|
settingsTimer->start();
|
||||||
}, QThread::LowestPriority);
|
}, QThread::LowestPriority);
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPersonLookAt)) {
|
||||||
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_MIN); // So that camera doesn't auto-switch to third person.
|
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_MIN); // So that camera doesn't auto-switch to third person.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2938,13 +2943,29 @@ Application::~Application() {
|
||||||
qInstallMessageHandler(LogHandler::verboseMessageHandler);
|
qInstallMessageHandler(LogHandler::verboseMessageHandler);
|
||||||
|
|
||||||
#ifdef Q_OS_MAC
|
#ifdef Q_OS_MAC
|
||||||
|
// 10/16/2019 - Disabling this call. This causes known crashes (A), and it is not
|
||||||
|
// fully understood whether it might cause other unknown crashes (B).
|
||||||
|
//
|
||||||
|
// (A) Although we try to shutdown the ScriptEngine threads in onAboutToQuit, there is
|
||||||
|
// currently no guarantee that they have stopped. Waiting on them to stop has so far appeared to
|
||||||
|
// never return on Mac, causing the application to hang on shutdown. Because ScriptEngines
|
||||||
|
// may still be running, they may end up receiving events that are triggered from this processEvents call,
|
||||||
|
// and then try to access resources that are no longer available at this point in time.
|
||||||
|
// If the ScriptEngine threads were fully destroyed before getting here, this would
|
||||||
|
// not be an issue.
|
||||||
|
//
|
||||||
|
// (B) It seems likely that a bunch of potential event handlers are dependent on Application
|
||||||
|
// and other common dependencies to be available and not destroyed or in the middle of being
|
||||||
|
// destroyed.
|
||||||
|
|
||||||
|
|
||||||
// Clear the event queue before application is totally destructed.
|
// Clear the event queue before application is totally destructed.
|
||||||
// This will drain the messasge queue of pending "deleteLaters" queued up
|
// This will drain the messasge queue of pending "deleteLaters" queued up
|
||||||
// during shutdown of the script engines.
|
// during shutdown of the script engines.
|
||||||
// We do this here because there is a possiblty that [NSApplication terminate:]
|
// We do this here because there is a possiblty that [NSApplication terminate:]
|
||||||
// will be called during processEvents which will invoke all static destructors.
|
// will be called during processEvents which will invoke all static destructors.
|
||||||
// We want to postpone this utill the last possible moment.
|
// We want to postpone this utill the last possible moment.
|
||||||
QCoreApplication::processEvents();
|
//QCoreApplication::processEvents();
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3587,14 +3608,17 @@ void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
||||||
// Using the latter will cause the camera to wobble with idle animations,
|
// Using the latter will cause the camera to wobble with idle animations,
|
||||||
// or with changes from the face tracker
|
// or with changes from the face tracker
|
||||||
CameraMode mode = _myCamera.getMode();
|
CameraMode mode = _myCamera.getMode();
|
||||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
if (mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
_thirdPersonHMDCameraBoomValid= false;
|
_thirdPersonHMDCameraBoomValid= false;
|
||||||
if (isHMDMode()) {
|
if (isHMDMode()) {
|
||||||
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||||
_myCamera.setPosition(extractTranslation(camMat));
|
_myCamera.setPosition(extractTranslation(camMat));
|
||||||
_myCamera.setOrientation(glmExtractRotation(camMat));
|
_myCamera.setOrientation(glmExtractRotation(camMat));
|
||||||
|
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
|
||||||
|
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||||
|
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
|
||||||
} else {
|
} else {
|
||||||
_myCamera.setPosition(myAvatar->getLookAtPivotPoint());
|
_myCamera.setPosition(myAvatar->getCameraEyesPosition(deltaTime));
|
||||||
_myCamera.setOrientation(myAvatar->getLookAtRotation());
|
_myCamera.setOrientation(myAvatar->getLookAtRotation());
|
||||||
}
|
}
|
||||||
} else if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
} else if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||||
|
@ -4373,7 +4397,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
||||||
|
|
||||||
case Qt::Key_1: {
|
case Qt::Key_1: {
|
||||||
Menu* menu = Menu::getInstance();
|
Menu* menu = Menu::getInstance();
|
||||||
menu->triggerOption(MenuOption::FirstPerson);
|
menu->triggerOption(MenuOption::FirstPersonLookAt);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Qt::Key_2: {
|
case Qt::Key_2: {
|
||||||
|
@ -5421,7 +5445,7 @@ void Application::loadSettings() {
|
||||||
isFirstPerson = menu->isOptionChecked(MenuOption::FirstPersonHMD);
|
isFirstPerson = menu->isOptionChecked(MenuOption::FirstPersonHMD);
|
||||||
} else {
|
} else {
|
||||||
// if HMD is not active, only use first person if the menu option is checked
|
// if HMD is not active, only use first person if the menu option is checked
|
||||||
isFirstPerson = menu->isOptionChecked(MenuOption::FirstPerson);
|
isFirstPerson = menu->isOptionChecked(MenuOption::FirstPersonLookAt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5436,9 +5460,9 @@ void Application::loadSettings() {
|
||||||
|
|
||||||
// finish initializing the camera, based on everything we checked above. Third person camera will be used if no settings
|
// finish initializing the camera, based on everything we checked above. Third person camera will be used if no settings
|
||||||
// dictated that we should be in first person
|
// dictated that we should be in first person
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, isFirstPerson);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, isFirstPerson);
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, !isFirstPerson);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, !isFirstPerson);
|
||||||
_myCamera.setMode((isFirstPerson) ? CAMERA_MODE_FIRST_PERSON : CAMERA_MODE_LOOK_AT);
|
_myCamera.setMode((isFirstPerson) ? CAMERA_MODE_FIRST_PERSON_LOOK_AT : CAMERA_MODE_LOOK_AT);
|
||||||
cameraMenuChanged();
|
cameraMenuChanged();
|
||||||
|
|
||||||
auto inputs = pluginManager->getInputPlugins();
|
auto inputs = pluginManager->getInputPlugins();
|
||||||
|
@ -5602,7 +5626,7 @@ void Application::pauseUntilLoginDetermined() {
|
||||||
menu->getMenu("Developer")->setVisible(false);
|
menu->getMenu("Developer")->setVisible(false);
|
||||||
}
|
}
|
||||||
_previousCameraMode = _myCamera.getMode();
|
_previousCameraMode = _myCamera.getMode();
|
||||||
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
|
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON_LOOK_AT);
|
||||||
cameraModeChanged();
|
cameraModeChanged();
|
||||||
|
|
||||||
// disconnect domain handler.
|
// disconnect domain handler.
|
||||||
|
@ -5750,13 +5774,13 @@ void Application::pushPostUpdateLambda(void* key, const std::function<void()>& f
|
||||||
// to everyone.
|
// to everyone.
|
||||||
// The principal result is to call updateLookAtTargetAvatar() and then setLookAtPosition().
|
// The principal result is to call updateLookAtTargetAvatar() and then setLookAtPosition().
|
||||||
// Note that it is called BEFORE we update position or joints based on sensors, etc.
|
// Note that it is called BEFORE we update position or joints based on sensors, etc.
|
||||||
void Application::updateMyAvatarLookAtPosition() {
|
void Application::updateMyAvatarLookAtPosition(float deltaTime) {
|
||||||
PerformanceTimer perfTimer("lookAt");
|
PerformanceTimer perfTimer("lookAt");
|
||||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||||
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
|
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
|
||||||
|
|
||||||
auto myAvatar = getMyAvatar();
|
auto myAvatar = getMyAvatar();
|
||||||
myAvatar->updateLookAtPosition(_myCamera);
|
myAvatar->updateEyesLookAtPosition(deltaTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::updateThreads(float deltaTime) {
|
void Application::updateThreads(float deltaTime) {
|
||||||
|
@ -5790,11 +5814,11 @@ void Application::cycleCamera() {
|
||||||
if (menu->isOptionChecked(MenuOption::FullscreenMirror)) {
|
if (menu->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||||
|
|
||||||
menu->setIsOptionChecked(MenuOption::FullscreenMirror, false);
|
menu->setIsOptionChecked(MenuOption::FullscreenMirror, false);
|
||||||
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
|
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
|
||||||
|
|
||||||
} else if (menu->isOptionChecked(MenuOption::FirstPerson)) {
|
} else if (menu->isOptionChecked(MenuOption::FirstPersonLookAt)) {
|
||||||
|
|
||||||
menu->setIsOptionChecked(MenuOption::FirstPerson, false);
|
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, false);
|
||||||
menu->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
menu->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
||||||
|
|
||||||
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
|
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
|
||||||
|
@ -5813,8 +5837,8 @@ void Application::cycleCamera() {
|
||||||
|
|
||||||
void Application::cameraModeChanged() {
|
void Application::cameraModeChanged() {
|
||||||
switch (_myCamera.getMode()) {
|
switch (_myCamera.getMode()) {
|
||||||
case CAMERA_MODE_FIRST_PERSON:
|
case CAMERA_MODE_FIRST_PERSON_LOOK_AT:
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
|
||||||
break;
|
break;
|
||||||
case CAMERA_MODE_LOOK_AT:
|
case CAMERA_MODE_LOOK_AT:
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
||||||
|
@ -5834,12 +5858,12 @@ void Application::changeViewAsNeeded(float boomLength) {
|
||||||
// This is called when the boom length has changed
|
// This is called when the boom length has changed
|
||||||
bool boomLengthGreaterThanMinimum = (boomLength > MyAvatar::ZOOM_MIN);
|
bool boomLengthGreaterThanMinimum = (boomLength > MyAvatar::ZOOM_MIN);
|
||||||
|
|
||||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON && boomLengthGreaterThanMinimum) {
|
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON_LOOK_AT && boomLengthGreaterThanMinimum) {
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, false);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, false);
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, true);
|
||||||
cameraMenuChanged();
|
cameraMenuChanged();
|
||||||
} else if (_myCamera.getMode() == CAMERA_MODE_LOOK_AT && !boomLengthGreaterThanMinimum) {
|
} else if (_myCamera.getMode() == CAMERA_MODE_LOOK_AT && !boomLengthGreaterThanMinimum) {
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, false);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::LookAtCamera, false);
|
||||||
cameraMenuChanged();
|
cameraMenuChanged();
|
||||||
}
|
}
|
||||||
|
@ -5847,9 +5871,9 @@ void Application::changeViewAsNeeded(float boomLength) {
|
||||||
|
|
||||||
void Application::cameraMenuChanged() {
|
void Application::cameraMenuChanged() {
|
||||||
auto menu = Menu::getInstance();
|
auto menu = Menu::getInstance();
|
||||||
if (menu->isOptionChecked(MenuOption::FirstPerson)) {
|
if (menu->isOptionChecked(MenuOption::FirstPersonLookAt)) {
|
||||||
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {
|
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
|
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON_LOOK_AT);
|
||||||
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_MIN);
|
getMyAvatar()->setBoomLength(MyAvatar::ZOOM_MIN);
|
||||||
}
|
}
|
||||||
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
|
} else if (menu->isOptionChecked(MenuOption::LookAtCamera)) {
|
||||||
|
@ -6509,7 +6533,7 @@ void Application::update(float deltaTime) {
|
||||||
{
|
{
|
||||||
PROFILE_RANGE(simulation, "MyAvatar");
|
PROFILE_RANGE(simulation, "MyAvatar");
|
||||||
PerformanceTimer perfTimer("MyAvatar");
|
PerformanceTimer perfTimer("MyAvatar");
|
||||||
qApp->updateMyAvatarLookAtPosition();
|
qApp->updateMyAvatarLookAtPosition(deltaTime);
|
||||||
avatarManager->updateMyAvatar(deltaTime);
|
avatarManager->updateMyAvatar(deltaTime);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8911,7 +8935,7 @@ void Application::setDisplayPlugin(DisplayPluginPointer newDisplayPlugin) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isHmd && menu->isOptionChecked(MenuOption::FirstPersonHMD)) {
|
if (isHmd && menu->isOptionChecked(MenuOption::FirstPersonHMD)) {
|
||||||
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
|
menu->setIsOptionChecked(MenuOption::FirstPersonLookAt, true);
|
||||||
cameraMenuChanged();
|
cameraMenuChanged();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -288,7 +288,7 @@ public:
|
||||||
|
|
||||||
virtual void pushPostUpdateLambda(void* key, const std::function<void()>& func) override;
|
virtual void pushPostUpdateLambda(void* key, const std::function<void()>& func) override;
|
||||||
|
|
||||||
void updateMyAvatarLookAtPosition();
|
void updateMyAvatarLookAtPosition(float deltaTime);
|
||||||
|
|
||||||
float getGameLoopRate() const { return _gameLoopCounter.rate(); }
|
float getGameLoopRate() const { return _gameLoopCounter.rate(); }
|
||||||
|
|
||||||
|
|
|
@ -171,7 +171,7 @@ Menu::Menu() {
|
||||||
|
|
||||||
// View > First Person
|
// View > First Person
|
||||||
auto firstPersonAction = cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(
|
auto firstPersonAction = cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(
|
||||||
viewMenu, MenuOption::FirstPerson, 0,
|
viewMenu, MenuOption::FirstPersonLookAt, 0,
|
||||||
true, qApp, SLOT(cameraMenuChanged())));
|
true, qApp, SLOT(cameraMenuChanged())));
|
||||||
|
|
||||||
firstPersonAction->setProperty(EXCLUSION_GROUP_KEY, QVariant::fromValue(cameraModeGroup));
|
firstPersonAction->setProperty(EXCLUSION_GROUP_KEY, QVariant::fromValue(cameraModeGroup));
|
||||||
|
|
|
@ -111,7 +111,8 @@ namespace MenuOption {
|
||||||
const QString ExpandSimulationTiming = "Expand /simulation";
|
const QString ExpandSimulationTiming = "Expand /simulation";
|
||||||
const QString ExpandPhysicsTiming = "Expand /physics";
|
const QString ExpandPhysicsTiming = "Expand /physics";
|
||||||
const QString ExpandUpdateTiming = "Expand /update";
|
const QString ExpandUpdateTiming = "Expand /update";
|
||||||
const QString FirstPerson = "First Person";
|
const QString FirstPerson = "First Person Legacy";
|
||||||
|
const QString FirstPersonLookAt = "First Person";
|
||||||
const QString FirstPersonHMD = "Enter First Person Mode in HMD";
|
const QString FirstPersonHMD = "Enter First Person Mode in HMD";
|
||||||
const QString FivePointCalibration = "5 Point Calibration";
|
const QString FivePointCalibration = "5 Point Calibration";
|
||||||
const QString FixGaze = "Fix Gaze (no saccade)";
|
const QString FixGaze = "Fix Gaze (no saccade)";
|
||||||
|
|
|
@ -892,6 +892,25 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
|
||||||
updateViewBoom();
|
updateViewBoom();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Head's look at blending needs updating
|
||||||
|
// before we perform rig animations and IK.
|
||||||
|
{
|
||||||
|
PerformanceTimer perfTimer("lookat");
|
||||||
|
|
||||||
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
|
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_FIRST_PERSON ||
|
||||||
|
mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||||
|
if (!_pointAtActive || !_isPointTargetValid) {
|
||||||
|
updateHeadLookAt(deltaTime);
|
||||||
|
} else {
|
||||||
|
resetHeadLookAt();
|
||||||
|
}
|
||||||
|
} else if (_headLookAtActive) {
|
||||||
|
resetHeadLookAt();
|
||||||
|
_headLookAtActive = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// update sensorToWorldMatrix for camera and hand controllers
|
// update sensorToWorldMatrix for camera and hand controllers
|
||||||
// before we perform rig animations and IK.
|
// before we perform rig animations and IK.
|
||||||
updateSensorToWorldMatrix();
|
updateSensorToWorldMatrix();
|
||||||
|
@ -943,17 +962,6 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
|
||||||
head->setPosition(headPosition);
|
head->setPosition(headPosition);
|
||||||
head->setScale(getModelScale());
|
head->setScale(getModelScale());
|
||||||
head->simulate(deltaTime);
|
head->simulate(deltaTime);
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
|
||||||
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
|
||||||
if (!_pointAtActive || !_isPointTargetValid) {
|
|
||||||
updateHeadLookAt(deltaTime);
|
|
||||||
} else {
|
|
||||||
resetHeadLookAt();
|
|
||||||
}
|
|
||||||
} else if (_headLookAtActive){
|
|
||||||
resetHeadLookAt();
|
|
||||||
_headLookAtActive = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Record avatars movements.
|
// Record avatars movements.
|
||||||
|
@ -2082,7 +2090,7 @@ static float lookAtCostFunction(const glm::vec3& myForward, const glm::vec3& myP
|
||||||
const float DISTANCE_FACTOR = 3.14f;
|
const float DISTANCE_FACTOR = 3.14f;
|
||||||
const float MY_ANGLE_FACTOR = 1.0f;
|
const float MY_ANGLE_FACTOR = 1.0f;
|
||||||
const float OTHER_ANGLE_FACTOR = 1.0f;
|
const float OTHER_ANGLE_FACTOR = 1.0f;
|
||||||
const float OTHER_IS_TALKING_TERM = otherIsTalking ? 1.0f : 0.0f;
|
const float OTHER_IS_TALKING_TERM = otherIsTalking ? -1.0f : 0.0f;
|
||||||
const float LOOKING_AT_OTHER_ALREADY_TERM = lookingAtOtherAlready ? -0.2f : 0.0f;
|
const float LOOKING_AT_OTHER_ALREADY_TERM = lookingAtOtherAlready ? -0.2f : 0.0f;
|
||||||
|
|
||||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f; // meters
|
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f; // meters
|
||||||
|
@ -2108,9 +2116,12 @@ static float lookAtCostFunction(const glm::vec3& myForward, const glm::vec3& myP
|
||||||
|
|
||||||
void MyAvatar::computeMyLookAtTarget(const AvatarHash& hash) {
|
void MyAvatar::computeMyLookAtTarget(const AvatarHash& hash) {
|
||||||
glm::vec3 myForward = _lookAtYaw * IDENTITY_FORWARD;
|
glm::vec3 myForward = _lookAtYaw * IDENTITY_FORWARD;
|
||||||
|
if (_skeletonModel->isLoaded()) {
|
||||||
|
myForward = getHeadJointFrontVector();
|
||||||
|
}
|
||||||
glm::vec3 myPosition = getHead()->getEyePosition();
|
glm::vec3 myPosition = getHead()->getEyePosition();
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
if (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_FIRST_PERSON) {
|
||||||
myPosition = qApp->getCamera().getPosition();
|
myPosition = qApp->getCamera().getPosition();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2120,7 +2131,7 @@ void MyAvatar::computeMyLookAtTarget(const AvatarHash& hash) {
|
||||||
foreach (const AvatarSharedPointer& avatarData, hash) {
|
foreach (const AvatarSharedPointer& avatarData, hash) {
|
||||||
std::shared_ptr<Avatar> avatar = std::static_pointer_cast<Avatar>(avatarData);
|
std::shared_ptr<Avatar> avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||||
if (!avatar->isMyAvatar() && avatar->isInitialized()) {
|
if (!avatar->isMyAvatar() && avatar->isInitialized()) {
|
||||||
glm::vec3 otherForward = avatar->getHead()->getForwardDirection();
|
glm::vec3 otherForward = avatar->getHeadJointFrontVector();
|
||||||
glm::vec3 otherPosition = avatar->getHead()->getEyePosition();
|
glm::vec3 otherPosition = avatar->getHead()->getEyePosition();
|
||||||
const float TIME_WITHOUT_TALKING_THRESHOLD = 1.0f;
|
const float TIME_WITHOUT_TALKING_THRESHOLD = 1.0f;
|
||||||
bool otherIsTalking = avatar->getHead()->getTimeWithoutTalking() <= TIME_WITHOUT_TALKING_THRESHOLD;
|
bool otherIsTalking = avatar->getHead()->getTimeWithoutTalking() <= TIME_WITHOUT_TALKING_THRESHOLD;
|
||||||
|
@ -2208,7 +2219,9 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
||||||
AvatarHash hash = DependencyManager::get<AvatarManager>()->getHashCopy();
|
AvatarHash hash = DependencyManager::get<AvatarManager>()->getHashCopy();
|
||||||
|
|
||||||
// determine what the best look at target for my avatar should be.
|
// determine what the best look at target for my avatar should be.
|
||||||
computeMyLookAtTarget(hash);
|
if (!_scriptControlsEyesLookAt) {
|
||||||
|
computeMyLookAtTarget(hash);
|
||||||
|
}
|
||||||
|
|
||||||
// snap look at position for avatars that are looking at me.
|
// snap look at position for avatars that are looking at me.
|
||||||
snapOtherAvatarLookAtTargetsToMe(hash);
|
snapOtherAvatarLookAtTargetsToMe(hash);
|
||||||
|
@ -2651,7 +2664,7 @@ void MyAvatar::updateMotors() {
|
||||||
if (_characterController.getState() == CharacterController::State::Hover ||
|
if (_characterController.getState() == CharacterController::State::Hover ||
|
||||||
_characterController.computeCollisionMask() == BULLET_COLLISION_MASK_COLLISIONLESS) {
|
_characterController.computeCollisionMask() == BULLET_COLLISION_MASK_COLLISIONLESS) {
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
if (!qApp->isHMDMode() && (mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE)) {
|
if (!qApp->isHMDMode() && (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE)) {
|
||||||
motorRotation = getLookAtRotation();
|
motorRotation = getLookAtRotation();
|
||||||
} else {
|
} else {
|
||||||
motorRotation = getMyHead()->getHeadOrientation();
|
motorRotation = getMyHead()->getHeadOrientation();
|
||||||
|
@ -3331,7 +3344,8 @@ bool MyAvatar::cameraInsideHead(const glm::vec3& cameraPosition) const {
|
||||||
|
|
||||||
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
||||||
bool defaultMode = renderArgs->_renderMode == RenderArgs::DEFAULT_RENDER_MODE;
|
bool defaultMode = renderArgs->_renderMode == RenderArgs::DEFAULT_RENDER_MODE;
|
||||||
bool firstPerson = qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON;
|
bool firstPerson = qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON_LOOK_AT ||
|
||||||
|
qApp->getCamera().getMode() == CAMERA_MODE_FIRST_PERSON;
|
||||||
bool overrideAnim = _skeletonModel ? _skeletonModel->getRig().isPlayingOverrideAnimation() : false;
|
bool overrideAnim = _skeletonModel ? _skeletonModel->getRig().isPlayingOverrideAnimation() : false;
|
||||||
bool insideHead = cameraInsideHead(renderArgs->getViewFrustum().getPosition());
|
bool insideHead = cameraInsideHead(renderArgs->getViewFrustum().getPosition());
|
||||||
return !defaultMode || (!firstPerson && !insideHead) || (overrideAnim && !insideHead);
|
return !defaultMode || (!firstPerson && !insideHead) || (overrideAnim && !insideHead);
|
||||||
|
@ -3351,8 +3365,8 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
float targetSpeed = getDriveKey(YAW) * _yawSpeed;
|
float targetSpeed = getDriveKey(YAW) * _yawSpeed;
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
bool computeLookAt = isReadyForPhysics() && !qApp->isHMDMode() &&
|
bool computeLookAt = isReadyForPhysics() && !qApp->isHMDMode() &&
|
||||||
(mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE);
|
(mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE);
|
||||||
bool smoothCameraYaw = computeLookAt && mode != CAMERA_MODE_FIRST_PERSON;
|
bool smoothCameraYaw = computeLookAt && mode != CAMERA_MODE_FIRST_PERSON_LOOK_AT;
|
||||||
if (smoothCameraYaw) {
|
if (smoothCameraYaw) {
|
||||||
// For "Look At" and "Selfie" camera modes we also smooth the yaw rotation from right-click mouse movement.
|
// For "Look At" and "Selfie" camera modes we also smooth the yaw rotation from right-click mouse movement.
|
||||||
float speedFromDeltaYaw = deltaTime > FLT_EPSILON ? getDriveKey(DELTA_YAW) / deltaTime : 0.0f;
|
float speedFromDeltaYaw = deltaTime > FLT_EPSILON ? getDriveKey(DELTA_YAW) / deltaTime : 0.0f;
|
||||||
|
@ -3476,11 +3490,11 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
if (isMovingFwdBwd) {
|
if (isMovingFwdBwd) {
|
||||||
if (isMovingSideways) {
|
if (isMovingSideways) {
|
||||||
// Reorient avatar to face camera diagonal
|
// Reorient avatar to face camera diagonal
|
||||||
blend = mode == CAMERA_MODE_FIRST_PERSON ? 1.0f : DIAGONAL_TURN_BLEND;
|
blend = mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT ? 1.0f : DIAGONAL_TURN_BLEND;
|
||||||
float turnSign = getDriveKey(TRANSLATE_Z) < 0.0f ? -1.0f : 1.0f;
|
float turnSign = getDriveKey(TRANSLATE_Z) < 0.0f ? -1.0f : 1.0f;
|
||||||
turnSign = getDriveKey(TRANSLATE_X) > 0.0f ? -turnSign : turnSign;
|
turnSign = getDriveKey(TRANSLATE_X) > 0.0f ? -turnSign : turnSign;
|
||||||
faceRotation = _lookAtYaw * glm::angleAxis(turnSign * 0.25f * PI, Vectors::UP);
|
faceRotation = _lookAtYaw * glm::angleAxis(turnSign * 0.25f * PI, Vectors::UP);
|
||||||
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
|
} else if (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
blend = 1.0f;
|
blend = 1.0f;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3551,11 +3565,11 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
glm::vec3 ajustedYawVector = cameraYawVector;
|
glm::vec3 ajustedYawVector = cameraYawVector;
|
||||||
float limitAngle = 0.0f;
|
float limitAngle = 0.0f;
|
||||||
float triggerAngle = -glm::sin(glm::radians(TRIGGER_REORIENT_ANGLE));
|
float triggerAngle = -glm::sin(glm::radians(TRIGGER_REORIENT_ANGLE));
|
||||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
if (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
limitAngle = glm::sin(glm::radians(90.0f - FIRST_PERSON_TRIGGER_REORIENT_ANGLE));
|
limitAngle = glm::sin(glm::radians(90.0f - FIRST_PERSON_TRIGGER_REORIENT_ANGLE));
|
||||||
triggerAngle = limitAngle;
|
triggerAngle = limitAngle;
|
||||||
}
|
}
|
||||||
float reorientAngle = mode == CAMERA_MODE_FIRST_PERSON ? FIRST_PERSON_REORIENT_ANGLE : DEFAULT_REORIENT_ANGLE;
|
float reorientAngle = mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT ? FIRST_PERSON_REORIENT_ANGLE : DEFAULT_REORIENT_ANGLE;
|
||||||
if (frontBackDot < limitAngle) {
|
if (frontBackDot < limitAngle) {
|
||||||
if (frontBackDot < 0.0f) {
|
if (frontBackDot < 0.0f) {
|
||||||
ajustedYawVector = (leftRightDot < 0.0f ? -avatarVectorRight : avatarVectorRight);
|
ajustedYawVector = (leftRightDot < 0.0f ? -avatarVectorRight : avatarVectorRight);
|
||||||
|
@ -3591,7 +3605,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
}
|
}
|
||||||
_headLookAtActive = true;
|
_headLookAtActive = true;
|
||||||
const float FIRST_PERSON_RECENTER_SECONDS = 15.0f;
|
const float FIRST_PERSON_RECENTER_SECONDS = 15.0f;
|
||||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
if (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
if (getDriveKey(YAW) + getDriveKey(STEP_YAW) + getDriveKey(DELTA_YAW) == 0.0f) {
|
if (getDriveKey(YAW) + getDriveKey(STEP_YAW) + getDriveKey(DELTA_YAW) == 0.0f) {
|
||||||
if (_firstPersonSteadyHeadTimer < FIRST_PERSON_RECENTER_SECONDS) {
|
if (_firstPersonSteadyHeadTimer < FIRST_PERSON_RECENTER_SECONDS) {
|
||||||
if (_firstPersonSteadyHeadTimer > 0.0f) {
|
if (_firstPersonSteadyHeadTimer > 0.0f) {
|
||||||
|
@ -3679,7 +3693,7 @@ glm::vec3 MyAvatar::scaleMotorSpeed(const glm::vec3 forward, const glm::vec3 rig
|
||||||
// Desktop mode.
|
// Desktop mode.
|
||||||
direction = (zSpeed * forward) + (xSpeed * right);
|
direction = (zSpeed * forward) + (xSpeed * right);
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
if ((mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_SELFIE) &&
|
if ((mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_SELFIE) &&
|
||||||
zSpeed != 0.0f && xSpeed != 0.0f && !isFlying()){
|
zSpeed != 0.0f && xSpeed != 0.0f && !isFlying()){
|
||||||
direction = (zSpeed * forward);
|
direction = (zSpeed * forward);
|
||||||
}
|
}
|
||||||
|
@ -5344,7 +5358,7 @@ glm::quat MyAvatar::getOrientationForAudio() {
|
||||||
case AudioListenerMode::FROM_HEAD: {
|
case AudioListenerMode::FROM_HEAD: {
|
||||||
// Using the camera's orientation instead, when the current mode is controlling the avatar's head.
|
// Using the camera's orientation instead, when the current mode is controlling the avatar's head.
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
bool headFollowsCamera = mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE;
|
bool headFollowsCamera = mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE;
|
||||||
result = headFollowsCamera ? qApp->getCamera().getOrientation() : getHead()->getFinalOrientationInWorldFrame();
|
result = headFollowsCamera ? qApp->getCamera().getOrientation() : getHead()->getFinalOrientationInWorldFrame();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -6517,7 +6531,7 @@ bool MyAvatar::getIsJointOverridden(int jointIndex) const {
|
||||||
return _skeletonModel->getIsJointOverridden(jointIndex);
|
return _skeletonModel->getIsJointOverridden(jointIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::updateLookAtPosition(Camera& myCamera) {
|
void MyAvatar::updateEyesLookAtPosition(float deltaTime) {
|
||||||
|
|
||||||
updateLookAtTargetAvatar();
|
updateLookAtTargetAvatar();
|
||||||
|
|
||||||
|
@ -6547,6 +6561,13 @@ void MyAvatar::updateLookAtPosition(Camera& myCamera) {
|
||||||
} else {
|
} else {
|
||||||
lookAtSpot = myHead->getEyePosition() + glm::normalize(leftVec) * 1000.0f;
|
lookAtSpot = myHead->getEyePosition() + glm::normalize(leftVec) * 1000.0f;
|
||||||
}
|
}
|
||||||
|
} else if (_scriptControlsEyesLookAt) {
|
||||||
|
if (_scriptEyesControlTimer < MAX_LOOK_AT_TIME_SCRIPT_CONTROL) {
|
||||||
|
_scriptEyesControlTimer += deltaTime;
|
||||||
|
lookAtSpot = _eyesLookAtTarget.get();
|
||||||
|
} else {
|
||||||
|
_scriptControlsEyesLookAt = false;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
controller::Pose leftEyePose = getControllerPoseInAvatarFrame(controller::Action::LEFT_EYE);
|
controller::Pose leftEyePose = getControllerPoseInAvatarFrame(controller::Action::LEFT_EYE);
|
||||||
controller::Pose rightEyePose = getControllerPoseInAvatarFrame(controller::Action::RIGHT_EYE);
|
controller::Pose rightEyePose = getControllerPoseInAvatarFrame(controller::Action::RIGHT_EYE);
|
||||||
|
@ -6609,13 +6630,14 @@ void MyAvatar::updateLookAtPosition(Camera& myCamera) {
|
||||||
if (headPose.isValid()) {
|
if (headPose.isValid()) {
|
||||||
lookAtSpot = transformPoint(headPose.getMatrix(), glm::vec3(0.0f, 0.0f, TREE_SCALE));
|
lookAtSpot = transformPoint(headPose.getMatrix(), glm::vec3(0.0f, 0.0f, TREE_SCALE));
|
||||||
} else {
|
} else {
|
||||||
lookAtSpot = myHead->getEyePosition() +
|
lookAtSpot = _shouldTurnToFaceCamera ?
|
||||||
(getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
myHead->getLookAtPosition() :
|
||||||
|
myHead->getEyePosition() + getHeadJointFrontVector() * (float)TREE_SCALE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_eyesLookAtTarget.set(lookAtSpot);
|
||||||
getHead()->setLookAtPosition(lookAtSpot);
|
getHead()->setLookAtPosition(lookAtSpot);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6698,12 +6720,83 @@ void MyAvatar::setHeadLookAt(const glm::vec3& lookAtTarget) {
|
||||||
_lookAtScriptTarget = lookAtTarget;
|
_lookAtScriptTarget = lookAtTarget;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MyAvatar::setEyesLookAt(const glm::vec3& lookAtTarget) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "setEyesLookAt",
|
||||||
|
Q_ARG(const glm::vec3&, lookAtTarget));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_eyesLookAtTarget.set(lookAtTarget);
|
||||||
|
_scriptEyesControlTimer = 0.0f;
|
||||||
|
_scriptControlsEyesLookAt = true;
|
||||||
|
}
|
||||||
|
|
||||||
glm::vec3 MyAvatar::getLookAtPivotPoint() {
|
glm::vec3 MyAvatar::getLookAtPivotPoint() {
|
||||||
glm::vec3 avatarUp = getWorldOrientation() * Vectors::UP;
|
glm::vec3 avatarUp = getWorldOrientation() * Vectors::UP;
|
||||||
glm::vec3 yAxisEyePosition = getWorldPosition() + avatarUp * glm::dot(avatarUp, _skeletonModel->getDefaultEyeModelPosition());
|
glm::vec3 yAxisEyePosition = getWorldPosition() + avatarUp * glm::dot(avatarUp, _skeletonModel->getDefaultEyeModelPosition());
|
||||||
return yAxisEyePosition;
|
return yAxisEyePosition;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
glm::vec3 MyAvatar::getCameraEyesPosition(float deltaTime) {
|
||||||
|
glm::vec3 defaultEyesPosition = getLookAtPivotPoint();
|
||||||
|
if (isFlying()) {
|
||||||
|
return defaultEyesPosition;
|
||||||
|
}
|
||||||
|
glm::vec3 avatarFrontVector = getWorldOrientation() * Vectors::FRONT;
|
||||||
|
glm::vec3 avatarUpVector = getWorldOrientation() * Vectors::UP;
|
||||||
|
// Compute the offset between the default and real eye positions.
|
||||||
|
glm::vec3 defaultEyesToEyesVector = getHead()->getEyePosition() - defaultEyesPosition;
|
||||||
|
float FRONT_OFFSET_IDLE_MULTIPLIER = 2.5f;
|
||||||
|
float FRONT_OFFSET_JUMP_MULTIPLIER = 1.5f;
|
||||||
|
float frontOffset = FRONT_OFFSET_IDLE_MULTIPLIER * glm::length(defaultEyesPosition - getDefaultEyePosition());
|
||||||
|
|
||||||
|
// Looking down will aproximate move the camera forward to meet the real eye position
|
||||||
|
float mixAlpha = glm::dot(_lookAtPitch * Vectors::FRONT, -avatarUpVector);
|
||||||
|
bool isLanding = false;
|
||||||
|
// When jumping the camera should follow the real eye on the Y coordenate
|
||||||
|
float upOffset = 0.0f;
|
||||||
|
if (isJumping() || _characterController.getState() == CharacterController::State::Takeoff) {
|
||||||
|
upOffset = glm::dot(defaultEyesToEyesVector, avatarUpVector);
|
||||||
|
frontOffset = glm::dot(defaultEyesToEyesVector, avatarFrontVector) * FRONT_OFFSET_JUMP_MULTIPLIER;
|
||||||
|
mixAlpha = 1.0f;
|
||||||
|
_landingAfterJumpTime = 0.0f;
|
||||||
|
} else {
|
||||||
|
// Limit the range effect from 45 to 0 degrees
|
||||||
|
// between the front camera and the down vectors
|
||||||
|
const float HEAD_OFFSET_RANGE_IN_DEGREES = 45.0f;
|
||||||
|
const float HEAD_OFFSET_RANGE_OUT_DEGREES = 0.0f;
|
||||||
|
float rangeIn = glm::cos(glm::radians(HEAD_OFFSET_RANGE_IN_DEGREES));
|
||||||
|
float rangeOut = glm::cos(glm::radians(HEAD_OFFSET_RANGE_OUT_DEGREES));
|
||||||
|
mixAlpha = mixAlpha < rangeIn ? 0.0f : (mixAlpha - rangeIn) / (rangeOut - rangeIn);
|
||||||
|
const float WAIT_TO_LAND_TIME = 1.0f;
|
||||||
|
if (_landingAfterJumpTime < WAIT_TO_LAND_TIME) {
|
||||||
|
_landingAfterJumpTime += deltaTime;
|
||||||
|
isLanding = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const float FPS = 60.0f;
|
||||||
|
float timeScale = deltaTime * FPS;
|
||||||
|
frontOffset = frontOffset < 0.0f ? 0.0f : mixAlpha * frontOffset;
|
||||||
|
glm::vec3 cameraOffset = upOffset * Vectors::UP + frontOffset * Vectors::FRONT;
|
||||||
|
const float JUMPING_TAU = 0.1f;
|
||||||
|
const float NO_JUMP_TAU = 0.3f;
|
||||||
|
const float LANDING_TAU = 0.05f;
|
||||||
|
float tau = NO_JUMP_TAU;
|
||||||
|
if (isJumping()) {
|
||||||
|
tau = JUMPING_TAU;
|
||||||
|
} else if (isLanding) {
|
||||||
|
tau = LANDING_TAU;
|
||||||
|
}
|
||||||
|
_cameraEyesOffset = _cameraEyesOffset + (cameraOffset - _cameraEyesOffset) * min(1.0f, tau * timeScale);
|
||||||
|
glm::vec3 estimatedCameraPosition = defaultEyesPosition + getWorldOrientation() * _cameraEyesOffset;
|
||||||
|
return estimatedCameraPosition;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool MyAvatar::isJumping() {
|
||||||
|
return (_characterController.getState() == CharacterController::State::InAir ||
|
||||||
|
_characterController.getState() == CharacterController::State::Takeoff) && !isFlying();
|
||||||
|
}
|
||||||
|
|
||||||
bool MyAvatar::setPointAt(const glm::vec3& pointAtTarget) {
|
bool MyAvatar::setPointAt(const glm::vec3& pointAtTarget) {
|
||||||
if (QThread::currentThread() != thread()) {
|
if (QThread::currentThread() != thread()) {
|
||||||
bool result = false;
|
bool result = false;
|
||||||
|
@ -6730,4 +6823,3 @@ void MyAvatar::resetPointAt() {
|
||||||
POINT_BLEND_LINEAR_ALPHA_NAME, POINT_ALPHA_BLENDING);
|
POINT_BLEND_LINEAR_ALPHA_NAME, POINT_ALPHA_BLENDING);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1757,10 +1757,26 @@ public:
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* Returns the current head look at target point in world coordinates.
|
* Returns the current head look at target point in world coordinates.
|
||||||
* @function MyAvatar.getHeadLookAt
|
* @function MyAvatar.getHeadLookAt
|
||||||
* @returns {Vec3} Default position between your avatar's eyes in world coordinates.
|
* @returns {Vec3} The head's look at target in world coordinates.
|
||||||
*/
|
*/
|
||||||
Q_INVOKABLE glm::vec3 getHeadLookAt() { return _lookAtCameraTarget; }
|
Q_INVOKABLE glm::vec3 getHeadLookAt() { return _lookAtCameraTarget; }
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Force the avatar's eyes to look to the specified location.
|
||||||
|
* Once this method is called, API calls will have full control of the eyes for a limited time.
|
||||||
|
* If this method is not called for two seconds, the engine will regain control of the eyes.
|
||||||
|
* @function MyAvatar.setEyesLookAt
|
||||||
|
* @param {Vec3} lookAtTarget - The target point in world coordinates.
|
||||||
|
*/
|
||||||
|
Q_INVOKABLE void setEyesLookAt(const glm::vec3& lookAtTarget);
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Returns the current eyes look at target point in world coordinates.
|
||||||
|
* @function MyAvatar.getEyesLookAt
|
||||||
|
* @returns {Vec3} The eyes's look at target in world coordinates.
|
||||||
|
*/
|
||||||
|
Q_INVOKABLE glm::vec3 getEyesLookAt() { return _eyesLookAtTarget.get(); }
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* Aims the pointing directional blending towards the provided target point.
|
* Aims the pointing directional blending towards the provided target point.
|
||||||
* The "point" reaction should be triggered before using this method.
|
* The "point" reaction should be triggered before using this method.
|
||||||
|
@ -1898,7 +1914,7 @@ public:
|
||||||
bool getFlowActive() const;
|
bool getFlowActive() const;
|
||||||
bool getNetworkGraphActive() const;
|
bool getNetworkGraphActive() const;
|
||||||
|
|
||||||
void updateLookAtPosition(Camera& myCamera);
|
void updateEyesLookAtPosition(float deltaTime);
|
||||||
|
|
||||||
// sets the reaction enabled and triggered parameters of the passed in params
|
// sets the reaction enabled and triggered parameters of the passed in params
|
||||||
// also clears internal reaction triggers
|
// also clears internal reaction triggers
|
||||||
|
@ -1911,6 +1927,8 @@ public:
|
||||||
|
|
||||||
bool getIsJointOverridden(int jointIndex) const;
|
bool getIsJointOverridden(int jointIndex) const;
|
||||||
glm::vec3 getLookAtPivotPoint();
|
glm::vec3 getLookAtPivotPoint();
|
||||||
|
glm::vec3 getCameraEyesPosition(float deltaTime);
|
||||||
|
bool isJumping();
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
|
|
||||||
|
@ -2647,6 +2665,9 @@ private:
|
||||||
|
|
||||||
eyeContactTarget _eyeContactTarget;
|
eyeContactTarget _eyeContactTarget;
|
||||||
float _eyeContactTargetTimer { 0.0f };
|
float _eyeContactTargetTimer { 0.0f };
|
||||||
|
ThreadSafeValueCache<glm::vec3> _eyesLookAtTarget { glm::vec3() };
|
||||||
|
bool _scriptControlsEyesLookAt{ false };
|
||||||
|
float _scriptEyesControlTimer{ 0.0f };
|
||||||
|
|
||||||
glm::vec3 _trackedHeadPosition;
|
glm::vec3 _trackedHeadPosition;
|
||||||
|
|
||||||
|
@ -2956,6 +2977,9 @@ private:
|
||||||
|
|
||||||
// used to prevent character from jumping after endSit is called.
|
// used to prevent character from jumping after endSit is called.
|
||||||
bool _endSitKeyPressComplete { false };
|
bool _endSitKeyPressComplete { false };
|
||||||
|
|
||||||
|
glm::vec3 _cameraEyesOffset;
|
||||||
|
float _landingAfterJumpTime { 0.0f };
|
||||||
};
|
};
|
||||||
|
|
||||||
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);
|
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);
|
||||||
|
|
|
@ -55,12 +55,14 @@ static const QVariantMap DOCK_AREA {
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* The possible "relative position anchors" of an <code>InteractiveWindow</code>. Used when defining the `relativePosition` property of an `InteractiveWindow`.
|
* The possible "relative position anchors" of an <code>InteractiveWindow</code>. Used when defining the `relativePosition` property of an `InteractiveWindow`.
|
||||||
* @typedef {object} InteractiveWindow.RelativePositionAnchors
|
* @typedef {object} InteractiveWindow.RelativePositionAnchors
|
||||||
|
* @property {InteractiveWindow.RelativePositionAnchor} NO_ANCHOR - Specifies that the position of the `InteractiveWindow` will not be relative to any part of the Interface window.
|
||||||
* @property {InteractiveWindow.RelativePositionAnchor} TOP_LEFT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the top left of the Interface window.
|
* @property {InteractiveWindow.RelativePositionAnchor} TOP_LEFT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the top left of the Interface window.
|
||||||
* @property {InteractiveWindow.RelativePositionAnchor} TOP_RIGHT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the top right of the Interface window.
|
* @property {InteractiveWindow.RelativePositionAnchor} TOP_RIGHT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the top right of the Interface window.
|
||||||
* @property {InteractiveWindow.RelativePositionAnchor} BOTTOM_RIGHT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the bottom right of the Interface window.
|
* @property {InteractiveWindow.RelativePositionAnchor} BOTTOM_RIGHT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the bottom right of the Interface window.
|
||||||
* @property {InteractiveWindow.RelativePositionAnchor} BOTTOM_LEFT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the bottom left of the Interface window.
|
* @property {InteractiveWindow.RelativePositionAnchor} BOTTOM_LEFT - Specifies that the `relativePosition` of the `InteractiveWindow` will be offset from the bottom left of the Interface window.
|
||||||
*/
|
*/
|
||||||
static const QVariantMap RELATIVE_POSITION_ANCHOR {
|
static const QVariantMap RELATIVE_POSITION_ANCHOR {
|
||||||
|
{ "NO_ANCHOR", RelativePositionAnchor::NO_ANCHOR },
|
||||||
{ "TOP_LEFT", RelativePositionAnchor::TOP_LEFT },
|
{ "TOP_LEFT", RelativePositionAnchor::TOP_LEFT },
|
||||||
{ "TOP_RIGHT", RelativePositionAnchor::TOP_RIGHT },
|
{ "TOP_RIGHT", RelativePositionAnchor::TOP_RIGHT },
|
||||||
{ "BOTTOM_RIGHT", RelativePositionAnchor::BOTTOM_RIGHT },
|
{ "BOTTOM_RIGHT", RelativePositionAnchor::BOTTOM_RIGHT },
|
||||||
|
|
|
@ -18,15 +18,23 @@
|
||||||
#include "DependencyManager.h"
|
#include "DependencyManager.h"
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* The Keyboard API provides facilities to use 3D Physical keyboard.
|
* The <code>Keyboard</code> API provides facilities to use an in-world, virtual keyboard. When enabled, this keyboard is
|
||||||
|
* displayed instead of the 2D keyboard that raises at the bottom of the tablet or Web entities when a text input field has
|
||||||
|
* focus and you're in HMD mode.
|
||||||
|
*
|
||||||
* @namespace Keyboard
|
* @namespace Keyboard
|
||||||
*
|
*
|
||||||
* @hifi-interface
|
* @hifi-interface
|
||||||
* @hifi-client-entity
|
* @hifi-client-entity
|
||||||
* @hifi-avatar
|
* @hifi-avatar
|
||||||
*
|
*
|
||||||
* @property {bool} raised - <code>true</code> If the keyboard is visible <code>false</code> otherwise
|
* @property {boolean} raised - <code>true</code> if the virtual keyboard is visible, <code>false</code> if it isn't.
|
||||||
* @property {bool} password - <code>true</code> Will show * instead of characters in the text display <code>false</code> otherwise
|
* @property {boolean} password - <code>true</code> if <code>"*"</code>s are displayed on the virtual keyboard's display
|
||||||
|
* instead of the characters typed, <code>false</code> if the actual characters are displayed.
|
||||||
|
* @property {boolean} use3DKeyboard - <code>true</code> if user settings have "Use Virtual Keyboard" enabled,
|
||||||
|
* <code>false</code> if it's disabled. <em>Read-only.</em>
|
||||||
|
* @property {boolean} preferMalletsOverLasers - <code>true</code> if user settings for the virtual keyboard have "Mallets"
|
||||||
|
* selected, <code>false</code> if "Lasers" is selected. <em>Read-only.</em>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class KeyboardScriptingInterface : public QObject, public Dependency {
|
class KeyboardScriptingInterface : public QObject, public Dependency {
|
||||||
|
@ -39,14 +47,61 @@ class KeyboardScriptingInterface : public QObject, public Dependency {
|
||||||
public:
|
public:
|
||||||
KeyboardScriptingInterface() = default;
|
KeyboardScriptingInterface() = default;
|
||||||
~KeyboardScriptingInterface() = default;
|
~KeyboardScriptingInterface() = default;
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Loads a JSON file that defines the virtual keyboard's layout. The default JSON file used is
|
||||||
|
* {@link https://github.com/highfidelity/hifi/blob/master/interface/resources/config/keyboard.json|https://github.com/highfidelity/hifi/.../keyboard.json}.
|
||||||
|
* @function Keyboard.loadKeyboardFile
|
||||||
|
* @param {string} path - The keyboard JSON file.
|
||||||
|
*/
|
||||||
Q_INVOKABLE void loadKeyboardFile(const QString& string);
|
Q_INVOKABLE void loadKeyboardFile(const QString& string);
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Enables the left mallet so that it is displayed when in HMD mode.
|
||||||
|
* @function Keyboard.enableLeftMallet
|
||||||
|
*/
|
||||||
Q_INVOKABLE void enableLeftMallet();
|
Q_INVOKABLE void enableLeftMallet();
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Enables the right mallet so that it is displayed when in HMD mode.
|
||||||
|
* @function Keyboard.enableRightMallet
|
||||||
|
*/
|
||||||
Q_INVOKABLE void enableRightMallet();
|
Q_INVOKABLE void enableRightMallet();
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Disables the left mallet so that it is not displayed when in HMD mode.
|
||||||
|
* @function Keyboard.disableLeftMallet
|
||||||
|
*/
|
||||||
Q_INVOKABLE void disableLeftMallet();
|
Q_INVOKABLE void disableLeftMallet();
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Disables the right mallet so that it is not displayed when in HMD mode.
|
||||||
|
* @function Keyboard.disableRightMallet
|
||||||
|
*/
|
||||||
Q_INVOKABLE void disableRightMallet();
|
Q_INVOKABLE void disableRightMallet();
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Configures the virtual keyboard to recognize a ray pointer as the left hand's laser.
|
||||||
|
* @function Keyboard.setLeftHandLaser
|
||||||
|
* @param {number} leftHandLaser - The ID of a ray pointer created by {@link Pointers.createPointer}.
|
||||||
|
*/
|
||||||
Q_INVOKABLE void setLeftHandLaser(unsigned int leftHandLaser);
|
Q_INVOKABLE void setLeftHandLaser(unsigned int leftHandLaser);
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Configures the virtual keyboard to recognize a ray pointer as the right hand's laser.
|
||||||
|
* @function Keyboard.setRightHandLaser
|
||||||
|
* @param {number} rightHandLaser - The ID of a ray pointer created by {@link Pointers.createPointer}.
|
||||||
|
*/
|
||||||
Q_INVOKABLE void setRightHandLaser(unsigned int rightHandLaser);
|
Q_INVOKABLE void setRightHandLaser(unsigned int rightHandLaser);
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Checks whether an entity is part of the virtual keyboard.
|
||||||
|
* @function Keyboard.containsID
|
||||||
|
* @param {Uuid} entityID - The entity ID.
|
||||||
|
* @returns {boolean} <code>true</code> if the entity is part of the virtual keyboard, <code>false</code> if it isn't.
|
||||||
|
*/
|
||||||
Q_INVOKABLE bool containsID(const QUuid& overlayID) const;
|
Q_INVOKABLE bool containsID(const QUuid& overlayID) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool getPreferMalletsOverLasers() const;
|
bool getPreferMalletsOverLasers() const;
|
||||||
bool isRaised() const;
|
bool isRaised() const;
|
||||||
|
|
|
@ -41,7 +41,8 @@ void MenuScriptingInterface::removeMenu(const QString& menu) {
|
||||||
|
|
||||||
bool MenuScriptingInterface::menuExists(const QString& menu) {
|
bool MenuScriptingInterface::menuExists(const QString& menu) {
|
||||||
if (QThread::currentThread() == qApp->thread()) {
|
if (QThread::currentThread() == qApp->thread()) {
|
||||||
return Menu::getInstance()->menuExists(menu);
|
Menu* menuInstance = Menu::getInstance();
|
||||||
|
return menuInstance && menuInstance->menuExists(menu);
|
||||||
}
|
}
|
||||||
bool result { false };
|
bool result { false };
|
||||||
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "menuExists",
|
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "menuExists",
|
||||||
|
@ -84,7 +85,8 @@ void MenuScriptingInterface::removeMenuItem(const QString& menu, const QString&
|
||||||
|
|
||||||
bool MenuScriptingInterface::menuItemExists(const QString& menu, const QString& menuitem) {
|
bool MenuScriptingInterface::menuItemExists(const QString& menu, const QString& menuitem) {
|
||||||
if (QThread::currentThread() == qApp->thread()) {
|
if (QThread::currentThread() == qApp->thread()) {
|
||||||
return Menu::getInstance()->menuItemExists(menu, menuitem);
|
Menu* menuInstance = Menu::getInstance();
|
||||||
|
return menuInstance && menuInstance->menuItemExists(menu, menuitem);
|
||||||
}
|
}
|
||||||
bool result { false };
|
bool result { false };
|
||||||
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "menuItemExists",
|
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "menuItemExists",
|
||||||
|
@ -96,7 +98,8 @@ bool MenuScriptingInterface::menuItemExists(const QString& menu, const QString&
|
||||||
|
|
||||||
bool MenuScriptingInterface::isOptionChecked(const QString& menuOption) {
|
bool MenuScriptingInterface::isOptionChecked(const QString& menuOption) {
|
||||||
if (QThread::currentThread() == qApp->thread()) {
|
if (QThread::currentThread() == qApp->thread()) {
|
||||||
return Menu::getInstance()->isOptionChecked(menuOption);
|
Menu* menuInstance = Menu::getInstance();
|
||||||
|
return menuInstance && menuInstance->isOptionChecked(menuOption);
|
||||||
}
|
}
|
||||||
bool result { false };
|
bool result { false };
|
||||||
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "isOptionChecked",
|
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "isOptionChecked",
|
||||||
|
@ -113,7 +116,8 @@ void MenuScriptingInterface::setIsOptionChecked(const QString& menuOption, bool
|
||||||
|
|
||||||
bool MenuScriptingInterface::isMenuEnabled(const QString& menuOption) {
|
bool MenuScriptingInterface::isMenuEnabled(const QString& menuOption) {
|
||||||
if (QThread::currentThread() == qApp->thread()) {
|
if (QThread::currentThread() == qApp->thread()) {
|
||||||
return Menu::getInstance()->isOptionChecked(menuOption);
|
Menu* menuInstance = Menu::getInstance();
|
||||||
|
return menuInstance && menuInstance->isMenuEnabled(menuOption);
|
||||||
}
|
}
|
||||||
bool result { false };
|
bool result { false };
|
||||||
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "isMenuEnabled",
|
BLOCKING_INVOKE_METHOD(Menu::getInstance(), "isMenuEnabled",
|
||||||
|
|
|
@ -82,11 +82,18 @@ void RenderScriptingInterface::forceShadowsEnabled(bool enabled) {
|
||||||
_shadowsEnabled = (enabled);
|
_shadowsEnabled = (enabled);
|
||||||
_shadowsEnabledSetting.set(enabled);
|
_shadowsEnabledSetting.set(enabled);
|
||||||
|
|
||||||
auto lightingModelConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<MakeLightingModel>("RenderMainView.LightingModel");
|
auto renderConfig = qApp->getRenderEngine()->getConfiguration();
|
||||||
|
assert(renderConfig);
|
||||||
|
auto lightingModelConfig = renderConfig->getConfig<MakeLightingModel>("RenderMainView.LightingModel");
|
||||||
if (lightingModelConfig) {
|
if (lightingModelConfig) {
|
||||||
Menu::getInstance()->setIsOptionChecked(MenuOption::Shadows, enabled);
|
Menu::getInstance()->setIsOptionChecked(MenuOption::Shadows, enabled);
|
||||||
lightingModelConfig->setShadow(enabled);
|
lightingModelConfig->setShadow(enabled);
|
||||||
}
|
}
|
||||||
|
auto secondaryLightingModelConfig = renderConfig->getConfig<MakeLightingModel>("RenderSecondView.LightingModel");
|
||||||
|
if (secondaryLightingModelConfig) {
|
||||||
|
Menu::getInstance()->setIsOptionChecked(MenuOption::Shadows, enabled);
|
||||||
|
secondaryLightingModelConfig->setShadow(enabled);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -116,9 +116,10 @@ void InteractiveWindow::forwardKeyReleaseEvent(int key, int modifiers) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void InteractiveWindow::onMainWindowGeometryChanged(QRect geometry) {
|
void InteractiveWindow::onMainWindowGeometryChanged(QRect geometry) {
|
||||||
|
// This handler is only connected `if (_isFullScreenWindow || _relativePositionAnchor != RelativePositionAnchor::NONE)`.
|
||||||
if (_isFullScreenWindow) {
|
if (_isFullScreenWindow) {
|
||||||
repositionAndResizeFullScreenWindow();
|
repositionAndResizeFullScreenWindow();
|
||||||
} else {
|
} else if (_relativePositionAnchor != RelativePositionAnchor::NO_ANCHOR) {
|
||||||
setPositionUsingRelativePositionAndAnchor(geometry);
|
setPositionUsingRelativePositionAndAnchor(geometry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -326,7 +327,9 @@ InteractiveWindow::InteractiveWindow(const QString& sourceUrl, const QVariantMap
|
||||||
connect(object, SIGNAL(presentationModeChanged()), this, SLOT(parentNativeWindowToMainWindow()), Qt::QueuedConnection);
|
connect(object, SIGNAL(presentationModeChanged()), this, SLOT(parentNativeWindowToMainWindow()), Qt::QueuedConnection);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
connect(qApp->getWindow(), &MainWindow::windowGeometryChanged, this, &InteractiveWindow::onMainWindowGeometryChanged, Qt::QueuedConnection);
|
if (_isFullScreenWindow || _relativePositionAnchor != RelativePositionAnchor::NO_ANCHOR) {
|
||||||
|
connect(qApp->getWindow(), &MainWindow::windowGeometryChanged, this, &InteractiveWindow::onMainWindowGeometryChanged, Qt::QueuedConnection);
|
||||||
|
}
|
||||||
|
|
||||||
QUrl sourceURL{ sourceUrl };
|
QUrl sourceURL{ sourceUrl };
|
||||||
// If the passed URL doesn't correspond to a known scheme, assume it's a local file path
|
// If the passed URL doesn't correspond to a known scheme, assume it's a local file path
|
||||||
|
@ -494,6 +497,9 @@ void InteractiveWindow::setPositionUsingRelativePositionAndAnchor(const QRect& m
|
||||||
newPosition.x = mainWindowGeometry.x() + relativePosition.x;
|
newPosition.x = mainWindowGeometry.x() + relativePosition.x;
|
||||||
newPosition.y = mainWindowGeometry.y() + mainWindowGeometry.height() - relativePosition.y;
|
newPosition.y = mainWindowGeometry.y() + mainWindowGeometry.height() - relativePosition.y;
|
||||||
break;
|
break;
|
||||||
|
case RelativePositionAnchor::NO_ANCHOR:
|
||||||
|
// No-op.
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we include the dimensions of the docked widget!
|
// Make sure we include the dimensions of the docked widget!
|
||||||
|
|
|
@ -91,6 +91,7 @@ namespace InteractiveWindowEnums {
|
||||||
Q_ENUM_NS(DockArea);
|
Q_ENUM_NS(DockArea);
|
||||||
|
|
||||||
enum RelativePositionAnchor {
|
enum RelativePositionAnchor {
|
||||||
|
NO_ANCHOR,
|
||||||
TOP_LEFT,
|
TOP_LEFT,
|
||||||
TOP_RIGHT,
|
TOP_RIGHT,
|
||||||
BOTTOM_RIGHT,
|
BOTTOM_RIGHT,
|
||||||
|
@ -147,7 +148,7 @@ private:
|
||||||
Q_INVOKABLE glm::vec2 getPosition() const;
|
Q_INVOKABLE glm::vec2 getPosition() const;
|
||||||
Q_INVOKABLE void setPosition(const glm::vec2& position);
|
Q_INVOKABLE void setPosition(const glm::vec2& position);
|
||||||
|
|
||||||
RelativePositionAnchor _relativePositionAnchor{ RelativePositionAnchor::TOP_LEFT };
|
RelativePositionAnchor _relativePositionAnchor{ RelativePositionAnchor::NO_ANCHOR };
|
||||||
Q_INVOKABLE RelativePositionAnchor getRelativePositionAnchor() const;
|
Q_INVOKABLE RelativePositionAnchor getRelativePositionAnchor() const;
|
||||||
Q_INVOKABLE void setRelativePositionAnchor(const RelativePositionAnchor& position);
|
Q_INVOKABLE void setRelativePositionAnchor(const RelativePositionAnchor& position);
|
||||||
|
|
||||||
|
|
|
@ -854,7 +854,8 @@ void Avatar::render(RenderArgs* renderArgs) {
|
||||||
float distanceToTarget = glm::length(toTarget);
|
float distanceToTarget = glm::length(toTarget);
|
||||||
const float DISPLAYNAME_DISTANCE = 20.0f;
|
const float DISPLAYNAME_DISTANCE = 20.0f;
|
||||||
updateDisplayNameAlpha(distanceToTarget < DISPLAYNAME_DISTANCE);
|
updateDisplayNameAlpha(distanceToTarget < DISPLAYNAME_DISTANCE);
|
||||||
if (!isMyAvatar() || renderArgs->_cameraMode != (int8_t)CAMERA_MODE_FIRST_PERSON) {
|
if (!isMyAvatar() || !(renderArgs->_cameraMode == (int8_t)CAMERA_MODE_FIRST_PERSON_LOOK_AT
|
||||||
|
|| renderArgs->_cameraMode == (int8_t)CAMERA_MODE_FIRST_PERSON)) {
|
||||||
auto& frustum = renderArgs->getViewFrustum();
|
auto& frustum = renderArgs->getViewFrustum();
|
||||||
auto textPosition = getDisplayNamePosition();
|
auto textPosition = getDisplayNamePosition();
|
||||||
if (frustum.pointIntersectsFrustum(textPosition)) {
|
if (frustum.pointIntersectsFrustum(textPosition)) {
|
||||||
|
|
|
@ -113,10 +113,16 @@ void Head::simulate(float deltaTime) {
|
||||||
// no blinking when brows are raised; blink less with increasing loudness
|
// no blinking when brows are raised; blink less with increasing loudness
|
||||||
const float BASE_BLINK_RATE = 15.0f / 60.0f;
|
const float BASE_BLINK_RATE = 15.0f / 60.0f;
|
||||||
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
|
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
|
||||||
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
|
if (_forceBlinkToRetarget || forceBlink ||
|
||||||
|
(_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
|
||||||
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
|
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
|
||||||
float randSpeedVariability = randFloat();
|
float randSpeedVariability = randFloat();
|
||||||
float eyeBlinkVelocity = BLINK_SPEED + randSpeedVariability * BLINK_SPEED_VARIABILITY;
|
float eyeBlinkVelocity = BLINK_SPEED + randSpeedVariability * BLINK_SPEED_VARIABILITY;
|
||||||
|
if (_forceBlinkToRetarget) {
|
||||||
|
// Slow down by half the blink if reseting eye target
|
||||||
|
eyeBlinkVelocity = 0.5f * eyeBlinkVelocity;
|
||||||
|
_forceBlinkToRetarget = false;
|
||||||
|
}
|
||||||
_leftEyeBlinkVelocity = eyeBlinkVelocity;
|
_leftEyeBlinkVelocity = eyeBlinkVelocity;
|
||||||
_rightEyeBlinkVelocity = eyeBlinkVelocity;
|
_rightEyeBlinkVelocity = eyeBlinkVelocity;
|
||||||
if (randFloat() < 0.5f) {
|
if (randFloat() < 0.5f) {
|
||||||
|
@ -131,13 +137,12 @@ void Head::simulate(float deltaTime) {
|
||||||
|
|
||||||
if (_leftEyeBlink == FULLY_CLOSED) {
|
if (_leftEyeBlink == FULLY_CLOSED) {
|
||||||
_leftEyeBlinkVelocity = -BLINK_SPEED;
|
_leftEyeBlinkVelocity = -BLINK_SPEED;
|
||||||
|
updateEyeLookAt();
|
||||||
} else if (_leftEyeBlink == FULLY_OPEN) {
|
} else if (_leftEyeBlink == FULLY_OPEN) {
|
||||||
_leftEyeBlinkVelocity = 0.0f;
|
_leftEyeBlinkVelocity = 0.0f;
|
||||||
}
|
}
|
||||||
if (_rightEyeBlink == FULLY_CLOSED) {
|
if (_rightEyeBlink == FULLY_CLOSED) {
|
||||||
_rightEyeBlinkVelocity = -BLINK_SPEED;
|
_rightEyeBlinkVelocity = -BLINK_SPEED;
|
||||||
|
|
||||||
} else if (_rightEyeBlink == FULLY_OPEN) {
|
} else if (_rightEyeBlink == FULLY_OPEN) {
|
||||||
_rightEyeBlinkVelocity = 0.0f;
|
_rightEyeBlinkVelocity = 0.0f;
|
||||||
}
|
}
|
||||||
|
@ -369,3 +374,24 @@ float Head::getFinalPitch() const {
|
||||||
float Head::getFinalRoll() const {
|
float Head::getFinalRoll() const {
|
||||||
return glm::clamp(_baseRoll + _deltaRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL);
|
return glm::clamp(_baseRoll + _deltaRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Head::setLookAtPosition(const glm::vec3& lookAtPosition) {
|
||||||
|
if (_isEyeLookAtUpdated && _requestLookAtPosition != lookAtPosition) {
|
||||||
|
_lookAtPositionChanged = usecTimestampNow();
|
||||||
|
glm::vec3 oldAvatarLookAtVector = _requestLookAtPosition - _owningAvatar->getWorldPosition();
|
||||||
|
glm::vec3 newAvatarLookAtVector = lookAtPosition - _owningAvatar->getWorldPosition();
|
||||||
|
const float MIN_BLINK_ANGLE = 0.35f; // 20 degrees
|
||||||
|
_forceBlinkToRetarget = angleBetween(oldAvatarLookAtVector, newAvatarLookAtVector) > MIN_BLINK_ANGLE;
|
||||||
|
if (_forceBlinkToRetarget) {
|
||||||
|
_isEyeLookAtUpdated = false;
|
||||||
|
} else {
|
||||||
|
_lookAtPosition = lookAtPosition;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_requestLookAtPosition = lookAtPosition;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Head::updateEyeLookAt() {
|
||||||
|
_lookAtPosition = _requestLookAtPosition;
|
||||||
|
_isEyeLookAtUpdated = true;
|
||||||
|
}
|
||||||
|
|
|
@ -79,6 +79,9 @@ public:
|
||||||
|
|
||||||
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
|
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
|
||||||
|
|
||||||
|
virtual void setLookAtPosition(const glm::vec3& lookAtPosition) override;
|
||||||
|
void updateEyeLookAt();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||||
Head(const Head&);
|
Head(const Head&);
|
||||||
|
@ -123,6 +126,10 @@ protected:
|
||||||
int _leftEyeLookAtID;
|
int _leftEyeLookAtID;
|
||||||
int _rightEyeLookAtID;
|
int _rightEyeLookAtID;
|
||||||
|
|
||||||
|
glm::vec3 _requestLookAtPosition;
|
||||||
|
bool _forceBlinkToRetarget { false };
|
||||||
|
bool _isEyeLookAtUpdated { false };
|
||||||
|
|
||||||
// private methods
|
// private methods
|
||||||
void calculateMouthShapes(float timeRatio);
|
void calculateMouthShapes(float timeRatio);
|
||||||
void applyEyelidOffset(glm::quat headOrientation);
|
void applyEyelidOffset(glm::quat headOrientation);
|
||||||
|
|
|
@ -3262,3 +3262,12 @@ void AvatarData::clearAvatarGrabData(const QUuid& grabID) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
glm::vec3 AvatarData::getHeadJointFrontVector() const {
|
||||||
|
int headJointIndex = getJointIndex("Head");
|
||||||
|
glm::quat headJointRotation = Quaternions::Y_180 * getAbsoluteJointRotationInObjectFrame(headJointIndex);// getAbsoluteJointRotationInRigFrame(headJointIndex, headJointRotation);
|
||||||
|
headJointRotation = getWorldOrientation() * headJointRotation;
|
||||||
|
float headYaw = safeEulerAngles(headJointRotation).y;
|
||||||
|
glm::quat headYawRotation = glm::angleAxis(headYaw, Vectors::UP);
|
||||||
|
return headYawRotation * IDENTITY_FORWARD;
|
||||||
|
}
|
||||||
|
|
|
@ -1508,6 +1508,7 @@ public:
|
||||||
std::vector<AvatarSkeletonTrait::UnpackedJointData> getSkeletonData() const;
|
std::vector<AvatarSkeletonTrait::UnpackedJointData> getSkeletonData() const;
|
||||||
void sendSkeletonData() const;
|
void sendSkeletonData() const;
|
||||||
QVector<JointData> getJointData() const;
|
QVector<JointData> getJointData() const;
|
||||||
|
glm::vec3 getHeadJointFrontVector() const;
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ public:
|
||||||
void clearBlendshapeCoefficients();
|
void clearBlendshapeCoefficients();
|
||||||
|
|
||||||
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }
|
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }
|
||||||
void setLookAtPosition(const glm::vec3& lookAtPosition) {
|
virtual void setLookAtPosition(const glm::vec3& lookAtPosition) {
|
||||||
if (_lookAtPosition != lookAtPosition) {
|
if (_lookAtPosition != lookAtPosition) {
|
||||||
_lookAtPositionChanged = usecTimestampNow();
|
_lookAtPositionChanged = usecTimestampNow();
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,6 +64,11 @@ void RecordingScriptingInterface::playClip(NetworkClipLoaderPointer clipLoader,
|
||||||
}
|
}
|
||||||
|
|
||||||
void RecordingScriptingInterface::loadRecording(const QString& url, QScriptValue callback) {
|
void RecordingScriptingInterface::loadRecording(const QString& url, QScriptValue callback) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "loadRecording", Q_ARG(const QString&, url), Q_ARG(QScriptValue, callback));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
auto clipLoader = DependencyManager::get<recording::ClipCache>()->getClipLoader(url);
|
auto clipLoader = DependencyManager::get<recording::ClipCache>()->getClipLoader(url);
|
||||||
|
|
||||||
if (clipLoader->isLoaded()) {
|
if (clipLoader->isLoaded()) {
|
||||||
|
@ -117,6 +122,11 @@ void RecordingScriptingInterface::startPlaying() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void RecordingScriptingInterface::setPlayerVolume(float volume) {
|
void RecordingScriptingInterface::setPlayerVolume(float volume) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "setPlayerVolume", Q_ARG(float, volume));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_player->setVolume(std::min(std::max(volume, 0.0f), 1.0f));
|
_player->setVolume(std::min(std::max(volume, 0.0f), 1.0f));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,6 +147,11 @@ void RecordingScriptingInterface::setPlayFromCurrentLocation(bool playFromCurren
|
||||||
}
|
}
|
||||||
|
|
||||||
void RecordingScriptingInterface::setPlayerLoop(bool loop) {
|
void RecordingScriptingInterface::setPlayerLoop(bool loop) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "setPlayerLoop", Q_ARG(bool, loop));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_player->loop(loop);
|
_player->loop(loop);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,6 +210,16 @@ void RecordingScriptingInterface::startRecording() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void RecordingScriptingInterface::stopRecording() {
|
void RecordingScriptingInterface::stopRecording() {
|
||||||
|
if (!_recorder->isRecording()) {
|
||||||
|
qCWarning(scriptengine) << "Recorder is not running";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "stopRecording");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_recorder->stop();
|
_recorder->stop();
|
||||||
_lastClip = _recorder->getClip();
|
_lastClip = _recorder->getClip();
|
||||||
_lastClip->seek(0);
|
_lastClip->seek(0);
|
||||||
|
|
|
@ -25,8 +25,14 @@
|
||||||
* <tr>
|
* <tr>
|
||||||
* <td><strong>First Person</strong></td>
|
* <td><strong>First Person</strong></td>
|
||||||
* <td><code>"first person"</code></td>
|
* <td><code>"first person"</code></td>
|
||||||
* <td>The camera is positioned such that you have the same view as your avatar. The camera moves and rotates with your
|
* <td>Legacy first person camera mode. The camera is positioned such that you have the same view as your avatar.
|
||||||
* avatar.</td>
|
* The camera moves and rotates with your avatar.</td>
|
||||||
|
* </tr>
|
||||||
|
* <tr>
|
||||||
|
* <td><strong>First Person Look At</strong></td>
|
||||||
|
* <td><code>"first person look at"</code></td>
|
||||||
|
* <td>Default first person camera mode. The camera is positioned such that you have the same view as your avatar.
|
||||||
|
* The camera moves and rotates with your avatar's head.</td>
|
||||||
* </tr>
|
* </tr>
|
||||||
* <tr>
|
* <tr>
|
||||||
* <td><strong>Third Person</strong></td>
|
* <td><strong>Third Person</strong></td>
|
||||||
|
@ -73,6 +79,8 @@ CameraMode stringToMode(const QString& mode) {
|
||||||
return CAMERA_MODE_THIRD_PERSON;
|
return CAMERA_MODE_THIRD_PERSON;
|
||||||
} else if (mode == "first person") {
|
} else if (mode == "first person") {
|
||||||
return CAMERA_MODE_FIRST_PERSON;
|
return CAMERA_MODE_FIRST_PERSON;
|
||||||
|
} else if (mode == "first person look at") {
|
||||||
|
return CAMERA_MODE_FIRST_PERSON_LOOK_AT;
|
||||||
} else if (mode == "mirror") {
|
} else if (mode == "mirror") {
|
||||||
return CAMERA_MODE_MIRROR;
|
return CAMERA_MODE_MIRROR;
|
||||||
} else if (mode == "independent") {
|
} else if (mode == "independent") {
|
||||||
|
@ -92,6 +100,8 @@ QString modeToString(CameraMode mode) {
|
||||||
return "third person";
|
return "third person";
|
||||||
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
|
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
|
||||||
return "first person";
|
return "first person";
|
||||||
|
} else if (mode == CAMERA_MODE_FIRST_PERSON_LOOK_AT) {
|
||||||
|
return "first person look at";
|
||||||
} else if (mode == CAMERA_MODE_MIRROR) {
|
} else if (mode == CAMERA_MODE_MIRROR) {
|
||||||
return "mirror";
|
return "mirror";
|
||||||
} else if (mode == CAMERA_MODE_INDEPENDENT) {
|
} else if (mode == CAMERA_MODE_INDEPENDENT) {
|
||||||
|
|
|
@ -19,6 +19,7 @@ enum CameraMode
|
||||||
{
|
{
|
||||||
CAMERA_MODE_NULL = -1,
|
CAMERA_MODE_NULL = -1,
|
||||||
CAMERA_MODE_THIRD_PERSON,
|
CAMERA_MODE_THIRD_PERSON,
|
||||||
|
CAMERA_MODE_FIRST_PERSON_LOOK_AT,
|
||||||
CAMERA_MODE_FIRST_PERSON,
|
CAMERA_MODE_FIRST_PERSON,
|
||||||
CAMERA_MODE_MIRROR,
|
CAMERA_MODE_MIRROR,
|
||||||
CAMERA_MODE_INDEPENDENT,
|
CAMERA_MODE_INDEPENDENT,
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
//
|
||||||
|
// PropItem.qml
|
||||||
|
//
|
||||||
|
// Created by Sam Gateau on 3/2/2019
|
||||||
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
import QtQuick 2.7
|
||||||
|
|
||||||
|
|
||||||
|
PropItem {
|
||||||
|
Global { id: global }
|
||||||
|
id: root
|
||||||
|
|
||||||
|
// Scalar Prop
|
||||||
|
// property bool integral: false
|
||||||
|
// property var numDigits: 2
|
||||||
|
|
||||||
|
Rectangle {
|
||||||
|
id: valueLabel
|
||||||
|
|
||||||
|
anchors.left: root.splitter.right
|
||||||
|
anchors.right: root.right
|
||||||
|
anchors.verticalCenter: root.verticalCenter
|
||||||
|
// horizontalAlignment: global.valueTextAlign
|
||||||
|
height: global.slimHeight
|
||||||
|
|
||||||
|
function getColor() {
|
||||||
|
var c = root.valueVarGetter();
|
||||||
|
return Qt.rgba(c.red, c.green, c.blue, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// background: Rectangle {
|
||||||
|
color: { return getColor() }
|
||||||
|
border.color: global.colorBorderLight
|
||||||
|
border.width: global.valueBorderWidth
|
||||||
|
radius: global.valueBorderRadius
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
}
|
|
@ -13,12 +13,19 @@ import QtQuick.Layouts 1.3
|
||||||
|
|
||||||
import stylesUit 1.0
|
import stylesUit 1.0
|
||||||
import controlsUit 1.0 as HifiControls
|
import controlsUit 1.0 as HifiControls
|
||||||
|
import "../lib/prop" as Prop
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
HifiConstants { id: hifi;}
|
HifiConstants { id: hifi;}
|
||||||
color: Qt.rgba(hifi.colors.baseGray.r, hifi.colors.baseGray.g, hifi.colors.baseGray.b, 0.8);
|
color: Qt.rgba(hifi.colors.baseGray.r, hifi.colors.baseGray.g, hifi.colors.baseGray.b, 0.8);
|
||||||
id: root;
|
id: root;
|
||||||
|
|
||||||
|
property var theMaterial: {}
|
||||||
|
property var theMaterialAttributes: {}
|
||||||
|
property var hasMaterial: false
|
||||||
|
|
||||||
|
property var isReadOnly: true
|
||||||
|
|
||||||
function fromScript(message) {
|
function fromScript(message) {
|
||||||
switch (message.method) {
|
switch (message.method) {
|
||||||
case "setObjectInfo":
|
case "setObjectInfo":
|
||||||
|
@ -26,40 +33,213 @@ Rectangle {
|
||||||
break;
|
break;
|
||||||
case "setMaterialJSON":
|
case "setMaterialJSON":
|
||||||
materialJSONText.text = message.params.materialJSONText;
|
materialJSONText.text = message.params.materialJSONText;
|
||||||
|
|
||||||
|
theMaterial = JSON.parse(message.params.materialJSONText)
|
||||||
|
theMaterialAttributes = theMaterial.materials
|
||||||
|
hasMaterial = (theMaterial !== undefined)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Rectangle {
|
Column {
|
||||||
id: entityIDContainer
|
|
||||||
height: 52
|
anchors.left: parent.left
|
||||||
width: root.width
|
anchors.right: parent.right
|
||||||
color: Qt.rgba(root.color.r * 0.7, root.color.g * 0.7, root.color.b * 0.7, 0.8);
|
|
||||||
TextEdit {
|
Rectangle {
|
||||||
id: entityIDInfo
|
id: entityIDContainer
|
||||||
text: "Type: Unknown\nID: None\nMesh Part: Unknown"
|
height: 52
|
||||||
font.pointSize: 9
|
|
||||||
color: "#FFFFFF"
|
|
||||||
readOnly: true
|
|
||||||
selectByMouse: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Original.ScrollView {
|
|
||||||
anchors.top: entityIDContainer.bottom
|
|
||||||
height: root.height - entityIDContainer.height
|
|
||||||
width: root.width
|
|
||||||
clip: true
|
|
||||||
Original.ScrollBar.horizontal.policy: Original.ScrollBar.AlwaysOff
|
|
||||||
TextEdit {
|
|
||||||
id: materialJSONText
|
|
||||||
text: "Click an object to get material JSON"
|
|
||||||
width: root.width
|
width: root.width
|
||||||
font.pointSize: 10
|
color: Qt.rgba(root.color.r * 0.7, root.color.g * 0.7, root.color.b * 0.7, 0.8);
|
||||||
color: "#FFFFFF"
|
TextEdit {
|
||||||
readOnly: true
|
id: entityIDInfo
|
||||||
selectByMouse: true
|
text: "Type: Unknown\nID: None\nMesh Part: Unknown"
|
||||||
wrapMode: Text.WordWrap
|
font.pointSize: 9
|
||||||
|
color: "#FFFFFF"
|
||||||
|
readOnly: true
|
||||||
|
selectByMouse: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("name" in theMaterialAttributes)
|
||||||
|
label: "name"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "name"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("model" in theMaterialAttributes)
|
||||||
|
label: "model"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "model"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropColor {
|
||||||
|
visible: hasMaterial && ("albedo" in theMaterialAttributes)
|
||||||
|
label: "albedo"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "albedo"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("albedoMap" in theMaterialAttributes)
|
||||||
|
label: "albedoMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "albedoMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropScalar {
|
||||||
|
visible: hasMaterial && ("opacity" in theMaterialAttributes)
|
||||||
|
label: "opacity"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "opacity"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("opacityMap" in theMaterialAttributes)
|
||||||
|
label: "opacityMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "opacityMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("opacityMapMode" in theMaterialAttributes)
|
||||||
|
label: "opacityMapMode"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "opacityMapMode"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
/*Prop.PropEnum {
|
||||||
|
visible: hasMaterial && ("opacityMapMode" in theMaterialAttributes)
|
||||||
|
label: "opacityMapMode"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "opacityMapMode"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
enums: ["None", "Mask", "Blend"]
|
||||||
|
} */
|
||||||
|
Prop.PropScalar {
|
||||||
|
visible: hasMaterial && ("opacityCutoff" in theMaterialAttributes)
|
||||||
|
label: "opacity Cutoff"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "opacityCutoff"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("occlusionMap" in theMaterialAttributes)
|
||||||
|
label: "occlusionMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "occlusionMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("normalMap" in theMaterialAttributes)
|
||||||
|
label: "normalMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "normalMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("bumpMap" in theMaterialAttributes)
|
||||||
|
label: "normalMap from bumpMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "bumpMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropScalar {
|
||||||
|
visible: hasMaterial && ("roughness" in theMaterialAttributes)
|
||||||
|
label: "roughness"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "roughness"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropScalar {
|
||||||
|
visible: hasMaterial && ("metallic" in theMaterialAttributes)
|
||||||
|
label: "metallic"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "metallic"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("roughnessMap" in theMaterialAttributes)
|
||||||
|
label: "roughnessMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "roughnessMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("glossMap" in theMaterialAttributes)
|
||||||
|
label: "roughnessMap from glossMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "glossMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("metallicMap" in theMaterialAttributes)
|
||||||
|
label: "metallicMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "metallicMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("specularMap" in theMaterialAttributes)
|
||||||
|
label: "metallicMap from specularMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "specularMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Prop.PropScalar {
|
||||||
|
visible: hasMaterial && ("scattering" in theMaterialAttributes)
|
||||||
|
label: "scattering"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "scattering"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("scatteringMap" in theMaterialAttributes)
|
||||||
|
label: "scatteringMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "scatteringMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Prop.PropColor {
|
||||||
|
visible: hasMaterial && ("emissive" in theMaterialAttributes)
|
||||||
|
label: "emissive"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "emissive"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
Prop.PropString {
|
||||||
|
visible: hasMaterial && ("emissiveMap" in theMaterialAttributes)
|
||||||
|
label: "emissiveMap"
|
||||||
|
object: theMaterialAttributes
|
||||||
|
property: "emissiveMap"
|
||||||
|
readOnly: isReadOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
Original.ScrollView {
|
||||||
|
// anchors.top: entityIDContainer.bottom
|
||||||
|
height: root.height - entityIDContainer.height
|
||||||
|
width: root.width
|
||||||
|
clip: true
|
||||||
|
Original.ScrollBar.horizontal.policy: Original.ScrollBar.AlwaysOff
|
||||||
|
TextEdit {
|
||||||
|
id: materialJSONText
|
||||||
|
text: "Click an object to get material JSON"
|
||||||
|
width: root.width
|
||||||
|
font.pointSize: 10
|
||||||
|
color: "#FFFFFF"
|
||||||
|
readOnly: true
|
||||||
|
selectByMouse: true
|
||||||
|
wrapMode: Text.WordWrap
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -377,7 +377,7 @@ function displayInitialLaunchWindow() {
|
||||||
|
|
||||||
initialLaunchWindow.fromQml.connect(onMessageFromInitialLaunchWindow);
|
initialLaunchWindow.fromQml.connect(onMessageFromInitialLaunchWindow);
|
||||||
|
|
||||||
Window.location = "file:///~/serverless/tutorial.json";
|
Window.location = "file:///~/serverless/empty.json";
|
||||||
}
|
}
|
||||||
|
|
||||||
var SECOND_LAUNCH_QML_PATH = Script.resolvePath("simplifiedFTUE/SecondLaunchWindow.qml");
|
var SECOND_LAUNCH_QML_PATH = Script.resolvePath("simplifiedFTUE/SecondLaunchWindow.qml");
|
||||||
|
@ -405,7 +405,7 @@ function displaySecondLaunchWindow() {
|
||||||
|
|
||||||
secondLaunchWindow.fromQml.connect(onMessageFromSecondLaunchWindow);
|
secondLaunchWindow.fromQml.connect(onMessageFromSecondLaunchWindow);
|
||||||
|
|
||||||
Window.location = "file:///~/serverless/tutorial.json";
|
Window.location = "file:///~/serverless/empty.json";
|
||||||
}
|
}
|
||||||
|
|
||||||
function closeInitialLaunchWindow() {
|
function closeInitialLaunchWindow() {
|
||||||
|
@ -663,7 +663,7 @@ function handleSecondLaunchWindowVisibleChanged(shouldBeVisible) {
|
||||||
|
|
||||||
function onDisplayModeChanged(isHMDMode) {
|
function onDisplayModeChanged(isHMDMode) {
|
||||||
if (isHMDMode) {
|
if (isHMDMode) {
|
||||||
Camera.setModeString("first person");
|
Camera.setModeString("first person look at");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isHMDMode) {
|
if (isHMDMode) {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
// Automatically enter first person mode when entering HMD mode
|
// Automatically enter first person mode when entering HMD mode
|
||||||
HMD.displayModeChanged.connect(function(isHMDMode) {
|
HMD.displayModeChanged.connect(function(isHMDMode) {
|
||||||
if (isHMDMode) {
|
if (isHMDMode) {
|
||||||
Camera.setModeString("first person");
|
Camera.setModeString("first person look at");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -272,7 +272,7 @@
|
||||||
currentProgress = 0.0;
|
currentProgress = 0.0;
|
||||||
connectionToDomainFailed = false;
|
connectionToDomainFailed = false;
|
||||||
previousCameraMode = Camera.mode;
|
previousCameraMode = Camera.mode;
|
||||||
Camera.mode = "first person";
|
Camera.mode = "first person look at";
|
||||||
updateProgressBar(0.0);
|
updateProgressBar(0.0);
|
||||||
scaleInterstitialPage(MyAvatar.sensorToWorldScale);
|
scaleInterstitialPage(MyAvatar.sensorToWorldScale);
|
||||||
timer = Script.setTimeout(update, 2000);
|
timer = Script.setTimeout(update, 2000);
|
||||||
|
|
|
@ -52,8 +52,8 @@ function calcSpawnInfo(hand, landscape) {
|
||||||
|
|
||||||
var LEFT_HAND = Controller.Standard.LeftHand;
|
var LEFT_HAND = Controller.Standard.LeftHand;
|
||||||
var sensorToWorldScale = MyAvatar.sensorToWorldScale;
|
var sensorToWorldScale = MyAvatar.sensorToWorldScale;
|
||||||
var headPos = (HMD.active && Camera.mode === "first person") ? HMD.position : Camera.position;
|
var headPos = (HMD.active && (Camera.mode === "first person" || Camera.mode === "first person look at")) ? HMD.position : Camera.position;
|
||||||
var headRot = Quat.cancelOutRollAndPitch((HMD.active && Camera.mode === "first person") ?
|
var headRot = Quat.cancelOutRollAndPitch((HMD.active && (Camera.mode === "first person" || Camera.mode === "first person look at")) ?
|
||||||
HMD.orientation : Camera.orientation);
|
HMD.orientation : Camera.orientation);
|
||||||
|
|
||||||
var right = Quat.getRight(headRot);
|
var right = Quat.getRight(headRot);
|
||||||
|
|
|
@ -53,7 +53,7 @@
|
||||||
function handJointName(hand) {
|
function handJointName(hand) {
|
||||||
var jointName;
|
var jointName;
|
||||||
if (hand === LEFT_HAND) {
|
if (hand === LEFT_HAND) {
|
||||||
if (Camera.mode === "first person") {
|
if (Camera.mode === "first person" || Camera.mode === "first person look at") {
|
||||||
jointName = "_CONTROLLER_LEFTHAND";
|
jointName = "_CONTROLLER_LEFTHAND";
|
||||||
} else if (Camera.mode === "third person") {
|
} else if (Camera.mode === "third person") {
|
||||||
jointName = "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND";
|
jointName = "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND";
|
||||||
|
@ -61,7 +61,7 @@
|
||||||
jointName = "LeftHand";
|
jointName = "LeftHand";
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (Camera.mode === "first person") {
|
if (Camera.mode === "first person" || Camera.mode === "first person look at") {
|
||||||
jointName = "_CONTROLLER_RIGHTHAND";
|
jointName = "_CONTROLLER_RIGHTHAND";
|
||||||
} else if (Camera.mode === "third person") {
|
} else if (Camera.mode === "third person") {
|
||||||
jointName = "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND";
|
jointName = "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND";
|
||||||
|
|
|
@ -61,17 +61,21 @@ Item {
|
||||||
Rectangle {
|
Rectangle {
|
||||||
width: 5
|
width: 5
|
||||||
height: 5
|
height: 5
|
||||||
color: "red"
|
color: "blue"
|
||||||
ColorAnimation on color { loops: Animation.Infinite; from: "red"; to: "yellow"; duration: 1000 }
|
ColorAnimation on color { loops: Animation.Infinite; from: "blue"; to: "yellow"; duration: 1000 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
WebEngineView {
|
WebEngineView {
|
||||||
id: root
|
id: root
|
||||||
url: "https://google.com/"
|
url: "https://www.webrtc-experiment.com/Pluginfree-Screen-Sharing/#19583796789766627"
|
||||||
x: 6; y: 6;
|
// url: "https://vimeo.com/108650530"
|
||||||
width: parent.width * 0.8
|
// url: "https://www.youtube.com/watch?v=7EWQOeQf32U&autoplay=1&loop=1"
|
||||||
height: parent.height * 0.8
|
// x: 6; y: 6;
|
||||||
|
anchors.fill: parent
|
||||||
|
// width: parent.width * 0.8
|
||||||
|
// height: parent.height * 0.8
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ void MacQml::init() {
|
||||||
_surface->load(url, callback);
|
_surface->load(url, callback);
|
||||||
_surface->resize(_window->size());
|
_surface->resize(_window->size());
|
||||||
_surface->resume();
|
_surface->resume();
|
||||||
|
_window->installEventFilter(_surface.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
void MacQml::draw() {
|
void MacQml::draw() {
|
||||||
|
|
|
@ -9,7 +9,7 @@ public:
|
||||||
QmlPtr _surface;
|
QmlPtr _surface;
|
||||||
GLuint _fbo{ 0 };
|
GLuint _fbo{ 0 };
|
||||||
|
|
||||||
MacQml(const QWindow* window) : Parent(window) {}
|
MacQml(QWindow* window) : Parent(window) {}
|
||||||
void update() override;
|
void update() override;
|
||||||
void init() override;
|
void init() override;
|
||||||
void draw() override;
|
void draw() override;
|
||||||
|
|
|
@ -24,7 +24,7 @@ public:
|
||||||
std::array<std::array<QmlInfo, DIVISIONS_Y>, DIVISIONS_X> _surfaces;
|
std::array<std::array<QmlInfo, DIVISIONS_Y>, DIVISIONS_X> _surfaces;
|
||||||
GLuint _fbo{ 0 };
|
GLuint _fbo{ 0 };
|
||||||
|
|
||||||
StressWeb(const QWindow* window) : Parent(window) {}
|
StressWeb(QWindow* window) : Parent(window) {}
|
||||||
static QString getSourceUrl(bool video);
|
static QString getSourceUrl(bool video);
|
||||||
void buildSurface(QmlInfo& qmlInfo, bool video);
|
void buildSurface(QmlInfo& qmlInfo, bool video);
|
||||||
void destroySurface(QmlInfo& qmlInfo);
|
void destroySurface(QmlInfo& qmlInfo);
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
class TestCase {
|
class TestCase {
|
||||||
public:
|
public:
|
||||||
using QmlPtr = QSharedPointer<hifi::qml::OffscreenSurface>;
|
using QmlPtr = QSharedPointer<hifi::qml::OffscreenSurface>;
|
||||||
using Builder = std::function<TestCase*(const QWindow*)>;
|
using Builder = std::function<TestCase*(QWindow*)>;
|
||||||
TestCase(const QWindow* window) : _window(window) {}
|
TestCase(QWindow* window) : _window(window) {}
|
||||||
virtual void init();
|
virtual void init();
|
||||||
virtual void destroy();
|
virtual void destroy();
|
||||||
virtual void update();
|
virtual void update();
|
||||||
|
@ -18,6 +18,6 @@ public:
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
QOpenGLFunctions_4_1_Core _glf;
|
QOpenGLFunctions_4_1_Core _glf;
|
||||||
const QWindow* _window;
|
QWindow* _window;
|
||||||
std::function<void(uint32_t, void*)> _discardLamdba;
|
std::function<void(uint32_t, void*)> _discardLamdba;
|
||||||
};
|
};
|
||||||
|
|
|
@ -205,12 +205,22 @@ void TestWindow::resizeEvent(QResizeEvent* ev) {
|
||||||
|
|
||||||
|
|
||||||
int main(int argc, char** argv) {
|
int main(int argc, char** argv) {
|
||||||
|
#ifdef Q_OS_MAC
|
||||||
auto format = getDefaultOpenGLSurfaceFormat();
|
auto format = getDefaultOpenGLSurfaceFormat();
|
||||||
format.setVersion(4, 1);
|
// Deal with some weirdness in the chromium context sharing on Mac.
|
||||||
|
// The primary share context needs to be 3.2, so that the Chromium will
|
||||||
|
// succeed in it's creation of it's command stub contexts.
|
||||||
|
format.setVersion(3, 2);
|
||||||
|
// This appears to resolve the issues with corrupted fonts on OSX. No
|
||||||
|
// idea why.
|
||||||
|
qputenv("QT_ENABLE_GLYPH_CACHE_WORKAROUND", "true");
|
||||||
|
// https://i.kym-cdn.com/entries/icons/original/000/008/342/ihave.jpg
|
||||||
QSurfaceFormat::setDefaultFormat(format);
|
QSurfaceFormat::setDefaultFormat(format);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
QGuiApplication app(argc, argv);
|
QGuiApplication app(argc, argv);
|
||||||
TestCase::Builder builder = [](const QWindow* window)->TestCase*{ return new MacQml(window); };
|
TestCase::Builder builder = [](QWindow* window)->TestCase*{ return new MacQml(window); };
|
||||||
TestWindow window(builder);
|
TestWindow window(builder);
|
||||||
return app.exec();
|
return app.exec();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
# General
|
# General
|
||||||
This document describes the process to build Qt 5.12.3.
|
This document describes the process to build Qt 5.12.3.
|
||||||
Note that there are three patches. The first (to qfloat16.h) is needed to compile QT 5.12.3 on Visual Studio 2017 due to a bug in Visual Studio (*bitset* will not compile. Note that there is a change in CMakeLists.txt to support this.
|
Note that there are several patches.
|
||||||
The second patch is to OpenSL ES audio.
|
* The first (to qfloat16.h) is needed to compile QT 5.12.3 on Visual Studio 2017 due to a bug in Visual Studio (*bitset* will not compile. Note that there is a change in CMakeLists.txt to support this.
|
||||||
The third is a patch to QScriptEngine to prevent crashes in QScriptEnginePrivate::reportAdditionalMemoryCost, during garbage collection. See https://bugreports.qt.io/browse/QTBUG-76176
|
* The second patch is to OpenSL ES audio and allow audio echo cancelllation on Android.
|
||||||
|
* The third is a patch to QScriptEngine to prevent crashes in QScriptEnginePrivate::reportAdditionalMemoryCost, during garbage collection. See https://bugreports.qt.io/browse/QTBUG-76176
|
||||||
|
* The fourth is a patch which fixes video playback on WebEngineViews on mac. See https://bugreports.qt.io/browse/QTBUG-70967
|
||||||
## Requirements
|
## Requirements
|
||||||
### Windows
|
### Windows
|
||||||
1. Visual Studio 2017
|
1. Visual Studio 2017
|
||||||
|
@ -222,6 +224,7 @@ git clone --recursive git://code.qt.io/qt/qt5.git -b 5.12.3 --single-branch
|
||||||
`cd qt5`
|
`cd qt5`
|
||||||
`git apply --ignore-space-change --ignore-whitespace patches/aec.patch`
|
`git apply --ignore-space-change --ignore-whitespace patches/aec.patch`
|
||||||
`git apply --ignore-space-change --ignore-whitespace patches/qtscript-crash-fix.patch`
|
`git apply --ignore-space-change --ignore-whitespace patches/qtscript-crash-fix.patch`
|
||||||
|
`git apply --ignore-space-change --ignore-whitespace patches/mac-web-video.patch`
|
||||||
`cd ..`
|
`cd ..`
|
||||||
#### Configuring
|
#### Configuring
|
||||||
`mkdir qt5-install`
|
`mkdir qt5-install`
|
||||||
|
|
247
tools/qt-builder/patches/mac-web-video.patch
Normal file
247
tools/qt-builder/patches/mac-web-video.patch
Normal file
|
@ -0,0 +1,247 @@
|
||||||
|
Submodule qtwebengine contains modified content
|
||||||
|
diff --git a/qtwebengine/src/core/stream_video_node.cpp b/qtwebengine/src/core/stream_video_node.cpp
|
||||||
|
index 29922f86..baa39d3b 100644
|
||||||
|
--- a/qtwebengine/src/core/stream_video_node.cpp
|
||||||
|
+++ b/qtwebengine/src/core/stream_video_node.cpp
|
||||||
|
@@ -62,38 +62,45 @@ protected:
|
||||||
|
const char *vertexShader() const override {
|
||||||
|
// Keep in sync with cc::VertexShaderVideoTransform
|
||||||
|
static const char *shader =
|
||||||
|
- "attribute highp vec4 a_position;\n"
|
||||||
|
- "attribute mediump vec2 a_texCoord;\n"
|
||||||
|
- "uniform highp mat4 matrix;\n"
|
||||||
|
- "uniform highp mat4 texMatrix;\n"
|
||||||
|
- "varying mediump vec2 v_texCoord;\n"
|
||||||
|
- "void main() {\n"
|
||||||
|
- " gl_Position = matrix * a_position;\n"
|
||||||
|
- " v_texCoord = vec4(texMatrix * vec4(a_texCoord.x, 1.0 - a_texCoord.y, 0.0, 1.0)).xy;\n"
|
||||||
|
- "}";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+in vec4 a_position;
|
||||||
|
+in vec2 a_texCoord;
|
||||||
|
+uniform mat4 matrix;
|
||||||
|
+uniform mat4 texMatrix;
|
||||||
|
+out vec2 v_texCoord;
|
||||||
|
+void main() {
|
||||||
|
+ gl_Position = matrix * a_position;
|
||||||
|
+ v_texCoord = vec4(texMatrix * vec4(a_texCoord.x, 1.0 - a_texCoord.y, 0.0, 1.0)).xy;
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
const char *fragmentShader() const override {
|
||||||
|
// Keep in sync with cc::FragmentShaderRGBATexAlpha
|
||||||
|
static const char *shaderExternal =
|
||||||
|
- "#extension GL_OES_EGL_image_external : require\n"
|
||||||
|
- "varying mediump vec2 v_texCoord;\n"
|
||||||
|
- "uniform samplerExternalOES s_texture;\n"
|
||||||
|
- "uniform lowp float alpha;\n"
|
||||||
|
- "void main() {\n"
|
||||||
|
- " lowp vec4 texColor = texture2D(s_texture, v_texCoord);\n"
|
||||||
|
- " gl_FragColor = texColor * alpha;\n"
|
||||||
|
- "}";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+#extension GL_OES_EGL_image_external : require
|
||||||
|
+in vec2 v_texCoord;
|
||||||
|
+uniform samplerExternalOES s_texture;
|
||||||
|
+uniform float alpha;
|
||||||
|
+out vec4 fragColor;
|
||||||
|
+void main() {
|
||||||
|
+ vec4 texColor = texture(s_texture, v_texCoord);
|
||||||
|
+ fragColor = texColor * alpha;
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
static const char *shader2DRect =
|
||||||
|
- "#extension GL_ARB_texture_rectangle : require\n"
|
||||||
|
- "varying mediump vec2 v_texCoord;\n"
|
||||||
|
- "uniform sampler2DRect s_texture;\n"
|
||||||
|
- "uniform lowp float alpha;\n"
|
||||||
|
- "void main() {\n"
|
||||||
|
- " lowp vec4 texColor = texture2DRect(s_texture, v_texCoord);\n"
|
||||||
|
- " gl_FragColor = texColor * alpha;\n"
|
||||||
|
- "}";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+in vec2 v_texCoord;
|
||||||
|
+uniform sampler2D s_texture;
|
||||||
|
+uniform float alpha;
|
||||||
|
+out vec4 fragColor;
|
||||||
|
+void main() {
|
||||||
|
+ vec4 texColor = texture(s_texture, v_texCoord);
|
||||||
|
+ fragColor = texColor * alpha;
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
if (m_target == ExternalTarget)
|
||||||
|
return shaderExternal;
|
||||||
|
else
|
||||||
|
diff --git a/qtwebengine/src/core/yuv_video_node.cpp b/qtwebengine/src/core/yuv_video_node.cpp
|
||||||
|
index 4a436d95..dc4b6ff9 100644
|
||||||
|
--- a/qtwebengine/src/core/yuv_video_node.cpp
|
||||||
|
+++ b/qtwebengine/src/core/yuv_video_node.cpp
|
||||||
|
@@ -59,39 +59,41 @@ public:
|
||||||
|
YUVVideoMaterialShader(const gfx::ColorSpace &colorSpace)
|
||||||
|
{
|
||||||
|
static const char *shaderHead =
|
||||||
|
- "varying mediump vec2 v_yaTexCoord;\n"
|
||||||
|
- "varying mediump vec2 v_uvTexCoord;\n"
|
||||||
|
- "uniform sampler2D y_texture;\n"
|
||||||
|
- "uniform sampler2D u_texture;\n"
|
||||||
|
- "uniform sampler2D v_texture;\n"
|
||||||
|
- "uniform mediump float alpha;\n"
|
||||||
|
- "uniform mediump vec4 ya_clamp_rect;\n"
|
||||||
|
- "uniform mediump vec4 uv_clamp_rect;\n";
|
||||||
|
- static const char *shader =
|
||||||
|
- "void main() {\n"
|
||||||
|
- " mediump vec2 ya_clamped =\n"
|
||||||
|
- " max(ya_clamp_rect.xy, min(ya_clamp_rect.zw, v_yaTexCoord));\n"
|
||||||
|
- " mediump float y_raw = texture2D(y_texture, ya_clamped).x;\n"
|
||||||
|
- " mediump vec2 uv_clamped =\n"
|
||||||
|
- " max(uv_clamp_rect.xy, min(uv_clamp_rect.zw, v_uvTexCoord));\n"
|
||||||
|
- " mediump float u_unsigned = texture2D(u_texture, uv_clamped).x;\n"
|
||||||
|
- " mediump float v_unsigned = texture2D(v_texture, uv_clamped).x;\n"
|
||||||
|
- " mediump vec3 yuv = vec3(y_raw, u_unsigned, v_unsigned);\n"
|
||||||
|
- " mediump vec3 rgb = DoColorConversion(yuv);\n"
|
||||||
|
- " gl_FragColor = vec4(rgb, 1.0) * alpha;\n"
|
||||||
|
- "}";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+in vec2 v_yaTexCoord;
|
||||||
|
+in vec2 v_uvTexCoord;
|
||||||
|
+uniform sampler2D y_texture;
|
||||||
|
+uniform sampler2D u_texture;
|
||||||
|
+uniform sampler2D v_texture;
|
||||||
|
+uniform float alpha;
|
||||||
|
+uniform vec4 ya_clamp_rect;
|
||||||
|
+uniform vec4 uv_clamp_rect;
|
||||||
|
+out vec4 fragColor;
|
||||||
|
+ )SHADER";
|
||||||
|
+
|
||||||
|
+ static const char *shader = R"SHADER(
|
||||||
|
+void main() {
|
||||||
|
+ vec2 ya_clamped =
|
||||||
|
+ max(ya_clamp_rect.xy, min(ya_clamp_rect.zw, v_yaTexCoord));
|
||||||
|
+ float y_raw = texture(y_texture, ya_clamped).x;
|
||||||
|
+ vec2 uv_clamped =
|
||||||
|
+ max(uv_clamp_rect.xy, min(uv_clamp_rect.zw, v_uvTexCoord));
|
||||||
|
+ float u_unsigned = texture(u_texture, uv_clamped).x;
|
||||||
|
+ float v_unsigned = texture(v_texture, uv_clamped).x;
|
||||||
|
+ vec3 yuv = vec3(y_raw, u_unsigned, v_unsigned);
|
||||||
|
+ vec3 rgb = DoColorConversion(yuv);
|
||||||
|
+ fragColor = vec4(rgb, 1.0) * alpha;
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
+
|
||||||
|
// Invalid or unspecified color spaces should be treated as REC709.
|
||||||
|
gfx::ColorSpace src = colorSpace.IsValid() ? colorSpace : gfx::ColorSpace::CreateREC709();
|
||||||
|
gfx::ColorSpace dst = gfx::ColorSpace::CreateSRGB();
|
||||||
|
std::unique_ptr<gfx::ColorTransform> transform =
|
||||||
|
gfx::ColorTransform::NewColorTransform(src, dst, gfx::ColorTransform::Intent::INTENT_PERCEPTUAL);
|
||||||
|
|
||||||
|
- QByteArray header(shaderHead);
|
||||||
|
- if (QOpenGLContext::currentContext()->isOpenGLES())
|
||||||
|
- header = QByteArray("precision mediump float;\n") + header;
|
||||||
|
-
|
||||||
|
m_csShader = QByteArray::fromStdString(transform->GetShaderSource());
|
||||||
|
- m_fragmentShader = header + m_csShader + QByteArray(shader);
|
||||||
|
+ m_fragmentShader = QByteArray(shaderHead) + m_csShader + QByteArray(shader);
|
||||||
|
}
|
||||||
|
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override;
|
||||||
|
|
||||||
|
@@ -108,20 +110,22 @@ protected:
|
||||||
|
const char *vertexShader() const override {
|
||||||
|
// Keep in sync with logic in VertexShader in components/viz/service/display/shader.cc
|
||||||
|
const char *shader =
|
||||||
|
- "attribute highp vec4 a_position;\n"
|
||||||
|
- "attribute mediump vec2 a_texCoord;\n"
|
||||||
|
- "uniform highp mat4 matrix;\n"
|
||||||
|
- "varying mediump vec2 v_yaTexCoord;\n"
|
||||||
|
- "varying mediump vec2 v_uvTexCoord;\n"
|
||||||
|
- "uniform mediump vec2 yaTexScale;\n"
|
||||||
|
- "uniform mediump vec2 yaTexOffset;\n"
|
||||||
|
- "uniform mediump vec2 uvTexScale;\n"
|
||||||
|
- "uniform mediump vec2 uvTexOffset;\n"
|
||||||
|
- "void main() {\n"
|
||||||
|
- " gl_Position = matrix * a_position;\n"
|
||||||
|
- " v_yaTexCoord = a_texCoord * yaTexScale + yaTexOffset;\n"
|
||||||
|
- " v_uvTexCoord = a_texCoord * uvTexScale + uvTexOffset;\n"
|
||||||
|
- "}";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+in vec4 a_position;
|
||||||
|
+in vec2 a_texCoord;
|
||||||
|
+uniform mat4 matrix;
|
||||||
|
+out vec2 v_yaTexCoord;
|
||||||
|
+out vec2 v_uvTexCoord;
|
||||||
|
+uniform vec2 yaTexScale;
|
||||||
|
+uniform vec2 yaTexOffset;
|
||||||
|
+uniform vec2 uvTexScale;
|
||||||
|
+uniform vec2 uvTexOffset;
|
||||||
|
+void main() {
|
||||||
|
+ gl_Position = matrix * a_position;
|
||||||
|
+ v_yaTexCoord = a_texCoord * yaTexScale + yaTexOffset;
|
||||||
|
+ v_uvTexCoord = a_texCoord * uvTexScale + uvTexOffset;
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -168,33 +172,35 @@ public:
|
||||||
|
YUVAVideoMaterialShader(const gfx::ColorSpace &colorSpace) : YUVVideoMaterialShader(colorSpace)
|
||||||
|
{
|
||||||
|
static const char *shaderHead =
|
||||||
|
- "varying mediump vec2 v_yaTexCoord;\n"
|
||||||
|
- "varying mediump vec2 v_uvTexCoord;\n"
|
||||||
|
- "uniform sampler2D y_texture;\n"
|
||||||
|
- "uniform sampler2D u_texture;\n"
|
||||||
|
- "uniform sampler2D v_texture;\n"
|
||||||
|
- "uniform sampler2D a_texture;\n"
|
||||||
|
- "uniform mediump float alpha;\n"
|
||||||
|
- "uniform mediump vec4 ya_clamp_rect;\n"
|
||||||
|
- "uniform mediump vec4 uv_clamp_rect;\n";
|
||||||
|
+ R"SHADER(#version 150 core
|
||||||
|
+in vec2 v_yaTexCoord;
|
||||||
|
+in vec2 v_uvTexCoord;
|
||||||
|
+uniform sampler2D y_texture;
|
||||||
|
+uniform sampler2D u_texture;
|
||||||
|
+uniform sampler2D v_texture;
|
||||||
|
+uniform sampler2D a_texture;
|
||||||
|
+uniform float alpha;
|
||||||
|
+uniform vec4 ya_clamp_rect;
|
||||||
|
+uniform vec4 uv_clamp_rect;
|
||||||
|
+out vec4 fragColor;
|
||||||
|
+ )SHADER";
|
||||||
|
static const char *shader =
|
||||||
|
- "void main() {\n"
|
||||||
|
- " mediump vec2 ya_clamped =\n"
|
||||||
|
- " max(ya_clamp_rect.xy, min(ya_clamp_rect.zw, v_yaTexCoord));\n"
|
||||||
|
- " mediump float y_raw = texture2D(y_texture, ya_clamped).x;\n"
|
||||||
|
- " mediump vec2 uv_clamped =\n"
|
||||||
|
- " max(uv_clamp_rect.xy, min(uv_clamp_rect.zw, v_uvTexCoord));\n"
|
||||||
|
- " mediump float u_unsigned = texture2D(u_texture, uv_clamped).x;\n"
|
||||||
|
- " mediump float v_unsigned = texture2D(v_texture, uv_clamped).x;\n"
|
||||||
|
- " mediump float a_raw = texture2D(a_texture, ya_clamped).x;\n"
|
||||||
|
- " mediump vec3 yuv = vec3(y_raw, u_unsigned, v_unsigned);\n"
|
||||||
|
- " mediump vec3 rgb = DoColorConversion(yuv);\n"
|
||||||
|
- " gl_FragColor = vec4(rgb, 1.0) * (alpha * a_raw);\n"
|
||||||
|
- "}";
|
||||||
|
- QByteArray header(shaderHead);
|
||||||
|
- if (QOpenGLContext::currentContext()->isOpenGLES())
|
||||||
|
- header = QByteArray("precision mediump float;\n") + header;
|
||||||
|
- m_fragmentShader = header + m_csShader + QByteArray(shader);
|
||||||
|
+ R"SHADER(
|
||||||
|
+void main() {
|
||||||
|
+ vec2 ya_clamped =
|
||||||
|
+ max(ya_clamp_rect.xy, min(ya_clamp_rect.zw, v_yaTexCoord));
|
||||||
|
+ float y_raw = texture(y_texture, ya_clamped).x;
|
||||||
|
+ vec2 uv_clamped =
|
||||||
|
+ max(uv_clamp_rect.xy, min(uv_clamp_rect.zw, v_uvTexCoord));
|
||||||
|
+ float u_unsigned = texture(u_texture, uv_clamped).x;
|
||||||
|
+ float v_unsigned = texture(v_texture, uv_clamped).x;
|
||||||
|
+ float a_raw = texture(a_texture, ya_clamped).x;
|
||||||
|
+ vec3 yuv = vec3(y_raw, u_unsigned, v_unsigned);
|
||||||
|
+ vec3 rgb = DoColorConversion(yuv);
|
||||||
|
+ fragColor = vec4(rgb, 1.0) * (alpha * a_raw);
|
||||||
|
+}
|
||||||
|
+ )SHADER";
|
||||||
|
+ m_fragmentShader = QByteArray(shaderHead) + m_csShader + QByteArray(shader);
|
||||||
|
}
|
||||||
|
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override;
|
||||||
|
|
Loading…
Reference in a new issue