3
0
Fork 0
mirror of https://github.com/JulianGro/overte.git synced 2025-04-30 17:03:13 +02:00

Merge remote-tracking branch 'upstream/master' into gl_common

This commit is contained in:
Brad Davis 2018-03-20 09:18:56 -07:00
commit 659da9b0e4
267 changed files with 9176 additions and 2634 deletions
.gitignoreBUILD_ANDROID.md
android
assignment-client/src
cmake/externals/nvtt
interface
libraries

4
.gitignore vendored
View file

@ -78,6 +78,8 @@ TAGS
node_modules
npm-debug.log
# ignore qmlc files generated from qml as cache
*.qmlc
# Android studio files
*___jb_old___
@ -88,4 +90,4 @@ android/app/src/main/assets
interface/compiledResources
# GPUCache
interface/resources/GPUCache/*
interface/resources/GPUCache/*

View file

@ -53,7 +53,7 @@ Enter the repository `android` directory
Execute a gradle pre-build setup. This step should only need to be done once
`gradle setupDepedencies`
`gradle setupDependencies`
# Building & Running

View file

@ -19,9 +19,9 @@
android:allowBackup="true"
android:screenOrientation="unspecified"
android:theme="@style/NoSystemUI"
android:icon="@mipmap/ic_launcher"
android:icon="@drawable/ic_launcher"
android:launchMode="singleTop"
android:roundIcon="@mipmap/ic_launcher_round">
android:roundIcon="@drawable/ic_launcher">
<activity android:name="io.highfidelity.hifiinterface.PermissionChecker">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

View file

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<!--suppress AndroidUnknownAttribute -->
<vector xmlns:api24="http://schemas.android.com/apk/res/android" xmlns:android="http://schemas.android.com/apk/res/android"
android:viewportWidth="192"
android:viewportHeight="192"
android:width="192dp"
android:height="192dp">
<path
android:pathData="M189.5 96.5A93.5 93.5 0 0 1 96 190 93.5 93.5 0 0 1 2.5 96.5 93.5 93.5 0 0 1 96 3 93.5 93.5 0 0 1 189.5 96.5Z"
android:fillColor="#333333" />
<path
android:pathData="M96.2 173.1c-10.3 0 -20.4 -2.1 -29.8 -6 -9.2 -3.8 -17.3 -9.4 -24.3 -16.4 -7 -7 -12.6 -15.2 -16.4 -24.3 -4.1 -9.6 -6.2 -19.6 -6.2 -30 0 -10.3 2.1 -20.4 6 -29.8 3.8 -9.2 9.4 -17.3 16.4 -24.3 7 -7 15.2 -12.6 24.3 -16.4 9.5 -4 19.5 -6 29.8 -6 10.3 0 20.4 2.1 29.8 6 9.2 3.8 17.3 9.4 24.3 16.4 7 7 12.6 15.2 16.4 24.3 4 9.5 6 19.5 6 29.8 0 10.3 -2.1 20.4 -6 29.8 -3.8 9.2 -9.4 17.3 -16.4 24.3 -7 7 -15.2 12.6 -24.3 16.4 -9.2 4.1 -19.3 6.2 -29.6 6.2zm0 -145.3c-37.8 0 -68.6 30.8 -68.6 68.6 0 37.8 30.8 68.6 68.6 68.6 37.8 0 68.6 -30.8 68.6 -68.6 0 -37.8 -30.8 -68.6 -68.6 -68.6z"
android:fillColor="#00b4f0" />
<path
android:pathData="M119.6 129l0 -53.8c3.4 -1.1 5.8 -4.3 5.8 -8 0 -4.6 -3.8 -8.4 -8.4 -8.4 -4.6 0 -8.4 3.8 -8.4 8.4 0 3.6 2.2 6.6 5.4 7.9l0 25L79 83.8 79 64c3.4 -1.1 5.8 -4.3 5.8 -8 0 -4.6 -3.8 -8.4 -8.4 -8.4 -4.6 0 -8.4 3.8 -8.4 8.4 0 3.6 2.2 6.6 5.4 7.9l0 54.1c-3.1 1.2 -5.4 4.3 -5.4 7.9 0 4.6 3.8 8.4 8.4 8.4 4.6 0 8.4 -3.8 8.4 -8.4 0 -3.7 -2.4 -6.9 -5.8 -8l0 -27.3 35 16.3 0 22.2c-3.1 1.2 -5.4 4.3 -5.4 7.9 0 4.6 3.8 8.4 8.4 8.4 4.6 0 8.4 -3.8 8.4 -8.4 0 -3.8 -2.4 -6.9 -5.8 -8z"
android:fillColor="#00b4f0" />
</vector>

Binary file not shown.

Before

(image error) Size: 9.7 KiB

Binary file not shown.

Before

(image error) Size: 5.1 KiB

Binary file not shown.

Before

(image error) Size: 4.9 KiB

Binary file not shown.

Before

(image error) Size: 4.2 KiB

Binary file not shown.

Before

(image error) Size: 4.3 KiB

Binary file not shown.

Before

(image error) Size: 5.8 KiB

Binary file not shown.

Before

(image error) Size: 5.5 KiB

Binary file not shown.

Before

(image error) Size: 7.3 KiB

Binary file not shown.

Before

(image error) Size: 6.7 KiB

Binary file not shown.

Before

(image error) Size: 8.9 KiB

Binary file not shown.

Before

(image error) Size: 8.2 KiB

View file

@ -137,6 +137,13 @@ def packages = [
checksum: '20768f298f53b195e71b414b0ae240c4',
sharedLibFolder: 'lib/release',
includeLibs: ['libtbb.so', 'libtbbmalloc.so'],
],
hifiAC: [
file: 'libplugins_libhifiCodec.zip',
versionId: 'mzKhsRCgVmloqq5bvE.0IwYK1NjGQc_G',
checksum: '9412a8e12c88a4096c1fc843bb9fe52d',
sharedLibFolder: '',
includeLibs: ['libplugins_libhifiCodec.so']
]
]
@ -353,6 +360,7 @@ task verifyGvr(type: Verify) { def p = packages['gvr']; src new File(baseFolder,
task verifyOpenSSL(type: Verify) { def p = packages['openssl']; src new File(baseFolder, p['file']); checksum p['checksum'] }
task verifyPolyvox(type: Verify) { def p = packages['polyvox']; src new File(baseFolder, p['file']); checksum p['checksum'] }
task verifyTBB(type: Verify) { def p = packages['tbb']; src new File(baseFolder, p['file']); checksum p['checksum'] }
task verifyHifiAC(type: Verify) { def p = packages['hifiAC']; src new File(baseFolder, p['file']); checksum p['checksum'] }
task verifyDependencyDownloads(dependsOn: downloadDependencies) { }
verifyDependencyDownloads.dependsOn verifyQt
@ -362,6 +370,7 @@ verifyDependencyDownloads.dependsOn verifyGvr
verifyDependencyDownloads.dependsOn verifyOpenSSL
verifyDependencyDownloads.dependsOn verifyPolyvox
verifyDependencyDownloads.dependsOn verifyTBB
verifyDependencyDownloads.dependsOn verifyHifiAC
task extractDependencies(dependsOn: verifyDependencyDownloads) {
doLast {

View file

@ -42,7 +42,7 @@ AvatarMixer::AvatarMixer(ReceivedMessage& message) :
ThreadedAssignment(message)
{
// make sure we hear about node kills so we can tell the other nodes
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &AvatarMixer::handleAvatarKilled);
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListener(PacketType::AvatarData, this, "queueIncomingPacket");
@ -423,14 +423,15 @@ void AvatarMixer::throttle(std::chrono::microseconds duration, int frame) {
}
}
void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
if (killedNode->getType() == NodeType::Agent
&& killedNode->getLinkedData()) {
void AvatarMixer::handleAvatarKilled(SharedNodePointer avatarNode) {
if (avatarNode->getType() == NodeType::Agent
&& avatarNode->getLinkedData()) {
auto nodeList = DependencyManager::get<NodeList>();
{ // decrement sessionDisplayNames table and possibly remove
QMutexLocker nodeDataLocker(&killedNode->getLinkedData()->getMutex());
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(killedNode->getLinkedData());
QMutexLocker nodeDataLocker(&avatarNode->getLinkedData()->getMutex());
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(avatarNode->getLinkedData());
const QString& baseDisplayName = nodeData->getBaseDisplayName();
// No sense guarding against very rare case of a node with no entry, as this will work without the guard and do one less lookup in the common case.
if (--_sessionDisplayNames[baseDisplayName].second <= 0) {
@ -447,12 +448,12 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
// we relay avatar kill packets to agents that are not upstream
// and downstream avatar mixers, if the node that was just killed was being replicated
return (node->getType() == NodeType::Agent && !node->isUpstream()) ||
(killedNode->isReplicated() && shouldReplicateTo(*killedNode, *node));
(avatarNode->isReplicated() && shouldReplicateTo(*avatarNode, *node));
}, [&](const SharedNodePointer& node) {
if (node->getType() == NodeType::Agent) {
if (!killPacket) {
killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason));
killPacket->write(killedNode->getUUID().toRfc4122());
killPacket->write(avatarNode->getUUID().toRfc4122());
killPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
}
@ -462,7 +463,7 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
if (!replicatedKillPacket) {
replicatedKillPacket = NLPacket::create(PacketType::ReplicatedKillAvatar,
NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason));
replicatedKillPacket->write(killedNode->getUUID().toRfc4122());
replicatedKillPacket->write(avatarNode->getUUID().toRfc4122());
replicatedKillPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
}
@ -479,7 +480,7 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
return false;
}
if (node->getUUID() == killedNode->getUUID()) {
if (node->getUUID() == avatarNode->getUUID()) {
return false;
}
@ -489,7 +490,7 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
QMetaObject::invokeMethod(node->getLinkedData(),
"cleanupKilledNode",
Qt::AutoConnection,
Q_ARG(const QUuid&, QUuid(killedNode->getUUID())));
Q_ARG(const QUuid&, QUuid(avatarNode->getUUID())));
}
);
}
@ -605,7 +606,9 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node) {
auto start = usecTimestampNow();
DependencyManager::get<NodeList>()->processKillNode(*message);
handleAvatarKilled(node);
node->setLinkedData(nullptr);
auto end = usecTimestampNow();
_handleKillAvatarPacketElapsedTime += (end - start);

View file

@ -39,7 +39,7 @@ public slots:
/// runs the avatar mixer
void run() override;
void nodeKilled(SharedNodePointer killedNode);
void handleAvatarKilled(SharedNodePointer killedNode);
void sendStatsPacket() override;

View file

@ -442,12 +442,16 @@ bool EntityTreeSendThread::traverseTreeAndBuildNextPacketPayload(EncodeBitstream
PrioritizedEntity queuedItem = _sendQueue.top();
EntityItemPointer entity = queuedItem.getEntity();
if (entity) {
// Only send entities that match the jsonFilters, but keep track of everything we've tried to send so we don't try to send it again
const QUuid& entityID = entity->getID();
// Only send entities that match the jsonFilters, but keep track of everything we've tried to send so we don't try to send it again;
// also send if we previously matched since this represents change to a matched item.
bool entityMatchesFilters = entity->matchesJSONFilters(jsonFilters);
if (entityMatchesFilters || entityNodeData->isEntityFlaggedAsExtra(entity->getID())) {
bool entityPreviouslyMatchedFilter = entityNodeData->sentFilteredEntity(entityID);
if (entityMatchesFilters || entityNodeData->isEntityFlaggedAsExtra(entityID) || entityPreviouslyMatchedFilter) {
if (!jsonFilters.isEmpty() && entityMatchesFilters) {
// Record explicitly filtered-in entity so that extra entities can be flagged.
entityNodeData->insertSentFilteredEntity(entity->getID());
entityNodeData->insertSentFilteredEntity(entityID);
}
OctreeElement::AppendState appendEntityState = entity->appendEntityData(&_packetData, params, _extraEncodeData);
@ -458,6 +462,10 @@ bool EntityTreeSendThread::traverseTreeAndBuildNextPacketPayload(EncodeBitstream
params.stopReason = EncodeBitstreamParams::DIDNT_FIT;
break;
}
if (entityPreviouslyMatchedFilter && !entityMatchesFilters) {
entityNodeData->removeSentFilteredEntity(entityID);
}
++_numEntities;
}
if (queuedItem.shouldForceRemove()) {

View file

@ -29,8 +29,8 @@ else ()
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/nvidia-texture-tools-2.1.0.hifi.zip
URL_MD5 5794b950f8b265a9a41b2839b3bf7ebb
URL http://hifi-public.s3.amazonaws.com/dependencies/nvidia-texture-tools-2.1.0.hifi-83462e4.zip
URL_MD5 602776e08515b54bfa1b8dc455003f0f
CONFIGURE_COMMAND CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DNVTT_SHARED=1 -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_POSITION_INDEPENDENT_CODE=ON
LOG_DOWNLOAD 1
LOG_CONFIGURE 1

View file

@ -23,12 +23,22 @@ set(RESOURCES_QRC ${CMAKE_CURRENT_BINARY_DIR}/resources.qrc)
set(RESOURCES_RCC ${CMAKE_CURRENT_SOURCE_DIR}/compiledResources/resources.rcc)
generate_qrc(OUTPUT ${RESOURCES_QRC} PATH ${CMAKE_CURRENT_SOURCE_DIR}/resources CUSTOM_PATHS ${CUSTOM_INTERFACE_QRC_PATHS} GLOBS *)
add_custom_command(
OUTPUT ${RESOURCES_RCC}
DEPENDS ${RESOURCES_QRC} ${GENERATE_QRC_DEPENDS}
COMMAND "${QT_DIR}/bin/rcc"
ARGS ${RESOURCES_QRC} -binary -o ${RESOURCES_RCC}
)
if (ANDROID)
# on Android, don't compress the rcc binary
add_custom_command(
OUTPUT ${RESOURCES_RCC}
DEPENDS ${RESOURCES_QRC} ${GENERATE_QRC_DEPENDS}
COMMAND "${QT_DIR}/bin/rcc"
ARGS ${RESOURCES_QRC} -no-compress -binary -o ${RESOURCES_RCC}
)
else ()
add_custom_command(
OUTPUT ${RESOURCES_RCC}
DEPENDS ${RESOURCES_QRC} ${GENERATE_QRC_DEPENDS}
COMMAND "${QT_DIR}/bin/rcc"
ARGS ${RESOURCES_QRC} -binary -o ${RESOURCES_RCC}
)
endif()
list(APPEND GENERATE_QRC_DEPENDS ${RESOURCES_RCC})
add_custom_target(resources ALL DEPENDS ${GENERATE_QRC_DEPENDS})

View file

@ -5,8 +5,8 @@
{ "from": "TouchscreenVirtualPad.LX", "when": "!Application.CameraIndependent", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateX" },
{ "from": "TouchscreenVirtualPad.RX", "when": "!Application.CameraIndependent", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.Yaw" },
{ "from": "TouchscreenVirtualPad.RX", "when": "!Application.CameraIndependent", "filters": [ {"type": "deadZone", "min": 0.05} , "invert" ], "to": "Actions.Yaw" },
{ "from": "TouchscreenVirtualPad.RY", "when": "!Application.CameraIndependent", "to": "Actions.Pitch" }
{ "from": "TouchscreenVirtualPad.RY", "when": "!Application.CameraIndependent", "filters": [ {"type": "deadZone", "min": 0.05}, "invert" ], "to": "Actions.Pitch" }
]
}

View file

@ -76,33 +76,33 @@ Item {
HifiStyles.RalewayRegular {
id: notice
text: "YOUR LOCATION"
font.pixelSize: (hifi.fonts.pixelSize * 2.15)*(android.dimen.atLeast1440p?1:.75);
font.pixelSize: (hifi.fonts.pixelSize * 2.15) * (android.dimen.atLeast1440p ? 1 : .75);
color: "#2CD7FF"
anchors {
bottom: addressBackground.top
bottomMargin: android.dimen.atLeast1440p?45:34
bottomMargin: android.dimen.atLeast1440p ? 45 : 34
left: addressBackground.left
leftMargin: android.dimen.atLeast1440p?60:45
leftMargin: android.dimen.atLeast1440p ? 60 : 45
}
}
property int inputAreaHeight: android.dimen.atLeast1440p?210:156
property int inputAreaHeight: android.dimen.atLeast1440p ? 210 : 156
property int inputAreaStep: (height - inputAreaHeight) / 2
ToolbarButton {
id: homeButton
y: android.dimen.atLeast1440p?280:210
y: android.dimen.atLeast1440p ? 280 : 210
imageURL: "../../icons/home.svg"
onClicked: {
addressBarDialog.loadHome();
bar.shown = false;
}
anchors {
leftMargin: android.dimen.atLeast1440p?75:56
leftMargin: android.dimen.atLeast1440p ? 75 : 56
left: parent.left
}
size: android.dimen.atLeast1440p?150:150//112
size: android.dimen.atLeast1440p ? 150 : 150//112
}
ToolbarButton {
@ -111,10 +111,10 @@ Item {
onClicked: addressBarDialog.loadBack();
anchors {
left: homeButton.right
leftMargin: android.dimen.atLeast1440p?70:52
leftMargin: android.dimen.atLeast1440p ? 70 : 52
verticalCenter: homeButton.verticalCenter
}
size: android.dimen.atLeast1440p?150:150
size: android.dimen.atLeast1440p ? 150 : 150
}
ToolbarButton {
id: forwardArrow;
@ -122,10 +122,10 @@ Item {
onClicked: addressBarDialog.loadForward();
anchors {
left: backArrow.right
leftMargin: android.dimen.atLeast1440p?60:45
leftMargin: android.dimen.atLeast1440p ? 60 : 45
verticalCenter: homeButton.verticalCenter
}
size: android.dimen.atLeast1440p?150:150
size: android.dimen.atLeast1440p ? 150 : 150
}
HifiStyles.FiraSansRegular {
@ -140,25 +140,22 @@ Item {
Rectangle {
id: addressBackground
x: android.dimen.atLeast1440p?780:585
y: android.dimen.atLeast1440p?280:235 // tweaking by hand
width: android.dimen.atLeast1440p?1270:952
height: android.dimen.atLeast1440p?150:112
x: android.dimen.atLeast1440p ? 780 : 585
y: android.dimen.atLeast1440p ? 280 : 235 // tweaking by hand
width: android.dimen.atLeast1440p ? 1270 : 952
height: android.dimen.atLeast1440p ? 150 : 112
color: "#FFFFFF"
}
TextInput {
id: addressLine
focus: true
x: android.dimen.atLeast1440p?870:652
y: android.dimen.atLeast1440p?300:245 // tweaking by hand
width: android.dimen.atLeast1440p?1200:900
height: android.dimen.atLeast1440p?120:90
x: android.dimen.atLeast1440p ? 870 : 652
y: android.dimen.atLeast1440p ? 300 : 245 // tweaking by hand
width: android.dimen.atLeast1440p ? 1200 : 900
height: android.dimen.atLeast1440p ? 120 : 90
inputMethodHints: Qt.ImhNoPredictiveText
//helperText: "Hint is here"
anchors {
//verticalCenter: addressBackground.verticalCenter
}
font.pixelSize: hifi.fonts.pixelSize * 3.75
onTextChanged: {
//filterChoicesByText();
@ -228,4 +225,4 @@ Item {
}
}
}
}

View file

@ -38,7 +38,8 @@ ModalWindow {
keyboardOverride: true // Disable ModalWindow's keyboard.
function tryDestroy() {
root.destroy()
Controller.setVPadHidden(false);
root.destroy();
}
LoginDialog {
@ -52,8 +53,9 @@ ModalWindow {
Component.onCompleted: {
this.anchors.centerIn = undefined;
this.y=150;
this.x=(parent.width - this.width)/2;
this.y = 150;
this.x = (parent.width - this.width) / 2;
Controller.setVPadHidden(true);
}
Keys.onPressed: {

View file

@ -14,9 +14,9 @@ import Qt.labs.settings 1.0
import "./hifi/audio" as HifiAudio
Hifi.AvatarInputs {
Item {
id: root;
objectName: "AvatarInputs"
objectName: "AvatarInputsBar"
property int modality: Qt.NonModal
width: audio.width;
height: audio.height;
@ -26,7 +26,7 @@ Hifi.AvatarInputs {
HifiAudio.MicBar {
id: audio;
visible: root.showAudioTools;
visible: AvatarInputs.showAudioTools;
standalone: true;
dragTarget: parent;
}

View file

@ -224,7 +224,7 @@ Item {
onClicked: {
Qt.inputMethod.hide();
root.destroy();
root.tryDestroy();
}
}
}

View file

@ -42,8 +42,8 @@ Original.CheckBox {
style: CheckBoxStyle {
indicator: Rectangle {
id: box
width: boxSize
height: boxSize
implicitWidth: boxSize
implicitHeight: boxSize
radius: boxRadius
border.width: 1
border.color: pressed || hovered
@ -101,8 +101,8 @@ Original.CheckBox {
}
label: Label {
text: control.text
color: control.color
text: checkBox.text
color: checkBox.color
x: 2
wrapMode: checkBox.wrap ? Text.Wrap : Text.NoWrap
elide: checkBox.wrap ? Text.ElideNone : Text.ElideRight

View file

@ -26,13 +26,13 @@ ColumnLayout {
property string methodName: "";
property string actionText: "";
spacing: 4*3
spacing: 4 * 3
signal sendToParentQml(var message);
Image {
id: itemImage
Layout.preferredWidth: 250*3
Layout.preferredHeight: 140*3
Layout.preferredWidth: 250 * 3
Layout.preferredHeight: 140 * 3
source: thumbnailUrl
asynchronous: true
fillMode: Image.PreserveAspectFit
@ -81,7 +81,7 @@ ColumnLayout {
HifiControlsUit.ImageButton {
width: 140*3
height: 35*3
text: type=="extra"? actionText: "CHOOSE"
text: type=="extra" ? actionText: "CHOOSE"
source: "../../../../icons/button.svg"
hoverSource: "../../../../icons/button-a.svg"
fontSize: 18*3
@ -102,8 +102,8 @@ ColumnLayout {
Image {
id: tickImage
width: 35*3
height: 35*3
width: 35 * 3
height: 35 * 3
source: "../../../icons/tick.svg"
anchors {
horizontalCenter: itemName.horizontalCenter
@ -114,4 +114,4 @@ ColumnLayout {
Component.onCompleted:{
sendToParentQml.connect(sendToScript);
}
}
}

View file

@ -22,26 +22,26 @@ Item {
Item {
id: dimen
readonly property bool atLeast1440p: Screen.width >= 2560 && Screen.height >= 1440
readonly property real windowLessWidth: atLeast1440p?378:284
readonly property real windowLessHeight: atLeast1440p?192:144
readonly property real windowLessWidth: atLeast1440p ? 378 : 284
readonly property real windowLessHeight: atLeast1440p ? 192 : 144
readonly property real windowZ: 100
readonly property real headerHeight: atLeast1440p?276:207
readonly property real headerHeight: atLeast1440p ? 276 : 207
readonly property real headerIconPosX: atLeast1440p?90:67
readonly property real headerIconPosY: atLeast1440p?108:81
readonly property real headerIconWidth: atLeast1440p?111:83
readonly property real headerIconHeight: atLeast1440p?111:83
readonly property real headerIconTitleDistance: atLeast1440p?151:113
readonly property real headerIconPosX: atLeast1440p ? 90 : 67
readonly property real headerIconPosY: atLeast1440p ? 108 : 81
readonly property real headerIconWidth: atLeast1440p ? 111 : 83
readonly property real headerIconHeight: atLeast1440p ? 111 : 83
readonly property real headerIconTitleDistance: atLeast1440p ? 151 : 113
readonly property real headerHideWidth: atLeast1440p?150:112
readonly property real headerHideHeight: atLeast1440p?150:112
readonly property real headerHideRightMargin: atLeast1440p?110:82
readonly property real headerHideTopMargin: atLeast1440p?90:67
readonly property real headerHideIconWidth: atLeast1440p?70:52
readonly property real headerHideIconHeight: atLeast1440p?45:33
readonly property real headerHideTextTopMargin: atLeast1440p?36:27
readonly property real headerHideWidth: atLeast1440p ? 150 : 112
readonly property real headerHideHeight: atLeast1440p ? 150 : 112
readonly property real headerHideRightMargin: atLeast1440p ? 110 : 82
readonly property real headerHideTopMargin: atLeast1440p ? 90 : 67
readonly property real headerHideIconWidth: atLeast1440p ? 70 : 52
readonly property real headerHideIconHeight: atLeast1440p ? 45 : 33
readonly property real headerHideTextTopMargin: atLeast1440p ? 36 : 27
readonly property real botomHudWidth: 366
readonly property real botomHudHeight: 180

View file

@ -20,6 +20,7 @@ Overlay {
font.family: "Helvetica"
font.pixelSize: 18
lineHeight: 18
clip: true
}
}

View file

@ -16,10 +16,11 @@ import "../js/Utils.js" as Utils
Item {
id: frame
objectName: "Frame"
HifiConstants { id: hifi }
default property var decoration
property string qmlFile: "N/A"
property bool gradientsSupported: desktop.gradientsSupported
readonly property int frameMarginLeft: frame.decoration ? frame.decoration.frameMarginLeft : 0
@ -44,7 +45,7 @@ Item {
id: debugZ
visible: DebugQML
color: "red"
text: (window ? "Z: " + window.z : "")
text: (window ? "Z: " + window.z : "") + " " + qmlFile
y: window ? window.height + 4 : 0
}

View file

@ -2007,20 +2007,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(_window, SIGNAL(windowMinimizedChanged(bool)), this, SLOT(windowMinimizedChanged(bool)));
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
{
PROFILE_RANGE(render, "Process Default Skybox");
auto textureCache = DependencyManager::get<TextureCache>();
QFileSelector fileSelector;
fileSelector.setExtraSelectors(FileUtils::getFileSelectors());
auto skyboxUrl = fileSelector.select(PathUtils::resourcesPath() + "images/Default-Sky-9-cubemap.ktx");
_defaultSkyboxTexture = gpu::Texture::unserialize(skyboxUrl.toStdString());
_defaultSkyboxAmbientTexture = _defaultSkyboxTexture;
_defaultSkybox->setCubemap(_defaultSkyboxTexture);
}
EntityTreeRenderer::setEntitiesShouldFadeFunction([this]() {
SharedNodePointer entityServerNode = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::EntityServer);
return entityServerNode && !isPhysicsEnabled();
@ -2470,7 +2456,6 @@ void Application::initializeGL() {
DeadlockWatchdogThread::withPause([&] {
// Set up the render engine
render::CullFunctor cullFunctor = LODManager::shouldRender;
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
#ifndef Q_OS_ANDROID
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor, !DISABLE_DEFERRED);
@ -2732,10 +2717,12 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
Stats::show();
AvatarInputs::show();
auto surfaceContext = DependencyManager::get<OffscreenUi>()->getSurfaceContext();
surfaceContext->setContextProperty("Stats", Stats::getInstance());
surfaceContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto qml = PathUtils::qmlUrl("AvatarInputsBar.qml");
offscreenUi->show(qml, "AvatarInputsBar");
}
void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
@ -5444,7 +5431,7 @@ void Application::update(float deltaTime) {
editRenderArgs([this, deltaTime](AppRenderArgs& appRenderArgs) {
PerformanceTimer perfTimer("editRenderArgs");
appRenderArgs._headPose= getHMDSensorPose();
appRenderArgs._headPose = getHMDSensorPose();
auto myAvatar = getMyAvatar();
@ -5464,10 +5451,10 @@ void Application::update(float deltaTime) {
{
QMutexLocker viewLocker(&_viewMutex);
// adjust near clip plane to account for sensor scaling.
auto adjustedProjection = glm::perspective(_viewFrustum.getFieldOfView(),
_viewFrustum.getAspectRatio(),
DEFAULT_NEAR_CLIP * sensorToWorldScale,
_viewFrustum.getFarClip());
auto adjustedProjection = glm::perspective(glm::radians(_fieldOfView.get()),
getActiveDisplayPlugin()->getRecommendedAspectRatio(),
DEFAULT_NEAR_CLIP * sensorToWorldScale,
DEFAULT_FAR_CLIP);
_viewFrustum.setProjection(adjustedProjection);
_viewFrustum.calculate();
}
@ -5549,6 +5536,7 @@ void Application::update(float deltaTime) {
{
QMutexLocker viewLocker(&_viewMutex);
_myCamera.loadViewFrustum(_displayViewFrustum);
appRenderArgs._view = glm::inverse(_displayViewFrustum.getView());
}
{

View file

@ -272,10 +272,6 @@ public:
void shareSnapshot(const QString& filename, const QUrl& href = QUrl(""));
graphics::SkyboxPointer getDefaultSkybox() const { return _defaultSkybox; }
gpu::TexturePointer getDefaultSkyboxTexture() const { return _defaultSkyboxTexture; }
gpu::TexturePointer getDefaultSkyboxAmbientTexture() const { return _defaultSkyboxAmbientTexture; }
OverlayID getTabletScreenID() const;
OverlayID getTabletHomeButtonID() const;
QUuid getTabletFrameID() const; // may be an entity or an overlay
@ -623,6 +619,7 @@ private:
struct AppRenderArgs {
render::Args _renderArgs;
glm::mat4 _eyeToWorld;
glm::mat4 _view;
glm::mat4 _eyeOffsets[2];
glm::mat4 _eyeProjections[2];
glm::mat4 _headPose;
@ -678,10 +675,6 @@ private:
ConnectionMonitor _connectionMonitor;
graphics::SkyboxPointer _defaultSkybox { new ProceduralSkybox() } ;
gpu::TexturePointer _defaultSkyboxTexture;
gpu::TexturePointer _defaultSkyboxAmbientTexture;
QTimer _addAssetToWorldResizeTimer;
QHash<QUuid, int> _addAssetToWorldResizeList;

View file

@ -90,10 +90,10 @@ void Application::paintGL() {
{
PROFILE_RANGE(render, "/gpuContextReset");
_gpuContext->beginFrame(HMDSensorPose);
_gpuContext->beginFrame(_appRenderArgs._view, HMDSensorPose);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch(_gpuContext, [&](gpu::Batch& batch) {
gpu::doInBatch("Application_render::gpuContextReset", _gpuContext, [&](gpu::Batch& batch) {
batch.resetStages();
});
}
@ -216,7 +216,7 @@ void Application::runRenderFrame(RenderArgs* renderArgs) {
// Make sure the WorldBox is in the scene
// For the record, this one RenderItem is the first one we created and added to the scene.
// We could meoee that code elsewhere but you know...
// We could move that code elsewhere but you know...
if (!render::Item::isValidID(WorldBoxRenderData::_item)) {
auto worldBoxRenderData = std::make_shared<WorldBoxRenderData>();
auto worldBoxRenderPayload = std::make_shared<WorldBoxRenderData::Payload>(worldBoxRenderData);

View file

@ -45,6 +45,9 @@
#include "LocationBookmarks.h"
#include "DeferredLightingEffect.h"
#include "AmbientOcclusionEffect.h"
#include "RenderShadowTask.h"
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
#include "SpeechRecognizer.h"
#endif
@ -361,18 +364,6 @@ Menu::Menu() {
// Developer menu ----------------------------------
MenuWrapper* developerMenu = addMenu("Developer", "Developer");
// Developer > Graphics
MenuWrapper* graphicsOptionsMenu = developerMenu->addMenu("Render");
action = addCheckableActionToQMenuAndActionHash(graphicsOptionsMenu, MenuOption::Shadows, 0, true);
connect(action, &QAction::triggered, [action] {
DependencyManager::get<DeferredLightingEffect>()->setShadowMapEnabled(action->isChecked());
});
action = addCheckableActionToQMenuAndActionHash(graphicsOptionsMenu, MenuOption::AmbientOcclusion, 0, false);
connect(action, &QAction::triggered, [action] {
DependencyManager::get<DeferredLightingEffect>()->setAmbientOcclusionEnabled(action->isChecked());
});
// Developer > UI >>>
MenuWrapper* uiOptionsMenu = developerMenu->addMenu("UI");
action = addCheckableActionToQMenuAndActionHash(uiOptionsMenu, MenuOption::DesktopTabletToToolbar, 0,
@ -389,6 +380,36 @@ Menu::Menu() {
// Developer > Render >>>
MenuWrapper* renderOptionsMenu = developerMenu->addMenu("Render");
action = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Shadows, 0, true);
connect(action, &QAction::triggered, [action] {
auto renderConfig = qApp->getRenderEngine()->getConfiguration();
if (renderConfig) {
auto mainViewShadowTaskConfig = renderConfig->getConfig<RenderShadowTask>("RenderMainView.RenderShadowTask");
if (mainViewShadowTaskConfig) {
if (action->isChecked()) {
mainViewShadowTaskConfig->setPreset("Enabled");
} else {
mainViewShadowTaskConfig->setPreset("None");
}
}
}
});
action = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion, 0, false);
connect(action, &QAction::triggered, [action] {
auto renderConfig = qApp->getRenderEngine()->getConfiguration();
if (renderConfig) {
auto mainViewAmbientOcclusionConfig = renderConfig->getConfig<AmbientOcclusionEffect>("RenderMainView.AmbientOcclusion");
if (mainViewAmbientOcclusionConfig) {
if (action->isChecked()) {
mainViewAmbientOcclusionConfig->setPreset("Enabled");
} else {
mainViewAmbientOcclusionConfig->setPreset("None");
}
}
}
});
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::WorldAxes);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DefaultSkybox, 0, true);

View file

@ -205,7 +205,7 @@ namespace MenuOption {
const QString DesktopTabletToToolbar = "Desktop Tablet Becomes Toolbar";
const QString HMDTabletToToolbar = "HMD Tablet Becomes Toolbar";
const QString Shadows = "Shadows";
const QString AmbientOcclusion = "AmbientOcclusion";
const QString AmbientOcclusion = "Ambient Occlusion";
}
#endif // hifi_Menu_h

View file

@ -107,7 +107,7 @@ public:
args->_displayMode = RenderArgs::MONO;
args->_renderMode = RenderArgs::RenderMode::SECONDARY_CAMERA_RENDER_MODE;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
gpu::doInBatch("SecondaryCameraJob::run", args->_context, [&](gpu::Batch& batch) {
batch.disableContextStereo();
batch.disableContextViewCorrection();
});
@ -196,7 +196,7 @@ public:
args->_displayMode = cachedArgs->_displayMode;
args->_renderMode = cachedArgs->_renderMode;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
gpu::doInBatch("EndSecondaryCameraFrame::run", args->_context, [&](gpu::Batch& batch) {
batch.restoreContextStereo();
batch.restoreContextViewCorrection();
});

View file

@ -15,6 +15,24 @@
#include <EntityItem.h>
#include <ObjectActionTractor.h>
/**jsdoc
* The <code>"far-grab"</code> {@link Entities.ActionType|ActionType} moves and rotates an entity to a target position and
* orientation, optionally relative to another entity. Collisions between the entity and the user's avatar are disabled during
* the far-grab.
* It has arguments in addition to the common {@link Entities.ActionArguments|ActionArguments}.
*
* @typedef {object} Entities.ActionArguments-FarGrab
* @property {Vec3} targetPosition=0,0,0 - The target position.
* @property {Quat} targetRotation=0,0,0,1 - The target rotation.
* @property {Uuid} otherID=null - If an entity ID, the <code>targetPosition</code> and <code>targetRotation</code> are
* relative to this entity's position and rotation.
* @property {number} linearTimeScale=3.4e+38 - Controls how long it takes for the entity's position to catch up with the
* target position. The value is the time for the action to catch up to 1/e = 0.368 of the target value, where the action
* is applied using an exponential decay.
* @property {number} angularTimeScale=3.4e+38 - Controls how long it takes for the entity's orientation to catch up with the
* target orientation. The value is the time for the action to catch up to 1/e = 0.368 of the target value, where the
* action is applied using an exponential decay.
*/
class AvatarActionFarGrab : public ObjectActionTractor {
public:
AvatarActionFarGrab(const QUuid& id, EntityItemPointer ownerEntity);

View file

@ -416,6 +416,26 @@ bool AvatarActionHold::updateArguments(QVariantMap arguments) {
return true;
}
/**jsdoc
* The <code>"hold"</code> {@link Entities.ActionType|ActionType} positions and rotates an entity relative to an avatar's hand.
* Collisions between the entity and the user's avatar are disabled during the hold.
* It has arguments in addition to the common {@link Entities.ActionArguments|ActionArguments}.
*
* @typedef {object} Entities.ActionArguments-Hold
* @property {Uuid} holderID=MyAvatar.sessionUUID - The ID of the avatar holding the entity.
* @property {Vec3} relativePosition=0,0,0 - The target position relative to the avatar's hand.
* @property {Vec3} relativeRotation=0,0,0,1 - The target rotation relative to the avatar's hand.
* @property {number} timeScale=3.4e+38 - Controls how long it takes for the entity's position and rotation to catch up with
* the target. The value is the time for the action to catch up to 1/e = 0.368 of the target value, where the action is
* applied using an exponential decay.
* @property {string} hand=right - The hand holding the entity: <code>"left"</code> or <code>"right"</code>.
* @property {boolean} kinematic=false - If <code>true</code>, the entity is made kinematic during the action; the entity won't
* lag behind the hand but constraint actions such as <code>"hinge"</code> won't act properly.
* @property {boolean} kinematicSetVelocity=false - If <code>true</code> and <code>kinematic</code> is <code>true</code>, the
* entity's <code>velocity</code> property will be set during the action, e.g., so that other scripts may use the value.
* @property {boolean} ignoreIK=false - If <code>true</code>, the entity follows the HMD controller rather than the avatar's
* hand.
*/
QVariantMap AvatarActionHold::getArguments() {
QVariantMap arguments = ObjectDynamic::getArguments();
withReadLock([&]{

View file

@ -1115,7 +1115,6 @@ void MyAvatar::setEnableDebugDrawIKChains(bool isEnabled) {
void MyAvatar::setEnableMeshVisible(bool isEnabled) {
_skeletonModel->setVisibleInScene(isEnabled, qApp->getMain3DScene(), render::ItemKey::TAG_BITS_NONE, true);
_skeletonModel->setCanCastShadow(isEnabled, qApp->getMain3DScene(), render::ItemKey::TAG_BITS_NONE, true);
}
void MyAvatar::setEnableInverseKinematics(bool isEnabled) {
@ -1468,7 +1467,6 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
int skeletonModelChangeCount = _skeletonModelChangeCount;
Avatar::setSkeletonModelURL(skeletonModelURL);
_skeletonModel->setVisibleInScene(true, qApp->getMain3DScene(), render::ItemKey::TAG_BITS_NONE, true);
_skeletonModel->setCanCastShadow(true, qApp->getMain3DScene(), render::ItemKey::TAG_BITS_NONE, true);
_headBoneSet.clear();
_cauterizationNeedsUpdate = true;
@ -2043,8 +2041,8 @@ void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
_attachmentModels[i]->setVisibleInScene(shouldDrawHead, qApp->getMain3DScene(),
render::ItemKey::TAG_BITS_NONE, true);
_attachmentModels[i]->setCanCastShadow(shouldDrawHead, qApp->getMain3DScene(),
render::ItemKey::TAG_BITS_NONE, true);
_attachmentModels[i]->setCanCastShadow(shouldDrawHead, qApp->getMain3DScene(),
render::ItemKey::TAG_BITS_NONE, true);
}
}
}

View file

@ -64,7 +64,7 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
}
// Execute the batch into our framebuffer
doInBatch(renderArgs->_context, [&](gpu::Batch& batch) {
doInBatch("ApplicationOverlay::render", renderArgs->_context, [&](gpu::Batch& batch) {
PROFILE_RANGE_BATCH(batch, "ApplicationOverlayRender");
renderArgs->_batch = &batch;
batch.enableStereo(false);

View file

@ -16,19 +16,19 @@
#include "Application.h"
#include "Menu.h"
HIFI_QML_DEF(AvatarInputs)
static AvatarInputs* INSTANCE{ nullptr };
Setting::Handle<bool> showAudioToolsSetting { QStringList { "AvatarInputs", "showAudioTools" }, false };
AvatarInputs* AvatarInputs::getInstance() {
Q_ASSERT(INSTANCE);
if (!INSTANCE) {
INSTANCE = new AvatarInputs();
Q_ASSERT(INSTANCE);
}
return INSTANCE;
}
AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
AvatarInputs::AvatarInputs(QObject* parent) : QObject(parent) {
_showAudioTools = showAudioToolsSetting.get();
}

View file

@ -19,7 +19,7 @@ public: \
private: \
type _##name{ initialValue };
class AvatarInputs : public QQuickItem {
class AvatarInputs : public QObject {
Q_OBJECT
HIFI_QML_DECL
@ -32,7 +32,7 @@ class AvatarInputs : public QQuickItem {
public:
static AvatarInputs* getInstance();
Q_INVOKABLE float loudnessToAudioLevel(float loudness);
AvatarInputs(QQuickItem* parent = nullptr);
AvatarInputs(QObject* parent = nullptr);
void update();
bool showAudioTools() const { return _showAudioTools; }

View file

@ -24,10 +24,6 @@
#include "SnapshotAnimated.h"
#include "UserActivityLogger.h"
#include "AmbientOcclusionEffect.h"
#include "AntialiasingEffect.h"
#include "RenderShadowTask.h"
void setupPreferences() {
auto preferences = DependencyManager::get<Preferences>();
auto nodeList = DependencyManager::get<NodeList>();
@ -295,30 +291,6 @@ void setupPreferences() {
}
#endif
{
static const QString RENDER("Graphics");
auto renderConfig = qApp->getRenderEngine()->getConfiguration();
if (renderConfig) {
auto mainViewAmbientOcclusionConfig = renderConfig->getConfig<AmbientOcclusionEffect>("RenderMainView.AmbientOcclusion");
if (mainViewAmbientOcclusionConfig) {
auto getter = [mainViewAmbientOcclusionConfig]()->QString { return mainViewAmbientOcclusionConfig->getPreset(); };
auto setter = [mainViewAmbientOcclusionConfig](QString preset) { mainViewAmbientOcclusionConfig->setPreset(preset); };
auto preference = new ComboBoxPreference(RENDER, "Ambient occlusion", getter, setter);
preference->setItems(mainViewAmbientOcclusionConfig->getPresetList());
preferences->addPreference(preference);
}
auto mainViewShadowConfig = renderConfig->getConfig<RenderShadowTask>("RenderMainView.RenderShadowTask");
if (mainViewShadowConfig) {
auto getter = [mainViewShadowConfig]()->QString { return mainViewShadowConfig->getPreset(); };
auto setter = [mainViewShadowConfig](QString preset) { mainViewShadowConfig->setPreset(preset); };
auto preference = new ComboBoxPreference(RENDER, "Shadows", getter, setter);
preference->setItems(mainViewShadowConfig->getPresetList());
preferences->addPreference(preference);
}
}
}
{
static const QString NETWORKING("Networking");

View file

@ -37,7 +37,8 @@ Base3DOverlay::Base3DOverlay(const Base3DOverlay* base3DOverlay) :
_ignoreRayIntersection(base3DOverlay->_ignoreRayIntersection),
_drawInFront(base3DOverlay->_drawInFront),
_drawHUDLayer(base3DOverlay->_drawHUDLayer),
_isGrabbable(base3DOverlay->_isGrabbable)
_isGrabbable(base3DOverlay->_isGrabbable),
_isVisibleInSecondaryCamera(base3DOverlay->_isVisibleInSecondaryCamera)
{
setTransform(base3DOverlay->getTransform());
}
@ -142,6 +143,13 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
setIsGrabbable(isGrabbable.toBool());
}
auto isVisibleInSecondaryCamera = properties["isVisibleInSecondaryCamera"];
if (isVisibleInSecondaryCamera.isValid()) {
bool value = isVisibleInSecondaryCamera.toBool();
setIsVisibleInSecondaryCamera(value);
needRenderItemUpdate = true;
}
if (properties["position"].isValid()) {
setLocalPosition(vec3FromVariant(properties["position"]));
needRenderItemUpdate = true;
@ -221,6 +229,8 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
* @property {boolean} drawInFront=false - If <code>true</code>, the overlay is rendered in front of other overlays that don't
* have <code>drawInFront</code> set to <code>true</code>, and in front of entities.
* @property {boolean} grabbable=false - Signal to grabbing scripts whether or not this overlay can be grabbed.
* @property {boolean} isVisibleInSecondaryCamera=false - If <code>true</code>, the overlay is rendered in secondary
* camera views.
* @property {Uuid} parentID=null - The avatar, entity, or overlay that the overlay is parented to.
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
@ -259,6 +269,9 @@ QVariant Base3DOverlay::getProperty(const QString& property) {
if (property == "grabbable") {
return _isGrabbable;
}
if (property == "isVisibleInSecondaryCamera") {
return _isVisibleInSecondaryCamera;
}
if (property == "parentID") {
return getParentID();
}

View file

@ -48,6 +48,7 @@ public:
bool getDrawInFront() const { return _drawInFront; }
bool getDrawHUDLayer() const { return _drawHUDLayer; }
bool getIsGrabbable() const { return _isGrabbable; }
virtual bool getIsVisibleInSecondaryCamera() const override { return _isVisibleInSecondaryCamera; }
void setIsSolid(bool isSolid) { _isSolid = isSolid; }
void setIsDashedLine(bool isDashedLine) { _isDashedLine = isDashedLine; }
@ -55,6 +56,7 @@ public:
virtual void setDrawInFront(bool value) { _drawInFront = value; }
virtual void setDrawHUDLayer(bool value) { _drawHUDLayer = value; }
void setIsGrabbable(bool value) { _isGrabbable = value; }
virtual void setIsVisibleInSecondaryCamera(bool value) { _isVisibleInSecondaryCamera = value; }
virtual AABox getBounds() const override = 0;
@ -92,6 +94,7 @@ protected:
bool _drawInFront;
bool _drawHUDLayer;
bool _isGrabbable { false };
bool _isVisibleInSecondaryCamera { false };
mutable bool _renderVariableDirty { true };
QString _name;

View file

@ -89,8 +89,11 @@ void ModelOverlay::update(float deltatime) {
}
if (_visibleDirty) {
_visibleDirty = false;
// don't show overlays in mirrors
_model->setVisibleInScene(getVisible(), scene, render::ItemKey::TAG_BITS_0, false);
// don't show overlays in mirrors or spectator-cam unless _isVisibleInSecondaryCamera is true
_model->setVisibleInScene(getVisible(), scene,
render::ItemKey::TAG_BITS_0 |
(_isVisibleInSecondaryCamera ? render::ItemKey::TAG_BITS_1 : render::ItemKey::TAG_BITS_NONE),
false);
}
if (_drawInFrontDirty) {
_drawInFrontDirty = false;

View file

@ -36,6 +36,11 @@ public:
void clearSubRenderItemIDs();
void setSubRenderItemIDs(const render::ItemIDs& ids);
virtual void setIsVisibleInSecondaryCamera(bool value) override {
Base3DOverlay::setIsVisibleInSecondaryCamera(value);
_visibleDirty = true;
}
void setProperties(const QVariantMap& properties) override;
QVariant getProperty(const QString& property) override;
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,

View file

@ -56,6 +56,8 @@ public:
bool isLoaded() { return _isLoaded; }
bool getVisible() const { return _visible; }
virtual bool isTransparent() { return getAlphaPulse() != 0.0f || getAlpha() != 1.0f; };
virtual bool getIsVisibleInSecondaryCamera() const { return false; }
xColor getColor();
float getAlpha();

View file

@ -44,6 +44,8 @@ void OverlayPropertyResultFromScriptValue(const QScriptValue& object, OverlayPro
const OverlayID UNKNOWN_OVERLAY_ID = OverlayID();
/**jsdoc
* The result of a {@link PickRay} search using {@link Overlays.findRayIntersection|findRayIntersection} or
* {@link Overlays.findRayIntersectionVector|findRayIntersectionVector}.
* @typedef {object} Overlays.RayToOverlayIntersectionResult
* @property {boolean} intersects - <code>true</code> if the {@link PickRay} intersected with a 3D overlay, otherwise
* <code>false</code>.
@ -75,7 +77,8 @@ void RayToOverlayIntersectionResultFromScriptValue(const QScriptValue& object, R
* yourself and that aren't persisted to the domain. They are used for UI.
* @namespace Overlays
* @property {Uuid} keyboardFocusOverlay - Get or set the {@link Overlays.OverlayType|web3d} overlay that has keyboard focus.
* If no overlay is set, get returns <code>null</code>; set to <code>null</code> to clear keyboard focus.
* If no overlay has keyboard focus, get returns <code>null</code>; set to <code>null</code> or {@link Uuid|Uuid.NULL} to
* clear keyboard focus.
*/
class Overlays : public QObject {
@ -116,7 +119,7 @@ public slots:
* @function Overlays.addOverlay
* @param {Overlays.OverlayType} type - The type of the overlay to add.
* @param {Overlays.OverlayProperties} properties - The properties of the overlay to add.
* @returns {Uuid} The ID of the newly created overlay.
* @returns {Uuid} The ID of the newly created overlay if successful, otherwise {@link Uuid|Uuid.NULL}.
* @example <caption>Add a cube overlay in front of your avatar.</caption>
* var overlay = Overlays.addOverlay("cube", {
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -3 })),
@ -131,7 +134,7 @@ public slots:
* Create a clone of an existing overlay.
* @function Overlays.cloneOverlay
* @param {Uuid} overlayID - The ID of the overlay to clone.
* @returns {Uuid} The ID of the new overlay.
* @returns {Uuid} The ID of the new overlay if successful, otherwise {@link Uuid|Uuid.NULL}.
* @example <caption>Add an overlay in front of your avatar, clone it, and move the clone to be above the
* original.</caption>
* var position = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -3 }));
@ -322,10 +325,8 @@ public slots:
* @function Overlays.findRayIntersection
* @param {PickRay} pickRay - The PickRay to use for finding overlays.
* @param {boolean} [precisionPicking=false] - <em>Unused</em>; exists to match Entity API.
* @param {Array.<Uuid>} [overlayIDsToInclude=[]] - Whitelist for intersection test. If empty then the result isn't limited
* to overlays in the list.
* @param {Array.<Uuid>} [overlayIDsToExclude=[]] - Blacklist for intersection test. If empty then the result doesn't
* exclude overlays in the list.
* @param {Array.<Uuid>} [overlayIDsToInclude=[]] - If not empty then the search is restricted to these overlays.
* @param {Array.<Uuid>} [overlayIDsToExclude=[]] - Overlays to ignore during the search.
* @param {boolean} [visibleOnly=false] - <em>Unused</em>; exists to match Entity API.
* @param {boolean} [collidableOnly=false] - <em>Unused</em>; exists to match Entity API.
* @returns {Overlays.RayToOverlayIntersectionResult} The closest 3D overlay intersected by <code>pickRay</code>, taking
@ -531,7 +532,7 @@ public slots:
* Set the Web3D overlay that has keyboard focus.
* @function Overlays.setKeyboardFocusOverlay
* @param {Uuid} overlayID - The ID of the {@link Overlays.OverlayType|web3d} overlay to set keyboard focus to. Use
* {@link Uuid|Uuid.NULL} or <code>null</code> to unset keyboard focus from an overlay.
* <code>null</code> or {@link Uuid|Uuid.NULL} to unset keyboard focus from an overlay.
*/
void setKeyboardFocusOverlay(const OverlayID& id);

View file

@ -49,7 +49,11 @@ namespace render {
builder.withInvisible();
}
builder.withTagBits(render::ItemKey::TAG_BITS_0); // Only draw overlays in main view
// always visible in primary view. if isVisibleInSecondaryCamera, also draw in secondary view
uint32_t viewTaskBits = render::ItemKey::TAG_BITS_0 |
(overlay->getIsVisibleInSecondaryCamera() ? render::ItemKey::TAG_BITS_1 : render::ItemKey::TAG_BITS_NONE);
builder.withTagBits(viewTaskBits);
return builder.build();
}

View file

@ -67,6 +67,32 @@ Shape3DOverlay* Shape3DOverlay::createClone() const {
}
/**jsdoc
* <p>A <code>shape</code> {@link Overlays.OverlayType|OverlayType} may display as one of the following geometrical shapes:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Dimensions</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"Circle"</code></td><td>2D</td><td>A circle oriented in 3D.</td></td></tr>
* <tr><td><code>"Cone"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Cube"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Cylinder"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Dodecahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Hexagon"</code></td><td>3D</td><td>A hexagonal prism.</td></tr>
* <tr><td><code>"Icosahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Line"</code></td><td>1D</td><td>A line oriented in 3D.</td></tr>
* <tr><td><code>"Octagon"</code></td><td>3D</td><td>An octagonal prism.</td></tr>
* <tr><td><code>"Octahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Quad"</code></td><td>2D</td><td>A square oriented in 3D.</tr>
* <tr><td><code>"Sphere"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Tetrahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Torus"</code></td><td>3D</td><td><em>Not implemented.</em></td></tr>
* <tr><td><code>"Triangle"</code></td><td>3D</td><td>A triangular prism.</td></tr>
* </tbody>
* </table>
* @typedef {string} Overlays.Shape
*/
static const std::array<QString, GeometryCache::Shape::NUM_SHAPES> shapeStrings { {
"Line",
"Triangle",
@ -80,7 +106,7 @@ static const std::array<QString, GeometryCache::Shape::NUM_SHAPES> shapeStrings
"Octahedron",
"Dodecahedron",
"Icosahedron",
"Torus",
"Torus", // Not implemented yet.
"Cone",
"Cylinder"
} };
@ -145,7 +171,7 @@ void Shape3DOverlay::setProperties(const QVariantMap& properties) {
*
* @property {Vec3} dimensions - The dimensions of the overlay. Synonyms: <code>scale</code>, <code>size</code>.
*
* @property {Shape} shape=Hexagon - The geometrical shape of the overlay.
* @property {Overlays.Shape} shape=Hexagon - The geometrical shape of the overlay.
*/
QVariant Shape3DOverlay::getProperty(const QString& property) {
if (property == "shape") {

View file

@ -114,7 +114,7 @@ void Text3DOverlay::render(RenderArgs* args) {
glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, SLIGHTLY_BEHIND);
glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, SLIGHTLY_BEHIND);
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch, false, quadColor.a < 1.0f, false, false, false, false);
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch, false, quadColor.a < 1.0f, false, false, false);
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor, _geometryId);
// Same font properties as textSize()

View file

@ -40,8 +40,10 @@ QUrl const TextOverlay::URL(QString("hifi/overlays/TextOverlay.qml"));
*
* @property {number} margin=0 - Sets the <code>leftMargin</code> and <code>topMargin</code> values, in pixels.
* <em>Write-only.</em>
* @property {number} leftMargin=0 - The left margin's size, in pixels. <em>Write-only.</em>
* @property {number} topMargin=0 - The top margin's size, in pixels. <em>Write-only.</em>
* @property {number} leftMargin=0 - The left margin's size, in pixels. This value is also used for the right margin.
* <em>Write-only.</em>
* @property {number} topMargin=0 - The top margin's size, in pixels. This value is also used for the bottom margin.
* <em>Write-only.</em>
* @property {string} text="" - The text to display. Text does not automatically wrap; use <code>\n</code> for a line break. Text
* is clipped to the <code>bounds</code>. <em>Write-only.</em>
* @property {number} font.size=18 - The size of the text, in pixels. <em>Write-only.</em>

View file

@ -78,6 +78,20 @@ int AnimSkeleton::getParentIndex(int jointIndex) const {
return _joints[jointIndex].parentIndex;
}
std::vector<int> AnimSkeleton::getChildrenOfJoint(int jointIndex) const {
// Children and grandchildren, etc.
std::vector<int> result;
if (jointIndex != -1) {
for (int i = jointIndex + 1; i < (int)_joints.size(); i++) {
if (_joints[i].parentIndex == jointIndex
|| (std::find(result.begin(), result.end(), _joints[i].parentIndex) != result.end())) {
result.push_back(i);
}
}
}
return result;
}
const QString& AnimSkeleton::getJointName(int jointIndex) const {
return _joints[jointIndex].name;
}

View file

@ -43,6 +43,7 @@ public:
const AnimPose& getPostRotationPose(int jointIndex) const;
int getParentIndex(int jointIndex) const;
std::vector<int> getChildrenOfJoint(int jointIndex) const;
AnimPose getAbsolutePose(int jointIndex, const AnimPoseVec& relativePoses) const;

View file

@ -66,16 +66,12 @@ bool ElbowConstraint::apply(glm::quat& rotation) const {
bool twistWasClamped = (twistAngle != clampedTwistAngle);
// update rotation
const float MIN_SWING_REAL_PART = 0.99999f;
if (twistWasClamped || fabsf(swingRotation.w) < MIN_SWING_REAL_PART) {
if (twistWasClamped) {
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
}
// we discard all swing and only keep twist
rotation = twistRotation * _referenceRotation;
return true;
if (twistWasClamped) {
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
}
return false;
// we discard all swing and only keep twist
rotation = twistRotation * _referenceRotation;
return true;
}
glm::quat ElbowConstraint::computeCenterRotation() const {

View file

@ -199,6 +199,8 @@ void Rig::destroyAnimGraph() {
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overrideFlags.clear();
_numOverrides = 0;
_leftEyeJointChildren.clear();
_rightEyeJointChildren.clear();
}
void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOffset) {
@ -225,12 +227,17 @@ void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOff
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
_rootJointIndex = geometry.rootJointIndex;
_leftEyeJointIndex = geometry.leftEyeJointIndex;
_rightEyeJointIndex = geometry.rightEyeJointIndex;
_leftHandJointIndex = geometry.leftHandJointIndex;
_leftElbowJointIndex = _leftHandJointIndex >= 0 ? geometry.joints.at(_leftHandJointIndex).parentIndex : -1;
_leftShoulderJointIndex = _leftElbowJointIndex >= 0 ? geometry.joints.at(_leftElbowJointIndex).parentIndex : -1;
_rightHandJointIndex = geometry.rightHandJointIndex;
_rightElbowJointIndex = _rightHandJointIndex >= 0 ? geometry.joints.at(_rightHandJointIndex).parentIndex : -1;
_rightShoulderJointIndex = _rightElbowJointIndex >= 0 ? geometry.joints.at(_rightElbowJointIndex).parentIndex : -1;
_leftEyeJointChildren = _animSkeleton->getChildrenOfJoint(geometry.leftEyeJointIndex);
_rightEyeJointChildren = _animSkeleton->getChildrenOfJoint(geometry.rightEyeJointIndex);
}
void Rig::reset(const FBXGeometry& geometry) {
@ -253,6 +260,8 @@ void Rig::reset(const FBXGeometry& geometry) {
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
_rootJointIndex = geometry.rootJointIndex;
_leftEyeJointIndex = geometry.leftEyeJointIndex;
_rightEyeJointIndex = geometry.rightEyeJointIndex;
_leftHandJointIndex = geometry.leftHandJointIndex;
_leftElbowJointIndex = _leftHandJointIndex >= 0 ? geometry.joints.at(_leftHandJointIndex).parentIndex : -1;
_leftShoulderJointIndex = _leftElbowJointIndex >= 0 ? geometry.joints.at(_leftElbowJointIndex).parentIndex : -1;
@ -260,6 +269,9 @@ void Rig::reset(const FBXGeometry& geometry) {
_rightElbowJointIndex = _rightHandJointIndex >= 0 ? geometry.joints.at(_rightHandJointIndex).parentIndex : -1;
_rightShoulderJointIndex = _rightElbowJointIndex >= 0 ? geometry.joints.at(_rightElbowJointIndex).parentIndex : -1;
_leftEyeJointChildren = _animSkeleton->getChildrenOfJoint(geometry.leftEyeJointIndex);
_rightEyeJointChildren = _animSkeleton->getChildrenOfJoint(geometry.rightEyeJointIndex);
if (!_animGraphURL.isEmpty()) {
_animNode.reset();
initAnimGraph(_animGraphURL);
@ -1430,6 +1442,15 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
// directly set absolutePose rotation
_internalPoseSet._absolutePoses[index].rot() = deltaQuat * headQuat;
// Update eye joint's children.
auto children = index == _leftEyeJointIndex ? _leftEyeJointChildren : _rightEyeJointChildren;
for (int i = 0; i < (int)children.size(); i++) {
int jointIndex = children[i];
int parentIndex = _animSkeleton->getParentIndex(jointIndex);
_internalPoseSet._absolutePoses[jointIndex] =
_internalPoseSet._absolutePoses[parentIndex] * _internalPoseSet._relativePoses[jointIndex];
}
}
}

View file

@ -267,6 +267,11 @@ protected:
int _rootJointIndex { -1 };
int _leftEyeJointIndex { -1 };
int _rightEyeJointIndex { -1 };
std::vector<int> _leftEyeJointChildren;
std::vector<int> _rightEyeJointChildren;
int _leftHandJointIndex { -1 };
int _leftElbowJointIndex { -1 };
int _leftShoulderJointIndex { -1 };

View file

@ -43,14 +43,19 @@ void AudioClient::checkPeakValues() {
// prepare the windows environment
CoInitialize(NULL);
std::unique_lock<std::mutex> lock(_deviceMutex, std::defer_lock);
// if disabled, clean up active clients
if (!_enablePeakValues) {
activeClients.clear();
if (lock.try_lock()) {
// deferred, if timer callbacks overlap
activeClients.clear();
}
return;
}
// lock the devices so the _inputDevices list is static
std::unique_lock<std::mutex> lock(_deviceMutex);
lock.lock();
HRESULT result;
// initialize the payload

File diff suppressed because it is too large Load diff

View file

@ -33,6 +33,10 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
{
// SkeletonModels, and by extention Avatars, use Dual Quaternion skinning.
_useDualQuaternionSkinning = true;
// Avatars all cast shadow
_canCastShadow = true;
assert(_owningAvatar);
}

View file

@ -33,65 +33,8 @@
#include "FBXBaker.h"
#ifdef _WIN32
#pragma warning( push )
#pragma warning( disable : 4267 )
#endif
#include <draco/mesh/triangle_soup_mesh_builder.h>
#include <draco/compression/encode.h>
#ifdef _WIN32
#pragma warning( pop )
#endif
FBXBaker::FBXBaker(const QUrl& fbxURL, TextureBakerThreadGetter textureThreadGetter,
const QString& bakedOutputDir, const QString& originalOutputDir) :
_fbxURL(fbxURL),
_bakedOutputDir(bakedOutputDir),
_originalOutputDir(originalOutputDir),
_textureThreadGetter(textureThreadGetter)
{
}
FBXBaker::~FBXBaker() {
if (_tempDir.exists()) {
if (!_tempDir.remove(_originalFBXFilePath)) {
qCWarning(model_baking) << "Failed to remove temporary copy of fbx file:" << _originalFBXFilePath;
}
if (!_tempDir.rmdir(".")) {
qCWarning(model_baking) << "Failed to remove temporary directory:" << _tempDir;
}
}
}
void FBXBaker::abort() {
Baker::abort();
// tell our underlying TextureBaker instances to abort
// the FBXBaker will wait until all are aborted before emitting its own abort signal
for (auto& textureBaker : _bakingTextures) {
textureBaker->abort();
}
}
void FBXBaker::bake() {
qDebug() << "FBXBaker" << _fbxURL << "bake starting";
auto tempDir = PathUtils::generateTemporaryDir();
if (tempDir.isEmpty()) {
handleError("Failed to create a temporary directory.");
return;
}
_tempDir = tempDir;
_originalFBXFilePath = _tempDir.filePath(_fbxURL.fileName());
qDebug() << "Made temporary dir " << _tempDir;
qDebug() << "Origin file path: " << _originalFBXFilePath;
void FBXBaker::bake() {
qDebug() << "FBXBaker" << _modelURL << "bake starting";
// setup the output folder for the results of this bake
setupOutputFolder();
@ -152,7 +95,7 @@ void FBXBaker::setupOutputFolder() {
}
// attempt to make the output folder
if (!QDir().mkpath(_originalOutputDir)) {
handleError("Failed to create FBX output folder " + _bakedOutputDir);
handleError("Failed to create FBX output folder " + _originalOutputDir);
return;
}
}
@ -160,25 +103,25 @@ void FBXBaker::setupOutputFolder() {
void FBXBaker::loadSourceFBX() {
// check if the FBX is local or first needs to be downloaded
if (_fbxURL.isLocalFile()) {
if (_modelURL.isLocalFile()) {
// load up the local file
QFile localFBX { _fbxURL.toLocalFile() };
QFile localFBX { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _fbxURL << _fbxURL.toString() << _fbxURL.toLocalFile() << ", copying to: " << _originalFBXFilePath;
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localFBX.exists()) {
//QMessageBox::warning(this, "Could not find " + _fbxURL.toString(), "");
handleError("Could not find " + _fbxURL.toString());
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _fbxURL.fileName();
localFBX.copy(_originalOutputDir + "/" + _fbxURL.fileName());
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localFBX.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localFBX.copy(_originalFBXFilePath);
localFBX.copy(_originalModelFilePath);
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
@ -193,9 +136,9 @@ void FBXBaker::loadSourceFBX() {
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_fbxURL);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _fbxURL;
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &FBXBaker::handleFBXNetworkReply);
@ -206,20 +149,20 @@ void FBXBaker::handleFBXNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _fbxURL;
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalFBXFilePath);
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original FBX to" << _originalFBXFilePath << copyOfOriginal.fileName();
qDebug(model_baking) << "Writing copy of original FBX to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this FBX stating that a duplicate of the original FBX could not be made
handleError("Could not create copy of " + _fbxURL.toString() + " (Failed to open " + _originalFBXFilePath + ")");
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _fbxURL.toString() + " (Failed to write)");
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
@ -227,98 +170,32 @@ void FBXBaker::handleFBXNetworkReply() {
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _fbxURL.fileName());
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// emit our signal to start the import of the FBX source copy
emit sourceCopyReadyToLoad();
} else {
// add an error to our list stating that the FBX could not be downloaded
handleError("Failed to download " + _fbxURL.toString());
handleError("Failed to download " + _modelURL.toString());
}
}
void FBXBaker::importScene() {
qDebug() << "file path: " << _originalFBXFilePath.toLocal8Bit().data() << QDir(_originalFBXFilePath).exists();
qDebug() << "file path: " << _originalModelFilePath.toLocal8Bit().data() << QDir(_originalModelFilePath).exists();
QFile fbxFile(_originalFBXFilePath);
QFile fbxFile(_originalModelFilePath);
if (!fbxFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalFBXFilePath + " for reading");
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
FBXReader reader;
qCDebug(model_baking) << "Parsing" << _fbxURL;
qCDebug(model_baking) << "Parsing" << _modelURL;
_rootNode = reader._rootNode = reader.parseFBX(&fbxFile);
_geometry = reader.extractFBXGeometry({}, _fbxURL.toString());
_textureContent = reader._textureContent;
}
QString texturePathRelativeToFBX(QUrl fbxURL, QUrl textureURL) {
auto fbxPath = fbxURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
auto texturePath = textureURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
if (texturePath.startsWith(fbxPath)) {
// texture path is a child of the FBX path, return the texture path without the fbx path
return texturePath.mid(fbxPath.length());
} else {
// the texture path was not a child of the FBX path, return the empty string
return "";
}
}
QString FBXBaker::createBakedTextureFileName(const QFileInfo& textureFileInfo) {
// first make sure we have a unique base name for this texture
// in case another texture referenced by this model has the same base name
auto& nameMatches = _textureNameMatchCount[textureFileInfo.baseName()];
QString bakedTextureFileName { textureFileInfo.completeBaseName() };
if (nameMatches > 0) {
// there are already nameMatches texture with this name
// append - and that number to our baked texture file name so that it is unique
bakedTextureFileName += "-" + QString::number(nameMatches);
}
bakedTextureFileName += BAKED_TEXTURE_EXT;
// increment the number of name matches
++nameMatches;
return bakedTextureFileName;
}
QUrl FBXBaker::getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded) {
QUrl urlToTexture;
auto apparentRelativePath = QFileInfo(relativeFileName.replace("\\", "/"));
if (isEmbedded) {
urlToTexture = _fbxURL.toString() + "/" + apparentRelativePath.filePath();
} else {
if (textureFileInfo.exists() && textureFileInfo.isFile()) {
// set the texture URL to the local texture that we have confirmed exists
urlToTexture = QUrl::fromLocalFile(textureFileInfo.absoluteFilePath());
} else {
// external texture that we'll need to download or find
// this is a relative file path which will require different handling
// depending on the location of the original FBX
if (_fbxURL.isLocalFile() && apparentRelativePath.exists() && apparentRelativePath.isFile()) {
// the absolute path we ran into for the texture in the FBX exists on this machine
// so use that file
urlToTexture = QUrl::fromLocalFile(apparentRelativePath.absoluteFilePath());
} else {
// we didn't find the texture on this machine at the absolute path
// so assume that it is right beside the FBX to match the behaviour of interface
urlToTexture = _fbxURL.resolved(apparentRelativePath.fileName());
}
}
}
return urlToTexture;
_geometry = reader.extractFBXGeometry({}, _modelURL.toString());
_textureContentMap = reader._textureContent;
}
void FBXBaker::rewriteAndBakeSceneModels() {
@ -344,176 +221,25 @@ void FBXBaker::rewriteAndBakeSceneModels() {
// TODO Pull this out of _geometry instead so we don't have to reprocess it
auto extractedMesh = FBXReader::extractMesh(objectChild, meshIndex, false);
auto& mesh = extractedMesh.mesh;
if (mesh.wasCompressed) {
handleError("Cannot re-bake a file that contains compressed mesh");
return;
}
Q_ASSERT(mesh.normals.size() == 0 || mesh.normals.size() == mesh.vertices.size());
Q_ASSERT(mesh.colors.size() == 0 || mesh.colors.size() == mesh.vertices.size());
Q_ASSERT(mesh.texCoords.size() == 0 || mesh.texCoords.size() == mesh.vertices.size());
int64_t numTriangles { 0 };
for (auto& part : mesh.parts) {
if ((part.quadTrianglesIndices.size() % 3) != 0 || (part.triangleIndices.size() % 3) != 0) {
handleWarning("Found a mesh part with invalid index data, skipping");
// Callback to get MaterialID
GetMaterialIDCallback materialIDcallback = [&extractedMesh](int partIndex) {
return extractedMesh.partMaterialTextures[partIndex].first;
};
// Compress mesh information and store in dracoMeshNode
FBXNode dracoMeshNode;
bool success = compressMesh(extractedMesh.mesh, hasDeformers, dracoMeshNode, materialIDcallback);
// if bake fails - return, if there were errors and continue, if there were warnings.
if (!success) {
if (hasErrors()) {
return;
} else if (hasWarnings()) {
continue;
}
numTriangles += part.quadTrianglesIndices.size() / 3;
numTriangles += part.triangleIndices.size() / 3;
}
if (numTriangles == 0) {
continue;
}
draco::TriangleSoupMeshBuilder meshBuilder;
meshBuilder.Start(numTriangles);
bool hasNormals { mesh.normals.size() > 0 };
bool hasColors { mesh.colors.size() > 0 };
bool hasTexCoords { mesh.texCoords.size() > 0 };
bool hasTexCoords1 { mesh.texCoords1.size() > 0 };
bool hasPerFaceMaterials { mesh.parts.size() > 1
|| extractedMesh.partMaterialTextures[0].first != 0 };
bool needsOriginalIndices { hasDeformers };
int normalsAttributeID { -1 };
int colorsAttributeID { -1 };
int texCoordsAttributeID { -1 };
int texCoords1AttributeID { -1 };
int faceMaterialAttributeID { -1 };
int originalIndexAttributeID { -1 };
const int positionAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::POSITION,
3, draco::DT_FLOAT32);
if (needsOriginalIndices) {
originalIndexAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_ORIGINAL_INDEX,
1, draco::DT_INT32);
}
if (hasNormals) {
normalsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::NORMAL,
3, draco::DT_FLOAT32);
}
if (hasColors) {
colorsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::COLOR,
3, draco::DT_FLOAT32);
}
if (hasTexCoords) {
texCoordsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::TEX_COORD,
2, draco::DT_FLOAT32);
}
if (hasTexCoords1) {
texCoords1AttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_TEX_COORD_1,
2, draco::DT_FLOAT32);
}
if (hasPerFaceMaterials) {
faceMaterialAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_MATERIAL_ID,
1, draco::DT_UINT16);
}
auto partIndex = 0;
draco::FaceIndex face;
for (auto& part : mesh.parts) {
const auto& matTex = extractedMesh.partMaterialTextures[partIndex];
uint16_t materialID = matTex.first;
auto addFace = [&](QVector<int>& indices, int index, draco::FaceIndex face) {
int32_t idx0 = indices[index];
int32_t idx1 = indices[index + 1];
int32_t idx2 = indices[index + 2];
if (hasPerFaceMaterials) {
meshBuilder.SetPerFaceAttributeValueForFace(faceMaterialAttributeID, face, &materialID);
}
meshBuilder.SetAttributeValuesForFace(positionAttributeID, face,
&mesh.vertices[idx0], &mesh.vertices[idx1],
&mesh.vertices[idx2]);
if (needsOriginalIndices) {
meshBuilder.SetAttributeValuesForFace(originalIndexAttributeID, face,
&mesh.originalIndices[idx0],
&mesh.originalIndices[idx1],
&mesh.originalIndices[idx2]);
}
if (hasNormals) {
meshBuilder.SetAttributeValuesForFace(normalsAttributeID, face,
&mesh.normals[idx0], &mesh.normals[idx1],
&mesh.normals[idx2]);
}
if (hasColors) {
meshBuilder.SetAttributeValuesForFace(colorsAttributeID, face,
&mesh.colors[idx0], &mesh.colors[idx1],
&mesh.colors[idx2]);
}
if (hasTexCoords) {
meshBuilder.SetAttributeValuesForFace(texCoordsAttributeID, face,
&mesh.texCoords[idx0], &mesh.texCoords[idx1],
&mesh.texCoords[idx2]);
}
if (hasTexCoords1) {
meshBuilder.SetAttributeValuesForFace(texCoords1AttributeID, face,
&mesh.texCoords1[idx0], &mesh.texCoords1[idx1],
&mesh.texCoords1[idx2]);
}
};
for (int i = 0; (i + 2) < part.quadTrianglesIndices.size(); i += 3) {
addFace(part.quadTrianglesIndices, i, face++);
}
for (int i = 0; (i + 2) < part.triangleIndices.size(); i += 3) {
addFace(part.triangleIndices, i, face++);
}
partIndex++;
}
auto dracoMesh = meshBuilder.Finalize();
if (!dracoMesh) {
handleWarning("Failed to finalize the baking of a draco Geometry node");
continue;
}
// we need to modify unique attribute IDs for custom attributes
// so the attributes are easily retrievable on the other side
if (hasPerFaceMaterials) {
dracoMesh->attribute(faceMaterialAttributeID)->set_unique_id(DRACO_ATTRIBUTE_MATERIAL_ID);
}
if (hasTexCoords1) {
dracoMesh->attribute(texCoords1AttributeID)->set_unique_id(DRACO_ATTRIBUTE_TEX_COORD_1);
}
if (needsOriginalIndices) {
dracoMesh->attribute(originalIndexAttributeID)->set_unique_id(DRACO_ATTRIBUTE_ORIGINAL_INDEX);
}
draco::Encoder encoder;
encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, 14);
encoder.SetAttributeQuantization(draco::GeometryAttribute::TEX_COORD, 12);
encoder.SetAttributeQuantization(draco::GeometryAttribute::NORMAL, 10);
encoder.SetSpeedOptions(0, 5);
draco::EncoderBuffer buffer;
encoder.EncodeMeshToBuffer(*dracoMesh, &buffer);
FBXNode dracoMeshNode;
dracoMeshNode.name = "DracoMesh";
auto value = QVariant::fromValue(QByteArray(buffer.data(), (int) buffer.size()));
dracoMeshNode.properties.append(value);
objectChild.children.push_back(dracoMeshNode);
static const std::vector<QString> nodeNamesToDelete {
@ -590,69 +316,25 @@ void FBXBaker::rewriteAndBakeSceneTextures() {
for (FBXNode& textureChild : object->children) {
if (textureChild.name == "RelativeFilename") {
QString fbxTextureFileName { textureChild.properties.at(0).toString() };
// grab the ID for this texture so we can figure out the
// texture type from the loaded materials
auto textureID { object->properties[0].toString() };
auto textureType = textureTypes[textureID];
// use QFileInfo to easily split up the existing texture filename into its components
QString fbxTextureFileName { textureChild.properties.at(0).toByteArray() };
QFileInfo textureFileInfo { fbxTextureFileName.replace("\\", "/") };
// Compress the texture information and return the new filename to be added into the FBX scene
auto bakedTextureFile = compressTexture(fbxTextureFileName, textureType);
if (hasErrors()) {
return;
}
if (textureFileInfo.suffix() == BAKED_TEXTURE_EXT.mid(1)) {
// re-baking an FBX that already references baked textures is a fail
// so we add an error and return from here
handleError("Cannot re-bake a file that references compressed textures");
return;
}
if (!image::getSupportedFormats().contains(textureFileInfo.suffix())) {
// this is a texture format we don't bake, skip it
handleWarning(fbxTextureFileName + " is not a bakeable texture format");
continue;
}
// make sure this texture points to something and isn't one we've already re-mapped
if (!textureFileInfo.filePath().isEmpty()) {
// check if this was an embedded texture we have already have in-memory content for
auto textureContent = _textureContent.value(fbxTextureFileName.toLocal8Bit());
// figure out the URL to this texture, embedded or external
auto urlToTexture = getTextureURL(textureFileInfo, fbxTextureFileName,
!textureContent.isNull());
QString bakedTextureFileName;
if (_remappedTexturePaths.contains(urlToTexture)) {
bakedTextureFileName = _remappedTexturePaths[urlToTexture];
} else {
// construct the new baked texture file name and file path
// ensuring that the baked texture will have a unique name
// even if there was another texture with the same name at a different path
bakedTextureFileName = createBakedTextureFileName(textureFileInfo);
_remappedTexturePaths[urlToTexture] = bakedTextureFileName;
}
qCDebug(model_baking).noquote() << "Re-mapping" << fbxTextureFileName
<< "to" << bakedTextureFileName;
QString bakedTextureFilePath {
_bakedOutputDir + "/" + bakedTextureFileName
};
// write the new filename into the FBX scene
textureChild.properties[0] = bakedTextureFileName.toLocal8Bit();
if (!_bakingTextures.contains(urlToTexture)) {
_outputFiles.push_back(bakedTextureFilePath);
// grab the ID for this texture so we can figure out the
// texture type from the loaded materials
QString textureID { object->properties[0].toByteArray() };
auto textureType = textureTypes[textureID];
// bake this texture asynchronously
bakeTexture(urlToTexture, textureType, _bakedOutputDir, bakedTextureFileName, textureContent);
// If no errors or warnings have occurred during texture compression add the filename to the FBX scene
if (!bakedTextureFile.isNull()) {
textureChild.properties[0] = bakedTextureFile;
} else {
// if bake fails - return, if there were errors and continue, if there were warnings.
if (hasErrors()) {
return;
} else if (hasWarnings()) {
continue;
}
}
}
@ -671,172 +353,26 @@ void FBXBaker::rewriteAndBakeSceneTextures() {
}
}
void FBXBaker::bakeTexture(const QUrl& textureURL, image::TextureUsage::Type textureType,
const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent) {
// start a bake for this texture and add it to our list to keep track of
QSharedPointer<TextureBaker> bakingTexture {
new TextureBaker(textureURL, textureType, outputDir, bakedFilename, textureContent),
&TextureBaker::deleteLater
};
// make sure we hear when the baking texture is done or aborted
connect(bakingTexture.data(), &Baker::finished, this, &FBXBaker::handleBakedTexture);
connect(bakingTexture.data(), &TextureBaker::aborted, this, &FBXBaker::handleAbortedTexture);
// keep a shared pointer to the baking texture
_bakingTextures.insert(textureURL, bakingTexture);
// start baking the texture on one of our available worker threads
bakingTexture->moveToThread(_textureThreadGetter());
QMetaObject::invokeMethod(bakingTexture.data(), "bake");
}
void FBXBaker::handleBakedTexture() {
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
// make sure we haven't already run into errors, and that this is a valid texture
if (bakedTexture) {
if (!shouldStop()) {
if (!bakedTexture->hasErrors()) {
if (!_originalOutputDir.isEmpty()) {
// we've been asked to make copies of the originals, so we need to make copies of this if it is a linked texture
// use the path to the texture being baked to determine if this was an embedded or a linked texture
// it is embeddded if the texure being baked was inside a folder with the name of the FBX
// since that is the fake URL we provide when baking external textures
if (!_fbxURL.isParentOf(bakedTexture->getTextureURL())) {
// for linked textures we want to save a copy of original texture beside the original FBX
qCDebug(model_baking) << "Saving original texture for" << bakedTexture->getTextureURL();
// check if we have a relative path to use for the texture
auto relativeTexturePath = texturePathRelativeToFBX(_fbxURL, bakedTexture->getTextureURL());
QFile originalTextureFile {
_originalOutputDir + "/" + relativeTexturePath + bakedTexture->getTextureURL().fileName()
};
if (relativeTexturePath.length() > 0) {
// make the folders needed by the relative path
}
if (originalTextureFile.open(QIODevice::WriteOnly) && originalTextureFile.write(bakedTexture->getOriginalTexture()) != -1) {
qCDebug(model_baking) << "Saved original texture file" << originalTextureFile.fileName()
<< "for" << _fbxURL;
} else {
handleError("Could not save original external texture " + originalTextureFile.fileName()
+ " for " + _fbxURL.toString());
return;
}
}
}
// now that this texture has been baked and handled, we can remove that TextureBaker from our hash
_bakingTextures.remove(bakedTexture->getTextureURL());
checkIfTexturesFinished();
} else {
// there was an error baking this texture - add it to our list of errors
_errorList.append(bakedTexture->getErrors());
// we don't emit finished yet so that the other textures can finish baking first
_pendingErrorEmission = true;
// now that this texture has been baked, even though it failed, we can remove that TextureBaker from our list
_bakingTextures.remove(bakedTexture->getTextureURL());
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
} else {
// we have errors to attend to, so we don't do extra processing for this texture
// but we do need to remove that TextureBaker from our list
// and then check if we're done with all textures
_bakingTextures.remove(bakedTexture->getTextureURL());
checkIfTexturesFinished();
}
}
}
void FBXBaker::handleAbortedTexture() {
// grab the texture bake that was aborted and remove it from our hash since we don't need to track it anymore
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
if (bakedTexture) {
_bakingTextures.remove(bakedTexture->getTextureURL());
}
// since a texture we were baking aborted, our status is also aborted
_shouldAbort.store(true);
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
void FBXBaker::exportScene() {
// save the relative path to this FBX inside our passed output folder
auto fileName = _fbxURL.fileName();
auto fileName = _modelURL.fileName();
auto baseName = fileName.left(fileName.lastIndexOf('.'));
auto bakedFilename = baseName + BAKED_FBX_EXTENSION;
_bakedFBXFilePath = _bakedOutputDir + "/" + bakedFilename;
_bakedModelFilePath = _bakedOutputDir + "/" + bakedFilename;
auto fbxData = FBXWriter::encodeFBX(_rootNode);
QFile bakedFile(_bakedFBXFilePath);
QFile bakedFile(_bakedModelFilePath);
if (!bakedFile.open(QIODevice::WriteOnly)) {
handleError("Error opening " + _bakedFBXFilePath + " for writing");
handleError("Error opening " + _bakedModelFilePath + " for writing");
return;
}
bakedFile.write(fbxData);
_outputFiles.push_back(_bakedFBXFilePath);
_outputFiles.push_back(_bakedModelFilePath);
qCDebug(model_baking) << "Exported" << _fbxURL << "with re-written paths to" << _bakedFBXFilePath;
}
void FBXBaker::checkIfTexturesFinished() {
// check if we're done everything we need to do for this FBX
// and emit our finished signal if we're done
if (_bakingTextures.isEmpty()) {
if (shouldStop()) {
// if we're checking for completion but we have errors
// that means one or more of our texture baking operations failed
if (_pendingErrorEmission) {
setIsFinished(true);
}
return;
} else {
qCDebug(model_baking) << "Finished baking, emitting finsihed" << _fbxURL;
setIsFinished(true);
}
}
}
void FBXBaker::setWasAborted(bool wasAborted) {
if (wasAborted != _wasAborted.load()) {
Baker::setWasAborted(wasAborted);
if (wasAborted) {
qCDebug(model_baking) << "Aborted baking" << _fbxURL;
}
}
qCDebug(model_baking) << "Exported" << _modelURL << "with re-written paths to" << _bakedModelFilePath;
}

View file

@ -19,7 +19,7 @@
#include "Baker.h"
#include "TextureBaker.h"
#include "ModelBaker.h"
#include "ModelBakingLoggingCategory.h"
#include <gpu/Texture.h>
@ -30,21 +30,13 @@ static const QString BAKED_FBX_EXTENSION = ".baked.fbx";
using TextureBakerThreadGetter = std::function<QThread*()>;
class FBXBaker : public Baker {
class FBXBaker : public ModelBaker {
Q_OBJECT
public:
FBXBaker(const QUrl& fbxURL, TextureBakerThreadGetter textureThreadGetter,
const QString& bakedOutputDir, const QString& originalOutputDir = "");
~FBXBaker() override;
QUrl getFBXUrl() const { return _fbxURL; }
QString getBakedFBXFilePath() const { return _bakedFBXFilePath; }
virtual void setWasAborted(bool wasAborted) override;
using ModelBaker::ModelBaker;
public slots:
virtual void bake() override;
virtual void abort() override;
signals:
void sourceCopyReadyToLoad();
@ -52,8 +44,6 @@ signals:
private slots:
void bakeSourceCopy();
void handleFBXNetworkReply();
void handleBakedTexture();
void handleAbortedTexture();
private:
void setupOutputFolder();
@ -64,38 +54,12 @@ private:
void rewriteAndBakeSceneModels();
void rewriteAndBakeSceneTextures();
void exportScene();
void removeEmbeddedMediaFolder();
void checkIfTexturesFinished();
QString createBakedTextureFileName(const QFileInfo& textureFileInfo);
QUrl getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded = false);
void bakeTexture(const QUrl& textureURL, image::TextureUsage::Type textureType, const QDir& outputDir,
const QString& bakedFilename, const QByteArray& textureContent = QByteArray());
QUrl _fbxURL;
FBXNode _rootNode;
FBXGeometry* _geometry;
QHash<QByteArray, QByteArray> _textureContent;
QString _bakedFBXFilePath;
QString _bakedOutputDir;
// If set, the original FBX and textures will also be copied here
QString _originalOutputDir;
QDir _tempDir;
QString _originalFBXFilePath;
QMultiHash<QUrl, QSharedPointer<TextureBaker>> _bakingTextures;
QHash<QString, int> _textureNameMatchCount;
QHash<QUrl, QString> _remappedTexturePaths;
TextureBakerThreadGetter _textureThreadGetter;
bool _pendingErrorEmission { false };
};

View file

@ -0,0 +1,521 @@
//
// ModelBaker.cpp
// libraries/baking/src
//
// Created by Utkarsh Gautam on 9/29/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ModelBaker.h"
#include <PathUtils.h>
#include <FBXReader.h>
#include <FBXWriter.h>
#ifdef _WIN32
#pragma warning( push )
#pragma warning( disable : 4267 )
#endif
#include <draco/mesh/triangle_soup_mesh_builder.h>
#include <draco/compression/encode.h>
#ifdef _WIN32
#pragma warning( pop )
#endif
ModelBaker::ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory) :
_modelURL(inputModelURL),
_bakedOutputDir(bakedOutputDirectory),
_originalOutputDir(originalOutputDirectory),
_textureThreadGetter(inputTextureThreadGetter)
{
auto tempDir = PathUtils::generateTemporaryDir();
if (tempDir.isEmpty()) {
handleError("Failed to create a temporary directory.");
return;
}
_modelTempDir = tempDir;
_originalModelFilePath = _modelTempDir.filePath(_modelURL.fileName());
qDebug() << "Made temporary dir " << _modelTempDir;
qDebug() << "Origin file path: " << _originalModelFilePath;
}
ModelBaker::~ModelBaker() {
if (_modelTempDir.exists()) {
if (!_modelTempDir.remove(_originalModelFilePath)) {
qCWarning(model_baking) << "Failed to remove temporary copy of fbx file:" << _originalModelFilePath;
}
if (!_modelTempDir.rmdir(".")) {
qCWarning(model_baking) << "Failed to remove temporary directory:" << _modelTempDir;
}
}
}
void ModelBaker::abort() {
Baker::abort();
// tell our underlying TextureBaker instances to abort
// the ModelBaker will wait until all are aborted before emitting its own abort signal
for (auto& textureBaker : _bakingTextures) {
textureBaker->abort();
}
}
bool ModelBaker::compressMesh(FBXMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback) {
if (mesh.wasCompressed) {
handleError("Cannot re-bake a file that contains compressed mesh");
return false;
}
Q_ASSERT(mesh.normals.size() == 0 || mesh.normals.size() == mesh.vertices.size());
Q_ASSERT(mesh.colors.size() == 0 || mesh.colors.size() == mesh.vertices.size());
Q_ASSERT(mesh.texCoords.size() == 0 || mesh.texCoords.size() == mesh.vertices.size());
int64_t numTriangles{ 0 };
for (auto& part : mesh.parts) {
if ((part.quadTrianglesIndices.size() % 3) != 0 || (part.triangleIndices.size() % 3) != 0) {
handleWarning("Found a mesh part with invalid index data, skipping");
continue;
}
numTriangles += part.quadTrianglesIndices.size() / 3;
numTriangles += part.triangleIndices.size() / 3;
}
if (numTriangles == 0) {
return false;
}
draco::TriangleSoupMeshBuilder meshBuilder;
meshBuilder.Start(numTriangles);
bool hasNormals{ mesh.normals.size() > 0 };
bool hasColors{ mesh.colors.size() > 0 };
bool hasTexCoords{ mesh.texCoords.size() > 0 };
bool hasTexCoords1{ mesh.texCoords1.size() > 0 };
bool hasPerFaceMaterials = (materialIDCallback) ? (mesh.parts.size() > 1 || materialIDCallback(0) != 0 ) : true;
bool needsOriginalIndices{ hasDeformers };
int normalsAttributeID { -1 };
int colorsAttributeID { -1 };
int texCoordsAttributeID { -1 };
int texCoords1AttributeID { -1 };
int faceMaterialAttributeID { -1 };
int originalIndexAttributeID { -1 };
const int positionAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::POSITION,
3, draco::DT_FLOAT32);
if (needsOriginalIndices) {
originalIndexAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_ORIGINAL_INDEX,
1, draco::DT_INT32);
}
if (hasNormals) {
normalsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::NORMAL,
3, draco::DT_FLOAT32);
}
if (hasColors) {
colorsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::COLOR,
3, draco::DT_FLOAT32);
}
if (hasTexCoords) {
texCoordsAttributeID = meshBuilder.AddAttribute(draco::GeometryAttribute::TEX_COORD,
2, draco::DT_FLOAT32);
}
if (hasTexCoords1) {
texCoords1AttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_TEX_COORD_1,
2, draco::DT_FLOAT32);
}
if (hasPerFaceMaterials) {
faceMaterialAttributeID = meshBuilder.AddAttribute(
(draco::GeometryAttribute::Type)DRACO_ATTRIBUTE_MATERIAL_ID,
1, draco::DT_UINT16);
}
auto partIndex = 0;
draco::FaceIndex face;
uint16_t materialID;
for (auto& part : mesh.parts) {
materialID = (materialIDCallback) ? materialIDCallback(partIndex) : partIndex;
auto addFace = [&](QVector<int>& indices, int index, draco::FaceIndex face) {
int32_t idx0 = indices[index];
int32_t idx1 = indices[index + 1];
int32_t idx2 = indices[index + 2];
if (hasPerFaceMaterials) {
meshBuilder.SetPerFaceAttributeValueForFace(faceMaterialAttributeID, face, &materialID);
}
meshBuilder.SetAttributeValuesForFace(positionAttributeID, face,
&mesh.vertices[idx0], &mesh.vertices[idx1],
&mesh.vertices[idx2]);
if (needsOriginalIndices) {
meshBuilder.SetAttributeValuesForFace(originalIndexAttributeID, face,
&mesh.originalIndices[idx0],
&mesh.originalIndices[idx1],
&mesh.originalIndices[idx2]);
}
if (hasNormals) {
meshBuilder.SetAttributeValuesForFace(normalsAttributeID, face,
&mesh.normals[idx0], &mesh.normals[idx1],
&mesh.normals[idx2]);
}
if (hasColors) {
meshBuilder.SetAttributeValuesForFace(colorsAttributeID, face,
&mesh.colors[idx0], &mesh.colors[idx1],
&mesh.colors[idx2]);
}
if (hasTexCoords) {
meshBuilder.SetAttributeValuesForFace(texCoordsAttributeID, face,
&mesh.texCoords[idx0], &mesh.texCoords[idx1],
&mesh.texCoords[idx2]);
}
if (hasTexCoords1) {
meshBuilder.SetAttributeValuesForFace(texCoords1AttributeID, face,
&mesh.texCoords1[idx0], &mesh.texCoords1[idx1],
&mesh.texCoords1[idx2]);
}
};
for (int i = 0; (i + 2) < part.quadTrianglesIndices.size(); i += 3) {
addFace(part.quadTrianglesIndices, i, face++);
}
for (int i = 0; (i + 2) < part.triangleIndices.size(); i += 3) {
addFace(part.triangleIndices, i, face++);
}
partIndex++;
}
auto dracoMesh = meshBuilder.Finalize();
if (!dracoMesh) {
handleWarning("Failed to finalize the baking of a draco Geometry node");
return false;
}
// we need to modify unique attribute IDs for custom attributes
// so the attributes are easily retrievable on the other side
if (hasPerFaceMaterials) {
dracoMesh->attribute(faceMaterialAttributeID)->set_unique_id(DRACO_ATTRIBUTE_MATERIAL_ID);
}
if (hasTexCoords1) {
dracoMesh->attribute(texCoords1AttributeID)->set_unique_id(DRACO_ATTRIBUTE_TEX_COORD_1);
}
if (needsOriginalIndices) {
dracoMesh->attribute(originalIndexAttributeID)->set_unique_id(DRACO_ATTRIBUTE_ORIGINAL_INDEX);
}
draco::Encoder encoder;
encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, 14);
encoder.SetAttributeQuantization(draco::GeometryAttribute::TEX_COORD, 12);
encoder.SetAttributeQuantization(draco::GeometryAttribute::NORMAL, 10);
encoder.SetSpeedOptions(0, 5);
draco::EncoderBuffer buffer;
encoder.EncodeMeshToBuffer(*dracoMesh, &buffer);
FBXNode dracoNode;
dracoNode.name = "DracoMesh";
auto value = QVariant::fromValue(QByteArray(buffer.data(), (int)buffer.size()));
dracoNode.properties.append(value);
dracoMeshNode = dracoNode;
// Mesh compression successful return true
return true;
}
QString ModelBaker::compressTexture(QString modelTextureFileName, image::TextureUsage::Type textureType) {
QFileInfo modelTextureFileInfo{ modelTextureFileName.replace("\\", "/") };
if (modelTextureFileInfo.suffix() == BAKED_TEXTURE_EXT.mid(1)) {
// re-baking a model that already references baked textures
// this is an error - return from here
handleError("Cannot re-bake a file that already references compressed textures");
return QString::null;
}
if (!image::getSupportedFormats().contains(modelTextureFileInfo.suffix())) {
// this is a texture format we don't bake, skip it
handleWarning(modelTextureFileName + " is not a bakeable texture format");
return QString::null;
}
// make sure this texture points to something and isn't one we've already re-mapped
QString textureChild { QString::null };
if (!modelTextureFileInfo.filePath().isEmpty()) {
// check if this was an embedded texture that we already have in-memory content for
QByteArray textureContent;
// figure out the URL to this texture, embedded or external
if (!modelTextureFileInfo.filePath().isEmpty()) {
textureContent = _textureContentMap.value(modelTextureFileName.toLocal8Bit());
}
auto urlToTexture = getTextureURL(modelTextureFileInfo, modelTextureFileName, !textureContent.isNull());
QString bakedTextureFileName;
if (_remappedTexturePaths.contains(urlToTexture)) {
bakedTextureFileName = _remappedTexturePaths[urlToTexture];
} else {
// construct the new baked texture file name and file path
// ensuring that the baked texture will have a unique name
// even if there was another texture with the same name at a different path
bakedTextureFileName = createBakedTextureFileName(modelTextureFileInfo);
_remappedTexturePaths[urlToTexture] = bakedTextureFileName;
}
qCDebug(model_baking).noquote() << "Re-mapping" << modelTextureFileName
<< "to" << bakedTextureFileName;
QString bakedTextureFilePath{
_bakedOutputDir + "/" + bakedTextureFileName
};
textureChild = bakedTextureFileName;
if (!_bakingTextures.contains(urlToTexture)) {
_outputFiles.push_back(bakedTextureFilePath);
// bake this texture asynchronously
bakeTexture(urlToTexture, textureType, _bakedOutputDir, bakedTextureFileName, textureContent);
}
}
return textureChild;
}
void ModelBaker::bakeTexture(const QUrl& textureURL, image::TextureUsage::Type textureType,
const QDir& outputDir, const QString& bakedFilename, const QByteArray& textureContent) {
// start a bake for this texture and add it to our list to keep track of
QSharedPointer<TextureBaker> bakingTexture{
new TextureBaker(textureURL, textureType, outputDir, bakedFilename, textureContent),
&TextureBaker::deleteLater
};
// make sure we hear when the baking texture is done or aborted
connect(bakingTexture.data(), &Baker::finished, this, &ModelBaker::handleBakedTexture);
connect(bakingTexture.data(), &TextureBaker::aborted, this, &ModelBaker::handleAbortedTexture);
// keep a shared pointer to the baking texture
_bakingTextures.insert(textureURL, bakingTexture);
// start baking the texture on one of our available worker threads
bakingTexture->moveToThread(_textureThreadGetter());
QMetaObject::invokeMethod(bakingTexture.data(), "bake");
}
void ModelBaker::handleBakedTexture() {
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
qDebug() << "Handling baked texture" << bakedTexture->getTextureURL();
// make sure we haven't already run into errors, and that this is a valid texture
if (bakedTexture) {
if (!shouldStop()) {
if (!bakedTexture->hasErrors()) {
if (!_originalOutputDir.isEmpty()) {
// we've been asked to make copies of the originals, so we need to make copies of this if it is a linked texture
// use the path to the texture being baked to determine if this was an embedded or a linked texture
// it is embeddded if the texure being baked was inside a folder with the name of the model
// since that is the fake URL we provide when baking external textures
if (!_modelURL.isParentOf(bakedTexture->getTextureURL())) {
// for linked textures we want to save a copy of original texture beside the original model
qCDebug(model_baking) << "Saving original texture for" << bakedTexture->getTextureURL();
// check if we have a relative path to use for the texture
auto relativeTexturePath = texturePathRelativeToModel(_modelURL, bakedTexture->getTextureURL());
QFile originalTextureFile{
_originalOutputDir + "/" + relativeTexturePath + bakedTexture->getTextureURL().fileName()
};
if (relativeTexturePath.length() > 0) {
// make the folders needed by the relative path
}
if (originalTextureFile.open(QIODevice::WriteOnly) && originalTextureFile.write(bakedTexture->getOriginalTexture()) != -1) {
qCDebug(model_baking) << "Saved original texture file" << originalTextureFile.fileName()
<< "for" << _modelURL;
} else {
handleError("Could not save original external texture " + originalTextureFile.fileName()
+ " for " + _modelURL.toString());
return;
}
}
}
// now that this texture has been baked and handled, we can remove that TextureBaker from our hash
_bakingTextures.remove(bakedTexture->getTextureURL());
checkIfTexturesFinished();
} else {
// there was an error baking this texture - add it to our list of errors
_errorList.append(bakedTexture->getErrors());
// we don't emit finished yet so that the other textures can finish baking first
_pendingErrorEmission = true;
// now that this texture has been baked, even though it failed, we can remove that TextureBaker from our list
_bakingTextures.remove(bakedTexture->getTextureURL());
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
} else {
// we have errors to attend to, so we don't do extra processing for this texture
// but we do need to remove that TextureBaker from our list
// and then check if we're done with all textures
_bakingTextures.remove(bakedTexture->getTextureURL());
checkIfTexturesFinished();
}
}
}
void ModelBaker::handleAbortedTexture() {
// grab the texture bake that was aborted and remove it from our hash since we don't need to track it anymore
TextureBaker* bakedTexture = qobject_cast<TextureBaker*>(sender());
qDebug() << "Texture aborted: " << bakedTexture->getTextureURL();
if (bakedTexture) {
_bakingTextures.remove(bakedTexture->getTextureURL());
}
// since a texture we were baking aborted, our status is also aborted
_shouldAbort.store(true);
// abort any other ongoing texture bakes since we know we'll end up failing
for (auto& bakingTexture : _bakingTextures) {
bakingTexture->abort();
}
checkIfTexturesFinished();
}
QUrl ModelBaker::getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded) {
QUrl urlToTexture;
// use QFileInfo to easily split up the existing texture filename into its components
auto apparentRelativePath = QFileInfo(relativeFileName.replace("\\", "/"));
if (isEmbedded) {
urlToTexture = _modelURL.toString() + "/" + apparentRelativePath.filePath();
} else {
if (textureFileInfo.exists() && textureFileInfo.isFile()) {
// set the texture URL to the local texture that we have confirmed exists
urlToTexture = QUrl::fromLocalFile(textureFileInfo.absoluteFilePath());
} else {
// external texture that we'll need to download or find
// this is a relative file path which will require different handling
// depending on the location of the original model
if (_modelURL.isLocalFile() && apparentRelativePath.exists() && apparentRelativePath.isFile()) {
// the absolute path we ran into for the texture in the model exists on this machine
// so use that file
urlToTexture = QUrl::fromLocalFile(apparentRelativePath.absoluteFilePath());
} else {
// we didn't find the texture on this machine at the absolute path
// so assume that it is right beside the model to match the behaviour of interface
urlToTexture = _modelURL.resolved(apparentRelativePath.fileName());
}
}
}
return urlToTexture;
}
QString ModelBaker::texturePathRelativeToModel(QUrl modelURL, QUrl textureURL) {
auto modelPath = modelURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
auto texturePath = textureURL.toString(QUrl::RemoveFilename | QUrl::RemoveQuery | QUrl::RemoveFragment);
if (texturePath.startsWith(modelPath)) {
// texture path is a child of the model path, return the texture path without the model path
return texturePath.mid(modelPath.length());
} else {
// the texture path was not a child of the model path, return the empty string
return "";
}
}
void ModelBaker::checkIfTexturesFinished() {
// check if we're done everything we need to do for this model
// and emit our finished signal if we're done
if (_bakingTextures.isEmpty()) {
if (shouldStop()) {
// if we're checking for completion but we have errors
// that means one or more of our texture baking operations failed
if (_pendingErrorEmission) {
setIsFinished(true);
}
return;
} else {
qCDebug(model_baking) << "Finished baking, emitting finished" << _modelURL;
setIsFinished(true);
}
}
}
QString ModelBaker::createBakedTextureFileName(const QFileInfo& textureFileInfo) {
// first make sure we have a unique base name for this texture
// in case another texture referenced by this model has the same base name
auto& nameMatches = _textureNameMatchCount[textureFileInfo.baseName()];
QString bakedTextureFileName{ textureFileInfo.completeBaseName() };
if (nameMatches > 0) {
// there are already nameMatches texture with this name
// append - and that number to our baked texture file name so that it is unique
bakedTextureFileName += "-" + QString::number(nameMatches);
}
bakedTextureFileName += BAKED_TEXTURE_EXT;
// increment the number of name matches
++nameMatches;
return bakedTextureFileName;
}
void ModelBaker::setWasAborted(bool wasAborted) {
if (wasAborted != _wasAborted.load()) {
Baker::setWasAborted(wasAborted);
if (wasAborted) {
qCDebug(model_baking) << "Aborted baking" << _modelURL;
}
}
}

View file

@ -0,0 +1,79 @@
//
// ModelBaker.h
// libraries/baking/src
//
// Created by Utkarsh Gautam on 9/29/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ModelBaker_h
#define hifi_ModelBaker_h
#include <QtCore/QFutureSynchronizer>
#include <QtCore/QDir>
#include <QtCore/QUrl>
#include <QtNetwork/QNetworkReply>
#include "Baker.h"
#include "TextureBaker.h"
#include "ModelBakingLoggingCategory.h"
#include <gpu/Texture.h>
#include <FBX.h>
using TextureBakerThreadGetter = std::function<QThread*()>;
using GetMaterialIDCallback = std::function <int(int)>;
class ModelBaker : public Baker {
Q_OBJECT
public:
ModelBaker(const QUrl& inputModelURL, TextureBakerThreadGetter inputTextureThreadGetter,
const QString& bakedOutputDirectory, const QString& originalOutputDirectory = "");
virtual ~ModelBaker();
bool compressMesh(FBXMesh& mesh, bool hasDeformers, FBXNode& dracoMeshNode, GetMaterialIDCallback materialIDCallback = nullptr);
QString compressTexture(QString textureFileName, image::TextureUsage::Type = image::TextureUsage::Type::DEFAULT_TEXTURE);
virtual void setWasAborted(bool wasAborted) override;
QUrl getModelURL() const { return _modelURL; }
QString getBakedModelFilePath() const { return _bakedModelFilePath; }
public slots:
virtual void abort() override;
protected:
void checkIfTexturesFinished();
QHash<QByteArray, QByteArray> _textureContentMap;
QUrl _modelURL;
QString _bakedOutputDir;
QString _originalOutputDir;
QString _bakedModelFilePath;
QDir _modelTempDir;
QString _originalModelFilePath;
private slots:
void handleBakedTexture();
void handleAbortedTexture();
private:
QString createBakedTextureFileName(const QFileInfo & textureFileInfo);
QUrl getTextureURL(const QFileInfo& textureFileInfo, QString relativeFileName, bool isEmbedded = false);
void bakeTexture(const QUrl & textureURL, image::TextureUsage::Type textureType, const QDir & outputDir,
const QString & bakedFilename, const QByteArray & textureContent);
QString texturePathRelativeToModel(QUrl modelURL, QUrl textureURL);
TextureBakerThreadGetter _textureThreadGetter;
QMultiHash<QUrl, QSharedPointer<TextureBaker>> _bakingTextures;
QHash<QString, int> _textureNameMatchCount;
QHash<QUrl, QString> _remappedTexturePaths;
bool _pendingErrorEmission{ false };
};
#endif // hifi_ModelBaker_h

View file

@ -0,0 +1,404 @@
//
// OBJBaker.cpp
// libraries/baking/src
//
// Created by Utkarsh Gautam on 9/29/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <PathUtils.h>
#include <NetworkAccessManager.h>
#include "OBJBaker.h"
#include "OBJReader.h"
#include "FBXWriter.h"
const double UNIT_SCALE_FACTOR = 100.0;
const QByteArray PROPERTIES70_NODE_NAME = "Properties70";
const QByteArray P_NODE_NAME = "P";
const QByteArray C_NODE_NAME = "C";
const QByteArray FBX_HEADER_EXTENSION = "FBXHeaderExtension";
const QByteArray GLOBAL_SETTINGS_NODE_NAME = "GlobalSettings";
const QByteArray OBJECTS_NODE_NAME = "Objects";
const QByteArray GEOMETRY_NODE_NAME = "Geometry";
const QByteArray MODEL_NODE_NAME = "Model";
const QByteArray MATERIAL_NODE_NAME = "Material";
const QByteArray TEXTURE_NODE_NAME = "Texture";
const QByteArray TEXTURENAME_NODE_NAME = "TextureName";
const QByteArray RELATIVEFILENAME_NODE_NAME = "RelativeFilename";
const QByteArray CONNECTIONS_NODE_NAME = "Connections";
const QByteArray CONNECTIONS_NODE_PROPERTY = "OO";
const QByteArray CONNECTIONS_NODE_PROPERTY_1 = "OP";
const QByteArray MESH = "Mesh";
void OBJBaker::bake() {
qDebug() << "OBJBaker" << _modelURL << "bake starting";
// trigger bakeOBJ once OBJ is loaded
connect(this, &OBJBaker::OBJLoaded, this, &OBJBaker::bakeOBJ);
// make a local copy of the OBJ
loadOBJ();
}
void OBJBaker::loadOBJ() {
if (!QDir().mkpath(_bakedOutputDir)) {
handleError("Failed to create baked OBJ output folder " + _bakedOutputDir);
return;
}
if (!QDir().mkpath(_originalOutputDir)) {
handleError("Failed to create original OBJ output folder " + _originalOutputDir);
return;
}
// check if the OBJ is local or it needs to be downloaded
if (_modelURL.isLocalFile()) {
// loading the local OBJ
QFile localOBJ { _modelURL.toLocalFile() };
qDebug() << "Local file url: " << _modelURL << _modelURL.toString() << _modelURL.toLocalFile() << ", copying to: " << _originalModelFilePath;
if (!localOBJ.exists()) {
handleError("Could not find " + _modelURL.toString());
return;
}
// make a copy in the output folder
if (!_originalOutputDir.isEmpty()) {
qDebug() << "Copying to: " << _originalOutputDir << "/" << _modelURL.fileName();
localOBJ.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
localOBJ.copy(_originalModelFilePath);
// local OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// OBJ is remote, start download
auto& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest;
// setup the request to follow re-directs and always hit the network
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
networkRequest.setUrl(_modelURL);
qCDebug(model_baking) << "Downloading" << _modelURL;
auto networkReply = networkAccessManager.get(networkRequest);
connect(networkReply, &QNetworkReply::finished, this, &OBJBaker::handleOBJNetworkReply);
}
}
void OBJBaker::handleOBJNetworkReply() {
auto requestReply = qobject_cast<QNetworkReply*>(sender());
if (requestReply->error() == QNetworkReply::NoError) {
qCDebug(model_baking) << "Downloaded" << _modelURL;
// grab the contents of the reply and make a copy in the output folder
QFile copyOfOriginal(_originalModelFilePath);
qDebug(model_baking) << "Writing copy of original obj to" << _originalModelFilePath << copyOfOriginal.fileName();
if (!copyOfOriginal.open(QIODevice::WriteOnly)) {
// add an error to the error list for this obj stating that a duplicate of the original obj could not be made
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to open " + _originalModelFilePath + ")");
return;
}
if (copyOfOriginal.write(requestReply->readAll()) == -1) {
handleError("Could not create copy of " + _modelURL.toString() + " (Failed to write)");
return;
}
// close that file now that we are done writing to it
copyOfOriginal.close();
if (!_originalOutputDir.isEmpty()) {
copyOfOriginal.copy(_originalOutputDir + "/" + _modelURL.fileName());
}
// remote OBJ is loaded emit signal to trigger its baking
emit OBJLoaded();
} else {
// add an error to our list stating that the OBJ could not be downloaded
handleError("Failed to download " + _modelURL.toString());
}
}
void OBJBaker::bakeOBJ() {
// Read the OBJ file
QFile objFile(_originalModelFilePath);
if (!objFile.open(QIODevice::ReadOnly)) {
handleError("Error opening " + _originalModelFilePath + " for reading");
return;
}
QByteArray objData = objFile.readAll();
bool combineParts = true; // set true so that OBJReader reads material info from material library
OBJReader reader;
auto geometry = reader.readOBJ(objData, QVariantHash(), combineParts, _modelURL);
// Write OBJ Data as FBX tree nodes
FBXNode rootNode;
createFBXNodeTree(rootNode, *geometry);
// Serialize the resultant FBX tree
auto encodedFBX = FBXWriter::encodeFBX(rootNode);
// Export as baked FBX
auto fileName = _modelURL.fileName();
auto baseName = fileName.left(fileName.lastIndexOf('.'));
auto bakedFilename = baseName + ".baked.fbx";
_bakedModelFilePath = _bakedOutputDir + "/" + bakedFilename;
QFile bakedFile;
bakedFile.setFileName(_bakedModelFilePath);
if (!bakedFile.open(QIODevice::WriteOnly)) {
handleError("Error opening " + _bakedModelFilePath + " for writing");
return;
}
bakedFile.write(encodedFBX);
// Export successful
_outputFiles.push_back(_bakedModelFilePath);
qCDebug(model_baking) << "Exported" << _modelURL << "to" << _bakedModelFilePath;
checkIfTexturesFinished();
}
void OBJBaker::createFBXNodeTree(FBXNode& rootNode, FBXGeometry& geometry) {
// Generating FBX Header Node
FBXNode headerNode;
headerNode.name = FBX_HEADER_EXTENSION;
// Generating global settings node
// Required for Unit Scale Factor
FBXNode globalSettingsNode;
globalSettingsNode.name = GLOBAL_SETTINGS_NODE_NAME;
// Setting the tree hierarchy: GlobalSettings -> Properties70 -> P -> Properties
FBXNode properties70Node;
properties70Node.name = PROPERTIES70_NODE_NAME;
FBXNode pNode;
{
pNode.name = P_NODE_NAME;
pNode.properties.append({
"UnitScaleFactor", "double", "Number", "",
UNIT_SCALE_FACTOR
});
}
properties70Node.children = { pNode };
globalSettingsNode.children = { properties70Node };
// Generating Object node
_objectNode.name = OBJECTS_NODE_NAME;
// Generating Object node's child - Geometry node
FBXNode geometryNode;
geometryNode.name = GEOMETRY_NODE_NAME;
{
_geometryID = nextNodeID();
geometryNode.properties = {
_geometryID,
GEOMETRY_NODE_NAME,
MESH
};
}
// Compress the mesh information and store in dracoNode
bool hasDeformers = false; // No concept of deformers for an OBJ
FBXNode dracoNode;
compressMesh(geometry.meshes[0], hasDeformers, dracoNode);
geometryNode.children.append(dracoNode);
// Generating Object node's child - Model node
FBXNode modelNode;
modelNode.name = MODEL_NODE_NAME;
{
_modelID = nextNodeID();
modelNode.properties = { _modelID, MODEL_NODE_NAME, MESH };
}
_objectNode.children = { geometryNode, modelNode };
// Generating Objects node's child - Material node
auto& meshParts = geometry.meshes[0].parts;
for (auto& meshPart : meshParts) {
FBXNode materialNode;
materialNode.name = MATERIAL_NODE_NAME;
if (geometry.materials.size() == 1) {
// case when no material information is provided, OBJReader considers it as a single default material
for (auto& materialID : geometry.materials.keys()) {
setMaterialNodeProperties(materialNode, materialID, geometry);
}
} else {
setMaterialNodeProperties(materialNode, meshPart.materialID, geometry);
}
_objectNode.children.append(materialNode);
}
// Generating Texture Node
// iterate through mesh parts and process the associated textures
auto size = meshParts.size();
for (int i = 0; i < size; i++) {
QString material = meshParts[i].materialID;
FBXMaterial currentMaterial = geometry.materials[material];
if (!currentMaterial.albedoTexture.filename.isEmpty() || !currentMaterial.specularTexture.filename.isEmpty()) {
_textureID = nextNodeID();
_mapTextureMaterial.emplace_back(_textureID, i);
FBXNode textureNode;
{
textureNode.name = TEXTURE_NODE_NAME;
textureNode.properties = { _textureID };
}
// Texture node child - TextureName node
FBXNode textureNameNode;
{
textureNameNode.name = TEXTURENAME_NODE_NAME;
QByteArray propertyString = (!currentMaterial.albedoTexture.filename.isEmpty()) ? "Kd" : "Ka";
textureNameNode.properties = { propertyString };
}
// Texture node child - Relative Filename node
FBXNode relativeFilenameNode;
{
relativeFilenameNode.name = RELATIVEFILENAME_NODE_NAME;
}
QByteArray textureFileName = (!currentMaterial.albedoTexture.filename.isEmpty()) ? currentMaterial.albedoTexture.filename : currentMaterial.specularTexture.filename;
auto textureType = (!currentMaterial.albedoTexture.filename.isEmpty()) ? image::TextureUsage::Type::ALBEDO_TEXTURE : image::TextureUsage::Type::SPECULAR_TEXTURE;
// Compress the texture using ModelBaker::compressTexture() and store compressed file's name in the node
auto textureFile = compressTexture(textureFileName, textureType);
if (textureFile.isNull()) {
// Baking failed return
handleError("Failed to compress texture: " + textureFileName);
return;
}
relativeFilenameNode.properties = { textureFile };
textureNode.children = { textureNameNode, relativeFilenameNode };
_objectNode.children.append(textureNode);
}
}
// Generating Connections node
FBXNode connectionsNode;
connectionsNode.name = CONNECTIONS_NODE_NAME;
// connect Geometry to Model
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, _geometryID, _modelID };
connectionsNode.children = { cNode };
// connect all materials to model
for (auto& materialID : _materialIDs) {
FBXNode cNode;
cNode.name = C_NODE_NAME;
cNode.properties = { CONNECTIONS_NODE_PROPERTY, materialID, _modelID };
connectionsNode.children.append(cNode);
}
// Connect textures to materials
for (const auto& texMat : _mapTextureMaterial) {
FBXNode cAmbientNode;
cAmbientNode.name = C_NODE_NAME;
cAmbientNode.properties = {
CONNECTIONS_NODE_PROPERTY_1,
texMat.first,
_materialIDs[texMat.second],
"AmbientFactor"
};
connectionsNode.children.append(cAmbientNode);
FBXNode cDiffuseNode;
cDiffuseNode.name = C_NODE_NAME;
cDiffuseNode.properties = {
CONNECTIONS_NODE_PROPERTY_1,
texMat.first,
_materialIDs[texMat.second],
"DiffuseColor"
};
connectionsNode.children.append(cDiffuseNode);
}
// Make all generated nodes children of rootNode
rootNode.children = { globalSettingsNode, _objectNode, connectionsNode };
}
// Set properties for material nodes
void OBJBaker::setMaterialNodeProperties(FBXNode& materialNode, QString material, FBXGeometry& geometry) {
auto materialID = nextNodeID();
_materialIDs.push_back(materialID);
materialNode.properties = { materialID, material, MESH };
FBXMaterial currentMaterial = geometry.materials[material];
// Setting the hierarchy: Material -> Properties70 -> P -> Properties
FBXNode properties70Node;
properties70Node.name = PROPERTIES70_NODE_NAME;
// Set diffuseColor
FBXNode pNodeDiffuseColor;
{
pNodeDiffuseColor.name = P_NODE_NAME;
pNodeDiffuseColor.properties.append({
"DiffuseColor", "Color", "", "A",
currentMaterial.diffuseColor[0], currentMaterial.diffuseColor[1], currentMaterial.diffuseColor[2]
});
}
properties70Node.children.append(pNodeDiffuseColor);
// Set specularColor
FBXNode pNodeSpecularColor;
{
pNodeSpecularColor.name = P_NODE_NAME;
pNodeSpecularColor.properties.append({
"SpecularColor", "Color", "", "A",
currentMaterial.specularColor[0], currentMaterial.specularColor[1], currentMaterial.specularColor[2]
});
}
properties70Node.children.append(pNodeSpecularColor);
// Set Shininess
FBXNode pNodeShininess;
{
pNodeShininess.name = P_NODE_NAME;
pNodeShininess.properties.append({
"Shininess", "Number", "", "A",
currentMaterial.shininess
});
}
properties70Node.children.append(pNodeShininess);
// Set Opacity
FBXNode pNodeOpacity;
{
pNodeOpacity.name = P_NODE_NAME;
pNodeOpacity.properties.append({
"Opacity", "Number", "", "A",
currentMaterial.opacity
});
}
properties70Node.children.append(pNodeOpacity);
materialNode.children.append(properties70Node);
}

View file

@ -0,0 +1,54 @@
//
// OBJBaker.h
// libraries/baking/src
//
// Created by Utkarsh Gautam on 9/29/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OBJBaker_h
#define hifi_OBJBaker_h
#include "Baker.h"
#include "TextureBaker.h"
#include "ModelBaker.h"
#include "ModelBakingLoggingCategory.h"
using TextureBakerThreadGetter = std::function<QThread*()>;
using NodeID = qlonglong;
class OBJBaker : public ModelBaker {
Q_OBJECT
public:
using ModelBaker::ModelBaker;
public slots:
virtual void bake() override;
signals:
void OBJLoaded();
private slots:
void bakeOBJ();
void handleOBJNetworkReply();
private:
void loadOBJ();
void createFBXNodeTree(FBXNode& rootNode, FBXGeometry& geometry);
void setMaterialNodeProperties(FBXNode& materialNode, QString material, FBXGeometry& geometry);
NodeID nextNodeID() { return _nodeID++; }
NodeID _nodeID { 0 };
NodeID _geometryID;
NodeID _modelID;
std::vector<NodeID> _materialIDs;
NodeID _textureID;
std::vector<std::pair<NodeID, int>> _mapTextureMaterial;
FBXNode _objectNode;
};
#endif // hifi_OBJBaker_h

View file

@ -28,7 +28,7 @@ void Basic2DWindowOpenGLDisplayPlugin::customizeContext() {
auto iconPath = PathUtils::resourcesPath() + "images/analog_stick.png";
auto image = QImage(iconPath);
qreal dpi = getFullscreenTarget()->physicalDotsPerInch();
_virtualPadPixelSize = dpi * 512 / 534; // 534 dpi for Pixel XL and Mate 9 Pro
_virtualPadPixelSize = dpi * VirtualPad::Manager::PIXEL_SIZE / VirtualPad::Manager::DPI;
if (image.format() != QImage::Format_ARGB32) {
image = image.convertToFormat(QImage::Format_ARGB32);

View file

@ -90,7 +90,7 @@ public:
glm::vec2 getReticleMaximumPosition() const;
glm::mat4 getReticleTransform(const glm::mat4& eyePose = glm::mat4(), const glm::vec3& headPosition = glm::vec3()) const;
glm::mat4 getPoint2DTransform(const glm::vec2& point = glm::vec2(), float sizeX = 512.0f, float sizeY = 512.0f) const;
glm::mat4 getPoint2DTransform(const glm::vec2& point, float sizeX , float sizeY) const;
ReticleInterface* getReticleInterface() { return _reticleInterface; }

View file

@ -361,7 +361,7 @@ void OpenGLDisplayPlugin::customizeContext() {
auto presentThread = DependencyManager::get<PresentThread>();
Q_ASSERT(thread() == presentThread->thread());
getGLBackend()->setCameraCorrection(mat4());
getGLBackend()->setCameraCorrection(mat4(), mat4(), true);
for (auto& cursorValue : _cursorsData) {
auto& cursorData = cursorValue.second;
@ -692,6 +692,9 @@ void OpenGLDisplayPlugin::present() {
incrementPresentCount();
if (_currentFrame) {
auto correction = getViewCorrection();
getGLBackend()->setCameraCorrection(correction, _prevRenderView);
_prevRenderView = correction * _currentFrame->view;
{
withPresentThreadLock([&] {
_renderRate.increment();

View file

@ -118,6 +118,7 @@ protected:
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor, gpu::FramebufferPointer fbo);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor);
virtual void updateFrameData();
virtual glm::mat4 getViewCorrection() { return glm::mat4(); }
void withOtherThreadContext(std::function<void()> f) const;
@ -137,6 +138,7 @@ protected:
gpu::FramePointer _currentFrame;
gpu::Frame* _lastFrame { nullptr };
mat4 _prevRenderView;
gpu::FramebufferPointer _compositeFramebuffer;
gpu::PipelinePointer _hudPipeline;
gpu::PipelinePointer _mirrorHUDPipeline;

View file

@ -7,6 +7,8 @@
//
#include "DebugHmdDisplayPlugin.h"
#include <ui-plugins/PluginContainer.h>
#include <QtCore/QProcessEnvironment>
#include <ViewFrustum.h>
@ -41,7 +43,15 @@ bool DebugHmdDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
}
bool DebugHmdDisplayPlugin::internalActivate() {
_isAutoRotateEnabled = _container->getBoolSetting("autoRotate", true);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), tr("Auto Rotate"),
[this](bool clicked) {
_isAutoRotateEnabled = clicked;
_container->setBoolSetting("autoRotate", _isAutoRotateEnabled);
}, true, _isAutoRotateEnabled);
_ipd = 0.0327499993f * 2.0f;
// Would be nice to know why the left and right projection matrices are slightly dissymetrical
_eyeProjections[0][0] = vec4{ 0.759056330, 0.000000000, 0.000000000, 0.000000000 };
_eyeProjections[0][1] = vec4{ 0.000000000, 0.682773232, 0.000000000, 0.000000000 };
_eyeProjections[0][2] = vec4{ -0.0580431037, -0.00619550655, -1.00000489, -1.00000000 };
@ -50,10 +60,15 @@ bool DebugHmdDisplayPlugin::internalActivate() {
_eyeProjections[1][1] = vec4{ 0.000000000, 0.678060353, 0.000000000, 0.000000000 };
_eyeProjections[1][2] = vec4{ 0.0578232110, -0.00669418881, -1.00000489, -1.000000000 };
_eyeProjections[1][3] = vec4{ 0.000000000, 0.000000000, -0.0800003856, 0.000000000 };
_eyeInverseProjections[0] = glm::inverse(_eyeProjections[0]);
_eyeInverseProjections[1] = glm::inverse(_eyeProjections[1]);
// No need to do so here as this will done in Parent::internalActivate
//_eyeInverseProjections[0] = glm::inverse(_eyeProjections[0]);
//_eyeInverseProjections[1] = glm::inverse(_eyeProjections[1]);
_eyeOffsets[0][3] = vec4{ -0.0327499993, 0.0, 0.0149999997, 1.0 };
_eyeOffsets[1][3] = vec4{ 0.0327499993, 0.0, 0.0149999997, 1.0 };
_eyeInverseProjections[0] = glm::inverse(_eyeProjections[0]);
_eyeInverseProjections[1] = glm::inverse(_eyeProjections[1]);
_eyeOffsets[0][3] = vec4{ -0.0327499993, 0.0, -0.0149999997, 1.0 };
_eyeOffsets[1][3] = vec4{ 0.0327499993, 0.0, -0.0149999997, 1.0 };
_renderTargetSize = { 3024, 1680 };
_cullingProjection = _eyeProjections[0];
// This must come after the initialization, so that the values calculated
@ -63,10 +78,13 @@ bool DebugHmdDisplayPlugin::internalActivate() {
}
void DebugHmdDisplayPlugin::updatePresentPose() {
float yaw = sinf(secTimestampNow()) * 0.25f;
float pitch = cosf(secTimestampNow()) * 0.25f;
// Simulates head pose latency correction
_currentPresentFrameInfo.presentPose =
glm::mat4_cast(glm::angleAxis(yaw, Vectors::UP)) *
glm::mat4_cast(glm::angleAxis(pitch, Vectors::RIGHT));
Parent::updatePresentPose();
if (_isAutoRotateEnabled) {
float yaw = sinf(secTimestampNow()) * 0.25f;
float pitch = cosf(secTimestampNow()) * 0.25f;
// Simulates head pose latency correction
_currentPresentFrameInfo.presentPose =
glm::mat4_cast(glm::angleAxis(yaw, Vectors::UP)) *
glm::mat4_cast(glm::angleAxis(pitch, Vectors::RIGHT)) ;
}
}

View file

@ -28,5 +28,7 @@ protected:
bool isHmdMounted() const override { return true; }
bool internalActivate() override;
private:
static const QString NAME;
bool _isAutoRotateEnabled{ true };
};

View file

@ -58,6 +58,18 @@ QRect HmdDisplayPlugin::getRecommendedHUDRect() const {
return CompositorHelper::VIRTUAL_SCREEN_RECOMMENDED_OVERLAY_RECT;
}
glm::mat4 HmdDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return _eyeOffsets[eye];
}
glm::mat4 HmdDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
glm::mat4 HmdDisplayPlugin::getCullingProjection(const glm::mat4& baseProjection) const {
return _cullingProjection;
}
#define DISABLE_PREVIEW_MENU_ITEM_DELAY_MS 500
bool HmdDisplayPlugin::internalActivate() {
@ -324,12 +336,14 @@ void HmdDisplayPlugin::updateFrameData() {
}
updatePresentPose();
}
glm::mat4 HmdDisplayPlugin::getViewCorrection() {
if (_currentFrame) {
auto batchPose = _currentFrame->pose;
auto currentPose = _currentPresentFrameInfo.presentPose;
auto correction = glm::inverse(batchPose) * currentPose;
getGLBackend()->setCameraCorrection(correction);
return glm::inverse(_currentPresentFrameInfo.presentPose) * batchPose;
} else {
return glm::mat4();
}
}

View file

@ -26,9 +26,9 @@ public:
~HmdDisplayPlugin();
bool isHmd() const override final { return true; }
float getIPD() const override final { return _ipd; }
glm::mat4 getEyeToHeadTransform(Eye eye) const override final { return _eyeOffsets[eye]; }
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override { return _eyeProjections[eye]; }
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override { return _cullingProjection; }
glm::mat4 getEyeToHeadTransform(Eye eye) const override final;
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override;
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override;
glm::uvec2 getRecommendedUiSize() const override final;
glm::uvec2 getRecommendedRenderSize() const override final { return _renderTargetSize; }
bool isDisplayVisible() const override { return isHmdMounted(); }
@ -59,6 +59,7 @@ protected:
void customizeContext() override;
void uncustomizeContext() override;
void updateFrameData() override;
glm::mat4 getViewCorrection() override;
std::array<mat4, 2> _eyeOffsets;
std::array<mat4, 2> _eyeProjections;

View file

@ -101,3 +101,4 @@ void StereoDisplayPlugin::internalDeactivate() {
float StereoDisplayPlugin::getRecommendedAspectRatio() const {
return aspect(Parent::getRecommendedRenderSize());
}

View file

@ -26,7 +26,7 @@ public:
// the IPD at the Application level, the way we now allow with HMDs.
// If that becomes an issue then we'll need to break up the functionality similar
// to the HMD plugins.
// virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
//virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
protected:
virtual bool internalActivate() override;

View file

@ -111,7 +111,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
if (!_geometryID) {
_geometryID = geometryCache->allocateID();
}
geometryCache->bindSimpleProgram(batch, false, transparent, false, false, false, false);
geometryCache->bindSimpleProgram(batch, false, transparent, false, false, false);
geometryCache->renderQuad(batch, minCorner, maxCorner, backgroundColor, _geometryID);
float scale = _lineHeight / _textRenderer->getFontSize();

View file

@ -46,6 +46,19 @@ static int YOUTUBE_MAX_FPS = 30;
static QTouchDevice _touchDevice;
WebEntityRenderer::ContentType WebEntityRenderer::getContentType(const QString& urlString) {
if (urlString.isEmpty()) {
return ContentType::NoContent;
}
const QUrl url(urlString);
if (url.scheme() == "http" || url.scheme() == "https" ||
urlString.toLower().endsWith(".htm") || urlString.toLower().endsWith(".html")) {
return ContentType::HtmlContent;
}
return ContentType::QmlContent;
}
WebEntityRenderer::WebEntityRenderer(const EntityItemPointer& entity) : Parent(entity) {
static std::once_flag once;
std::call_once(once, [&]{
@ -123,13 +136,45 @@ void WebEntityRenderer::onTimeout() {
}
void WebEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
withWriteLock([&] {
// This work must be done on the main thread
if (!hasWebSurface()) {
// If we couldn't create a new web surface, exit
if (!buildWebSurface(entity)) {
return;
// If the content type has changed, or the old content type was QML, we need to
// destroy the existing surface (because surfaces don't support changing the root
// object, so subsequent loads of content just overlap the existing content
bool urlChanged = false;
{
auto newSourceUrl = entity->getSourceUrl();
auto newContentType = getContentType(newSourceUrl);
auto currentContentType = ContentType::NoContent;
withReadLock([&] {
urlChanged = _lastSourceUrl != newSourceUrl;
currentContentType = _contentType;
});
if (urlChanged) {
if (newContentType != ContentType::HtmlContent || currentContentType != ContentType::HtmlContent) {
destroyWebSurface();
}
withWriteLock([&] {
_lastSourceUrl = newSourceUrl;
_contentType = newContentType;
});
}
}
withWriteLock([&] {
if (_contentType == ContentType::NoContent) {
return;
}
// This work must be done on the main thread
// If we couldn't create a new web surface, exit
if (!hasWebSurface() && !buildWebSurface(entity)) {
return;
}
if (urlChanged) {
_webSurface->getRootItem()->setProperty("url", _lastSourceUrl);
}
if (_contextPosition != entity->getWorldPosition()) {
@ -138,11 +183,6 @@ void WebEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene
_webSurface->getSurfaceContext()->setContextProperty("globalPosition", vec3toVariant(_contextPosition));
}
if (_lastSourceUrl != entity->getSourceUrl()) {
_lastSourceUrl = entity->getSourceUrl();
loadSourceURL();
}
_lastDPI = entity->getDPI();
_lastLocked = entity->getLocked();
@ -232,9 +272,6 @@ bool WebEntityRenderer::buildWebSurface(const TypedEntityPointer& entity) {
// Let us interact with the keyboard
surfaceContext->setContextProperty("tabletInterface", DependencyManager::get<TabletScriptingInterface>().data());
});
_fadeStartTime = usecTimestampNow();
loadSourceURL();
_webSurface->resume();
// forward web events to EntityScriptingInterface
auto entities = DependencyManager::get<EntityScriptingInterface>();
@ -243,6 +280,29 @@ bool WebEntityRenderer::buildWebSurface(const TypedEntityPointer& entity) {
emit entities->webEventReceived(entityItemID, message);
});
if (_contentType == ContentType::HtmlContent) {
// We special case YouTube URLs since we know they are videos that we should play with at least 30 FPS.
// FIXME this doesn't handle redirects or shortened URLs, consider using a signaling method from the
// web entity
if (QUrl(_lastSourceUrl).host().endsWith("youtube.com", Qt::CaseInsensitive)) {
_webSurface->setMaxFps(YOUTUBE_MAX_FPS);
} else {
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
}
_webSurface->load("controls/WebEntityView.qml", [this](QQmlContext* context, QObject* item) {
item->setProperty("url", _lastSourceUrl);
});
} else if (_contentType == ContentType::QmlContent) {
_webSurface->load(_lastSourceUrl, [this](QQmlContext* context, QObject* item) {
if (item && item->objectName() == "tabletRoot") {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
}
});
}
_fadeStartTime = usecTimestampNow();
_webSurface->resume();
return true;
}
@ -289,32 +349,6 @@ glm::vec2 WebEntityRenderer::getWindowSize(const TypedEntityPointer& entity) con
return dims;
}
void WebEntityRenderer::loadSourceURL() {
const QUrl sourceUrl(_lastSourceUrl);
if (sourceUrl.scheme() == "http" || sourceUrl.scheme() == "https" ||
_lastSourceUrl.toLower().endsWith(".htm") || _lastSourceUrl.toLower().endsWith(".html")) {
_contentType = htmlContent;
// We special case YouTube URLs since we know they are videos that we should play with at least 30 FPS.
if (sourceUrl.host().endsWith("youtube.com", Qt::CaseInsensitive)) {
_webSurface->setMaxFps(YOUTUBE_MAX_FPS);
} else {
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
}
_webSurface->load("controls/WebEntityView.qml", [this](QQmlContext* context, QObject* item) {
item->setProperty("url", _lastSourceUrl);
});
} else {
_contentType = qmlContent;
_webSurface->load(_lastSourceUrl);
if (_webSurface->getRootItem() && _webSurface->getRootItem()->objectName() == "tabletRoot") {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
}
}
}
void WebEntityRenderer::hoverEnterEntity(const PointerEvent& event) {
if (!_lastLocked && _webSurface) {
PointerEvent webEvent = event;

View file

@ -47,15 +47,19 @@ private:
bool buildWebSurface(const TypedEntityPointer& entity);
void destroyWebSurface();
bool hasWebSurface();
void loadSourceURL();
glm::vec2 getWindowSize(const TypedEntityPointer& entity) const;
int _geometryId{ 0 };
enum contentType {
htmlContent,
qmlContent
enum class ContentType {
NoContent,
HtmlContent,
QmlContent
};
contentType _contentType;
static ContentType getContentType(const QString& urlString);
ContentType _contentType{ ContentType::NoContent };
QSharedPointer<OffscreenQmlSurface> _webSurface;
glm::vec3 _contextPosition;
gpu::TexturePointer _texture;

View file

@ -433,7 +433,7 @@ void ZoneEntityRenderer::setAmbientURL(const QString& ambientUrl) {
_ambientTexture = textureCache->getTexture(_ambientTextureURL, image::TextureUsage::CUBE_TEXTURE);
// keep whatever is assigned on the ambient map/sphere until texture is loaded
}
}
}
void ZoneEntityRenderer::updateAmbientMap() {

View file

@ -30,7 +30,7 @@ void main(void) {
varTexcoord = inTexCoord0.st;
// pass along the diffuse color
varColor = colorToLinearRGBA(inColor);
varColor = color_sRGBAToLinear(inColor);
// standard transform

View file

@ -31,7 +31,7 @@ void main(void) {
varTexcoord = inTexCoord0.st;
// pass along the diffuse color
varColor = colorToLinearRGBA(inColor);
varColor = color_sRGBAToLinear(inColor);
// standard transform

View file

@ -27,6 +27,14 @@ class OctreePacketData;
class EntityTreeElementExtraEncodeData;
class ReadBitstreamToTreeParams;
/**jsdoc
* Ambient light is defined by the following properties.
* @typedef {object} Entities.AmbientLight
* @property {number} ambientIntensity=0.5 - The intensity of the light.
* @property {string} ambientURL="" - A cube map image that defines the color of the light coming from each direction. If
* <code>""</code> then the entity's {@link Entities.Skybox|Skybox} <code>url</code> property value is used, unless that also is <code>""</code> in which
* case the entity's <code>ambientLightMode</code> property is set to <code>"inherit"</code>.
*/
class AmbientLightPropertyGroup : public PropertyGroup {
public:
// EntityItemProperty related helpers

View file

@ -44,6 +44,19 @@ bool operator!=(const AnimationPropertyGroup& a, const AnimationPropertyGroup& b
}
/**jsdoc
* The AnimationProperties are used to configure an animation.
* @typedef Entities.AnimationProperties
* @property {string} url="" - The URL of the FBX file that has the animation.
* @property {number} fps=30 - The speed in frames/s that the animation is played at.
* @property {number} firstFrame=0 - The first frame to play in the animation.
* @property {number} lastFrame=100000 - The last frame to play in the animation.
* @property {number} currentFrame=0 - The current frame being played in the animation.
* @property {boolean} running=false - If <code>true</code> then the animation should play.
* @property {boolean} loop=true - If <code>true</code> then the animation should be continuously repeated in a loop.
* @property {boolean} hold=false - If <code>true</code> then the rotations and translations of the last frame played should be
* maintained when the animation stops playing.
*/
void AnimationPropertyGroup::copyToScriptValue(const EntityPropertyFlags& desiredProperties, QScriptValue& properties, QScriptEngine* engine, bool skipDefaults, EntityItemProperties& defaultEntityProperties) const {
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ANIMATION_URL, Animation, animation, URL, url);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ANIMATION_ALLOW_TRANSLATION, Animation, animation, AllowTranslation, allowTranslation);

View file

@ -94,6 +94,49 @@ variables. These argument variables are used by the code which is run when bull
#include "EntityDynamicInterface.h"
/**jsdoc
* <p>An entity action may be one of the following types:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Type</th><th>Description</th><th>Arguments</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"far-grab"</code></td><td>Avatar action</td>
* <td>Moves and rotates an entity to a target position and orientation, optionally relative to another entity. Collisions
* between the entity and the user's avatar are disabled during the far-grab.</td>
* <td>{@link Entities.ActionArguments-FarGrab}</td></tr>
* <tr><td><code>"hold"</code></td><td>Avatar action</td>
* <td>Positions and rotates an entity relative to an avatar's hand. Collisions between the entity and the user's avatar
* are disabled during the hold.</td>
* <td>{@link Entities.ActionArguments-Hold}</td></tr>
* <tr><td><code>"offset"</code></td><td>Object action</td>
* <td>Moves an entity so that it is a set distance away from a target point.</td>
* <td>{@link Entities.ActionArguments-Offset}</td></tr>
* <tr><td><code>"tractor"</code></td><td>Object action</td>
* <td>Moves and rotates an entity to a target position and orientation, optionally relative to another entity.</td>
* <td>{@link Entities.ActionArguments-Tractor}</td></tr>
* <tr><td><code>"travel-oriented"</code></td><td>Object action</td>
* <td>Orients an entity to align with its direction of travel.</td>
* <td>{@link Entities.ActionArguments-TravelOriented}</td></tr>
* <tr><td><code>"hinge"</code></td><td>Object constraint</td>
* <td>Lets an entity pivot about an axis or connects two entities with a hinge joint.</td>
* <td>{@link Entities.ActionArguments-Hinge}</td></tr>
* <tr><td><code>"slider"</code></td><td>Object constraint</td>
* <td>Lets an entity slide and rotate along an axis, or connects two entities that slide and rotate along a shared
* axis.</td>
* <td>{@link Entities.ActionArguments-Slider|ActionArguments-Slider}</td></tr>
* <tr><td><code>"cone-twist"</code></td><td>Object constraint</td>
* <td>Connects two entities with a joint that can move through a cone and can twist.</td>
* <td>{@link Entities.ActionArguments-ConeTwist}</td></tr>
* <tr><td><code>"ball-socket"</code></td><td>Object constraint</td>
* <td>Connects two entities with a ball and socket joint.</td>
* <td>{@link Entities.ActionArguments-BallSocket}</td></tr>
* <tr><td><code>"spring"</code></td><td colspan="3">Synonym for <code>"tractor"</code>. <em>Legacy value.</em></td></tr>
* </tbody>
* </table>
* @typedef {string} Entities.ActionType
*/
// Note: The "none" action type is not listed because it's an internal "uninitialized" value and not useful for scripts.
EntityDynamicType EntityDynamicInterface::dynamicTypeFromString(QString dynamicTypeString) {
QString normalizedDynamicTypeString = dynamicTypeString.toLower().remove('-').remove('_');
if (normalizedDynamicTypeString == "none") {

View file

@ -440,6 +440,707 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
return changedProperties;
}
/**jsdoc
* Different entity types have different properties: some common to all entities (listed below) and some specific to each
* {@link Entities.EntityType|EntityType} (linked to below). The properties are accessed as an object of property names and
* values.
*
* @typedef {object} Entities.EntityProperties
* @property {Uuid} id - The ID of the entity. <em>Read-only.</em>
* @property {string} name="" - A name for the entity. Need not be unique.
* @property {Entities.EntityType} type - The entity type. You cannot change the type of an entity after it's created. (Though
* its value may switch among <code>"Box"</code>, <code>"Shape"</code>, and <code>"Sphere"</code> depending on changes to
* the <code>shape</code> property set for entities of these types.) <em>Read-only.</em>
* @property {boolean} clientOnly=false - If <code>true</code> then the entity is an avatar entity, otherwise it is a server
* entity. <em>Read-only.</em>
* @property {Uuid} owningAvatarID=Uuid.NULL - The session ID of the owning avatar if <code>clientOnly</code> is
* <code>true</code>, otherwise {@link Uuid|Uuid.NULL}. <em>Read-only.</em>
*
* @property {string} created - The UTC date and time that the entity was created, in ISO 8601 format as
* <code>yyyy-MM-ddTHH:mm:ssZ</code>. <em>Read-only.</em>
* @property {number} age - The age of the entity in seconds since it was created. <em>Read-only.</em>
* @property {string} ageAsText - The age of the entity since it was created, formatted as <code>h hours m minutes s
* seconds</code>.
* @property {number} lifetime=-1 - How long an entity lives for, in seconds, before being automatically deleted. A value of
* <code>-1</code> means that the entity lives for ever.
* @property {number} lastEdited - When the entity was last edited, expressed as the number of microseconds since
* 1970-01-01T00:00:00 UTC. <em>Read-only.</em>
* @property {Uuid} lastEditedBy - The session ID of the avatar or agent that most recently created or edited the entity.
* <em>Read-only.</em>
*
* @property {boolean} locked=false - Whether or not the entity can be edited or deleted. If <code>true</code> then the
* entity's properties other than <code>locked</code> cannot be changed, and the entity cannot be deleted.
* @property {boolean} visible=true - Whether or not the entity is rendered. If <code>true</code> then the entity is rendered.
* @property {boolean} canCastShadows=true - Whether or not the entity casts shadows. Currently applicable only to
* {@link Entities.EntityType|Model} and {@link Entities.EntityType|Shape} entities. Shadows are cast if inside a
* {@link Entities.EntityType|Zone} entity with <code>castShadows</code> enabled in its {@link Entities.EntityProperties-Zone|keyLight} property.
*
* @property {Vec3} position=0,0,0 - The position of the entity.
* @property {Quat} rotation=0,0,0,1 - The orientation of the entity with respect to world coordinates.
* @property {Vec3} registrationPoint=0.5,0.5,0.5 - The point in the entity that is set to the entity's position and is rotated
* about, {@link Vec3|Vec3.ZERO} &ndash; {@link Vec3|Vec3.ONE}. A value of {@link Vec3|Vec3.ZERO} is the entity's
* minimum x, y, z corner; a value of {@link Vec3|Vec3.ONE} is the entity's maximum x, y, z corner.
*
* @property {Vec3} naturalPosition=0,0,0 - The center of the entity's unscaled mesh model if it has one, otherwise
* {@link Vec3|Vec3.ZERO}. <em>Read-only.</em>
* @property {Vec3} naturalDimensions - The dimensions of the entity's unscaled mesh model if it has one, otherwise
* {@link Vec3|Vec3.ONE}. <em>Read-only.</em>
*
* @property {Vec3} velocity=0,0,0 - The linear velocity of the entity in m/s with respect to world coordinates.
* @property {number} damping=0.39347 - How much to slow down the linear velocity of an entity over time, <code>0.0</code>
* &ndash; <code>1.0</code>. A higher damping value slows down the entity more quickly. The default value is for an
* exponential decay timescale of 2.0s, where it takes 2.0s for the movement to slow to <code>1/e = 0.368</code> of its
* initial value.
* @property {Vec3} angularVelocity=0,0,0 - The angular velocity of the entity in rad/s with respect to its axes, about its
* registration point.
* @property {number} angularDamping=0.39347 - How much to slow down the angular velocity of an entity over time,
* <code>0.0</code> &ndash; <code>1.0</code>. A higher damping value slows down the entity more quickly. The default value
* is for an exponential decay timescale of 2.0s, where it takes 2.0s for the movement to slow to <code>1/e = 0.368</code>
* of its initial value.
*
* @property {Vec3} gravity=0,0,0 - The acceleration due to gravity in m/s<sup>2</sup> that the entity should move with, in
* world coordinates. Set to <code>{ x: 0, y: -9.8, z: 0 }</code> to simulate Earth's gravity. Gravity is applied to an
* entity's motion only if its <code>dynamic</code> property is <code>true</code>. If changing an entity's
* <code>gravity</code> from {@link Vec3|Vec3.ZERO}, you need to give it a small <code>velocity</code> in order to kick off
* physics simulation.
* The <code>gravity</code> value is applied in addition to the <code>acceleration</code> value.
* @property {Vec3} acceleration=0,0,0 - A general acceleration in m/s<sup>2</sup> that the entity should move with, in world
* coordinates. The acceleration is applied to an entity's motion only if its <code>dynamic</code> property is
* <code>true</code>. If changing an entity's <code>acceleration</code> from {@link Vec3|Vec3.ZERO}, you need to give it a
* small <code>velocity</code> in order to kick off physics simulation.
* The <code>acceleration</code> value is applied in addition to the <code>gravity</code> value.
* @property {number} restitution=0.5 - The "bounciness" of an entity when it collides, <code>0.0</code> &ndash;
* <code>0.99</code>. The higher the value, the more bouncy.
* @property {number} friction=0.5 - How much to slow down an entity when it's moving against another, <code>0.0</code> &ndash;
* <code>10.0</code>. The higher the value, the more quickly it slows down. Examples: <code>0.1</code> for ice,
* <code>0.9</code> for sandpaper.
* @property {number} density=1000 - The density of the entity in kg/m<sup>3</sup>, <code>100</code> for balsa wood &ndash;
* <code>10000</code> for silver. The density is used in conjunction with the entity's bounding box volume to work out its
* mass in the application of physics.
*
* @property {boolean} collisionless=false - Whether or not the entity should collide with items per its
* <code>collisionMask<code> property. If <code>true</code> then the entity does not collide.
* @property {boolean} ignoreForCollisions=false - Synonym for <code>collisionless</code>.
* @property {Entities.CollisionMask} collisionMask=31 - What types of items the entity should collide with.
* @property {string} collidesWith="static,dynamic,kinematic,myAvatar,otherAvatar," - Synonym for <code>collisionMask</code>,
* in text format.
* @property {string} collisionSoundURL="" - The sound to play when the entity experiences a collision. Valid file formats are
* as per the {@link SoundCache} object.
* @property {boolean} dynamic=false - Whether or not the entity should be affected by collisions. If <code>true</code> then
* the entity's movement is affected by collisions.
* @property {boolean} collisionsWillMove=false - Synonym for <code>dynamic</code>.
*
* @property {string} href="" - A "hifi://" metaverse address that a user is taken to when they click on the entity.
* @property {string} description="" - A description of the <code>href</code> property value.
*
* @property {string} userData="" - Used to store extra data about the entity in JSON format. WARNING: Other apps such as the
* Create app can also use this property, so make sure you handle data stored by other apps &mdash; edit only your bit and
* leave the rest of the data intact. You can use <code>JSON.parse()</code> to parse the string into a JavaScript object
* which you can manipulate the properties of, and use <code>JSON.stringify()</code> to convert the object into a string to
* put in the property.
*
* @property {string} script="" - The URL of the client entity script, if any, that is attached to the entity.
* @property {number} scriptTimestamp=0 - Intended to be used to indicate when the client entity script was loaded. Should be
* an integer number of milliseconds since midnight GMT on January 1, 1970 (e.g., as supplied by <code>Date.now()</code>.
* If you update the property's value, the <code>script</code> is re-downloaded and reloaded. This is how the "reload"
* button beside the "script URL" field in properties tab of the Create app works.
* @property {string} serverScripts="" - The URL of the server entity script, if any, that is attached to the entity.
*
* @property {Uuid} parentID=Uuid.NULL - The ID of the entity or avatar that this entity is parented to. {@link Uuid|Uuid.NULL}
* if the entity is not parented.
* @property {number} parentJointIndex=65535 - The joint of the entity or avatar that this entity is parented to. Use
* <code>65535</code> or <code>-1</code> to parent to the entity or avatar's position and orientation rather than a joint.
* @property {Vec3} localPosition=0,0,0 - The position of the entity relative to its parent if the entity is parented,
* otherwise the same value as <code>position</code>. If the entity is parented to an avatar and is <code>clientOnly</code>
* so that it scales with the avatar, this value remains the original local position value while the avatar scale changes.
* @property {Quat} localRotation=0,0,0,1 - The rotation of the entity relative to its parent if the entity is parented,
* otherwise the same value as <code>rotation</code>.
* @property {Vec3} localVelocity=0,0,0 - The velocity of the entity relative to its parent if the entity is parented,
* otherwise the same value as <code>velocity</code>.
* @property {Vec3} localAngularVelocity=0,0,0 - The angular velocity of the entity relative to its parent if the entity is
* parented, otherwise the same value as <code>position</code>.
* @property {Vec3} localDimensions - The dimensions of the entity. If the entity is parented to an avatar and is
* <code>clientOnly</code> so that it scales with the avatar, this value remains the original dimensions value while the
* avatar scale changes.
*
* @property {Entities.BoundingBox} boundingBox - The axis-aligned bounding box that tightly encloses the entity.
* <em>Read-only.</em>
* @property {AACube} queryAACube - The axis-aligned cube that determines where the entity lives in the entity server's octree.
* The cube may be considerably larger than the entity in some situations, e.g., when the entity is grabbed by an avatar:
* the position of the entity is determined through avatar mixer updates and so the AA cube is expanded in order to reduce
* unnecessary entity server updates. Scripts should not change this property's value.
*
* @property {string} actionData="" - Base-64 encoded compressed dump of the actions associated with the entity. This property
* is typically not used in scripts directly; rather, functions that manipulate an entity's actions update it.
* The size of this property increases with the number of actions. Because this property value has to fit within a High
* Fidelity datagram packet there is a limit to the number of actions that an entity can have, and edits which would result
* in overflow are rejected.
* <em>Read-only.</em>
* @property {Entities.RenderInfo} renderInfo - Information on the cost of rendering the entity. Currently information is only
* provided for <code>Model</code> entities. <em>Read-only.</em>
*
* @property {string} itemName="" - Certifiable name of the Marketplace item.
* @property {string} itemDescription="" - Certifiable description of the Marketplace item.
* @property {string} itemCategories="" - Certifiable category of the Marketplace item.
* @property {string} itemArtist="" - Certifiable artist that created the Marketplace item.
* @property {string} itemLicense="" - Certifiable license URL for the Marketplace item.
* @property {number} limitedRun=4294967295 - Certifiable maximum integer number of editions (copies) of the Marketplace item
* allowed to be sold.
* @property {number} editionNumber=0 - Certifiable integer edition (copy) number or the Marketplace item. Each copy sold in
* the Marketplace is numbered sequentially, starting at 1.
* @property {number} entityInstanceNumber=0 - Certifiable integer instance number for identical entities in a Marketplace
* item. A Marketplace item may have identical parts. If so, then each is numbered sequentially with an instance number.
* @property {string} marketplaceID="" - Certifiable UUID for the Marketplace item, as used in the URL of the item's download
* and its Marketplace Web page.
* @property {string} certificateID="" - Hash of the entity's static certificate JSON, signed by the artist's private key.
* @property {number} staticCertificateVersion=0 - The version of the method used to generate the <code>certificateID</code>.
*
* @see The different entity types have additional properties as follows:
* @see {@link Entities.EntityProperties-Box|EntityProperties-Box}
* @see {@link Entities.EntityProperties-Light|EntityProperties-Light}
* @see {@link Entities.EntityProperties-Line|EntityProperties-Line}
* @see {@link Entities.EntityProperties-Material|EntityProperties-Material}
* @see {@link Entities.EntityProperties-Model|EntityProperties-Model}
* @see {@link Entities.EntityProperties-ParticleEffect|EntityProperties-ParticleEffect}
* @see {@link Entities.EntityProperties-PolyLine|EntityProperties-PolyLine}
* @see {@link Entities.EntityProperties-PolyVox|EntityProperties-PolyVox}
* @see {@link Entities.EntityProperties-Shape|EntityProperties-Shape}
* @see {@link Entities.EntityProperties-Sphere|EntityProperties-Sphere}
* @see {@link Entities.EntityProperties-Text|EntityProperties-Text}
* @see {@link Entities.EntityProperties-Web|EntityProperties-Web}
* @see {@link Entities.EntityProperties-Zone|EntityProperties-Zone}
*/
/**jsdoc
* The <code>"Box"</code> {@link Entities.EntityType|EntityType} is the same as the <code>"Shape"</code>
* {@link Entities.EntityType|EntityType} except that its <code>shape</code> value is always set to <code>"Cube"</code>
* when the entity is created. If its <code>shape</code> property value is subsequently changed then the entity's
* <code>type</code> will be reported as <code>"Sphere"</code> if the <code>shape</code> is set to <code>"Sphere"</code>,
* otherwise it will be reported as <code>"Shape"</code>.
* @typedef {object} Entities.EntityProperties-Box
*/
/**jsdoc
* The <code>"Light"</code> {@link Entities.EntityType|EntityType} adds local lighting effects.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Light
* @property {Vec3} dimensions=0.1,0.1,0.1 - The dimensions of the entity. Entity surface outside these dimensions are not lit
* by the light.
* @property {Color} color=255,255,255 - The color of the light emitted.
* @property {number} intensity=1 - The brightness of the light.
* @property {number} falloffRadius=0.1 - The distance from the light's center at which intensity is reduced by 25%.
* @property {boolean} isSpotlight=false - If <code>true</code> then the light is directional, emitting along the entity's
* local negative z-axis; otherwise the light is a point light which emanates in all directions.
* @property {number} exponent=0 - Affects the softness of the spotlight beam: the higher the value the softer the beam.
* @property {number} cutoff=1.57 - Affects the size of the spotlight beam: the higher the value the larger the beam.
* @example <caption>Create a spotlight pointing at the ground.</caption>
* Entities.addEntity({
* type: "Light",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.5, z: -4 })),
* rotation: Quat.fromPitchYawRollDegrees(-75, 0, 0),
* dimensions: { x: 5, y: 5, z: 5 },
* intensity: 100,
* falloffRadius: 0.3,
* isSpotlight: true,
* exponent: 20,
* cutoff: 30,
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"Line"</code> {@link Entities.EntityType|EntityType} draws thin, straight lines between a sequence of two or more
* points.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Line
* @property {Vec3} dimensions=0.1,0.1,0.1 - The dimensions of the entity. Must be sufficient to contain all the
* <code>linePoints</code>.
* @property {Vec3[]} linePoints=[]] - The sequence of points to draw lines between. The values are relative to the entity's
* position. A maximum of 70 points can be specified. The property's value is set only if all the <code>linePoints</code>
* lie within the entity's <code>dimensions</code>.
* @property {number} lineWidth=2 - <em>Currently not used.</em>
* @property {Color} color=255,255,255 - The color of the line.
* @example <caption>Draw lines in a "V".</caption>
* var entity = Entities.addEntity({
* type: "Line",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.75, z: -5 })),
* rotation: MyAvatar.orientation,
* dimensions: { x: 2, y: 2, z: 1 },
* linePoints: [
* { x: -1, y: 1, z: 0 },
* { x: 0, y: -1, z: 0 },
* { x: 1, y: 1, z: 0 },
* ],
* color: { red: 255, green: 0, blue: 0 },
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"Material"</code> {@link Entities.EntityType|EntityType} modifies the existing materials on
* {@link Entities.EntityType|Model} entities, {@link Entities.EntityType|Shape} entities (albedo only),
* {@link Overlays.OverlayType|model overlays}, and avatars.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.<br />
* To apply a material to an entity or overlay, set the material entity's <code>parentID</code> property to the entity or
* overlay's ID.
* To apply a material to an avatar, set the material entity's <code>parentID</code> property to the avatar's session UUID.
* To apply a material to your avatar such that it persists across domains and log-ins, create the material as an avatar entity
* by setting the <code>clientOnly</code> parameter in {@link Entities.addEntity} to <code>true</code>.
* Material entities render as non-scalable spheres if they don't have their parent set.
* @typedef {object} Entities.EntityProperties-Material
* @property {string} materialURL="" - URL to a {@link MaterialResource}. If you append <code>?name</code> to the URL, the
* material with that name in the {@link MaterialResource} will be applied to the entity. <br />
* Alternatively, set the property value to <code>"userData"</code> to use the {@link Entities.EntityProperties|userData}
* entity property to live edit the material resource values.
* @property {number} priority=0 - The priority for applying the material to its parent. Only the highest priority material is
* applied, with materials of the same priority randomly assigned. Materials that come with the model have a priority of
* <code>0</code>.
* @property {string|number} parentMaterialName="0" - Selects the submesh or submeshes within the parent to apply the material
* to. If in the format <code>"mat::string"</code>, all submeshes with material name <code>"string"</code> are replaced.
* Otherwise the property value is parsed as an unsigned integer, specifying the mesh index to modify. Invalid values are
* parsed to <code>0</code>.
* @property {string} materialMappingMode="uv" - How the material is mapped to the entity. Either <code>"uv"</code> or
* <code>"projected"</code>. <em>Currently, only <code>"uv"</code> is supported.
* @property {Vec2} materialMappingPos=0,0 - Offset position in UV-space of the top left of the material, range
* <code>{ x: 0, y: 0 }</code> &ndash; <code>{ x: 1, y: 1 }</code>.
* @property {Vec2} materialMappingScale=1,1 - How much to scale the material within the parent's UV-space.
* @property {number} materialMappingRot=0 - How much to rotate the material within the parent's UV-space, in degrees.
* @example <caption>Color a sphere using a Material entity.</caption>
* var entityID = Entities.addEntity({
* type: "Sphere",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -5 })),
* dimensions: { x: 1, y: 1, z: 1 },
* color: { red: 128, green: 128, blue: 128 },
* lifetime: 300 // Delete after 5 minutes.
* });
*
* var materialID = Entities.addEntity({
* type: "Material",
* parentID: entityID,
* materialURL: "userData",
* priority: 1,
* userData: JSON.stringify({
* materials: {
* // Can only set albedo on a Shape entity.
* // Value overrides entity's "color" property.
* albedo: [1.0, 0, 0]
* }
* }),
* });
*/
/**jsdoc
* The <code>"Model"</code> {@link Entities.EntityType|EntityType} displays an FBX or OBJ model.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Model
* @property {Vec3} dimensions=0.1,0.1,0.1 - The dimensions of the entity. When adding an entity, if no <code>dimensions</code>
* value is specified then the model is automatically sized to its
* <code>{@link Entities.EntityProperties|naturalDimensions}</code>.
* @property {Color} color=255,255,255 - <em>Currently not used.</em>
* @property {string} modelURL="" - The URL of the FBX of OBJ model. Baked FBX models' URLs end in ".baked.fbx".<br />
* Note: If the name ends with <code>"default-image-model.fbx"</code> then the entity is considered to be an "Image"
* entity, in which case the <code>textures</code> property should be set per the example.
* @property {string} textures="" - A JSON string of texture name, URL pairs used when rendering the model in place of the
* model's original textures. Use a texture name from the <code>originalTextures</code> property to override that texture.
* Only the texture names and URLs to be overridden need be specified; original textures are used where there are no
* overrides. You can use <code>JSON.stringify()</code> to convert a JavaScript object of name, URL pairs into a JSON
* string.
* @property {string} originalTextures="{}" - A JSON string of texture name, URL pairs used in the model. The property value is
* filled in after the entity has finished rezzing (i.e., textures have loaded). You can use <code>JSON.parse()</code> to
* parse the JSON string into a JavaScript object of name, URL pairs. <em>Read-only.</em>
*
* @property {ShapeType} shapeType="none" - The shape of the collision hull used if collisions are enabled.
* @property {string} compoundShapeURL="" - The OBJ file to use for the compound shape if <code>shapeType</code> is
* <code>"compound"</code>.
*
* @property {Entities.AnimationProperties} animation - An animation to play on the model.
*
* @property {Quat[]} jointRotations=[]] - Joint rotations applied to the model; <code>[]</code> if none are applied or the
* model hasn't loaded. The array indexes are per {@link Entities.getJointIndex|getJointIndex}. Rotations are relative to
* each joint's parent.<br />
* Joint rotations can be set by {@link Entities.setLocalJointRotation|setLocalJointRotation} and similar functions, or by
* setting the value of this property. If you set a joint rotation using this property you also need to set the
* corresponding <code>jointRotationsSet</code> value to <code>true</code>.
* @property {boolean[]} jointRotationsSet=[]] - <code>true</code> values for joints that have had rotations applied,
* <code>false</code> otherwise; <code>[]</code> if none are applied or the model hasn't loaded. The array indexes are per
* {@link Entities.getJointIndex|getJointIndex}.
* @property {Vec3[]} jointTranslations=[]] - Joint translations applied to the model; <code>[]</code> if none are applied or
* the model hasn't loaded. The array indexes are per {@link Entities.getJointIndex|getJointIndex}. Rotations are relative
* to each joint's parent.<br />
* Joint translations can be set by {@link Entities.setLocalJointTranslation|setLocalJointTranslation} and similar
* functions, or by setting the value of this property. If you set a joint translation using this property you also need to
* set the corresponding <code>jointTranslationsSet</code> value to <code>true</code>.
* @property {boolean[]} jointTranslationsSet=[]] - <code>true</code> values for joints that have had translations applied,
* <code>false</code> otherwise; <code>[]</code> if none are applied or the model hasn't loaded. The array indexes are per
* {@link Entities.getJointIndex|getJointIndex}.
* @property {boolean} relayParentJoints=false - If <code>true</code> and the entity is parented to an avatar, then the
* avatar's joint rotations are applied to the entity's joints.
*
* @example <caption>Rez a Vive tracker puck.</caption>
* var entity = Entities.addEntity({
* type: "Model",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.75, z: -2 })),
* rotation: MyAvatar.orientation,
* modelURL: "http://content.highfidelity.com/seefo/production/puck-attach/vive_tracker_puck.obj",
* dimensions: { x: 0.0945, y: 0.0921, z: 0.0423 },
* lifetime: 300 // Delete after 5 minutes.
* });
* @example <caption>Create an "Image" entity like you can in the Create app.</caption>
* var IMAGE_MODEL = "https://hifi-content.s3.amazonaws.com/DomainContent/production/default-image-model.fbx";
* var DEFAULT_IMAGE = "https://hifi-content.s3.amazonaws.com/DomainContent/production/no-image.jpg";
* var entity = Entities.addEntity({
* type: "Model",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.5, z: -3 })),
* rotation: MyAvatar.orientation,
* dimensions: {
* x: 0.5385,
* y: 0.2819,
* z: 0.0092
* },
* shapeType: "box",
* collisionless: true,
* modelURL: IMAGE_MODEL,
* textures: JSON.stringify({ "tex.picture": DEFAULT_IMAGE }),
* lifetime: 300 // Delete after 5 minutes
* });
*/
/**jsdoc
* The <code>"ParticleEffect"</code> {@link Entities.EntityType|EntityType} displays a particle system that can be used to
* simulate things such as fire, smoke, snow, magic spells, etc. The particles emanate from an ellipsoid or part thereof.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-ParticleEffect
* @property {boolean} isEmitting=true - If <code>true</code> then particles are emitted.
* @property {number} maxParticles=1000 - The maximum number of particles to render at one time. Older particles are deleted if
* necessary when new ones are created.
* @property {number} lifespan=3s - How long, in seconds, each particle lives.
* @property {number} emitRate=15 - The number of particles per second to emit.
* @property {number} emitSpeed=5 - The speed, in m/s, that each particle is emitted at.
* @property {number} speedSpread=1 - The spread in speeds at which particles are emitted at. If <code>emitSpeed == 5</code>
* and <code>speedSpread == 1</code>, particles will be emitted with speeds in the range 4m/s &ndash; 6m/s.
* @property {vec3} emitAcceleration=0,-9.8,0 - The acceleration that is applied to each particle during its lifetime. The
* default is Earth's gravity value.
* @property {vec3} accelerationSpread=0,0,0 - The spread in accelerations that each particle is given. If
* <code>emitAccelerations == {x: 0, y: -9.8, z: 0}</code> and <code>accelerationSpread ==
* {x: 0, y: 1, z: 0}</code>, each particle will have an acceleration in the range, <code>{x: 0, y: -10.8, z: 0}</code>
* &ndash; <code>{x: 0, y: -8.8, z: 0}</code>.
* @property {Vec3} dimensions - The dimensions of the particle effect, i.e., a bounding box containing all the particles
* during their lifetimes, assuming that <code>emitterShouldTrail</code> is <code>false</code>. <em>Read-only.</em>
* @property {boolean} emitterShouldTrail=false - If <code>true</code> then particles are "left behind" as the emitter moves,
* otherwise they stay with the entity's dimensions.
*
* @property {Quat} emitOrientation=-0.707,0,0,0.707 - The orientation of particle emission relative to the entity's axes. By
* default, particles emit along the entity's local z-axis, and <code>azimuthStart</code> and <code>azimuthFinish</code>
* are relative to the entity's local x-axis. The default value is a rotation of -90 degrees about the local x-axis, i.e.,
* the particles emit vertically.
* @property {vec3} emitDimensions=0,0,0 - The dimensions of the ellipsoid from which particles are emitted.
* @property {number} emitRadiusStart=1 - The starting radius within the ellipsoid at which particles start being emitted;
* range <code>0.0</code> &ndash; <code>1.0</code> for the ellipsoid center to the ellipsoid surface, respectively.
* Particles are emitted from the portion of the ellipsoid that lies between <code>emitRadiusStart</code> and the
* ellipsoid's surface.
* @property {number} polarStart=0 - The angle in radians from the entity's local z-axis at which particles start being emitted
* within the ellipsoid; range <code>0</code> &ndash; <code>Math.PI</code>. Particles are emitted from the portion of the
* ellipsoid that lies between <code>polarStart<code> and <code>polarFinish</code>.
* @property {number} polarFinish=0 - The angle in radians from the entity's local z-axis at which particles stop being emitted
* within the ellipsoid; range <code>0</code> &ndash; <code>Math.PI</code>. Particles are emitted from the portion of the
* ellipsoid that lies between <code>polarStart<code> and <code>polarFinish</code>.
* @property {number} azimuthStart=-Math.PI - The angle in radians from the entity's local x-axis about the entity's local
* z-axis at which particles start being emitted; range <code>-Math.PI</code> &ndash; <code>Math.PI</code>. Particles are
* emitted from the portion of the ellipsoid that lies between <code>azimuthStart<code> and <code>azimuthFinish</code>.
* @property {number} azimuthFinish=Math.PI - The angle in radians from the entity's local x-axis about the entity's local
* z-axis at which particles stop being emitted; range <code>-Math.PI</code> &ndash; <code>Math.PI</code>. Particles are
* emitted from the portion of the ellipsoid that lies between <code>azimuthStart<code> and <code>azimuthFinish</code>.
*
* @property {string} textures="" - The URL of a JPG or PNG image file to display for each particle. If you want transparency,
* use PNG format.
* @property {number} particleRadius=0.025 - The radius of each particle at the middle of its life.
* @property {number} radiusStart=0.025 - The radius of each particle at the start of its life. If not explicitly set, the
* <code>particleRadius</code> value is used.
* @property {number} radiusFinish=0.025 - The radius of each particle at the end of its life. If not explicitly set, the
* <code>particleRadius</code> value is used.
* @property {number} radiusSpread=0 - <em>Currently not used.</em>
* @property {Color} color=255,255,255 - The color of each particle at the middle of its life.
* @property {Color} colorStart=255,255,255 - The color of each particle at the start of its life. If not explicitly set, the
* <code>color</code> value is used.
* @property {Color} colorFinish=255,255,255 - The color of each particle at the end of its life. If not explicitly set, the
* <code>color</code> value is used.
* @property {Color} colorSpread=0,0,0 - <em>Currently not used.</em>
* @property {number} alpha=1 - The alpha of each particle at the middle of its life.
* @property {number} alphaStart=1 - The alpha of each particle at the start of its life. If not explicitly set, the
* <code>alpha</code> value is used.
* @property {number} alphaFinish=1 - The alpha of each particle at the end of its life. If not explicitly set, the
* <code>alpha</code> value is used.
* @property {number} alphaSpread=0 - <em>Currently not used.</em>
*
* @property {ShapeType} shapeType="none" - <em>Currently not used.</em> <em>Read-only.</em>
*
* @example <caption>Create a ball of green smoke.</caption>
* particles = Entities.addEntity({
* type: "ParticleEffect",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.5, z: -4 })),
* lifespan: 5,
* emitRate: 10,
* emitSpeed: 0.02,
* speedSpread: 0.01,
* emitAcceleration: { x: 0, y: 0.02, z: 0 },
* polarFinish: Math.PI,
* textures: "https://content.highfidelity.com/DomainContent/production/Particles/wispy-smoke.png",
* particleRadius: 0.1,
* color: { red: 0, green: 255, blue: 0 },
* alphaFinish: 0,
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"PolyLine"</code> {@link Entities.EntityType|EntityType} draws textured, straight lines between a sequence of
* points.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-PolyLine
* @property {Vec3} dimensions=1,1,1 - The dimensions of the entity, i.e., the size of the bounding box that contains the
* lines drawn.
* @property {Vec3[]} linePoints=[]] - The sequence of points to draw lines between. The values are relative to the entity's
* position. A maximum of 70 points can be specified. Must be specified in order for the entity to render.
* @property {Vec3[]} normals=[]] - The normal vectors for the line's surface at the <code>linePoints</code>. The values are
* relative to the entity's orientation. Must be specified in order for the entity to render.
* @property {number[]} strokeWidths=[]] - The widths, in m, of the line at the <code>linePoints</code>. Must be specified in
* order for the entity to render.
* @property {number} lineWidth=2 - <em>Currently not used.</code>
* @property {Vec3[]} strokeColors=[]] - <em>Currently not used.</em>
* @property {Color} color=255,255,255 - The base color of the line, which is multiplied with the color of the texture for
* rendering.
* @property {string} textures="" - The URL of a JPG or PNG texture to use for the lines. If you want transparency, use PNG
* format.
* @property {boolean} isUVModeStretch=true - If <code>true</code>, the texture is stretched to fill the whole line, otherwise
* the texture repeats along the line.
* @example <caption>Draw a textured "V".</caption>
* var entity = Entities.addEntity({
* type: "PolyLine",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.75, z: -5 })),
* rotation: MyAvatar.orientation,
* linePoints: [
* { x: -1, y: 0.5, z: 0 },
* { x: 0, y: 0, z: 0 },
* { x: 1, y: 0.5, z: 0 }
* ],
* normals: [
* { x: 0, y: 0, z: 1 },
* { x: 0, y: 0, z: 1 },
* { x: 0, y: 0, z: 1 }
* ],
* strokeWidths: [ 0.1, 0.1, 0.1 ],
* color: { red: 255, green: 0, blue: 0 }, // Use just the red channel from the image.
* textures: "http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/flowArts/trails.png",
* isUVModeStretch: true,
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"PolyVox"</code> {@link Entities.EntityType|EntityType} displays a set of textured voxels.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* If you have two or more neighboring PolyVox entities of the same size abutting each other, you can display them as joined by
* configuring their <code>voxelSurfaceStyle</code> and neighbor ID properties.<br />
* PolyVox entities uses a library from <a href="http://www.volumesoffun.com/">Volumes of Fun</a>. Their
* <a href="http://www.volumesoffun.com/polyvox-documentation/">library documentation</a> may be useful to read.
* @typedef {object} Entities.EntityProperties-PolyVox
* @property {Vec3} dimensions=0.1,0.1,0.1 - The dimensions of the entity.
* @property {Vec3} voxelVolumeSize=32,32,32 - Integer number of voxels along each axis of the entity, in the range
* <code>1,1,1</code> to <code>128,128,128</code>. The dimensions of each voxel is
* <code>dimensions / voxelVolumesize</code>.
* @property {string} voxelData="ABAAEAAQAAAAHgAAEAB42u3BAQ0AAADCoPdPbQ8HFAAAAPBuEAAAAQ==" - Base-64 encoded compressed dump of
* the PolyVox data. This property is typically not used in scripts directly; rather, functions that manipulate a PolyVox
* entity update it.<br />
* The size of this property increases with the size and complexity of the PolyVox entity, with the size depending on how
* the particular entity's voxels compress. Because this property value has to fit within a High Fidelity datagram packet
* there is a limit to the size and complexity of a PolyVox entity, and edits which would result in an overflow are
* rejected.
* @property {Entities.PolyVoxSurfaceStyle} voxelSurfaceStyle=2 - The style of rendering the voxels' surface and how
* neighboring PolyVox entities are joined.
* @property {string} xTextureURL="" - URL of the texture to map to surfaces perpendicular to the entity's local x-axis. JPG or
* PNG format. If no texture is specified the surfaces display white.
* @property {string} yTextureURL="" - URL of the texture to map to surfaces perpendicular to the entity's local y-axis. JPG or
* PNG format. If no texture is specified the surfaces display white.
* @property {string} zTextureURL="" - URL of the texture to map to surfaces perpendicular to the entity's local z-axis. JPG or
* PNG format. If no texture is specified the surfaces display white.
* @property {Uuid} xNNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's -ve local x-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @property {Uuid} yNNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's -ve local y-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @property {Uuid} zNNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's -ve local z-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @property {Uuid} xPNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's +ve local x-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @property {Uuid} yPNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's +ve local y-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @property {Uuid} zPNeighborID=Uuid.NULL - ID of the neighboring PolyVox entity in the entity's +ve local z-axis direction,
* if you want them joined. Set to {@link Uuid|Uuid.NULL} if there is none or you don't want to join them.
* @example <caption>Create a textured PolyVox sphere.</caption>
* var position = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.5, z: -8 }));
* var texture = "http://public.highfidelity.com/cozza13/tuscany/Concrete2.jpg";
* var polyVox = Entities.addEntity({
* type: "PolyVox",
* position: position,
* dimensions: { x: 2, y: 2, z: 2 },
* xTextureURL: texture,
* yTextureURL: texture,
* zTextureURL: texture,
* lifetime: 300 // Delete after 5 minutes.
* });
* Entities.setVoxelSphere(polyVox, position, 0.8, 255);
*/
/**jsdoc
* The <code>"Shape"</code> {@link Entities.EntityType|EntityType} displays an entity of a specified <code>shape</code>.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Shape
* @property {Entities.Shape} shape="Sphere" - The shape of the entity.
* @property {Vec3} dimensions=0.1,0.1,0.1 - The dimensions of the entity.
* @property {Color} color=255,255,255 - The color of the entity.
* @example <caption>Create a cylinder.</caption>
* var shape = Entities.addEntity({
* type: "Shape",
* shape: "Cylinder",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -5 })),
* dimensions: { x: 0.4, y: 0.6, z: 0.4 },
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"Sphere"</code> {@link Entities.EntityType|EntityType} is the same as the <code>"Shape"</code>
* {@link Entities.EntityType|EntityType} except that its <code>shape</code> value is always set to <code>"Sphere"</code>
* when the entity is created. If its <code>shape</code> property value is subsequently changed then the entity's
* <code>type</code> will be reported as <code>"Box"</code> if the <code>shape</code> is set to <code>"Cube"</code>,
* otherwise it will be reported as <code>"Shape"</code>.
* @typedef {object} Entities.EntityProperties-Sphere
*/
/**jsdoc
* The <code>"Text"</code> {@link Entities.EntityType|EntityType} displays a 2D rectangle of text in the domain.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Text
* @property {Vec3} dimensions=0.1,0.1,0.01 - The dimensions of the entity.
* @property {string} text="" - The text to display on the face of the entity. Text wraps if necessary to fit. New lines can be
* created using <code>\n</code>. Overflowing lines are not displayed.
* @property {number} lineHeight=0.1 - The height of each line of text (thus determining the font size).
* @property {Color} textColor=255,255,255 - The color of the text.
* @property {Color} backgroundColor=0,0,0 - The color of the background rectangle.
* @property {boolean} faceCamera=false - If <code>true</code>, the entity is oriented to face each user's camera (i.e., it
* differs for each user present).
* @example <caption>Create a text entity.</caption>
* var text = Entities.addEntity({
* type: "Text",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -5 })),
* dimensions: { x: 0.6, y: 0.3, z: 0.01 },
* lineHeight: 0.12,
* text: "Hello\nthere!",
* faceCamera: true,
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"Web"</code> {@link Entities.EntityType|EntityType} displays a browsable Web page. Each user views their own copy
* of the Web page: if one user navigates to another page on the entity, other users do not see the change; if a video is being
* played, users don't see it in sync.
* The entity has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Web
* @property {Vec3} dimensions=0.1,0.1,0.01 - The dimensions of the entity.
* @property {string} sourceUrl="" - The URL of the Web page to display. This value does not change as you or others navigate
* on the Web entity.
* @property {number} dpi=30 - The resolution to display the page at, in dots per inch. If you convert this to dots per meter
* (multiply by 1 / 0.0254 = 39.3701) then multiply <code>dimensions.x</code> and <code>dimensions.y</code> by that value
* you get the resolution in pixels.
* @example <caption>Create a Web entity displaying at 1920 x 1080 resolution.</caption>
* var METERS_TO_INCHES = 39.3701;
* var entity = Entities.addEntity({
* type: "Web",
* sourceUrl: "https://highfidelity.com/",
* position: Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0.75, z: -4 })),
* rotation: MyAvatar.orientation,
* dimensions: {
* x: 3,
* y: 3 * 1080 / 1920,
* z: 0.01
* },
* dpi: 1920 / (3 * METERS_TO_INCHES),
* lifetime: 300 // Delete after 5 minutes.
* });
*/
/**jsdoc
* The <code>"Zone"</code> {@link Entities.EntityType|EntityType} is a volume of lighting effects and avatar permissions.
* Avatar interaction events such as {@link Entities.enterEntity} are also often used with a Zone entity.
* It has properties in addition to the common {@link Entities.EntityProperties|EntityProperties}.
* @typedef {object} Entities.EntityProperties-Zone
* @property {Vec3} dimensions=0.1,0.1,0.1 - The size of the volume in which the zone's lighting effects and avatar permissions
* have effect.
*
* @property {ShapeType} shapeType="box" - The shape of the volume in which the zone's lighting effects and avatar
* permissions have effect. Reverts to the default value if set to <code>"none"</code>, or set to <code>"compound"</code>
* and <code>compoundShapeURL</code> is <code>""</code>.
* @property {string} compoundShapeURL="" - The OBJ file to use for the compound shape if <code>shapeType</code> is
* <code>"compound"</code>.
*
* @property {string} keyLightMode="inherit" - Configures the key light in the zone. Possible values:<br />
* <code>"inherit"</code>: The key light from any enclosing zone continues into this zone.<br />
* <code>"disabled"</code>: The key light from any enclosing zone and the key light of this zone are disabled in this
* zone.<br />
* <code>"enabled"</code>: The key light properties of this zone are enabled, overriding the key light of from any
* enclosing zone.
* @property {Entities.KeyLight} keyLight - The key light properties of the zone.
*
* @property {string} ambientLightMode="inherit" - Configures the ambient light in the zone. Possible values:<br />
* <code>"inherit"</code>: The ambient light from any enclosing zone continues into this zone.<br />
* <code>"disabled"</code>: The ambient light from any enclosing zone and the ambient light of this zone are disabled in
* this zone.<br />
* <code>"enabled"</code>: The ambient light properties of this zone are enabled, overriding the ambient light from any
* enclosing zone.
* @property {Entities.AmbientLight} ambientLight - The ambient light properties of the zone.
*
* @property {string} skyboxMode="inherit" - Configures the skybox displayed in the zone. Possible values:<br />
* <code>"inherit"</code>: The skybox from any enclosing zone is dislayed in this zone.<br />
* <code>"disabled"</code>: The skybox from any enclosing zone and the skybox of this zone are disabled in this zone.<br />
* <code>"enabled"</code>: The skybox properties of this zone are enabled, overriding the skybox from any enclosing zone.
* @property {Entities.Skybox} skybox - The skybox properties of the zone.
*
* @property {string} hazeMode="inherit" - Configures the haze in the zone. Possible values:<br />
* <code>"inherit"</code>: The haze from any enclosing zone continues into this zone.<br />
* <code>"disabled"</code>: The haze from any enclosing zone and the haze of this zone are disabled in this zone.<br />
* <code>"enabled"</code>: The haze properties of this zone are enabled, overriding the haze from any enclosing zone.
* @property {Entities.Haze} haze - The haze properties of the zone.
*
* @property {boolean} flyingAllowed=true - If <code>true</code> then visitors can fly in the zone; otherwise they cannot.
* @property {boolean} ghostingAllowed=true - If <code>true</code> then visitors with avatar collisions turned off will not
* collide with content in the zone; otherwise visitors will always collide with content in the zone.
* @property {string} filterURL="" - The URL of a JavaScript file that filters changes to properties of entities within the
* zone. It is periodically executed for each entity in the zone. It can, for example, be used to not allow changes to
* certain properties.<br />
* <pre>
* function filter(properties) {
* // Test and edit properties object values,
* // e.g., properties.modelURL, as required.
* return properties;
* }
* </pre>
*
* @example <caption>Create a zone that casts a red key light along the x-axis.</caption>
* var zone = Entities.addEntity({
* type: "Zone",
* position: MyAvatar.position,
* dimensions: { x: 100, y: 100, z: 100 },
* keyLightMode: "enabled",
* keyLight: {
* "color": { "red": 255, "green": 0, "blue": 0 },
* "direction": { "x": 1, "y": 0, "z": 0 }
* },
* lifetime: 300 // Delete after 5 minutes.
* });
*/
QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool skipDefaults, bool allowUnknownCreateTime, bool strictSemantics) const {
// If strictSemantics is true and skipDefaults is false, then all and only those properties are copied for which the property flag
// is included in _desiredProperties, or is one of the specially enumerated ALWAYS properties below.
@ -491,7 +1192,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ANGULAR_VELOCITY, angularVelocity);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ANGULAR_DAMPING, angularDamping);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_VISIBLE, visible);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_CAN_CAST_SHADOW, canCastShadow);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_CAN_CAST_SHADOW, canCastShadow); // Relevant to Shape and Model entities only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_COLLISIONLESS, collisionless);
COPY_PROXY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_COLLISIONLESS, collisionless, ignoreForCollisions, getCollisionless()); // legacy support
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_COLLISION_MASK, collisionMask);
@ -500,7 +1201,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROXY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_DYNAMIC, dynamic, collisionsWillMove, getDynamic()); // legacy support
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_HREF, href);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_DESCRIPTION, description);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_FACE_CAMERA, faceCamera);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_FACE_CAMERA, faceCamera); // Text only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ACTION_DATA, actionData);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LOCKED, locked);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_USER_DATA, userData);
@ -521,7 +1222,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_NAME, name);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_COLLISION_SOUND_URL, collisionSoundURL);
// Boxes, Spheres, Light, Line, Model(??), Particle, PolyLine
// Light, Line, Model, ParticleEffect, PolyLine, Shape
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_COLOR, color);
// Particles only
@ -569,12 +1270,15 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
if (_type == EntityTypes::Model || _type == EntityTypes::Zone || _type == EntityTypes::ParticleEffect) {
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_SHAPE_TYPE, shapeType, getShapeTypeAsString());
}
// FIXME: Shouldn't provide a shapeType property for Box and Sphere entities.
if (_type == EntityTypes::Box) {
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_SHAPE_TYPE, shapeType, QString("Box"));
}
if (_type == EntityTypes::Sphere) {
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_SHAPE_TYPE, shapeType, QString("Sphere"));
}
if (_type == EntityTypes::Box || _type == EntityTypes::Sphere || _type == EntityTypes::Shape) {
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_SHAPE, shape);
}
@ -653,11 +1357,11 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
if (_type == EntityTypes::Line || _type == EntityTypes::PolyLine) {
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_WIDTH, lineWidth);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_POINTS, linePoints);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_NORMALS, normals);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_COLORS, strokeColors);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_WIDTHS, strokeWidths);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_TEXTURES, textures);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_IS_UV_MODE_STRETCH, isUVModeStretch);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_NORMALS, normals); // Polyline only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_COLORS, strokeColors); // Polyline only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_WIDTHS, strokeWidths); // Polyline only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_TEXTURES, textures); // Polyline only.
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_IS_UV_MODE_STRETCH, isUVModeStretch); // Polyline only.
}
// Materials
@ -671,6 +1375,14 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_MATERIAL_MAPPING_ROT, materialMappingRot);
}
/**jsdoc
* The axis-aligned bounding box of an entity.
* @typedef Entities.BoundingBox
* @property {Vec3} brn - The bottom right near (minimum axes values) corner of the AA box.
* @property {Vec3} tfl - The top far left (maximum axes values) corner of the AA box.
* @property {Vec3} center - The center of the AA box.
* @property {Vec3} dimensions - The dimensions of the AA box.
*/
if (!skipDefaults && !strictSemantics) {
AABox aaBox = getAABox();
QScriptValue boundingBox = engine->newObject();
@ -692,6 +1404,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_PARENT_ID, parentID);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_PARENT_JOINT_INDEX, parentJointIndex);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_QUERY_AA_CUBE, queryAACube);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LOCAL_POSITION, localPosition);
@ -700,13 +1413,23 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LOCAL_ANGULAR_VELOCITY, localAngularVelocity);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LOCAL_DIMENSIONS, localDimensions);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_CLIENT_ONLY, clientOnly);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_OWNING_AVATAR_ID, owningAvatarID);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_CLIENT_ONLY, clientOnly); // Gettable but not settable
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_OWNING_AVATAR_ID, owningAvatarID); // Gettable but not settable
// Rendering info
if (!skipDefaults && !strictSemantics) {
QScriptValue renderInfo = engine->newObject();
/**jsdoc
* Information on how an entity is rendered. Properties are only filled in for <code>Model</code> entities; other
* entity types have an empty object, <code>{}</code>.
* @typedef {object} Entities.RenderInfo
* @property {number} verticesCount - The number of vertices in the entity.
* @property {number} texturesCount - The number of textures in the entity.
* @property {number} textureSize - The total size of the textures in the entity, in bytes.
* @property {boolean} hasTransparent - Is <code>true</code> if any of the textures has transparency.
* @property {number} drawCalls - The number of draw calls required to render the entity.
*/
// currently only supported by models
if (_type == EntityTypes::Model) {
renderInfo.setProperty("verticesCount", (int)getRenderInfoVertexCount()); // FIXME - theoretically the number of vertex could be > max int
@ -719,6 +1442,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER_NO_SKIP(renderInfo, renderInfo); // Gettable but not settable
}
// FIXME: These properties should already have been set above.
properties.setProperty("clientOnly", convertScriptValue(engine, getClientOnly()));
properties.setProperty("owningAvatarID", convertScriptValue(engine, getOwningAvatarID()));

View file

@ -208,7 +208,7 @@ public:
DEFINE_PROPERTY(PROP_NORMALS, Normals, normals, QVector<glm::vec3>, ENTITY_ITEM_DEFAULT_EMPTY_VEC3_QVEC);
DEFINE_PROPERTY(PROP_STROKE_COLORS, StrokeColors, strokeColors, QVector<glm::vec3>, ENTITY_ITEM_DEFAULT_EMPTY_VEC3_QVEC);
DEFINE_PROPERTY(PROP_STROKE_WIDTHS, StrokeWidths, strokeWidths, QVector<float>, QVector<float>());
DEFINE_PROPERTY(PROP_IS_UV_MODE_STRETCH, IsUVModeStretch, isUVModeStretch, bool, true);
DEFINE_PROPERTY(PROP_IS_UV_MODE_STRETCH, IsUVModeStretch, isUVModeStretch, bool, true);
DEFINE_PROPERTY_REF(PROP_X_TEXTURE_URL, XTextureURL, xTextureURL, QString, "");
DEFINE_PROPERTY_REF(PROP_Y_TEXTURE_URL, YTextureURL, yTextureURL, QString, "");
DEFINE_PROPERTY_REF(PROP_Z_TEXTURE_URL, ZTextureURL, zTextureURL, QString, "");

View file

@ -33,7 +33,7 @@ public:
// these can only be called from the OctreeSendThread for the given Node
void insertSentFilteredEntity(const QUuid& entityID) { _sentFilteredEntities.insert(entityID); }
void removeSentFilteredEntity(const QUuid& entityID) { _sentFilteredEntities.remove(entityID); }
bool sentFilteredEntity(const QUuid& entityID) { return _sentFilteredEntities.contains(entityID); }
bool sentFilteredEntity(const QUuid& entityID) const { return _sentFilteredEntities.contains(entityID); }
QSet<QUuid> getSentFilteredEntities() { return _sentFilteredEntities; }
// the following flagged extra entity methods can only be called from the OctreeSendThread for the given Node

View file

@ -538,7 +538,7 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
NestableType nestableType = nestable->getNestableType();
if (nestableType == NestableType::Overlay || nestableType == NestableType::Avatar) {
qCWarning(entities) << "attempted edit on non-entity: " << id << nestable->getName();
return QUuid(); // null UUID to indicate failure
return QUuid(); // null script value to indicate failure
}
}
}
@ -1017,6 +1017,25 @@ QScriptValue RayToEntityIntersectionResultToScriptValue(QScriptEngine* engine, c
QString faceName = "";
// handle BoxFace
/**jsdoc
* <p>A <code>BoxFace</code> specifies the face of an axis-aligned (AA) box.
* <table>
* <thead>
* <tr><th>Value</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"MIN_X_FACE"</code></td><td>The minimum x-axis face.</td></tr>
* <tr><td><code>"MAX_X_FACE"</code></td><td>The maximum x-axis face.</td></tr>
* <tr><td><code>"MIN_Y_FACE"</code></td><td>The minimum y-axis face.</td></tr>
* <tr><td><code>"MAX_Y_FACE"</code></td><td>The maximum y-axis face.</td></tr>
* <tr><td><code>"MIN_Z_FACE"</code></td><td>The minimum z-axis face.</td></tr>
* <tr><td><code>"MAX_Z_FACE"</code></td><td>The maximum z-axis face.</td></tr>
* <tr><td><code>"UNKNOWN_FACE"</code></td><td>Unknown value.</td></tr>
* </tbody>
* </table>
* @typedef {string} BoxFace
*/
// FIXME: Move enum to string function to BoxBase.cpp.
switch (value.face) {
case MIN_X_FACE:
faceName = "MIN_X_FACE";

File diff suppressed because it is too large Load diff

View file

@ -35,6 +35,56 @@ typedef EntityItemPointer (*EntityTypeFactory)(const EntityItemID& entityID, con
class EntityTypes {
public:
/**jsdoc
* <p>An entity may be one of the following types:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Description</th><th>Properties</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"Box"</code></td><td>A rectangular prism. This is a synonym of <code>"Shape"</code> for the case
* where the entity's <code>shape</code> property value is <code>"Cube"</code>.<br />
* If an entity is created with its <code>type</code>
* set to <code>"Box"</code> it will always be created with a <code>shape</code> property value of
* <code>"Cube"</code>. If an entity of type <code>Shape</code> or <code>Sphere</code> has its <code>shape</code> set
* to <code>"Cube"</code> then its <code>type</code> will be reported as <code>"Box"</code>.
* <td>{@link Entities.EntityProperties-Box|EntityProperties-Box}</td></tr>
* <tr><td><code>"Light"</code></td><td>A local lighting effect.</td>
* <td>{@link Entities.EntityProperties-Light|EntityProperties-Light}</td></tr>
* <tr><td><code>"Line"</code></td><td>A sequence of one or more simple straight lines.</td>
* <td>{@link Entities.EntityProperties-Line|EntityProperties-Line}</td></tr>
* <tr><td><code>"Material"</code></td><td>Modifies the existing materials on Model entities, Shape entities (albedo
* only), {@link Overlays.OverlayType|model overlays}, and avatars.</td>
* <td>{@link Entities.EntityProperties-Material|EntityProperties-Material}</td></tr>
* <tr><td><code>"Model"</code></td><td>A mesh model from an FBX or OBJ file.</td>
* <td>{@link Entities.EntityProperties-Model|EntityProperties-Model}</td></tr>
* <tr><td><code>"ParticleEffect"</code></td><td>A particle system that can be used to simulate things such as fire,
* smoke, snow, magic spells, etc.</td>
* <td>{@link Entities.EntityProperties-ParticleEffect|EntityProperties-ParticleEffect}</td></tr>
* <tr><td><code>"PolyLine"</code></td><td>A sequence of one or more textured straight lines.</td>
* <td>{@link Entities.EntityProperties-PolyLine|EntityProperties-PolyLine}</td></tr>
* <tr><td><code>"PolyVox"</code></td><td>A set of textured voxels.</td>
* <td>{@link Entities.EntityProperties-PolyVox|EntityProperties-PolyVox}</td></tr>
* <tr><td><code>"Shape"</code></td><td>A basic entity such as a cube.
* See also, the <code>"Box"</code> and <code>"Sphere"</code> entity types.</td>
* <td>{@link Entities.EntityProperties-Shape|EntityProperties-Shape}</td></tr>
* <tr><td><code>"Sphere"</code></td><td>A sphere. This is a synonym of <code>"Shape"</code> for the case
* where the entity's <code>shape</code> property value is <code>"Sphere"</code>.<br />
* If an entity is created with its <code>type</code>
* set to <code>"Sphere"</code> it will always be created with a <code>shape</code> property value of
* <code>"Sphere"</code>. If an entity of type <code>Box</code> or <code>Shape</code> has its <code>shape</code> set
* to <code>"Sphere"</code> then its <code>type</code> will be reported as <code>"Sphere"</code>.
* <td>{@link Entities.EntityProperties-Sphere|EntityProperties-Sphere}</td></tr>
* <tr><td><code>"Text"</code></td><td>A pane of text oriented in space.</td>
* <td>{@link Entities.EntityProperties-Text|EntityProperties-Text}</td></tr>
* <tr><td><code>"Web"</code></td><td>A browsable Web page.</td>
* <td>{@link Entities.EntityProperties-Web|EntityProperties-Web}</td></tr>
* <tr><td><code>"Zone"</code></td><td>A volume of lighting effects and avatar permissions.</td>
* <td>{@link Entities.EntityProperties-Zone|EntityProperties-Zone}</td></tr>
* </tbody>
* </table>
* @typedef {string} Entities.EntityType
*/
typedef enum EntityType_t {
Unknown,
Model,

View file

@ -40,6 +40,35 @@ static const float INITIAL_HAZE_BACKGROUND_BLEND{ 0.0f };
static const float INITIAL_KEY_LIGHT_RANGE{ 1000.0f };
static const float INITIAL_KEY_LIGHT_ALTITUDE{ 200.0f };
// FIXME: Document hazeAttenuationKeyLight, hazeKeyLightRange, and hazeKeyLightAltitude once they're working and are provided
// in the Create app's UI.
/**jsdoc
* Haze is defined by the following properties.
* @typedef {object} Entities.Haze
*
* @property {number} hazeRange=1000 - The horizontal distance at which visibility is reduced to 95%; i.e., 95% of each pixel's
* color is haze.
* @property {Color} hazeColor=128,154,179 - The color of the haze when looking away from the key light.
* @property {boolean} hazeEnableGlare=false - If <code>true</code> then the haze is colored with glare from the key light;
* <code>hazeGlareColor</code> and <code>hazeGlareAngle</code> are used.
* @property {Color} hazeGlareColor=255,299,179 - The color of the haze when looking towards the key light.
* @property {number} hazeGlareAngle=20 - The angle in degrees across the circle around the key light that the glare color and
* haze color are blended 50/50.
*
* @property {boolean} hazeAltitudeEffect=false - If <code>true</code> then haze decreases with altitude as defined by the
* entity's local coordinate system; <code>hazeBaseRef</code> and </code>hazeCeiling</code> are used.
* @property {number} hazeBaseRef=0 - The y-axis value in the entity's local coordinate system at which the haze density starts
* reducing with altitude.
* @property {number} hazeCeiling=200 - The y-axis value in the entity's local coordinate system at which the haze density has
* reduced to 5%.
*
* @property {number} hazeBackgroundBlend=0 - The proportion of the skybox image to show through the haze: <code>0.0</code>
* displays no skybox image; <code>1.0</code> displays no haze.
*
* @property {boolean} hazeAttenuateKeyLight=false - <em>Currently not supported.</em>
* @property {number} hazeKeyLightRange=1000 - <em>Currently not supported.</em>
* @property {number} hazeKeyLightAltitude=200 - <em>Currently not supported.</em>
*/
class HazePropertyGroup : public PropertyGroup {
public:
// EntityItemProperty related helpers

View file

@ -27,6 +27,16 @@ class OctreePacketData;
class EntityTreeElementExtraEncodeData;
class ReadBitstreamToTreeParams;
/**jsdoc
* A key light is defined by the following properties.
* @typedef {object} Entities.KeyLight
* @property {Color} color=255,255,255 - The color of the light.
* @property {number} intensity=1 - The intensity of the light.
* @property {Vec3} direction=0,-1,0 - The direction the light is shining.
* @property {boolean} castShadows=false - If <code>true</code> then shadows are cast. Shadows are cast by avatars, plus
* {@link Entities.EntityType|Model} and {@link Entities.EntityType|Shape} entities that have their
* <code>{@link Entities.EntityProperties|canCastShadows}</code> property set to <code>true</code>.
*/
class KeyLightPropertyGroup : public PropertyGroup {
public:
// EntityItemProperty related helpers

View file

@ -59,6 +59,25 @@ class PolyVoxEntityItem : public EntityItem {
virtual int getOnCount() const { return 0; }
/**jsdoc
* <p>A <code>PolyVoxSurfaceStyle</code> may be one of the following:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Type</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td><code>0</code></td><td>Marching cubes.</td><td>Chamfered edges. Open volume.
* Joins neighboring PolyVox entities reasonably well.</td></tr>
* <tr><td><code>1</code></td><td>Cubic.</td><td>Square edges. Open volume.
* Joins neighboring PolyVox entities cleanly.</td></tr>
* <tr><td><code>2</code></td><td>Edged cubic.</td><td>Square edges. Enclosed volume.
* Joins neighboring PolyVox entities cleanly.</td></tr>
* <tr><td><code>3</code></td><td>Edged marching cubes.</td><td>Chamfered edges. Enclosed volume.
* Doesn't join neighboring PolyVox entities.</td></tr>
* </tbody>
* </table>
* @typedef {number} Entities.PolyVoxSurfaceStyle
*/
enum PolyVoxSurfaceStyle {
SURFACE_MARCHING_CUBES,
SURFACE_CUBIC,

View file

@ -20,6 +20,33 @@
#include "ShapeEntityItem.h"
namespace entity {
/**jsdoc
* <p>A <code>Shape</code>, <code>Box</code>, or <code>Sphere</code> {@link Entities.EntityType|EntityType} may display as
* one of the following geometrical shapes:</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Dimensions</th><th>Notes</th></tr>
* </thead>
* <tbody>
* <tr><td><code>"Circle"</code></td><td>2D</td><td>A circle oriented in 3D.</td></tr>
* <tr><td><code>"Cube"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Cone"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Cylinder"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Dodecahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Hexagon"</code></td><td>3D</td><td>A hexagonal prism.</td></tr>
* <tr><td><code>"Icosahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Octagon"</code></td><td>3D</td><td>An octagonal prism.</td></tr>
* <tr><td><code>"Octahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Quad"</code></td><td>2D</td><td>A square oriented in 3D.</td></tr>
* <tr><td><code>"Sphere"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Tetrahedron"</code></td><td>3D</td><td></td></tr>
* <tr><td><code>"Torus"</code></td><td>3D</td><td><em>Not implemented.</em></td></tr>
* <tr><td><code>"Triangle"</code></td><td>3D</td><td>A triangular prism.</td></tr>
* </tbody>
* </table>
* @typedef {string} Entities.Shape
*/
static const std::array<QString, Shape::NUM_SHAPES> shapeStrings { {
"Triangle",
"Quad",
@ -32,7 +59,7 @@ namespace entity {
"Octahedron",
"Dodecahedron",
"Icosahedron",
"Torus",
"Torus", // Not implemented yet.
"Cone",
"Cylinder"
} };

View file

@ -105,7 +105,7 @@ public:
protected:
float _alpha { 1 };
float _alpha { 1 }; // FIXME: This property is not used.
rgbColor _color;
entity::Shape _shape { entity::Shape::Sphere };

View file

@ -27,6 +27,13 @@ class OctreePacketData;
class EntityTreeElementExtraEncodeData;
class ReadBitstreamToTreeParams;
/**jsdoc
* A skybox is defined by the following properties.
* @typedef {object} Entities.Skybox
* @property {Color} color=0,0,0 - Sets the color of the sky if <code>url</code> is <code>""</code>, otherwise modifies the
* color of the cube map image.
* @property {string} url="" - A cube map image that is used to render the sky.
*/
class SkyboxPropertyGroup : public PropertyGroup {
public:
// EntityItemProperty related helpers

View file

@ -161,23 +161,19 @@ void FBXWriter::encodeFBXProperty(QDataStream& out, const QVariant& prop) {
case QMetaType::QString:
{
auto bytes = prop.toString().toUtf8();
out << 'S';
out << bytes.length();
out << bytes;
out.device()->write("S", 1);
out << (int32_t)bytes.size();
out.writeRawData(bytes, bytes.size());
break;
}
case QMetaType::QByteArray:
{
auto bytes = prop.toByteArray();
out.device()->write("S", 1);
out << (int32_t)bytes.size();
out.writeRawData(bytes, bytes.size());
break;
}
{
auto bytes = prop.toByteArray();
out.device()->write("S", 1);
out << (int32_t)bytes.size();
out.writeRawData(bytes, bytes.size());
break;
}
default:
{
if (prop.canConvert<QVector<float>>()) {

Some files were not shown because too many files have changed in this diff Show more