Merge branch 'master' of https://github.com/highfidelity/hifi into QmlMarketplace

This commit is contained in:
Roxanne Skelly 2019-01-28 15:17:08 -08:00
commit a4d105f24f
27 changed files with 1015 additions and 168 deletions

View file

@ -1,3 +1,3 @@
Source: nvtt
Version: 8c7e6b40ee5095f227b75880fabd89c99d6f34c0
Version: 330c4d56274a0f602a5c70596e2eb670a4ed56c2
Description: Texture processing tools with support for Direct3D 10 and 11 formats.

View file

@ -10,8 +10,8 @@ include(vcpkg_common_functions)
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO highfidelity/nvidia-texture-tools
REF 8c7e6b40ee5095f227b75880fabd89c99d6f34c0
SHA512 f107d19dbbd6651ef2126b1422a5db8db291bf70311ac4fb1dbacb5ceaa8752fee38becbd32964f57596f0b84e1223bb2c3ff9d9c4fdc65c3e77a47836657cef
REF 330c4d56274a0f602a5c70596e2eb670a4ed56c2
SHA512 4c0bc2f369120d696cc27710b6d33086b27eef55f537ec66b9a5c8b1839bc2426c0413670b0f65be52c5d353468f0126dfe024be1f0690611d4d7e33ac530127
HEAD_REF master
)

View file

@ -1480,9 +1480,6 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
}
extracted.mesh.createMeshTangents(generateTangents);
extracted.mesh.createBlendShapeTangents(generateTangents);
// find the clusters with which the mesh is associated
QVector<QString> clusterIDs;
foreach (const QString& childID, _connectionChildMap.values(it.key())) {

View file

@ -67,108 +67,6 @@ bool HFMMaterial::needTangentSpace() const {
return !normalTexture.isNull();
}
static void _createBlendShapeTangents(HFMMesh& mesh, bool generateFromTexCoords, HFMBlendshape& blendShape);
void HFMMesh::createBlendShapeTangents(bool generateTangents) {
for (auto& blendShape : blendshapes) {
_createBlendShapeTangents(*this, generateTangents, blendShape);
}
}
using IndexAccessor = std::function<glm::vec3*(const HFMMesh&, int, int, glm::vec3*, glm::vec3&)>;
static void setTangents(const HFMMesh& mesh, const IndexAccessor& vertexAccessor, int firstIndex, int secondIndex,
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals, QVector<glm::vec3>& tangents) {
glm::vec3 vertex[2];
glm::vec3 normal;
glm::vec3* tangent = vertexAccessor(mesh, firstIndex, secondIndex, vertex, normal);
if (tangent) {
glm::vec3 bitangent = glm::cross(normal, vertex[1] - vertex[0]);
if (glm::length(bitangent) < EPSILON) {
return;
}
glm::vec2 texCoordDelta = mesh.texCoords.at(secondIndex) - mesh.texCoords.at(firstIndex);
glm::vec3 normalizedNormal = glm::normalize(normal);
*tangent += glm::cross(glm::angleAxis(-atan2f(-texCoordDelta.t, texCoordDelta.s), normalizedNormal) *
glm::normalize(bitangent), normalizedNormal);
}
}
static void createTangents(const HFMMesh& mesh, bool generateFromTexCoords,
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals, QVector<glm::vec3>& tangents,
IndexAccessor accessor) {
// if we have a normal map (and texture coordinates), we must compute tangents
if (generateFromTexCoords && !mesh.texCoords.isEmpty()) {
tangents.resize(vertices.size());
foreach(const HFMMeshPart& part, mesh.parts) {
for (int i = 0; i < part.quadIndices.size(); i += 4) {
setTangents(mesh, accessor, part.quadIndices.at(i), part.quadIndices.at(i + 1), vertices, normals, tangents);
setTangents(mesh, accessor, part.quadIndices.at(i + 1), part.quadIndices.at(i + 2), vertices, normals, tangents);
setTangents(mesh, accessor, part.quadIndices.at(i + 2), part.quadIndices.at(i + 3), vertices, normals, tangents);
setTangents(mesh, accessor, part.quadIndices.at(i + 3), part.quadIndices.at(i), vertices, normals, tangents);
}
// <= size - 3 in order to prevent overflowing triangleIndices when (i % 3) != 0
// This is most likely evidence of a further problem in extractMesh()
for (int i = 0; i <= part.triangleIndices.size() - 3; i += 3) {
setTangents(mesh, accessor, part.triangleIndices.at(i), part.triangleIndices.at(i + 1), vertices, normals, tangents);
setTangents(mesh, accessor, part.triangleIndices.at(i + 1), part.triangleIndices.at(i + 2), vertices, normals, tangents);
setTangents(mesh, accessor, part.triangleIndices.at(i + 2), part.triangleIndices.at(i), vertices, normals, tangents);
}
if ((part.triangleIndices.size() % 3) != 0) {
qCDebug(modelformat) << "Error in extractHFMModel part.triangleIndices.size() is not divisible by three ";
}
}
}
}
void HFMMesh::createMeshTangents(bool generateFromTexCoords) {
HFMMesh& mesh = *this;
// This is the only workaround I've found to trick the compiler into understanding that mesh.tangents isn't
// const in the lambda function.
auto& tangents = mesh.tangents;
createTangents(mesh, generateFromTexCoords, mesh.vertices, mesh.normals, mesh.tangents,
[&](const HFMMesh& mesh, int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec3& outNormal) {
outVertices[0] = mesh.vertices[firstIndex];
outVertices[1] = mesh.vertices[secondIndex];
outNormal = mesh.normals[firstIndex];
return &(tangents[firstIndex]);
});
}
static void _createBlendShapeTangents(HFMMesh& mesh, bool generateFromTexCoords, HFMBlendshape& blendShape) {
// Create lookup to get index in blend shape from vertex index in mesh
std::vector<int> reverseIndices;
reverseIndices.resize(mesh.vertices.size());
std::iota(reverseIndices.begin(), reverseIndices.end(), 0);
for (int indexInBlendShape = 0; indexInBlendShape < blendShape.indices.size(); ++indexInBlendShape) {
auto indexInMesh = blendShape.indices[indexInBlendShape];
reverseIndices[indexInMesh] = indexInBlendShape;
}
createTangents(mesh, generateFromTexCoords, blendShape.vertices, blendShape.normals, blendShape.tangents,
[&](const HFMMesh& mesh, int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec3& outNormal) {
const auto index1 = reverseIndices[firstIndex];
const auto index2 = reverseIndices[secondIndex];
if (index1 < blendShape.vertices.size()) {
outVertices[0] = blendShape.vertices[index1];
if (index2 < blendShape.vertices.size()) {
outVertices[1] = blendShape.vertices[index2];
} else {
// Index isn't in the blend shape so return vertex from mesh
outVertices[1] = mesh.vertices[secondIndex];
}
outNormal = blendShape.normals[index1];
return &blendShape.tangents[index1];
} else {
// Index isn't in blend shape so return nullptr
return (glm::vec3*)nullptr;
}
});
}
QStringList HFMModel::getJointNames() const {
QStringList names;
foreach (const HFMJoint& joint, joints) {

View file

@ -239,9 +239,6 @@ public:
graphics::MeshPointer _mesh;
bool wasCompressed { false };
void createMeshTangents(bool generateFromTexCoords);
void createBlendShapeTangents(bool generateTangents);
};
/**jsdoc

View file

@ -1,6 +1,4 @@
set(TARGET_NAME model-baker)
setup_hifi_library()
link_hifi_libraries(shared task gpu graphics)
include_hifi_library_headers(hfm)
link_hifi_libraries(shared task gpu graphics hfm)

View file

@ -15,26 +15,65 @@
#include "BakerTypes.h"
#include "BuildGraphicsMeshTask.h"
#include "CalculateMeshNormalsTask.h"
#include "CalculateMeshTangentsTask.h"
#include "CalculateBlendshapeNormalsTask.h"
#include "CalculateBlendshapeTangentsTask.h"
namespace baker {
class GetModelPartsTask {
public:
using Input = hfm::Model::Pointer;
using Output = VaryingSet3<std::vector<hfm::Mesh>, hifi::URL, MeshIndicesToModelNames>;
using Output = VaryingSet5<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::BlendshapesPerMesh, QHash<QString, hfm::Material>>;
using JobModel = Job::ModelIO<GetModelPartsTask, Input, Output>;
void run(const BakeContextPointer& context, const Input& input, Output& output) {
auto& hfmModelIn = input;
const auto& hfmModelIn = input;
output.edit0() = hfmModelIn->meshes.toStdVector();
output.edit1() = hfmModelIn->originalURL;
output.edit2() = hfmModelIn->meshIndicesToModelNames;
auto& blendshapesPerMesh = output.edit3();
blendshapesPerMesh.reserve(hfmModelIn->meshes.size());
for (int i = 0; i < hfmModelIn->meshes.size(); i++) {
blendshapesPerMesh.push_back(hfmModelIn->meshes[i].blendshapes.toStdVector());
}
output.edit4() = hfmModelIn->materials;
}
};
class BuildBlendshapesTask {
public:
using Input = VaryingSet3<BlendshapesPerMesh, std::vector<NormalsPerBlendshape>, std::vector<TangentsPerBlendshape>>;
using Output = BlendshapesPerMesh;
using JobModel = Job::ModelIO<BuildBlendshapesTask, Input, Output>;
void run(const BakeContextPointer& context, const Input& input, Output& output) {
const auto& blendshapesPerMeshIn = input.get0();
const auto& normalsPerBlendshapePerMesh = input.get1();
const auto& tangentsPerBlendshapePerMesh = input.get2();
auto& blendshapesPerMeshOut = output;
blendshapesPerMeshOut = blendshapesPerMeshIn;
for (int i = 0; i < (int)blendshapesPerMeshOut.size(); i++) {
const auto& normalsPerBlendshape = normalsPerBlendshapePerMesh[i];
const auto& tangentsPerBlendshape = tangentsPerBlendshapePerMesh[i];
auto& blendshapesOut = blendshapesPerMeshOut[i];
for (int j = 0; j < (int)blendshapesOut.size(); j++) {
const auto& normals = normalsPerBlendshape[j];
const auto& tangents = tangentsPerBlendshape[j];
auto& blendshape = blendshapesOut[j];
blendshape.normals = QVector<glm::vec3>::fromStdVector(normals);
blendshape.tangents = QVector<glm::vec3>::fromStdVector(tangents);
}
}
}
};
class BuildMeshesTask {
public:
using Input = VaryingSet4<std::vector<hfm::Mesh>, std::vector<graphics::MeshPointer>, TangentsPerMesh, BlendshapesPerMesh>;
using Input = VaryingSet5<std::vector<hfm::Mesh>, std::vector<graphics::MeshPointer>, NormalsPerMesh, TangentsPerMesh, BlendshapesPerMesh>;
using Output = std::vector<hfm::Mesh>;
using JobModel = Job::ModelIO<BuildMeshesTask, Input, Output>;
@ -42,13 +81,15 @@ namespace baker {
auto& meshesIn = input.get0();
int numMeshes = (int)meshesIn.size();
auto& graphicsMeshesIn = input.get1();
auto& tangentsPerMeshIn = input.get2();
auto& blendshapesPerMeshIn = input.get3();
auto& normalsPerMeshIn = input.get2();
auto& tangentsPerMeshIn = input.get3();
auto& blendshapesPerMeshIn = input.get4();
auto meshesOut = meshesIn;
for (int i = 0; i < numMeshes; i++) {
auto& meshOut = meshesOut[i];
meshOut._mesh = graphicsMeshesIn[i];
meshOut.normals = QVector<glm::vec3>::fromStdVector(normalsPerMeshIn[i]);
meshOut.tangents = QVector<glm::vec3>::fromStdVector(tangentsPerMeshIn[i]);
meshOut.blendshapes = QVector<hfm::Blendshape>::fromStdVector(blendshapesPerMeshIn[i]);
}
@ -80,17 +121,27 @@ namespace baker {
const auto meshesIn = modelPartsIn.getN<GetModelPartsTask::Output>(0);
const auto url = modelPartsIn.getN<GetModelPartsTask::Output>(1);
const auto meshIndicesToModelNames = modelPartsIn.getN<GetModelPartsTask::Output>(2);
const auto blendshapesPerMeshIn = modelPartsIn.getN<GetModelPartsTask::Output>(3);
const auto materials = modelPartsIn.getN<GetModelPartsTask::Output>(4);
// Calculate normals and tangents for meshes and blendshapes if they do not exist
// Note: Normals are never calculated here for OBJ models. OBJ files optionally define normals on a per-face basis, so for consistency normals are calculated beforehand in OBJSerializer.
const auto normalsPerMesh = model.addJob<CalculateMeshNormalsTask>("CalculateMeshNormals", meshesIn);
const auto calculateMeshTangentsInputs = CalculateMeshTangentsTask::Input(normalsPerMesh, meshesIn, materials).asVarying();
const auto tangentsPerMesh = model.addJob<CalculateMeshTangentsTask>("CalculateMeshTangents", calculateMeshTangentsInputs);
const auto calculateBlendshapeNormalsInputs = CalculateBlendshapeNormalsTask::Input(blendshapesPerMeshIn, meshesIn).asVarying();
const auto normalsPerBlendshapePerMesh = model.addJob<CalculateBlendshapeNormalsTask>("CalculateBlendshapeNormals", calculateBlendshapeNormalsInputs);
const auto calculateBlendshapeTangentsInputs = CalculateBlendshapeTangentsTask::Input(normalsPerBlendshapePerMesh, blendshapesPerMeshIn, meshesIn, materials).asVarying();
const auto tangentsPerBlendshapePerMesh = model.addJob<CalculateBlendshapeTangentsTask>("CalculateBlendshapeTangents", calculateBlendshapeTangentsInputs);
// Build the graphics::MeshPointer for each hfm::Mesh
const auto buildGraphicsMeshInputs = BuildGraphicsMeshTask::Input(meshesIn, url, meshIndicesToModelNames).asVarying();
const auto buildGraphicsMeshOutputs = model.addJob<BuildGraphicsMeshTask>("BuildGraphicsMesh", buildGraphicsMeshInputs);
const auto graphicsMeshes = buildGraphicsMeshOutputs.getN<BuildGraphicsMeshTask::Output>(0);
// TODO: Move tangent/blendshape validation/calculation to an earlier step
const auto tangentsPerMesh = buildGraphicsMeshOutputs.getN<BuildGraphicsMeshTask::Output>(1);
const auto blendshapesPerMesh = buildGraphicsMeshOutputs.getN<BuildGraphicsMeshTask::Output>(2);
const auto buildGraphicsMeshInputs = BuildGraphicsMeshTask::Input(meshesIn, url, meshIndicesToModelNames, normalsPerMesh, tangentsPerMesh).asVarying();
const auto graphicsMeshes = model.addJob<BuildGraphicsMeshTask>("BuildGraphicsMesh", buildGraphicsMeshInputs);
// Combine the outputs into a new hfm::Model
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, tangentsPerMesh, blendshapesPerMesh).asVarying();
const auto buildBlendshapesInputs = BuildBlendshapesTask::Input(blendshapesPerMeshIn, normalsPerBlendshapePerMesh, tangentsPerBlendshapePerMesh).asVarying();
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, normalsPerMesh, tangentsPerMesh, blendshapesPerMeshOut).asVarying();
const auto meshesOut = model.addJob<BuildMeshesTask>("BuildMeshes", buildMeshesInputs);
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut).asVarying();
hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);

View file

@ -15,10 +15,25 @@
#include <hfm/HFM.h>
namespace baker {
using MeshIndices = std::vector<int>;
using IndicesPerMesh = std::vector<std::vector<int>>;
using VerticesPerMesh = std::vector<std::vector<glm::vec3>>;
using MeshNormals = std::vector<glm::vec3>;
using NormalsPerMesh = std::vector<std::vector<glm::vec3>>;
using MeshTangents = std::vector<glm::vec3>;
using TangentsPerMesh = std::vector<std::vector<glm::vec3>>;
using Blendshapes = std::vector<hfm::Blendshape>;
using BlendshapesPerMesh = std::vector<std::vector<hfm::Blendshape>>;
using BlendshapeVertices = std::vector<glm::vec3>;
using BlendshapeNormals = std::vector<glm::vec3>;
using BlendshapeIndices = std::vector<int>;
using VerticesPerBlendshape = std::vector<std::vector<glm::vec3>>;
using NormalsPerBlendshape = std::vector<std::vector<glm::vec3>>;
using IndicesPerBlendshape = std::vector<std::vector<int>>;
using BlendshapeTangents = std::vector<glm::vec3>;
using TangentsPerBlendshape = std::vector<std::vector<glm::vec3>>;
using MeshIndicesToModelNames = QHash<int, QString>;
};

View file

@ -26,9 +26,18 @@ glm::vec3 normalizeDirForPacking(const glm::vec3& dir) {
return dir;
}
void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphicsMeshPointer, baker::MeshTangents& meshTangents, baker::Blendshapes& blendshapes) {
void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphicsMeshPointer, const baker::MeshNormals& meshNormals, const baker::MeshTangents& meshTangentsIn) {
auto graphicsMesh = std::make_shared<graphics::Mesh>();
// Fill tangents with a dummy value to force tangents to be present if there are normals
baker::MeshTangents meshTangents;
if (!meshTangentsIn.empty()) {
meshTangents = meshTangentsIn;
} else {
meshTangents.reserve(meshNormals.size());
std::fill_n(std::back_inserter(meshTangents), meshNormals.size(), Vectors::UNIT_X);
}
unsigned int totalSourceIndices = 0;
foreach(const HFMMeshPart& part, hfmMesh.parts) {
totalSourceIndices += (part.quadTrianglesIndices.size() + part.triangleIndices.size());
@ -48,23 +57,6 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
int numVerts = hfmMesh.vertices.size();
if (!hfmMesh.normals.empty() && hfmMesh.tangents.empty()) {
// Fill with a dummy value to force tangents to be present if there are normals
meshTangents.reserve(hfmMesh.normals.size());
std::fill_n(std::back_inserter(meshTangents), hfmMesh.normals.size(), Vectors::UNIT_X);
} else {
meshTangents = hfmMesh.tangents.toStdVector();
}
// Same thing with blend shapes
blendshapes = hfmMesh.blendshapes.toStdVector();
for (auto& blendShape : blendshapes) {
if (!blendShape.normals.empty() && blendShape.tangents.empty()) {
// Fill with a dummy value to force tangents to be present if there are normals
blendShape.tangents.reserve(blendShape.normals.size());
std::fill_n(std::back_inserter(blendShape.tangents), blendShape.normals.size(), Vectors::UNIT_X);
}
}
// evaluate all attribute elements and data sizes
// Position is a vec3
@ -73,12 +65,12 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
// Normal and tangent are always there together packed in normalized xyz32bits word (times 2)
const auto normalElement = HFM_NORMAL_ELEMENT;
const int normalsSize = hfmMesh.normals.size() * normalElement.getSize();
const int normalsSize = (int)meshNormals.size() * normalElement.getSize();
const int tangentsSize = (int)meshTangents.size() * normalElement.getSize();
// If there are normals then there should be tangents
assert(normalsSize <= tangentsSize);
if (tangentsSize > normalsSize) {
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "BuildGraphicsMeshTask -- Unexpected tangents in file");
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "BuildGraphicsMeshTask -- Unexpected tangents in mesh");
}
const auto normalsAndTangentsSize = normalsSize + tangentsSize;
@ -124,11 +116,11 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
if (normalsSize > 0) {
std::vector<NormalType> normalsAndTangents;
normalsAndTangents.reserve(hfmMesh.normals.size() + (int)meshTangents.size());
auto normalIt = hfmMesh.normals.constBegin();
normalsAndTangents.reserve(meshNormals.size() + (int)meshTangents.size());
auto normalIt = meshNormals.cbegin();
auto tangentIt = meshTangents.cbegin();
for (;
normalIt != hfmMesh.normals.constEnd();
normalIt != meshNormals.cend();
++normalIt, ++tangentIt) {
#if HFM_PACK_NORMALS
const auto normal = normalizeDirForPacking(*normalIt);
@ -212,11 +204,6 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
auto vertexFormat = std::make_shared<gpu::Stream::Format>();
auto vertexBufferStream = std::make_shared<gpu::BufferStream>();
// Decision time:
// if blendshapes then keep position and normals/tangents as separated channel buffers from interleaved attributes
// else everything is interleaved in one buffer
// Default case is no blend shapes
gpu::BufferPointer attribBuffer;
int totalAttribBufferSize = totalVertsSize;
gpu::uint8 posChannel = 0;
@ -244,7 +231,7 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
}
}
// Pack normal and Tangent with the rest of atributes if no blend shapes
// Pack normal and Tangent with the rest of atributes
if (colorsSize) {
vertexFormat->setAttribute(gpu::Stream::COLOR, attribChannel, colorElement, bufOffset);
bufOffset += colorElement.getSize();
@ -384,22 +371,21 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
}
void BuildGraphicsMeshTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
auto& meshes = input.get0();
auto& url = input.get1();
auto& meshIndicesToModelNames = input.get2();
const auto& meshes = input.get0();
const auto& url = input.get1();
const auto& meshIndicesToModelNames = input.get2();
const auto& normalsPerMesh = input.get3();
const auto& tangentsPerMesh = input.get4();
auto& graphicsMeshes = output;
auto& graphicsMeshes = output.edit0();
auto& tangentsPerMesh = output.edit1();
auto& blendshapesPerMesh = output.edit2();
int n = (int)meshes.size();
for (int i = 0; i < n; i++) {
graphicsMeshes.emplace_back();
auto& graphicsMesh = graphicsMeshes[i];
tangentsPerMesh.emplace_back();
blendshapesPerMesh.emplace_back();
// Try to create the graphics::Mesh
buildGraphicsMesh(meshes[i], graphicsMesh, tangentsPerMesh[i], blendshapesPerMesh[i]);
buildGraphicsMesh(meshes[i], graphicsMesh, normalsPerMesh[i], tangentsPerMesh[i]);
// Choose a name for the mesh
if (graphicsMesh) {

View file

@ -20,8 +20,8 @@
class BuildGraphicsMeshTask {
public:
using Input = baker::VaryingSet3<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames>;
using Output = baker::VaryingSet3<std::vector<graphics::MeshPointer>, std::vector<baker::MeshTangents>, std::vector<baker::Blendshapes>>;
using Input = baker::VaryingSet5<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::NormalsPerMesh, baker::TangentsPerMesh>;
using Output = std::vector<graphics::MeshPointer>;
using JobModel = baker::Job::ModelIO<BuildGraphicsMeshTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);

View file

@ -0,0 +1,70 @@
//
// CalculateBlendshapeNormalsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CalculateBlendshapeNormalsTask.h"
#include "ModelMath.h"
void CalculateBlendshapeNormalsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& blendshapesPerMesh = input.get0();
const auto& meshes = input.get1();
auto& normalsPerBlendshapePerMeshOut = output;
normalsPerBlendshapePerMeshOut.reserve(blendshapesPerMesh.size());
for (size_t i = 0; i < blendshapesPerMesh.size(); i++) {
const auto& mesh = meshes[i];
const auto& blendshapes = blendshapesPerMesh[i];
normalsPerBlendshapePerMeshOut.emplace_back();
auto& normalsPerBlendshapeOut = normalsPerBlendshapePerMeshOut[normalsPerBlendshapePerMeshOut.size()-1];
normalsPerBlendshapeOut.reserve(blendshapes.size());
for (size_t j = 0; j < blendshapes.size(); j++) {
const auto& blendshape = blendshapes[j];
const auto& normalsIn = blendshape.normals;
// Check if normals are already defined. Otherwise, calculate them from existing blendshape vertices.
if (!normalsIn.empty()) {
normalsPerBlendshapeOut.push_back(normalsIn.toStdVector());
} else {
// Create lookup to get index in blendshape from vertex index in mesh
std::vector<int> reverseIndices;
reverseIndices.resize(mesh.vertices.size());
std::iota(reverseIndices.begin(), reverseIndices.end(), 0);
for (int indexInBlendShape = 0; indexInBlendShape < blendshape.indices.size(); ++indexInBlendShape) {
auto indexInMesh = blendshape.indices[indexInBlendShape];
reverseIndices[indexInMesh] = indexInBlendShape;
}
normalsPerBlendshapeOut.emplace_back();
auto& normals = normalsPerBlendshapeOut[normalsPerBlendshapeOut.size()-1];
normals.resize(mesh.vertices.size());
baker::calculateNormals(mesh,
[&reverseIndices, &blendshape, &normals](int normalIndex) /* NormalAccessor */ {
const auto lookupIndex = reverseIndices[normalIndex];
if (lookupIndex < blendshape.vertices.size()) {
return &normals[lookupIndex];
} else {
// Index isn't in the blendshape. Request that the normal not be calculated.
return (glm::vec3*)nullptr;
}
},
[&mesh, &reverseIndices, &blendshape](int vertexIndex, glm::vec3& outVertex) /* VertexSetter */ {
const auto lookupIndex = reverseIndices[vertexIndex];
if (lookupIndex < blendshape.vertices.size()) {
outVertex = blendshape.vertices[lookupIndex];
} else {
// Index isn't in the blendshape, so return vertex from mesh
outVertex = mesh.vertices[lookupIndex];
}
});
}
}
}
}

View file

@ -0,0 +1,28 @@
//
// CalculateBlendshapeNormalsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CalculateBlendshapeNormalsTask_h
#define hifi_CalculateBlendshapeNormalsTask_h
#include "Engine.h"
#include "BakerTypes.h"
// Calculate blendshape normals if not already present in the blendshape
class CalculateBlendshapeNormalsTask {
public:
using Input = baker::VaryingSet2<baker::BlendshapesPerMesh, std::vector<hfm::Mesh>>;
using Output = std::vector<baker::NormalsPerBlendshape>;
using JobModel = baker::Job::ModelIO<CalculateBlendshapeNormalsTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_CalculateBlendshapeNormalsTask_h

View file

@ -0,0 +1,95 @@
//
// CalculateBlendshapeTangentsTask.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/08.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CalculateBlendshapeTangentsTask.h"
#include <set>
#include "ModelMath.h"
void CalculateBlendshapeTangentsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& normalsPerBlendshapePerMesh = input.get0();
const auto& blendshapesPerMesh = input.get1();
const auto& meshes = input.get2();
const auto& materials = input.get3();
auto& tangentsPerBlendshapePerMeshOut = output;
tangentsPerBlendshapePerMeshOut.reserve(normalsPerBlendshapePerMesh.size());
for (size_t i = 0; i < blendshapesPerMesh.size(); i++) {
const auto& normalsPerBlendshape = normalsPerBlendshapePerMesh[i];
const auto& blendshapes = blendshapesPerMesh[i];
const auto& mesh = meshes[i];
tangentsPerBlendshapePerMeshOut.emplace_back();
auto& tangentsPerBlendshapeOut = tangentsPerBlendshapePerMeshOut[tangentsPerBlendshapePerMeshOut.size()-1];
// Check if we actually need to calculate the tangents, or just append empty arrays
bool needTangents = false;
for (const auto& meshPart : mesh.parts) {
auto materialIt = materials.find(meshPart.materialID);
if (materialIt != materials.end() && (*materialIt).needTangentSpace()) {
needTangents = true;
break;
}
}
for (size_t j = 0; j < blendshapes.size(); j++) {
const auto& blendshape = blendshapes[j];
const auto& tangentsIn = blendshape.tangents;
const auto& normals = normalsPerBlendshape[j];
tangentsPerBlendshapeOut.emplace_back();
auto& tangentsOut = tangentsPerBlendshapeOut[tangentsPerBlendshapeOut.size()-1];
// Check if we already have tangents
if (!tangentsIn.empty()) {
tangentsOut = tangentsIn.toStdVector();
continue;
}
// Check if we can and should calculate tangents (we need normals to calculate the tangents)
if (normals.empty() || !needTangents) {
continue;
}
tangentsOut.resize(normals.size());
// Create lookup to get index in blend shape from vertex index in mesh
std::vector<int> reverseIndices;
reverseIndices.resize(mesh.vertices.size());
std::iota(reverseIndices.begin(), reverseIndices.end(), 0);
for (int indexInBlendShape = 0; indexInBlendShape < blendshape.indices.size(); ++indexInBlendShape) {
auto indexInMesh = blendshape.indices[indexInBlendShape];
reverseIndices[indexInMesh] = indexInBlendShape;
}
baker::calculateTangents(mesh,
[&mesh, &blendshape, &normals, &tangentsOut, &reverseIndices](int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec2* outTexCoords, glm::vec3& outNormal) {
const auto index1 = reverseIndices[firstIndex];
const auto index2 = reverseIndices[secondIndex];
if (index1 < blendshape.vertices.size()) {
outVertices[0] = blendshape.vertices[index1];
outTexCoords[0] = mesh.texCoords[index1];
outTexCoords[1] = mesh.texCoords[index2];
if (index2 < blendshape.vertices.size()) {
outVertices[1] = blendshape.vertices[index2];
} else {
// Index isn't in the blend shape so return vertex from mesh
outVertices[1] = mesh.vertices[secondIndex];
}
outNormal = normals[index1];
return &tangentsOut[index1];
} else {
// Index isn't in blend shape so return nullptr
return (glm::vec3*)nullptr;
}
});
}
}
}

View file

@ -0,0 +1,28 @@
//
// CalculateBlendshapeTangentsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CalculateBlendshapeTangentsTask_h
#define hifi_CalculateBlendshapeTangentsTask_h
#include "Engine.h"
#include "BakerTypes.h"
// Calculate blendshape tangents if not already present in the blendshape
class CalculateBlendshapeTangentsTask {
public:
using Input = baker::VaryingSet4<std::vector<baker::NormalsPerBlendshape>, baker::BlendshapesPerMesh, std::vector<hfm::Mesh>, QHash<QString, hfm::Material>>;
using Output = std::vector<baker::TangentsPerBlendshape>;
using JobModel = baker::Job::ModelIO<CalculateBlendshapeTangentsTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_CalculateBlendshapeTangentsTask_h

View file

@ -0,0 +1,40 @@
//
// CalculateMeshNormalsTask.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/22.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CalculateMeshNormalsTask.h"
#include "ModelMath.h"
void CalculateMeshNormalsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& meshes = input;
auto& normalsPerMeshOut = output;
normalsPerMeshOut.reserve(meshes.size());
for (int i = 0; i < (int)meshes.size(); i++) {
const auto& mesh = meshes[i];
normalsPerMeshOut.emplace_back();
auto& normalsOut = normalsPerMeshOut[normalsPerMeshOut.size()-1];
// Only calculate normals if this mesh doesn't already have them
if (!mesh.normals.empty()) {
normalsOut = mesh.normals.toStdVector();
} else {
normalsOut.resize(mesh.vertices.size());
baker::calculateNormals(mesh,
[&normalsOut](int normalIndex) /* NormalAccessor */ {
return &normalsOut[normalIndex];
},
[&mesh](int vertexIndex, glm::vec3& outVertex) /* VertexSetter */ {
outVertex = mesh.vertices[vertexIndex];
}
);
}
}
}

View file

@ -0,0 +1,30 @@
//
// CalculateMeshNormalsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CalculateMeshNormalsTask_h
#define hifi_CalculateMeshNormalsTask_h
#include <hfm/HFM.h>
#include "Engine.h"
#include "BakerTypes.h"
// Calculate mesh normals if not already present in the mesh
class CalculateMeshNormalsTask {
public:
using Input = std::vector<hfm::Mesh>;
using Output = baker::NormalsPerMesh;
using JobModel = baker::Job::ModelIO<CalculateMeshNormalsTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_CalculateMeshNormalsTask_h

View file

@ -0,0 +1,65 @@
//
// CalculateMeshTangentsTask.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/22.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "CalculateMeshTangentsTask.h"
#include "ModelMath.h"
void CalculateMeshTangentsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& normalsPerMesh = input.get0();
const std::vector<hfm::Mesh>& meshes = input.get1();
const auto& materials = input.get2();
auto& tangentsPerMeshOut = output;
tangentsPerMeshOut.reserve(meshes.size());
for (int i = 0; i < (int)meshes.size(); i++) {
const auto& mesh = meshes[i];
const auto& tangentsIn = mesh.tangents;
const auto& normals = normalsPerMesh[i];
tangentsPerMeshOut.emplace_back();
auto& tangentsOut = tangentsPerMeshOut[tangentsPerMeshOut.size()-1];
// Check if we already have tangents and therefore do not need to do any calculation
if (!tangentsIn.empty()) {
tangentsOut = tangentsIn.toStdVector();
continue;
}
// Check if we have normals, and if not then tangents can't be calculated
if (normals.empty()) {
continue;
}
// Check if we actually need to calculate the tangents
bool needTangents = false;
for (const auto& meshPart : mesh.parts) {
auto materialIt = materials.find(meshPart.materialID);
if (materialIt != materials.end() && (*materialIt).needTangentSpace()) {
needTangents = true;
break;
}
}
if (needTangents) {
continue;
}
tangentsOut.resize(normals.size());
baker::calculateTangents(mesh,
[&mesh, &normals, &tangentsOut](int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec2* outTexCoords, glm::vec3& outNormal) {
outVertices[0] = mesh.vertices[firstIndex];
outVertices[1] = mesh.vertices[secondIndex];
outNormal = normals[firstIndex];
outTexCoords[0] = mesh.texCoords[firstIndex];
outTexCoords[1] = mesh.texCoords[secondIndex];
return &(tangentsOut[firstIndex]);
});
}
}

View file

@ -0,0 +1,32 @@
//
// CalculateMeshTangentsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_CalculateMeshTangentsTask_h
#define hifi_CalculateMeshTangentsTask_h
#include <hfm/HFM.h>
#include "Engine.h"
#include "BakerTypes.h"
// Calculate mesh tangents if not already present in the mesh
class CalculateMeshTangentsTask {
public:
using NormalsPerMesh = std::vector<std::vector<glm::vec3>>;
using Input = baker::VaryingSet3<baker::NormalsPerMesh, std::vector<hfm::Mesh>, QHash<QString, hfm::Material>>;
using Output = baker::TangentsPerMesh;
using JobModel = baker::Job::ModelIO<CalculateMeshTangentsTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_CalculateMeshTangentsTask_h

View file

@ -0,0 +1,121 @@
//
// ModelMath.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/18.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ModelMath.h"
#include <LogHandler.h>
#include "ModelBakerLogging.h"
namespace baker {
template<class T>
const T& checkedAt(const QVector<T>& vector, int i) {
if (i < 0 || i >= vector.size()) {
throw std::out_of_range("baker::checked_at (ModelMath.cpp): index " + std::to_string(i) + " is out of range");
}
return vector[i];
}
template<class T>
const T& checkedAt(const std::vector<T>& vector, int i) {
if (i < 0 || i >= vector.size()) {
throw std::out_of_range("baker::checked_at (ModelMath.cpp): index " + std::to_string(i) + " is out of range");
}
return vector[i];
}
template<class T>
T& checkedAt(std::vector<T>& vector, int i) {
if (i < 0 || i >= vector.size()) {
throw std::out_of_range("baker::checked_at (ModelMath.cpp): index " + std::to_string(i) + " is out of range");
}
return vector[i];
}
void setTangent(const HFMMesh& mesh, const IndexAccessor& vertexAccessor, int firstIndex, int secondIndex) {
glm::vec3 vertex[2];
glm::vec2 texCoords[2];
glm::vec3 normal;
glm::vec3* tangent = vertexAccessor(firstIndex, secondIndex, vertex, texCoords, normal);
if (tangent) {
glm::vec3 bitangent = glm::cross(normal, vertex[1] - vertex[0]);
if (glm::length(bitangent) < EPSILON) {
return;
}
glm::vec2 texCoordDelta = texCoords[1] - texCoords[0];
glm::vec3 normalizedNormal = glm::normalize(normal);
*tangent += glm::cross(glm::angleAxis(-atan2f(-texCoordDelta.t, texCoordDelta.s), normalizedNormal) *
glm::normalize(bitangent), normalizedNormal);
}
}
void calculateNormals(const hfm::Mesh& mesh, NormalAccessor normalAccessor, VertexSetter vertexSetter) {
static int repeatMessageID = LogHandler::getInstance().newRepeatedMessageID();
for (const HFMMeshPart& part : mesh.parts) {
for (int i = 0; i < part.quadIndices.size(); i += 4) {
glm::vec3* n0 = normalAccessor(part.quadIndices[i]);
glm::vec3* n1 = normalAccessor(part.quadIndices[i + 1]);
glm::vec3* n2 = normalAccessor(part.quadIndices[i + 2]);
glm::vec3* n3 = normalAccessor(part.quadIndices[i + 3]);
if (!n0 || !n1 || !n2 || !n3) {
// Quad is not in the mesh (can occur with blendshape meshes, which are a subset of the hfm Mesh vertices)
continue;
}
glm::vec3 vertices[3]; // Assume all vertices in this quad are in the same plane, so only the first three are needed to calculate the normal
vertexSetter(part.quadIndices[i], vertices[0]);
vertexSetter(part.quadIndices[i + 1], vertices[1]);
vertexSetter(part.quadIndices[i + 2], vertices[2]);
*n0 = *n1 = *n2 = *n3 = glm::cross(vertices[1] - vertices[0], vertices[2] - vertices[0]);
}
// <= size - 3 in order to prevent overflowing triangleIndices when (i % 3) != 0
// This is most likely evidence of a further problem in extractMesh()
for (int i = 0; i <= part.triangleIndices.size() - 3; i += 3) {
glm::vec3* n0 = normalAccessor(part.triangleIndices[i]);
glm::vec3* n1 = normalAccessor(part.triangleIndices[i + 1]);
glm::vec3* n2 = normalAccessor(part.triangleIndices[i + 2]);
if (!n0 || !n1 || !n2) {
// Tri is not in the mesh (can occur with blendshape meshes, which are a subset of the hfm Mesh vertices)
continue;
}
glm::vec3 vertices[3];
vertexSetter(part.triangleIndices[i], vertices[0]);
vertexSetter(part.triangleIndices[i + 1], vertices[1]);
vertexSetter(part.triangleIndices[i + 2], vertices[2]);
*n0 = *n1 = *n2 = glm::cross(vertices[1] - vertices[0], vertices[2] - vertices[0]);
}
if ((part.triangleIndices.size() % 3) != 0) {
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "Error in baker::calculateNormals: part.triangleIndices.size() is not divisible by three");
}
}
}
void calculateTangents(const hfm::Mesh& mesh, IndexAccessor accessor) {
static int repeatMessageID = LogHandler::getInstance().newRepeatedMessageID();
for (const HFMMeshPart& part : mesh.parts) {
for (int i = 0; i < part.quadIndices.size(); i += 4) {
setTangent(mesh, accessor, part.quadIndices.at(i), part.quadIndices.at(i + 1));
setTangent(mesh, accessor, part.quadIndices.at(i + 1), part.quadIndices.at(i + 2));
setTangent(mesh, accessor, part.quadIndices.at(i + 2), part.quadIndices.at(i + 3));
setTangent(mesh, accessor, part.quadIndices.at(i + 3), part.quadIndices.at(i));
}
// <= size - 3 in order to prevent overflowing triangleIndices when (i % 3) != 0
// This is most likely evidence of a further problem in extractMesh()
for (int i = 0; i <= part.triangleIndices.size() - 3; i += 3) {
setTangent(mesh, accessor, part.triangleIndices.at(i), part.triangleIndices.at(i + 1));
setTangent(mesh, accessor, part.triangleIndices.at(i + 1), part.triangleIndices.at(i + 2));
setTangent(mesh, accessor, part.triangleIndices.at(i + 2), part.triangleIndices.at(i));
}
if ((part.triangleIndices.size() % 3) != 0) {
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "Error in baker::calculateTangents: part.triangleIndices.size() is not divisible by three");
}
}
}
}

View file

@ -0,0 +1,34 @@
//
// ModelMath.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/07.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <hfm/HFM.h>
#include "BakerTypes.h"
namespace baker {
// Returns a reference to the normal at the specified index, or nullptr if it cannot be accessed
using NormalAccessor = std::function<glm::vec3*(int index)>;
// Assigns a vertex to outVertex given the lookup index
using VertexSetter = std::function<void(int index, glm::vec3& outVertex)>;
void calculateNormals(const hfm::Mesh& mesh, NormalAccessor normalAccessor, VertexSetter vertexAccessor);
// firstIndex, secondIndex: the vertex indices to be used for calculation
// outVertices: should be assigned a 2 element array containing the vertices at firstIndex and secondIndex
// outTexCoords: same as outVertices but for texture coordinates
// outNormal: reference to the normal of this triangle
//
// Return value: pointer to the tangent you want to be calculated
using IndexAccessor = std::function<glm::vec3*(int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec2* outTexCoords, glm::vec3& outNormal)>;
void calculateTangents(const hfm::Mesh& mesh, IndexAccessor accessor);
};

View file

@ -370,6 +370,9 @@
"priority": {
"tooltip": "The priority of the material, where a larger number means higher priority. Original materials = 0."
},
"materialMappingMode": {
"tooltip": "How the material is mapped to the entity. If set to \"UV space\", then the material will be applied with the target entity's UV coordinates. If set to \"3D Projected\", then the 3D transform of the material entity will be used."
},
"materialMappingPos": {
"tooltip": "The offset position of the bottom left of the material within the parent's UV space."
},
@ -379,6 +382,9 @@
"materialMappingRot": {
"tooltip": "How much to rotate the material within the parent's UV-space, in degrees."
},
"materialRepeat": {
"tooltip": "If enabled, the material will repeat, otherwise it will clamp."
},
"followCamera": {
"tooltip": "If enabled, the grid is always visible even as the camera moves to another position."
},

View file

@ -0,0 +1,73 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
id="Layer_1"
x="0px"
y="0px"
viewBox="0 0 512 512"
style="enable-background:new 0 0 512 512;"
xml:space="preserve"
inkscape:version="0.91 r13725"
sodipodi:docname="icon-zone.svg"><metadata
id="metadata4211"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
id="defs4209" /><sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="2560"
inkscape:window-height="1377"
id="namedview4207"
showgrid="false"
inkscape:zoom="2"
inkscape:cx="228.01796"
inkscape:cy="376.88605"
inkscape:window-x="-8"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="g4191" /><style
type="text/css"
id="style4189">
.st0{fill:none;stroke:#000000;stroke-width:18;stroke-miterlimit:10;}
.st1{fill:none;stroke:#000000;stroke-width:18;stroke-linecap:round;stroke-miterlimit:10;}
.st2{fill:none;stroke:#000000;stroke-width:19;stroke-linecap:round;stroke-miterlimit:10;}
</style><g
id="g4191"><path
d="M 380.7,139.8 C 378,133.2 371.5,129 364.4,129 l -72.4,0 0,34 29.9,0 -162.9,163.1 0,-36.1 -36,0 0,79 0.006,0.024 c 0.1,2 0.5,4.1 1.3,5.9 C 127.00564,381.52397 134.7,386 141.8,386 l 83.2,0 0,-35 -40.8,0 161.8,-161.3 0,25.3 36.125,0.125 -0.19692,-62.65758 C 382.08839,148.14645 382.4,144 380.7,139.8 Z"
id="path4193"
style="fill:#333333;fill-opacity:1"
inkscape:connector-curvature="0"
sodipodi:nodetypes="cscccccccccscccccccc" /><path
d="M338.4,437.6c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C348.1,459.3,338.4,449.6,338.4,437.6z M266.9,437.6c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0 c0,12-9.7,21.7-21.7,21.7l0,0C276.6,459.3,266.9,449.6,266.9,437.6z M195.4,437.6c0-12,9.7-21.7,21.7-21.7l0,0 c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0C205.1,459.3,195.4,449.6,195.4,437.6z M123.9,437.6 c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0C133.6,459.3,123.9,449.6,123.9,437.6z"
id="path4195"
style="fill:#333333;fill-opacity:1" /><path
d="M74.1,459.3c-5.7,0-11.3-2.3-15.4-6.4c-4-4-6.4-9.6-6.4-15.3c0-5.7,2.3-11.3,6.4-15.3c4-4,9.6-6.4,15.4-6.4 c5.7,0,11.3,2.3,15.3,6.4c4,4,6.4,9.6,6.4,15.3c0,5.7-2.3,11.3-6.4,15.3C85.4,457,79.8,459.3,74.1,459.3z"
id="path4197"
style="fill:#333333;fill-opacity:1" /><path
d="M52.4,366.1c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C62.1,387.8,52.4,378,52.4,366.1z M52.4,294.6c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7 l0,0C62.1,316.3,52.4,306.5,52.4,294.6z M52.4,223.1c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0 c0,12-9.7,21.7-21.7,21.7l0,0C62.1,244.8,52.4,235,52.4,223.1z M52.4,151.5c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7 l0,0c0,12-9.7,21.7-21.7,21.7l0,0C62.1,173.2,52.4,163.5,52.4,151.5z"
id="path4199"
style="fill:#333333;fill-opacity:1" /><path
d="M338.4,80c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C348.1,101.7,338.4,92,338.4,80z M266.9,80c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C276.6,101.7,266.9,92,266.9,80z M195.4,80c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C205.1,101.7,195.4,92,195.4,80z M123.9,80c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C133.6,101.7,123.9,92,123.9,80z"
id="path4201"
style="fill:#333333;fill-opacity:1" /><path
d="M431.6,101.7c-5.7,0-11.3-2.3-15.3-6.4c-4-4-6.4-9.6-6.4-15.3c0-5.7,2.3-11.3,6.4-15.3c4-4,9.6-6.4,15.3-6.4 s11.3,2.3,15.3,6.4c4,4,6.4,9.6,6.4,15.3c0,5.7-2.3,11.3-6.4,15.3C442.9,99.4,437.4,101.7,431.6,101.7z"
id="path4203"
style="fill:#333333;fill-opacity:1" /><path
d="M409.9,366.1c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0 C419.7,387.8,409.9,378.1,409.9,366.1z M409.9,294.6c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0 c0,12-9.7,21.7-21.7,21.7l0,0C419.7,316.3,409.9,306.5,409.9,294.6z M409.9,223.1c0-12,9.7-21.7,21.7-21.7l0,0 c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0C419.7,244.7,409.9,235,409.9,223.1z M409.9,151.5 c0-12,9.7-21.7,21.7-21.7l0,0c12,0,21.7,9.7,21.7,21.7l0,0c0,12-9.7,21.7-21.7,21.7l0,0C419.7,173.2,409.9,163.5,409.9,151.5z"
id="path4205"
style="fill:#333333;fill-opacity:1" /></g></svg>

After

Width:  |  Height:  |  Size: 5.3 KiB

View file

@ -0,0 +1,13 @@
{
"materialVersion": 1,
"materials": {
"albedo": [
0.0,
0.0,
7.0
],
"unlit": true,
"opacity": 0.4,
"albedoMap": "GridPattern.png"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

View file

@ -82,13 +82,18 @@ var selectionManager = SelectionManager;
var PARTICLE_SYSTEM_URL = Script.resolvePath("assets/images/icon-particles.svg");
var POINT_LIGHT_URL = Script.resolvePath("assets/images/icon-point-light.svg");
var SPOT_LIGHT_URL = Script.resolvePath("assets/images/icon-spot-light.svg");
var ZONE_URL = Script.resolvePath("assets/images/icon-zone.svg");
var entityIconOverlayManager = new EntityIconOverlayManager(['Light', 'ParticleEffect'], function(entityID) {
var entityIconOverlayManager = new EntityIconOverlayManager(['Light', 'ParticleEffect', 'Zone'], function(entityID) {
var properties = Entities.getEntityProperties(entityID, ['type', 'isSpotlight']);
if (properties.type === 'Light') {
return {
url: properties.isSpotlight ? SPOT_LIGHT_URL : POINT_LIGHT_URL,
};
} else if (properties.type === 'Zone') {
return {
url: ZONE_URL,
};
} else {
return {
url: PARTICLE_SYSTEM_URL,
@ -106,11 +111,15 @@ var gridTool = new GridTool({
});
gridTool.setVisible(false);
var EntityShapeVisualizer = Script.require('./modules/entityShapeVisualizer.js');
var entityShapeVisualizer = new EntityShapeVisualizer(["Zone"]);
var entityListTool = new EntityListTool(shouldUseEditTabletApp);
selectionManager.addEventListener(function () {
selectionDisplay.updateHandles();
entityIconOverlayManager.updatePositions();
entityShapeVisualizer.setEntities(selectionManager.selections);
});
var DEGREES_TO_RADIANS = Math.PI / 180.0;
@ -836,7 +845,7 @@ var toolBar = (function () {
dialogWindow.fromQml.connect(fromQml);
}
};
};
}
addButton("newModelButton", createNewEntityDialogButtonCallback("Model"));
@ -1492,6 +1501,7 @@ Script.scriptEnding.connect(function () {
cleanupModelMenus();
tooltip.cleanup();
selectionDisplay.cleanup();
entityShapeVisualizer.cleanup();
Entities.setLightsArePickable(originalLightsArePickable);
Overlays.deleteOverlay(importingSVOImageOverlay);

View file

@ -3798,6 +3798,11 @@ function loaded() {
if (FILTERED_NODE_NAMES.includes(keyUpEvent.target.nodeName)) {
return;
}
if (elUserDataEditor.contains(keyUpEvent.target) || elMaterialDataEditor.contains(keyUpEvent.target)) {
return;
}
let {code, key, keyCode, altKey, ctrlKey, metaKey, shiftKey} = keyUpEvent;
let controlKey = window.navigator.platform.startsWith("Mac") ? metaKey : ctrlKey;

View file

@ -0,0 +1,255 @@
"use strict";
// entityShapeVisualizer.js
//
// Created by Thijs Wenker on 1/11/19
//
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var SHAPETYPE_TO_SHAPE = {
"box": "Cube",
"ellipsoid": "Sphere",
"cylinder-y": "Cylinder",
};
var REQUESTED_ENTITY_SHAPE_PROPERTIES = [
'type', 'shapeType', 'compoundShapeURL', 'localDimensions'
];
function getEntityShapePropertiesForType(properties) {
switch (properties.type) {
case "Zone":
if (SHAPETYPE_TO_SHAPE[properties.shapeType]) {
return {
type: "Shape",
shape: SHAPETYPE_TO_SHAPE[properties.shapeType],
localDimensions: properties.localDimensions
};
} else if (properties.shapeType === "compound") {
return {
type: "Model",
modelURL: properties.compoundShapeURL,
localDimensions: properties.localDimensions
};
} else if (properties.shapeType === "sphere") {
var sphereDiameter = Math.max(properties.localDimensions.x, properties.localDimensions.y,
properties.localDimensions.z);
return {
type: "Sphere",
modelURL: properties.compoundShapeURL,
localDimensions: {x: sphereDiameter, y: sphereDiameter, z: sphereDiameter}
};
}
break;
}
// Default properties
return {
type: "Shape",
shape: "Cube",
localDimensions: properties.localDimensions
};
}
function deepEqual(a, b) {
if (a === b) {
return true;
}
if (typeof(a) !== "object" || typeof(b) !== "object") {
return false;
}
if (Object.keys(a).length !== Object.keys(b).length) {
return false;
}
for (var property in a) {
if (!a.hasOwnProperty(property)) {
continue;
}
if (!b.hasOwnProperty(property)) {
return false;
}
if (!deepEqual(a[property], b[property])) {
return false;
}
}
return true;
}
/**
* Returns an array of property names which are different in comparison.
* @param propertiesA
* @param propertiesB
* @returns {Array} - array of different property names
*/
function compareEntityProperties(propertiesA, propertiesB) {
var differentProperties = [],
property;
for (property in propertiesA) {
if (!propertiesA.hasOwnProperty(property)) {
continue;
}
if (!propertiesB.hasOwnProperty(property) || !deepEqual(propertiesA[property], propertiesB[property])) {
differentProperties.push(property);
}
}
for (property in propertiesB) {
if (!propertiesB.hasOwnProperty(property)) {
continue;
}
if (!propertiesA.hasOwnProperty(property)) {
differentProperties.push(property);
}
}
return differentProperties;
}
function deepCopy(v) {
return JSON.parse(JSON.stringify(v));
}
function EntityShape(entityID) {
this.entityID = entityID;
var propertiesForType = getEntityShapePropertiesForType(Entities.getEntityProperties(entityID, REQUESTED_ENTITY_SHAPE_PROPERTIES));
this.previousPropertiesForType = propertiesForType;
this.initialize(propertiesForType);
}
EntityShape.prototype = {
initialize: function(properties) {
// Create new instance of JS object:
var overlayProperties = deepCopy(properties);
overlayProperties.localPosition = Vec3.ZERO;
overlayProperties.localRotation = Quat.IDENTITY;
overlayProperties.canCastShadows = false;
overlayProperties.parentID = this.entityID;
overlayProperties.collisionless = true;
this.entity = Entities.addEntity(overlayProperties, "local");
var PROJECTED_MATERIALS = false;
this.materialEntity = Entities.addEntity({
type: "Material",
localPosition: Vec3.ZERO,
localRotation: Quat.IDENTITY,
localDimensions: properties.localDimensions,
parentID: this.entity,
priority: 1,
materialMappingMode: PROJECTED_MATERIALS ? "projected" : "uv",
materialURL: Script.resolvePath("../assets/images/materials/GridPattern.json"),
}, "local");
},
update: function() {
var propertiesForType = getEntityShapePropertiesForType(Entities.getEntityProperties(this.entityID, REQUESTED_ENTITY_SHAPE_PROPERTIES));
var difference = compareEntityProperties(propertiesForType, this.previousPropertiesForType);
if (deepEqual(difference, ['localDimensions'])) {
this.previousPropertiesForType = propertiesForType;
Entities.editEntity(this.entity, {
localDimensions: propertiesForType.localDimensions,
});
} else if (difference.length > 0) {
this.previousPropertiesForType = propertiesForType;
this.clear();
this.initialize(propertiesForType);
}
},
clear: function() {
Entities.deleteEntity(this.materialEntity);
Entities.deleteEntity(this.entity);
}
};
function EntityShapeVisualizer(visualizedTypes) {
this.acceptedEntities = [];
this.ignoredEntities = [];
this.entityShapes = {};
this.visualizedTypes = visualizedTypes;
}
EntityShapeVisualizer.prototype = {
addEntity: function(entityID) {
if (this.entityShapes[entityID]) {
return;
}
this.entityShapes[entityID] = new EntityShape(entityID);
},
updateEntity: function(entityID) {
if (!this.entityShapes[entityID]) {
return;
}
this.entityShapes[entityID].update();
},
removeEntity: function(entityID) {
if (!this.entityShapes[entityID]) {
return;
}
this.entityShapes[entityID].clear();
delete this.entityShapes[entityID];
},
cleanup: function() {
Object.keys(this.entityShapes).forEach(function(entityID) {
this.entityShapes[entityID].clear();
}, this);
this.entityShapes = {};
},
setEntities: function(entities) {
var qualifiedEntities = entities.filter(function(entityID) {
if (this.acceptedEntities.indexOf(entityID) !== -1) {
return true;
}
if (this.ignoredEntities.indexOf(entityID) !== -1) {
return false;
}
if (this.visualizedTypes.indexOf(Entities.getEntityProperties(entityID, "type").type) !== -1) {
this.acceptedEntities.push(entityID);
return true;
}
this.ignoredEntities.push(entityID);
return false;
}, this);
var newEntries = [];
var updateEntries = [];
var currentEntries = Object.keys(this.entityShapes);
qualifiedEntities.forEach(function(entityID) {
if (currentEntries.indexOf(entityID) !== -1) {
updateEntries.push(entityID);
} else {
newEntries.push(entityID);
}
});
var deleteEntries = currentEntries.filter(function(entityID) {
return updateEntries.indexOf(entityID) === -1;
});
deleteEntries.forEach(function(entityID) {
this.removeEntity(entityID);
}, this);
updateEntries.forEach(function(entityID) {
this.updateEntity(entityID);
}, this);
newEntries.forEach(function(entityID) {
this.addEntity(entityID);
}, this);
}
};
module.exports = EntityShapeVisualizer;