mirror of
https://github.com/lubosz/overte.git
synced 2025-04-24 03:53:52 +02:00
merged master
This commit is contained in:
commit
58cff374d7
14 changed files with 1123 additions and 1010 deletions
|
@ -10,7 +10,14 @@ endif()
|
|||
include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/macros/TargetPython.cmake")
|
||||
target_python()
|
||||
|
||||
if (HIFI_ANDROID )
|
||||
# set our OS X deployment target
|
||||
# (needs to be set before first project() call and before prebuild.py)
|
||||
# Will affect VCPKG dependencies
|
||||
if (APPLE)
|
||||
set(ENV{MACOSX_DEPLOYMENT_TARGET} 10.9)
|
||||
endif()
|
||||
|
||||
if (HIFI_ANDROID)
|
||||
execute_process(
|
||||
COMMAND ${HIFI_PYTHON_EXEC} ${CMAKE_CURRENT_SOURCE_DIR}/prebuild.py --android ${HIFI_ANDROID_APP} --build-root ${CMAKE_BINARY_DIR}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
|
|
|
@ -88,12 +88,10 @@ if (APPLE)
|
|||
exec_program(sw_vers ARGS -productVersion OUTPUT_VARIABLE OSX_VERSION)
|
||||
string(REGEX MATCH "^[0-9]+\\.[0-9]+" OSX_VERSION ${OSX_VERSION})
|
||||
message(STATUS "Detected OS X version = ${OSX_VERSION}")
|
||||
message(STATUS "OS X deployment target = ${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
|
||||
set(OSX_SDK "${OSX_VERSION}" CACHE STRING "OS X SDK version to look for inside Xcode bundle or at OSX_SDK_PATH")
|
||||
|
||||
# set our OS X deployment target
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.9)
|
||||
|
||||
# find the SDK path for the desired SDK
|
||||
find_path(
|
||||
_OSX_DESIRED_SDK_PATH
|
||||
|
|
|
@ -24,6 +24,20 @@ Rectangle {
|
|||
property var pushingToTalk: AudioScriptingInterface.pushingToTalk;
|
||||
readonly property var userSpeakingLevel: 0.4;
|
||||
property bool gated: false;
|
||||
|
||||
Timer {
|
||||
// used to hold the muted warning.
|
||||
id: mutedTimer
|
||||
|
||||
interval: 2000;
|
||||
running: false;
|
||||
repeat: false;
|
||||
property bool isRunning: false;
|
||||
onTriggered: {
|
||||
isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
AudioScriptingInterface.noiseGateOpened.connect(function() { gated = false; });
|
||||
AudioScriptingInterface.noiseGateClosed.connect(function() { gated = true; });
|
||||
|
@ -54,7 +68,17 @@ Rectangle {
|
|||
opacity: 0.7;
|
||||
|
||||
onLevelChanged: {
|
||||
var rectOpacity = (muted && (level >= userSpeakingLevel)) ? 1.0 : 0.7;
|
||||
var mutedAndSpeaking = (muted && (level >= userSpeakingLevel));
|
||||
if (!mutedTimer.isRunning && !pushToTalk) {
|
||||
if (mutedAndSpeaking) {
|
||||
mutedTimer.start();
|
||||
mutedTimer.isRunning = true;
|
||||
statusText.text = "MUTED";
|
||||
} else {
|
||||
statusText.text = "";
|
||||
}
|
||||
}
|
||||
var rectOpacity = mutedAndSpeaking ? 1.0 : 0.7;
|
||||
if (pushToTalk && !pushingToTalk) {
|
||||
rectOpacity = (mouseArea.containsMouse) ? 1.0 : 0.7;
|
||||
} else if (mouseArea.containsMouse && rectOpacity != 1.0) {
|
||||
|
@ -63,6 +87,10 @@ Rectangle {
|
|||
micBar.opacity = rectOpacity;
|
||||
}
|
||||
|
||||
onPushToTalkChanged: {
|
||||
statusText.text = pushToTalk ? HMD.active ? "PTT" : "PTT-(T)" : "";
|
||||
}
|
||||
|
||||
color: "#00000000";
|
||||
border {
|
||||
width: mouseArea.containsMouse || mouseArea.containsPress ? 2 : 0;
|
||||
|
@ -190,7 +218,6 @@ Rectangle {
|
|||
color: pushToTalk ? (pushingToTalk ? colors.unmutedColor : colors.mutedColor) : (level >= userSpeakingLevel && muted) ? colors.mutedColor : colors.unmutedColor;
|
||||
font.bold: true
|
||||
|
||||
text: pushToTalk ? (HMD.active ? "PTT" : "PTT-(T)") : (muted ? "MUTED" : "MUTE");
|
||||
size: 12;
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2023,9 +2023,10 @@ void EntityItem::setCollisionMask(uint16_t value) {
|
|||
|
||||
void EntityItem::setDynamic(bool value) {
|
||||
if (getDynamic() != value) {
|
||||
auto shapeType = getShapeType();
|
||||
withWriteLock([&] {
|
||||
// dynamic and STATIC_MESH are incompatible so we check for that case
|
||||
if (value && getShapeType() == SHAPE_TYPE_STATIC_MESH) {
|
||||
if (value && shapeType == SHAPE_TYPE_STATIC_MESH) {
|
||||
if (_dynamic) {
|
||||
_dynamic = false;
|
||||
_flags |= Simulation::DIRTY_MOTION_TYPE;
|
||||
|
|
|
@ -874,6 +874,7 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
|||
joint.isSkeletonJoint = false;
|
||||
hfmModel.joints.push_back(joint);
|
||||
}
|
||||
hfmModel.shapeVertices.resize(hfmModel.joints.size());
|
||||
|
||||
|
||||
// Build skeleton
|
||||
|
@ -1243,6 +1244,13 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const hifi::VariantHash&
|
|||
}
|
||||
}
|
||||
|
||||
for (int clusterIndex = 0; clusterIndex < mesh.clusters.size() - 1; clusterIndex++) {
|
||||
ShapeVertices& points = hfmModel.shapeVertices.at(clusterIndex);
|
||||
for (glm::vec3 vertex : mesh.vertices) {
|
||||
points.push_back(vertex);
|
||||
}
|
||||
}
|
||||
|
||||
mesh.meshExtents.reset();
|
||||
foreach(const glm::vec3& vertex, mesh.vertices) {
|
||||
mesh.meshExtents.addPoint(vertex);
|
||||
|
|
|
@ -187,38 +187,43 @@ void Midi::MidiSetup() {
|
|||
|
||||
MIDIINCAPS incaps;
|
||||
for (unsigned int i = 0; i < midiInGetNumDevs(); i++) {
|
||||
midiInGetDevCaps(i, &incaps, sizeof(MIDIINCAPS));
|
||||
if (MMSYSERR_NOERROR == midiInGetDevCaps(i, &incaps, sizeof(MIDIINCAPS))) {
|
||||
|
||||
bool found = false;
|
||||
for (int j = 0; j < midiInExclude.size(); j++) {
|
||||
if (midiInExclude[j].toStdString().compare(incaps.szPname) == 0) {
|
||||
found = true;
|
||||
break;
|
||||
bool found = false;
|
||||
for (int j = 0; j < midiInExclude.size(); j++) {
|
||||
if (midiInExclude[j].toStdString().compare(incaps.szPname) == 0) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) { // EXCLUDE AN INPUT BY NAME
|
||||
HMIDIIN tmphin;
|
||||
if (MMSYSERR_NOERROR == midiInOpen(&tmphin, i, (DWORD_PTR)MidiInProc, NULL, CALLBACK_FUNCTION)) {
|
||||
if (MMSYSERR_NOERROR == midiInStart(tmphin)) {
|
||||
midihin.push_back(tmphin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) { // EXCLUDE AN INPUT BY NAME
|
||||
HMIDIIN tmphin;
|
||||
midiInOpen(&tmphin, i, (DWORD_PTR)MidiInProc, NULL, CALLBACK_FUNCTION);
|
||||
midiInStart(tmphin);
|
||||
midihin.push_back(tmphin);
|
||||
}
|
||||
}
|
||||
|
||||
MIDIOUTCAPS outcaps;
|
||||
for (unsigned int i = 0; i < midiOutGetNumDevs(); i++) {
|
||||
midiOutGetDevCaps(i, &outcaps, sizeof(MIDIINCAPS));
|
||||
if (MMSYSERR_NOERROR == midiOutGetDevCaps(i, &outcaps, sizeof(MIDIOUTCAPS))) {
|
||||
|
||||
bool found = false;
|
||||
for (int j = 0; j < midiOutExclude.size(); j++) {
|
||||
if (midiOutExclude[j].toStdString().compare(outcaps.szPname) == 0) {
|
||||
found = true;
|
||||
break;
|
||||
bool found = false;
|
||||
for (int j = 0; j < midiOutExclude.size(); j++) {
|
||||
if (midiOutExclude[j].toStdString().compare(outcaps.szPname) == 0) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) { // EXCLUDE AN OUTPUT BY NAME
|
||||
HMIDIOUT tmphout;
|
||||
if (MMSYSERR_NOERROR == midiOutOpen(&tmphout, i, (DWORD_PTR)MidiOutProc, NULL, CALLBACK_FUNCTION)) {
|
||||
midihout.push_back(tmphout);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) { // EXCLUDE AN OUTPUT BY NAME
|
||||
HMIDIOUT tmphout;
|
||||
midiOutOpen(&tmphout, i, (DWORD_PTR)MidiOutProc, NULL, CALLBACK_FUNCTION);
|
||||
midihout.push_back(tmphout);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -269,6 +269,7 @@ enum class EntityVersion : PacketVersion {
|
|||
CertificateTypeProperty,
|
||||
DisableWebMedia,
|
||||
ParticleShapeType,
|
||||
ParticleShapeTypeDeadlockFix,
|
||||
|
||||
// Add new versions above here
|
||||
NUM_PACKET_TYPE,
|
||||
|
|
|
@ -31,6 +31,10 @@ bool RenderEventHandler::event(QEvent* e) {
|
|||
onRender();
|
||||
return true;
|
||||
|
||||
case OffscreenEvent::RenderSync:
|
||||
onRenderSync();
|
||||
return true;
|
||||
|
||||
case OffscreenEvent::Initialize:
|
||||
onInitalize();
|
||||
return true;
|
||||
|
@ -106,6 +110,14 @@ void RenderEventHandler::resize() {
|
|||
}
|
||||
|
||||
void RenderEventHandler::onRender() {
|
||||
qmlRender(false);
|
||||
}
|
||||
|
||||
void RenderEventHandler::onRenderSync() {
|
||||
qmlRender(true);
|
||||
}
|
||||
|
||||
void RenderEventHandler::qmlRender(bool sceneGraphSync) {
|
||||
if (_shared->isQuit()) {
|
||||
return;
|
||||
}
|
||||
|
@ -117,7 +129,8 @@ void RenderEventHandler::onRender() {
|
|||
PROFILE_RANGE(render_qml_gl, __FUNCTION__);
|
||||
|
||||
gl::globalLock();
|
||||
if (!_shared->preRender()) {
|
||||
if (!_shared->preRender(sceneGraphSync)) {
|
||||
gl::globalRelease();
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ public:
|
|||
enum Type {
|
||||
Initialize = QEvent::User + 1,
|
||||
Render,
|
||||
RenderSync,
|
||||
Quit
|
||||
};
|
||||
|
||||
|
@ -45,6 +46,8 @@ private:
|
|||
void onInitalize();
|
||||
void resize();
|
||||
void onRender();
|
||||
void onRenderSync();
|
||||
void qmlRender(bool sceneGraphSync);
|
||||
void onQuit();
|
||||
|
||||
SharedObject* const _shared;
|
||||
|
@ -59,4 +62,4 @@ private:
|
|||
|
||||
}}} // namespace hifi::qml::impl
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
|
|
@ -344,17 +344,17 @@ void SharedObject::setSize(const QSize& size) {
|
|||
#endif
|
||||
}
|
||||
|
||||
bool SharedObject::preRender() {
|
||||
bool SharedObject::preRender(bool sceneGraphSync) {
|
||||
#ifndef DISABLE_QML
|
||||
QMutexLocker lock(&_mutex);
|
||||
if (_paused) {
|
||||
if (_syncRequested) {
|
||||
if (sceneGraphSync) {
|
||||
wake();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (_syncRequested) {
|
||||
if (sceneGraphSync) {
|
||||
bool syncResult = true;
|
||||
if (!nsightActive()) {
|
||||
PROFILE_RANGE(render_qml_gl, "sync")
|
||||
|
@ -364,7 +364,6 @@ bool SharedObject::preRender() {
|
|||
if (!syncResult) {
|
||||
return false;
|
||||
}
|
||||
_syncRequested = false;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -475,9 +474,10 @@ void SharedObject::onRender() {
|
|||
lock.unlock();
|
||||
_renderControl->polishItems();
|
||||
lock.relock();
|
||||
QCoreApplication::postEvent(_renderObject, new OffscreenEvent(OffscreenEvent::Render));
|
||||
QCoreApplication::postEvent(_renderObject, new OffscreenEvent(OffscreenEvent::RenderSync));
|
||||
// sync and render request, main and render threads must be synchronized
|
||||
wait();
|
||||
_syncRequested = false;
|
||||
} else {
|
||||
QCoreApplication::postEvent(_renderObject, new OffscreenEvent(OffscreenEvent::Render));
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public:
|
|||
private:
|
||||
bool event(QEvent* e) override;
|
||||
|
||||
bool preRender();
|
||||
bool preRender(bool sceneGraphSync);
|
||||
void shutdownRendering(OffscreenGLCanvas& canvas, const QSize& size);
|
||||
// Called by the render event handler, from the render thread
|
||||
void initializeRenderControl(QOpenGLContext* context);
|
||||
|
|
|
@ -1747,9 +1747,9 @@ void Blender::run() {
|
|||
if (_model && _model->isLoaded()) {
|
||||
DETAILED_PROFILE_RANGE_EX(simulation_animation, __FUNCTION__, 0xFFFF0000, 0, { { "url", _model->getURL().toString() } });
|
||||
int offset = 0;
|
||||
auto meshes = _model->getHFMModel().meshes;
|
||||
const auto& meshes = _model->getHFMModel().meshes;
|
||||
int meshIndex = 0;
|
||||
foreach(const HFMMesh& mesh, meshes) {
|
||||
for(const HFMMesh& mesh : meshes) {
|
||||
auto modelMeshBlendshapeOffsets = _model->_blendshapeOffsets.find(meshIndex++);
|
||||
if (mesh.blendshapes.isEmpty() || modelMeshBlendshapeOffsets == _model->_blendshapeOffsets.end()) {
|
||||
// Not blendshaped or not initialized
|
||||
|
@ -1780,33 +1780,30 @@ void Blender::run() {
|
|||
|
||||
float normalCoefficient = vertexCoefficient * NORMAL_COEFFICIENT_SCALE;
|
||||
const HFMBlendshape& blendshape = mesh.blendshapes.at(i);
|
||||
for (int j = 0; j < blendshape.indices.size(); ++j) {
|
||||
int index = blendshape.indices.at(j);
|
||||
|
||||
tbb::parallel_for(tbb::blocked_range<int>(0, blendshape.indices.size()), [&](const tbb::blocked_range<int>& range) {
|
||||
for (auto j = range.begin(); j < range.end(); j++) {
|
||||
int index = blendshape.indices.at(j);
|
||||
auto& currentBlendshapeOffset = unpackedBlendshapeOffsets[index];
|
||||
currentBlendshapeOffset.positionOffset += blendshape.vertices.at(j) * vertexCoefficient;
|
||||
|
||||
auto& currentBlendshapeOffset = unpackedBlendshapeOffsets[index];
|
||||
currentBlendshapeOffset.positionOffset += blendshape.vertices.at(j) * vertexCoefficient;
|
||||
|
||||
currentBlendshapeOffset.normalOffset += blendshape.normals.at(j) * normalCoefficient;
|
||||
if (j < blendshape.tangents.size()) {
|
||||
currentBlendshapeOffset.tangentOffset += blendshape.tangents.at(j) * normalCoefficient;
|
||||
}
|
||||
currentBlendshapeOffset.normalOffset += blendshape.normals.at(j) * normalCoefficient;
|
||||
if (j < blendshape.tangents.size()) {
|
||||
currentBlendshapeOffset.tangentOffset += blendshape.tangents.at(j) * normalCoefficient;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Blendshape offsets are generrated, now let's pack it on its way to gpu
|
||||
tbb::parallel_for(tbb::blocked_range<int>(0, (int) unpackedBlendshapeOffsets.size()), [&](const tbb::blocked_range<int>& range) {
|
||||
auto unpacked = unpackedBlendshapeOffsets.data() + range.begin();
|
||||
auto packed = meshBlendshapeOffsets + range.begin();
|
||||
for (auto j = range.begin(); j < range.end(); j++) {
|
||||
// FIXME it feels like we could be more effectively using SIMD here
|
||||
{
|
||||
auto unpacked = unpackedBlendshapeOffsets.data();
|
||||
auto packed = meshBlendshapeOffsets;
|
||||
for (int j = 0; j < (int)unpackedBlendshapeOffsets.size(); ++j) {
|
||||
packBlendshapeOffsetTo_Pos_F32_3xSN10_Nor_3xSN10_Tan_3xSN10((*packed).packedPosNorTan, (*unpacked));
|
||||
|
||||
unpacked++;
|
||||
packed++;
|
||||
++unpacked;
|
||||
++packed;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// post the result to the ModelBlender, which will dispatch to the model if still alive
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include <unordered_set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <stdexcept>
|
||||
|
||||
#include <QtCore/QtGlobal>
|
||||
|
||||
|
|
Loading…
Reference in a new issue