mirror of
https://github.com/overte-org/overte.git
synced 2025-08-16 17:56:01 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into red
This commit is contained in:
commit
9c5670fc0f
18 changed files with 111 additions and 189 deletions
|
@ -98,3 +98,7 @@ If you need to debug Interface, you can run interface from within Visual Studio
|
|||
* In the Solution Explorer, right click interface and click Set as StartUp Project
|
||||
* Set the "Working Directory" for the Interface debugging sessions to the Debug output directory so that your application can load resources. Do this: right click interface and click Properties, choose Debugging from Configuration Properties, set Working Directory to .\Debug
|
||||
* Now you can run and debug interface through Visual Studio
|
||||
|
||||
For better performance when running debug builds, set the environment variable ```_NO_DEBUG_HEAP``` to ```1```
|
||||
|
||||
http://preshing.com/20110717/the-windows-heap-is-slow-when-launched-from-the-debugger/
|
||||
|
|
|
@ -982,7 +982,7 @@
|
|||
</div>
|
||||
|
||||
<div class="property">
|
||||
<div class="label">Href</div>
|
||||
<div class="label">Href - Hifi://address</div>
|
||||
<div class="value">
|
||||
<input id="property-hyperlink-href" class="url">
|
||||
</div>
|
||||
|
|
|
@ -1515,7 +1515,6 @@ SelectionDisplay = (function() {
|
|||
}
|
||||
|
||||
that.updateRotationHandles();
|
||||
that.highlightSelectable();
|
||||
|
||||
var rotation, dimensions, position, registrationPoint;
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ Window {
|
|||
}
|
||||
}
|
||||
|
||||
WebEngineView {
|
||||
WebView {
|
||||
id: webview
|
||||
url: "http://highfidelity.com"
|
||||
anchors.top: buttons.bottom
|
||||
|
@ -124,6 +124,12 @@ Window {
|
|||
onIconChanged: {
|
||||
console.log("New icon: " + icon)
|
||||
}
|
||||
|
||||
profile: WebEngineProfile {
|
||||
id: webviewProfile
|
||||
storageName: "qmlUserBrowser"
|
||||
}
|
||||
|
||||
}
|
||||
} // item
|
||||
|
||||
|
|
|
@ -62,6 +62,13 @@ Window {
|
|||
|
||||
sections.push(sectionBuilder.createObject(prefControls, { name: category }));
|
||||
}
|
||||
|
||||
if (sections.length) {
|
||||
sections[0].expanded = true;
|
||||
if (sections.length === 1) {
|
||||
sections[0].collapsable = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Flickable {
|
||||
|
|
|
@ -7,6 +7,7 @@ import "."
|
|||
|
||||
Preference {
|
||||
id: root
|
||||
property bool collapsable: true
|
||||
property bool expanded: false
|
||||
property string name: "Header"
|
||||
property real spacing: 8
|
||||
|
@ -42,7 +43,10 @@ Preference {
|
|||
|
||||
VrControls.FontAwesome {
|
||||
id: toggle
|
||||
width: root.collapsable ? height : 0
|
||||
anchors { left: parent.left; top: parent.top; margins: root.spacing }
|
||||
visible: root.collapsable
|
||||
enabled: root.collapsable
|
||||
rotation: root.expanded ? 0 : -90
|
||||
text: "\uf078"
|
||||
Behavior on rotation { PropertyAnimation {} }
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
//
|
||||
// AudioEditBuffer.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/29/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioEditBuffer_h
|
||||
#define hifi_AudioEditBuffer_h
|
||||
|
||||
template< typename T >
|
||||
class AudioEditBuffer : public AudioFrameBuffer<T> {
|
||||
|
||||
public:
|
||||
|
||||
AudioEditBuffer();
|
||||
AudioEditBuffer(const uint32_t channelCount, const uint32_t frameCount);
|
||||
~AudioEditBuffer();
|
||||
|
||||
bool getZeroCrossing(uint32_t start, bool direction, float32_t epsilon, uint32_t& zero);
|
||||
|
||||
void linearFade(uint32_t start, uint32_t stop, bool increasing);
|
||||
void exponentialFade(uint32_t start, uint32_t stop, bool increasing);
|
||||
};
|
||||
|
||||
template< typename T >
|
||||
AudioEditBuffer<T>::AudioEditBuffer() :
|
||||
AudioFrameBuffer<T>() {
|
||||
}
|
||||
|
||||
template< typename T >
|
||||
AudioEditBuffer<T>::AudioEditBuffer(const uint32_t channelCount, const uint32_t frameCount) :
|
||||
AudioFrameBuffer<T>(channelCount, frameCount) {
|
||||
}
|
||||
|
||||
template< typename T >
|
||||
AudioEditBuffer<T>::~AudioEditBuffer() {
|
||||
}
|
||||
|
||||
template< typename T >
|
||||
inline bool AudioEditBuffer<T>::getZeroCrossing(uint32_t start, bool direction, float32_t epsilon, uint32_t& zero) {
|
||||
|
||||
zero = this->_frameCount;
|
||||
|
||||
if (direction) { // scan from the left
|
||||
if (start < this->_frameCount) {
|
||||
for (uint32_t i = start; i < this->_frameCount; ++i) {
|
||||
for (uint32_t j = 0; j < this->_channelCount; ++j) {
|
||||
if (this->_frameBuffer[j][i] >= -epsilon && this->_frameBuffer[j][i] <= epsilon) {
|
||||
zero = i;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else { // scan from the right
|
||||
if (start != 0 && start < this->_frameCount) {
|
||||
for (uint32_t i = start; i != 0; --i) {
|
||||
for (uint32_t j = 0; j < this->_channelCount; ++j) {
|
||||
if (this->_frameBuffer[j][i] >= -epsilon && this->_frameBuffer[j][i] <= epsilon) {
|
||||
zero = i;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
template< typename T >
|
||||
inline void AudioEditBuffer<T>::linearFade(uint32_t start, uint32_t stop, bool increasing) {
|
||||
|
||||
if (start >= stop || start > this->_frameCount || stop > this->_frameCount ) {
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t count = stop - start;
|
||||
float32_t delta;
|
||||
float32_t gain;
|
||||
|
||||
if (increasing) { // 0.0 to 1.0f in delta increments
|
||||
delta = 1.0f / (float32_t)count;
|
||||
gain = 0.0f;
|
||||
} else { // 1.0f to 0.0f in delta increments
|
||||
delta = -1.0f / (float32_t)count;
|
||||
gain = 1.0f;
|
||||
}
|
||||
|
||||
for (uint32_t i = start; i < stop; ++i) {
|
||||
for (uint32_t j = 0; j < this->_channelCount; ++j) {
|
||||
this->_frameBuffer[j][i] *= gain;
|
||||
}
|
||||
gain += delta;
|
||||
}
|
||||
}
|
||||
|
||||
template< typename T >
|
||||
inline void AudioEditBuffer<T>::exponentialFade(uint32_t start, uint32_t stop, bool increasing) {
|
||||
// TBD
|
||||
}
|
||||
|
||||
typedef AudioEditBuffer< float32_t > AudioEditBufferFloat32;
|
||||
typedef AudioEditBuffer< int32_t > AudioEditBufferSInt32;
|
||||
|
||||
#endif // hifi_AudioEditBuffer_h
|
||||
|
|
@ -295,7 +295,14 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
|
||||
if (audioMixer) {
|
||||
// send off this audio packet
|
||||
nodeList->sendUnreliablePacket(*_currentPacket, *audioMixer);
|
||||
auto bytesWritten = nodeList->sendUnreliablePacket(*_currentPacket, *audioMixer);
|
||||
if (bytesWritten < 0) {
|
||||
auto currentTime = _frameTimer->nsecsElapsed() / 1000;
|
||||
qDebug() << this << "error sending audio injector packet. NF:"
|
||||
<< _nextFrame << "CT:" << currentTime
|
||||
<< "CF:" << currentTime / AudioConstants::NETWORK_FRAME_USECS;
|
||||
}
|
||||
|
||||
_outgoingSequenceNumber++;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioEditBuffer.h"
|
||||
#include "AudioLogging.h"
|
||||
#include "Sound.h"
|
||||
|
||||
|
@ -69,7 +68,6 @@ void Sound::downloadFinished(const QByteArray& data) {
|
|||
|
||||
interpretAsWav(rawAudioByteArray, outputAudioByteArray);
|
||||
downSample(outputAudioByteArray);
|
||||
trimFrames();
|
||||
} else if (fileName.endsWith(RAW_EXTENSION)) {
|
||||
// check if this was a stereo raw file
|
||||
// since it's raw the only way for us to know that is if the file was called .stereo.raw
|
||||
|
@ -80,7 +78,6 @@ void Sound::downloadFinished(const QByteArray& data) {
|
|||
|
||||
// Process as RAW file
|
||||
downSample(rawAudioByteArray);
|
||||
trimFrames();
|
||||
} else {
|
||||
qCDebug(audio) << "Unknown sound file type";
|
||||
}
|
||||
|
@ -98,11 +95,23 @@ void Sound::downSample(const QByteArray& rawAudioByteArray) {
|
|||
|
||||
int numSourceSamples = rawAudioByteArray.size() / sizeof(AudioConstants::AudioSample);
|
||||
|
||||
int numDestinationBytes = rawAudioByteArray.size() / sizeof(AudioConstants::AudioSample);
|
||||
if (_isStereo && numSourceSamples % 2 != 0) {
|
||||
numDestinationBytes += sizeof(AudioConstants::AudioSample);
|
||||
if (_isStereo && numSourceSamples % 2 != 0){
|
||||
// in the unlikely case that we have stereo audio but we seem to be missing a sample
|
||||
// (the sample for one channel is missing in a set of interleaved samples)
|
||||
// then drop the odd sample
|
||||
--numSourceSamples;
|
||||
}
|
||||
|
||||
int numDestinationSamples = numSourceSamples / 2.0f;
|
||||
|
||||
if (_isStereo && numDestinationSamples % 2 != 0) {
|
||||
// if this is stereo we need to make sure we produce stereo output
|
||||
// which means we should have an even number of output samples
|
||||
numDestinationSamples += 1;
|
||||
}
|
||||
|
||||
int numDestinationBytes = numDestinationSamples * sizeof(AudioConstants::AudioSample);
|
||||
|
||||
_byteArray.resize(numDestinationBytes);
|
||||
|
||||
int16_t* sourceSamples = (int16_t*) rawAudioByteArray.data();
|
||||
|
@ -129,26 +138,6 @@ void Sound::downSample(const QByteArray& rawAudioByteArray) {
|
|||
}
|
||||
}
|
||||
|
||||
void Sound::trimFrames() {
|
||||
|
||||
const uint32_t inputFrameCount = _byteArray.size() / sizeof(int16_t);
|
||||
const uint32_t trimCount = 1024; // number of leading and trailing frames to trim
|
||||
|
||||
if (inputFrameCount <= (2 * trimCount)) {
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t* inputFrameData = (int16_t*)_byteArray.data();
|
||||
|
||||
AudioEditBufferFloat32 editBuffer(1, inputFrameCount);
|
||||
editBuffer.copyFrames(1, inputFrameCount, inputFrameData, false /*copy in*/);
|
||||
|
||||
editBuffer.linearFade(0, trimCount, true);
|
||||
editBuffer.linearFade(inputFrameCount - trimCount, inputFrameCount, false);
|
||||
|
||||
editBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
|
||||
}
|
||||
|
||||
//
|
||||
// Format description from https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
|
||||
//
|
||||
|
|
|
@ -38,7 +38,6 @@ private:
|
|||
bool _isStereo;
|
||||
bool _isReady;
|
||||
|
||||
void trimFrames();
|
||||
void downSample(const QByteArray& rawAudioByteArray);
|
||||
void interpretAsWav(const QByteArray& inputAudioByteArray, QByteArray& outputAudioByteArray);
|
||||
|
||||
|
|
|
@ -763,7 +763,7 @@ void EntityItem::adjustEditPacketForClockSkew(QByteArray& buffer, int clockSkew)
|
|||
// lastEdited
|
||||
quint64 lastEditedInLocalTime;
|
||||
memcpy(&lastEditedInLocalTime, dataAt, sizeof(lastEditedInLocalTime));
|
||||
quint64 lastEditedInServerTime = lastEditedInLocalTime + clockSkew;
|
||||
quint64 lastEditedInServerTime = lastEditedInLocalTime > 0 ? lastEditedInLocalTime + clockSkew : 0;
|
||||
memcpy(dataAt, &lastEditedInServerTime, sizeof(lastEditedInServerTime));
|
||||
#ifdef WANT_DEBUG
|
||||
qCDebug(entities, "EntityItem::adjustEditPacketForClockSkew()...");
|
||||
|
@ -812,6 +812,14 @@ void EntityItem::setMass(float mass) {
|
|||
}
|
||||
}
|
||||
|
||||
void EntityItem::setHref(QString value) {
|
||||
auto href = value.toLower();
|
||||
if (! (value.toLower().startsWith("hifi://")) ) {
|
||||
return;
|
||||
}
|
||||
_href = value;
|
||||
}
|
||||
|
||||
void EntityItem::simulate(const quint64& now) {
|
||||
if (_lastSimulated == 0) {
|
||||
_lastSimulated = now;
|
||||
|
|
|
@ -172,7 +172,7 @@ public:
|
|||
|
||||
// Hyperlink related getters and setters
|
||||
QString getHref() const { return _href; }
|
||||
void setHref(QString value) { _href = value; }
|
||||
void setHref(QString value);
|
||||
|
||||
QString getDescription() const { return _description; }
|
||||
void setDescription(QString value) { _description = value; }
|
||||
|
|
|
@ -321,6 +321,11 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
QScriptValue properties = engine->newObject();
|
||||
EntityItemProperties defaultEntityProperties;
|
||||
|
||||
if (_created == UNKNOWN_CREATED_TIME) {
|
||||
// No entity properties can have been set so return without setting any default, zero property values.
|
||||
return properties;
|
||||
}
|
||||
|
||||
if (_idSet) {
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER_ALWAYS(id, _id.toString());
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
#include "render/DrawSceneOctree.h"
|
||||
#include "AmbientOcclusionEffect.h"
|
||||
#include "AntialiasingEffect.h"
|
||||
#include "ToneMappingEffect.h"
|
||||
|
||||
#include "RenderDeferredTask.h"
|
||||
|
||||
|
@ -68,20 +69,6 @@ void RenderDeferred::run(const SceneContextPointer& sceneContext, const RenderCo
|
|||
DependencyManager::get<DeferredLightingEffect>()->render(renderContext);
|
||||
}
|
||||
|
||||
void ToneMappingDeferred::configure(const Config& config) {
|
||||
if (config.exposure >= 0.0f) {
|
||||
_toneMappingEffect.setExposure(config.exposure);
|
||||
}
|
||||
|
||||
if (config.curve >= 0) {
|
||||
_toneMappingEffect.setToneCurve((ToneMappingEffect::ToneCurve)config.curve);
|
||||
}
|
||||
}
|
||||
|
||||
void ToneMappingDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
_toneMappingEffect.render(renderContext->args);
|
||||
}
|
||||
|
||||
RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
||||
cullFunctor = cullFunctor ? cullFunctor : [](const RenderArgs*, const AABox&){ return true; };
|
||||
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
#include "render/DrawTask.h"
|
||||
|
||||
#include "ToneMappingEffect.h"
|
||||
|
||||
class SetupDeferred {
|
||||
public:
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
@ -40,32 +38,6 @@ public:
|
|||
using JobModel = render::Job::Model<RenderDeferred>;
|
||||
};
|
||||
|
||||
|
||||
class ToneMappingConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool enabled MEMBER enabled)
|
||||
Q_PROPERTY(float exposure MEMBER exposure NOTIFY dirty);
|
||||
Q_PROPERTY(int curve MEMBER curve NOTIFY dirty);
|
||||
public:
|
||||
ToneMappingConfig() : render::Job::Config(true) {}
|
||||
|
||||
float exposure{ 0.0f };
|
||||
int curve{ 3 };
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class ToneMappingDeferred {
|
||||
public:
|
||||
using Config = ToneMappingConfig;
|
||||
using JobModel = render::Job::Model<ToneMappingDeferred, Config>;
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
ToneMappingEffect _toneMappingEffect;
|
||||
};
|
||||
|
||||
class DrawConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(int numDrawn READ getNumDrawn)
|
||||
|
|
|
@ -145,4 +145,19 @@ void ToneMappingEffect::render(RenderArgs* args) {
|
|||
batch.setResourceTexture(ToneMappingEffect_LightingMapSlot, lightingBuffer);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ToneMappingDeferred::configure(const Config& config) {
|
||||
if (config.exposure >= 0.0f) {
|
||||
_toneMappingEffect.setExposure(config.exposure);
|
||||
}
|
||||
|
||||
if (config.curve >= 0) {
|
||||
_toneMappingEffect.setToneCurve((ToneMappingEffect::ToneCurve)config.curve);
|
||||
}
|
||||
}
|
||||
|
||||
void ToneMappingDeferred::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
|
||||
_toneMappingEffect.render(renderContext->args);
|
||||
}
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
#include <gpu/Resource.h>
|
||||
#include <gpu/Pipeline.h>
|
||||
#include <render/DrawTask.h>
|
||||
|
||||
|
||||
class RenderArgs;
|
||||
|
||||
|
@ -50,7 +52,7 @@ private:
|
|||
float _exposure = 0.0f;
|
||||
float _twoPowExposure = 1.0f;
|
||||
glm::vec2 spareA;
|
||||
int _toneCurve = Filmic;
|
||||
int _toneCurve = Gamma22;
|
||||
glm::vec3 spareB;
|
||||
|
||||
Parameters() {}
|
||||
|
@ -61,4 +63,33 @@ private:
|
|||
void init();
|
||||
};
|
||||
|
||||
class ToneMappingConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool enabled MEMBER enabled)
|
||||
Q_PROPERTY(float exposure MEMBER exposure WRITE setExposure);
|
||||
Q_PROPERTY(int curve MEMBER curve WRITE setCurve);
|
||||
public:
|
||||
ToneMappingConfig() : render::Job::Config(true) {}
|
||||
|
||||
void setExposure(float newExposure) { exposure = std::max(0.0f, newExposure); emit dirty(); }
|
||||
void setCurve(int newCurve) { curve = std::max((int)ToneMappingEffect::None, std::min((int)ToneMappingEffect::Filmic, newCurve)); emit dirty(); }
|
||||
|
||||
|
||||
float exposure{ 0.0f };
|
||||
int curve{ ToneMappingEffect::Gamma22 };
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class ToneMappingDeferred {
|
||||
public:
|
||||
using Config = ToneMappingConfig;
|
||||
using JobModel = render::Job::Model<ToneMappingDeferred, Config>;
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
ToneMappingEffect _toneMappingEffect;
|
||||
};
|
||||
|
||||
#endif // hifi_ToneMappingEffect_h
|
||||
|
|
|
@ -20,6 +20,7 @@ DISTFILES += \
|
|||
../../interface/resources/qml/controls/*.qml \
|
||||
../../interface/resources/qml/dialogs/*.qml \
|
||||
../../interface/resources/qml/dialogs/fileDialog/*.qml \
|
||||
../../interface/resources/qml/dialogs/preferences/*.qml \
|
||||
../../interface/resources/qml/desktop/*.qml \
|
||||
../../interface/resources/qml/menus/*.qml \
|
||||
../../interface/resources/qml/styles/*.qml \
|
||||
|
|
Loading…
Reference in a new issue