Merge branch 'master' into 21055

Conflicts:
	interface/resources/qml/AddressBarDialog.qml
This commit is contained in:
David Rowe 2016-10-07 11:27:35 +13:00
commit e2e762f9e7
27 changed files with 284 additions and 175 deletions

View file

@ -48,8 +48,7 @@ static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
Agent::Agent(ReceivedMessage& message) :
ThreadedAssignment(message),
_entityEditSender(),
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO,
RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES) {
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES) {
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
ResourceManager::init();

View file

@ -6,8 +6,8 @@ if (WIN32)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi2.zip
URL_MD5 272b27bd6c211c45c0c23d4701b63b5e
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi3.zip
URL_MD5 1a2433f80a788a54c70f505ff4f43ac1
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -59,11 +59,13 @@ Window {
}
addressLine.text = targetString;
toggleOrGo(true);
clearAddressLineTimer.start();
}
property var allStories: [];
property int cardWidth: 200;
property int cardHeight: 152;
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
property bool isCursorVisible: false // Override default cursor visibility.
AddressBarDialog {
id: addressBarDialog
@ -73,11 +75,15 @@ Window {
implicitWidth: backgroundImage.width
implicitHeight: backgroundImage.height + (keyboardRaised ? 200 : 0)
// The buttons have their button state changed on hover, so we have to manually fix them up here
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0;
onReceivedHifiSchemeURL: resetAfterTeleport();
// Update location after using back and forward buttons.
onMetaverseServerUrlChanged: updateLocationTextTimer.start();
ListModel { id: suggestions }
ListView {
@ -189,7 +195,7 @@ Window {
color: "gray";
clip: true;
anchors.fill: addressLine;
visible: !addressLine.activeFocus;
visible: addressLine.text.length === 0
}
TextInput {
id: addressLine
@ -205,8 +211,47 @@ Window {
bottomMargin: parent.inputAreaStep
}
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
onTextChanged: filterChoicesByText()
onActiveFocusChanged: updateLocationText(focus)
cursorVisible: false
onTextChanged: {
filterChoicesByText();
updateLocationText(text.length > 0);
if (!isCursorVisible && text.length > 0) {
isCursorVisible = true;
cursorVisible = true;
}
}
onActiveFocusChanged: {
cursorVisible = isCursorVisible;
}
MouseArea {
// If user clicks in address bar show cursor to indicate ability to enter address.
anchors.fill: parent
onClicked: {
isCursorVisible = true;
parent.cursorVisible = true;
}
}
}
}
Timer {
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
id: updateLocationTextTimer
running: false
interval: 500 // ms
repeat: false
onTriggered: updateLocationText(false);
}
Timer {
// Delay clearing address line so as to avoid flicker of "not connected" being displayed after entering an address.
id: clearAddressLineTimer
running: false
interval: 100 // ms
repeat: false
onTriggered: {
addressLine.text = "";
isCursorVisible = false;
}
}
@ -393,9 +438,8 @@ Window {
});
}
function updateLocationText(focus) {
addressLine.text = "";
if (focus) {
function updateLocationText(enteringAddress) {
if (enteringAddress) {
notice.text = "Go to a place, @user, path or network address";
notice.color = "gray";
} else {
@ -407,9 +451,9 @@ Window {
}
onVisibleChanged: {
focus = false;
updateLocationText(false);
if (visible) {
addressLine.forceActiveFocus();
fillDestinations();
}
}
@ -426,11 +470,13 @@ Window {
case Qt.Key_Escape:
case Qt.Key_Back:
root.shown = false
clearAddressLineTimer.start();
event.accepted = true
break
case Qt.Key_Enter:
case Qt.Key_Return:
toggleOrGo()
clearAddressLineTimer.start();
event.accepted = true
break
}

View file

@ -250,8 +250,6 @@ int AudioScope::addSilenceToScope(QByteArray* byteArray, int frameOffset, int si
}
const int STEREO_FACTOR = 2;
void AudioScope::addStereoSilenceToScope(int silentSamplesPerChannel) {
if (!_isEnabled || _isPaused) {
return;
@ -265,10 +263,10 @@ void AudioScope::addStereoSamplesToScope(const QByteArray& samples) {
return;
}
const int16_t* samplesData = reinterpret_cast<const int16_t*>(samples.data());
int samplesPerChannel = samples.size() / sizeof(int16_t) / STEREO_FACTOR;
int samplesPerChannel = samples.size() / sizeof(int16_t) / AudioConstants::STEREO;
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, STEREO_FACTOR);
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, STEREO_FACTOR);
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, AudioConstants::STEREO);
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, AudioConstants::STEREO);
_scopeLastFrame = samples.right(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
}
@ -282,9 +280,9 @@ void AudioScope::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
int samplesToWriteThisIteration = std::min(samplesRemaining, (int) AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData,
samplesToWriteThisIteration, 0, STEREO_FACTOR, fade);
samplesToWriteThisIteration, 0, AudioConstants::STEREO, fade);
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset,
lastFrameData, samplesToWriteThisIteration, 1, STEREO_FACTOR, fade);
lastFrameData, samplesToWriteThisIteration, 1, AudioConstants::STEREO, fade);
samplesRemaining -= samplesToWriteThisIteration;
indexOfRepeat++;

View file

@ -29,6 +29,7 @@
#include "InterfaceLogging.h"
#include "UserActivityLogger.h"
#include "MainWindow.h"
#include <QtCore/QProcess>
#ifdef HAS_BUGSPLAT
#include <BuildInfo.h>
@ -121,6 +122,29 @@ int main(int argc, const char* argv[]) {
}
}
QCommandLineParser parser;
QCommandLineOption runServerOption("runServer", "Whether to run the server");
QCommandLineOption serverContentPathOption("serverContentPath", "Where to find server content", "serverContentPath");
parser.addOption(runServerOption);
parser.addOption(serverContentPathOption);
parser.parse(arguments);
if (parser.isSet(runServerOption)) {
QString applicationDirPath = QFileInfo(arguments[0]).path();
QString serverPath = applicationDirPath + "/server-console/server-console.exe";
qDebug() << "Application dir path is: " << applicationDirPath;
qDebug() << "Server path is: " << serverPath;
QStringList args;
if (parser.isSet(serverContentPathOption)) {
QString serverContentPath = QFileInfo(arguments[0]).path() + "/" + parser.value(serverContentPathOption);
args << "--" << "--contentPath" << serverContentPath;
}
qDebug() << QFileInfo(arguments[0]).path();
qDebug() << QProcess::startDetached(serverPath, args);
// Sleep a short amount of time to give the server a chance to start
usleep(2000000);
}
QElapsedTimer startupTime;
startupTime.start();

View file

@ -38,6 +38,7 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
});
_backEnabled = !(DependencyManager::get<AddressManager>()->getBackStack().isEmpty());
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
connect(addressManager.data(), &AddressManager::hostChanged, this, &AddressBarDialog::metaverseServerUrlChanged);
connect(DependencyManager::get<DialogsManager>().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed);
connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL);
}

View file

@ -37,7 +37,7 @@ signals:
void forwardEnabledChanged();
void useFeedChanged();
void receivedHifiSchemeURL(const QString& url);
void metaverseServerUrlChanged(); // While it is a constant, qml will complain about not seeing a change signal.
void metaverseServerUrlChanged();
protected:
void displayAddressOfflineMessage();

View file

@ -115,7 +115,7 @@ AudioClient::AudioClient() :
_loopbackAudioOutput(NULL),
_loopbackOutputDevice(NULL),
_inputRingBuffer(0),
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
_isStereoInput(false),
_outputStarveDetectionStartTimeMsec(0),
_outputStarveDetectionCount(0),
@ -1152,9 +1152,9 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
}
void AudioClient::outputFormatChanged() {
int outputFormatChannelCountTimesSampleRate = _outputFormat.channelCount() * _outputFormat.sampleRate();
_outputFrameSize = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * outputFormatChannelCountTimesSampleRate / _desiredOutputFormat.sampleRate();
_receivedAudioStream.outputFormatChanged(outputFormatChannelCountTimesSampleRate);
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * _outputFormat.channelCount() * _outputFormat.sampleRate()) /
_desiredOutputFormat.sampleRate();
_receivedAudioStream.outputFormatChanged(_outputFormat.sampleRate(), _outputFormat.channelCount());
}
bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {

View file

@ -46,10 +46,11 @@ static const int STATS_FOR_STATS_PACKET_WINDOW_SECONDS = 30;
// _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset.
static const quint64 FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND;
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames) :
_ringBuffer(numFrameSamples, numFramesCapacity),
_dynamicJitterBufferEnabled(numStaticJitterFrames == -1),
_staticJitterBufferFrames(std::max(numStaticJitterFrames, DEFAULT_STATIC_JITTER_FRAMES)),
InboundAudioStream::InboundAudioStream(int numChannels, int numFrames, int numBlocks, int numStaticJitterBlocks) :
_ringBuffer(numChannels * numFrames, numBlocks),
_numChannels(numChannels),
_dynamicJitterBufferEnabled(numStaticJitterBlocks == -1),
_staticJitterBufferFrames(std::max(numStaticJitterBlocks, DEFAULT_STATIC_JITTER_FRAMES)),
_desiredJitterBufferFrames(_dynamicJitterBufferEnabled ? 1 : _staticJitterBufferFrames),
_incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_starveHistory(STARVE_HISTORY_CAPACITY),
@ -121,11 +122,11 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
packetReceivedUpdateTimingStats();
int networkSamples;
int networkFrames;
// parse the info after the seq number and before the audio data (the stream properties)
int prePropertyPosition = message.getPosition();
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples);
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkFrames);
message.seek(prePropertyPosition + propertyBytes);
// handle this packet based on its arrival status.
@ -135,7 +136,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
// NOTE: we assume that each dropped packet contains the same number of samples
// as the packet we just received.
int packetsDropped = arrivalInfo._seqDiffFromExpected;
writeSamplesForDroppedPackets(packetsDropped * networkSamples);
writeFramesForDroppedPackets(packetsDropped * networkFrames);
// fall through to OnTime case
}
@ -143,7 +144,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
// Packet is on time; parse its data to the ringbuffer
if (message.getType() == PacketType::SilentAudioFrame) {
// FIXME - Some codecs need to know about these silent frames... and can produce better output
writeDroppableSilentSamples(networkSamples);
writeDroppableSilentFrames(networkFrames);
} else {
// note: PCM and no codec are identical
bool selectedPCM = _selectedCodecName == "pcm" || _selectedCodecName == "";
@ -153,7 +154,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
parseAudioData(message.getType(), afterProperties);
} else {
qDebug() << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
writeDroppableSilentSamples(networkSamples);
writeDroppableSilentFrames(networkFrames);
// inform others of the mismatch
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithUUID(message.getSourceID());
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
@ -218,12 +219,13 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet
return _ringBuffer.writeData(decodedBuffer.data(), actualSize);
}
int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) {
int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) {
if (_decoder) {
_decoder->trackLostFrames(silentSamples);
_decoder->trackLostFrames(silentFrames);
}
// calculate how many silent frames we should drop.
int silentSamples = silentFrames * _numChannels;
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
int numSilentFramesToDrop = 0;
@ -414,14 +416,14 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
_lastPacketReceivedTime = now;
}
int InboundAudioStream::writeSamplesForDroppedPackets(int networkSamples) {
return writeLastFrameRepeatedWithFade(networkSamples);
int InboundAudioStream::writeFramesForDroppedPackets(int networkFrames) {
return writeLastFrameRepeatedWithFade(networkFrames);
}
int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
int InboundAudioStream::writeLastFrameRepeatedWithFade(int frames) {
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
int frameSize = _ringBuffer.getNumFrameSamples();
int samplesToWrite = samples;
int samplesToWrite = frames * _numChannels;
int indexOfRepeat = 0;
do {
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
@ -434,7 +436,7 @@ int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
indexOfRepeat++;
} while (samplesToWrite > 0);
return samples;
return frames;
}
AudioStreamStats InboundAudioStream::getAudioStreamStats() const {

View file

@ -47,7 +47,7 @@ public:
static const bool REPETITION_WITH_FADE;
InboundAudioStream() = delete;
InboundAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
InboundAudioStream(int numChannels, int numFrames, int numBlocks, int numStaticJitterBlocks);
~InboundAudioStream();
void reset();
@ -115,7 +115,7 @@ public slots:
private:
void packetReceivedUpdateTimingStats();
int writeSamplesForDroppedPackets(int networkSamples);
int writeFramesForDroppedPackets(int networkFrames);
void popSamplesNoCheck(int samples);
void framesAvailableChanged();
@ -134,16 +134,17 @@ protected:
/// default implementation assumes packet contains raw audio samples after stream properties
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties);
/// writes silent samples to the buffer that may be dropped to reduce latency caused by the buffer
virtual int writeDroppableSilentSamples(int silentSamples);
/// writes silent frames to the buffer that may be dropped to reduce latency caused by the buffer
virtual int writeDroppableSilentFrames(int silentFrames);
/// writes the last written frame repeatedly, gradually fading to silence.
/// used for writing samples for dropped packets.
virtual int writeLastFrameRepeatedWithFade(int samples);
virtual int writeLastFrameRepeatedWithFade(int frames);
protected:
AudioRingBuffer _ringBuffer;
int _numChannels;
bool _lastPopSucceeded { false };
AudioRingBuffer::ConstIterator _lastPopOutput;

View file

@ -11,5 +11,8 @@
#include "MixedAudioStream.h"
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames) :
InboundAudioStream(numFrameSamples, numFramesCapacity, numStaticJitterFrames) {}
#include "AudioConstants.h"
MixedAudioStream::MixedAudioStream(int numFramesCapacity, int numStaticJitterFrames) :
InboundAudioStream(AudioConstants::STEREO, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
numFramesCapacity, numStaticJitterFrames) {}

View file

@ -16,7 +16,7 @@
class MixedAudioStream : public InboundAudioStream {
public:
MixedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
MixedAudioStream(int numFramesCapacity, int numStaticJitterFrames = -1);
float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); }
};

View file

@ -12,33 +12,30 @@
#include "MixedProcessedAudioStream.h"
#include "AudioLogging.h"
static const int STEREO_FACTOR = 2;
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFramesCapacity, int numStaticJitterFrames)
: InboundAudioStream(AudioConstants::STEREO, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
numFramesCapacity, numStaticJitterFrames) {}
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames)
: InboundAudioStream(numFrameSamples, numFramesCapacity, numStaticJitterFrames) {}
void MixedProcessedAudioStream::outputFormatChanged(int outputFormatChannelCountTimesSampleRate) {
_outputFormatChannelsTimesSampleRate = outputFormatChannelCountTimesSampleRate;
int deviceOutputFrameSize = networkToDeviceSamples(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
_ringBuffer.resizeForFrameSize(deviceOutputFrameSize);
void MixedProcessedAudioStream::outputFormatChanged(int sampleRate, int channelCount) {
_outputSampleRate = sampleRate;
_outputChannelCount = channelCount;
int deviceOutputFrameFrames = networkToDeviceFrames(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / AudioConstants::STEREO);
int deviceOutputFrameSamples = deviceOutputFrameFrames * AudioConstants::STEREO;
_ringBuffer.resizeForFrameSize(deviceOutputFrameSamples);
}
int MixedProcessedAudioStream::writeDroppableSilentSamples(int silentSamples) {
int deviceSilentSamplesWritten = InboundAudioStream::writeDroppableSilentSamples(networkToDeviceSamples(silentSamples));
emit addedSilence(deviceToNetworkSamples(deviceSilentSamplesWritten) / STEREO_FACTOR);
return deviceSilentSamplesWritten;
int MixedProcessedAudioStream::writeDroppableSilentFrames(int silentFrames) {
int deviceSilentFrames = networkToDeviceFrames(silentFrames);
int deviceSilentFramesWritten = InboundAudioStream::writeDroppableSilentFrames(deviceSilentFrames);
emit addedSilence(deviceToNetworkFrames(deviceSilentFramesWritten));
return deviceSilentFramesWritten;
}
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int samples) {
int deviceSamplesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(networkToDeviceSamples(samples));
emit addedLastFrameRepeatedWithFade(deviceToNetworkSamples(deviceSamplesWritten) / STEREO_FACTOR);
return deviceSamplesWritten;
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int frames) {
int deviceFrames = networkToDeviceFrames(frames);
int deviceFramesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(deviceFrames);
emit addedLastFrameRepeatedWithFade(deviceToNetworkFrames(deviceFramesWritten));
return deviceFramesWritten;
}
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) {
@ -56,16 +53,16 @@ int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray&
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable());
return packetAfterStreamProperties.size();
}
int MixedProcessedAudioStream::networkToDeviceSamples(int networkSamples) {
return (quint64)networkSamples * (quint64)_outputFormatChannelsTimesSampleRate / (quint64)(STEREO_FACTOR
* AudioConstants::SAMPLE_RATE);
int MixedProcessedAudioStream::networkToDeviceFrames(int networkFrames) {
return ((quint64)networkFrames * _outputChannelCount * _outputSampleRate) /
(quint64)(AudioConstants::STEREO * AudioConstants::SAMPLE_RATE);
}
int MixedProcessedAudioStream::deviceToNetworkSamples(int deviceSamples) {
return (quint64)deviceSamples * (quint64)(STEREO_FACTOR * AudioConstants::SAMPLE_RATE)
/ (quint64)_outputFormatChannelsTimesSampleRate;
int MixedProcessedAudioStream::deviceToNetworkFrames(int deviceFrames) {
return (quint64)deviceFrames * (quint64)(AudioConstants::STEREO * AudioConstants::SAMPLE_RATE) /
(_outputSampleRate * _outputChannelCount);
}

View file

@ -19,7 +19,7 @@ class AudioClient;
class MixedProcessedAudioStream : public InboundAudioStream {
Q_OBJECT
public:
MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
MixedProcessedAudioStream(int numFramesCapacity, int numStaticJitterFrames = -1);
signals:
@ -30,19 +30,20 @@ signals:
void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
public:
void outputFormatChanged(int outputFormatChannelCountTimesSampleRate);
void outputFormatChanged(int sampleRate, int channelCount);
protected:
int writeDroppableSilentSamples(int silentSamples) override;
int writeLastFrameRepeatedWithFade(int samples) override;
int writeDroppableSilentFrames(int silentFrames) override;
int writeLastFrameRepeatedWithFade(int frames) override;
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override;
private:
int networkToDeviceSamples(int networkSamples);
int deviceToNetworkSamples(int deviceSamples);
int networkToDeviceFrames(int networkFrames);
int deviceToNetworkFrames(int deviceFrames);
private:
int _outputFormatChannelsTimesSampleRate;
quint64 _outputSampleRate;
quint64 _outputChannelCount;
};
#endif // hifi_MixedProcessedAudioStream_h

View file

@ -22,10 +22,10 @@
#include <UUID.h>
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, int numStaticJitterFrames) :
InboundAudioStream(isStereo
? AudioConstants::NETWORK_FRAME_SAMPLES_STEREO
: AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, numStaticJitterFrames),
InboundAudioStream(isStereo ? AudioConstants::STEREO : AudioConstants::MONO,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY,
numStaticJitterFrames),
_type(type),
_position(0.0f, 0.0f, 0.0f),
_orientation(0.0f, 0.0f, 0.0f, 0.0f),

View file

@ -395,9 +395,14 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
}
});
if (!updatedEntity) {
return QUuid();
}
// FIXME: We need to figure out a better way to handle this. Allowing these edits to go through potentially
// breaks avatar energy and entities that are parented.
//
// To handle cases where a script needs to edit an entity with a _known_ entity id but doesn't exist
// in the local entity tree, we need to allow those edits to go through to the server.
// if (!updatedEntity) {
// return QUuid();
// }
_entityTree->withReadLock([&] {
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(entityID);

View file

@ -90,7 +90,11 @@ bool copyShapeToMesh(const btTransform& transform, const btConvexShape* shape,
avgVertex = transform * (avgVertex * (1.0f / (float)numHullVertices));
for (int i = 0; i < numHullVertices; ++i) {
btVector3 norm = (transform * hullVertices[i] - avgVertex).normalize();
btVector3 norm = transform * hullVertices[i] - avgVertex;
btScalar normLength = norm.length();
if (normLength > FLT_EPSILON) {
norm /= normLength;
}
memcpy(tempVertices + 3 * i, norm.m_floats, SIZE_OF_VEC3);
}
gpu::BufferView::Size numBytes = sizeof(float) * (3 * numHullVertices);

View file

@ -31,13 +31,28 @@ void AssetScriptingInterface::uploadData(QString data, QScriptValue callback) {
QObject::connect(upload, &AssetUpload::finished, this, [this, callback](AssetUpload* upload, const QString& hash) mutable {
if (callback.isFunction()) {
QString url = "atp:" + hash;
QScriptValueList args { url };
QScriptValueList args { url, hash };
callback.call(_engine->currentContext()->thisObject(), args);
}
upload->deleteLater();
});
upload->start();
}
void AssetScriptingInterface::setMapping(QString path, QString hash, QScriptValue callback) {
auto setMappingRequest = DependencyManager::get<AssetClient>()->createSetMappingRequest(path, hash);
QObject::connect(setMappingRequest, &SetMappingRequest::finished, this, [this, callback](SetMappingRequest* request) mutable {
if (callback.isFunction()) {
QScriptValueList args { };
callback.call(_engine->currentContext()->thisObject(), args);
}
request->deleteLater();
});
setMappingRequest->start();
}
void AssetScriptingInterface::downloadData(QString urlString, QScriptValue callback) {
const QString ATP_SCHEME { "atp:" };

View file

@ -26,6 +26,7 @@ public:
Q_INVOKABLE void uploadData(QString data, QScriptValue callback);
Q_INVOKABLE void downloadData(QString url, QScriptValue downloadComplete);
Q_INVOKABLE void setMapping(QString path, QString hash, QScriptValue callback);
protected:
QSet<AssetRequest*> _pendingRequests;

View file

@ -25,6 +25,7 @@
#include <QtNetwork/QNetworkRequest>
#include <QtNetwork/QNetworkReply>
#include <QtScript/QScriptContextInfo>
#include <QtScript/QScriptValue>
#include <QtScript/QScriptValueIterator>
@ -106,6 +107,25 @@ void inputControllerFromScriptValue(const QScriptValue &object, controller::Inpu
out = qobject_cast<controller::InputController*>(object.toQObject());
}
// The purpose of the following two function is to embed entity ids into entity script filenames
// so that they show up in stacktraces
//
// Extract the url portion of a url that has been encoded with encodeEntityIdIntoEntityUrl(...)
QString extractUrlFromEntityUrl(const QString& url) {
auto parts = url.split(' ', QString::SkipEmptyParts);
if (parts.length() > 0) {
return parts[0];
} else {
return "";
}
}
// Encode an entity id into an entity url
// Example: http://www.example.com/some/path.js [EntityID:{9fdd355f-d226-4887-9484-44432d29520e}]
QString encodeEntityIdIntoEntityUrl(const QString& url, const QString& entityID) {
return url + " [EntityID:" + entityID + "]";
}
static bool hasCorrectSyntax(const QScriptProgram& program) {
const auto syntaxCheck = QScriptEngine::checkSyntax(program.sourceCode());
if (syntaxCheck.state() != QScriptSyntaxCheckResult::Valid) {
@ -1091,14 +1111,19 @@ QUrl ScriptEngine::resolvePath(const QString& include) const {
return expandScriptUrl(url);
}
QScriptContextInfo contextInfo { currentContext()->parentContext() };
// we apparently weren't a fully qualified url, so, let's assume we're relative
// to the original URL of our script
QUrl parentURL;
if (_parentURL.isEmpty()) {
parentURL = QUrl(_fileNameString);
} else {
parentURL = QUrl(_parentURL);
QUrl parentURL = extractUrlFromEntityUrl(contextInfo.fileName());
if (parentURL.isEmpty()) {
if (_parentURL.isEmpty()) {
parentURL = QUrl(_fileNameString);
} else {
parentURL = QUrl(_parentURL);
}
}
// if the parent URL's scheme is empty, then this is probably a local file...
if (parentURL.scheme().isEmpty()) {
parentURL = QUrl::fromLocalFile(_fileNameString);
@ -1323,7 +1348,7 @@ void ScriptEngine::entityScriptContentAvailable(const EntityItemID& entityID, co
auto scriptCache = DependencyManager::get<ScriptCache>();
bool isFileUrl = isURL && scriptOrURL.startsWith("file://");
auto fileName = QString("(EntityID:%1, %2)").arg(entityID.toString(), isURL ? scriptOrURL : "EmbededEntityScript");
auto fileName = isURL ? encodeEntityIdIntoEntityUrl(scriptOrURL, entityID.toString()) : "EmbeddedEntityScript";
QScriptProgram program(contents, fileName);
if (!hasCorrectSyntax(program)) {

View file

@ -137,9 +137,8 @@ QString LogHandler::printMessage(LogMsgType type, const QMessageLogContext& cont
dateFormatPtr = &DATE_STRING_FORMAT_WITH_MILLISECONDS;
}
QString prefixString = QString("[%1]").arg(QDateTime::currentDateTime().toString(*dateFormatPtr));
prefixString.append(QString(" [%1]").arg(stringForLogType(type)));
QString prefixString = QString("[%1] [%2] [%3]").arg(QDateTime::currentDateTime().toString(*dateFormatPtr),
stringForLogType(type), context.category);
if (_shouldOutputProcessID) {
prefixString.append(QString(" [%1]").arg(QCoreApplication::instance()->applicationPid()));

View file

@ -1002,6 +1002,9 @@ function MyController(hand) {
this.secondaryPress = function(value) {
_this.rawSecondaryValue = value;
if (value > 0) {
_this.release();
}
};
this.updateSmoothedTrigger = function() {
@ -1849,7 +1852,7 @@ function MyController(hand) {
z: 0
};
var DROP_ANGLE = Math.PI / 6;
var DROP_ANGLE = Math.PI / 3;
var HYSTERESIS_FACTOR = 1.1;
var ROTATION_ENTER_THRESHOLD = Math.cos(DROP_ANGLE);
var ROTATION_EXIT_THRESHOLD = Math.cos(DROP_ANGLE * HYSTERESIS_FACTOR);

View file

@ -20,7 +20,7 @@
#include <CollisionRenderMeshCache.h>
#include <ShapeInfo.h> // for MAX_HULL_POINTS
#include "MeshUtil.cpp"
#include "MeshUtil.h"
QTEST_MAIN(CollisionRenderMeshCacheTests)

View file

@ -1,45 +0,0 @@
//
// MeshUtil.cpp
// tests/physics/src
//
// Created by Andrew Meadows 2016.07.14
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MeshUtil.h"
#include<unordered_map>
// returns false if any edge has only one adjacent triangle
bool MeshUtil::isClosedManifold(const uint32_t* meshIndices, uint32_t numIndices) {
using EdgeList = std::unordered_map<MeshUtil::TriangleEdge, uint32_t>;
EdgeList edges;
// count the triangles for each edge
const uint32_t TRIANGLE_STRIDE = 3;
for (uint32_t i = 0; i < numIndices; i += TRIANGLE_STRIDE) {
MeshUtil::TriangleEdge edge;
// the triangles indices are stored in sequential order
for (uint32_t j = 0; j < 3; ++j) {
edge.setIndices(meshIndices[i + j], meshIndices[i + ((j + 1) % 3)]);
EdgeList::iterator edgeEntry = edges.find(edge);
if (edgeEntry == edges.end()) {
edges.insert(std::pair<MeshUtil::TriangleEdge, uint32_t>(edge, 1));
} else {
edgeEntry->second += 1;
}
}
}
// scan for outside edge
for (auto& edgeEntry : edges) {
if (edgeEntry.second == 1) {
return false;
}
}
return true;
}

View file

@ -42,8 +42,6 @@ private:
uint32_t _indexB { (uint32_t)(-1) };
};
bool isClosedManifold(const uint32_t* meshIndices, uint32_t numIndices);
} // MeshUtil namespace
namespace std {
@ -55,7 +53,39 @@ namespace std {
return hash<uint32_t>()((ab * (ab + 1)) / 2 + edge.getIndexB());
}
};
} // std namesspace
namespace MeshUtil {
bool isClosedManifold(const uint32_t* meshIndices, uint32_t numIndices) {
using EdgeList = std::unordered_map<TriangleEdge, uint32_t>;
EdgeList edges;
// count the triangles for each edge
const uint32_t TRIANGLE_STRIDE = 3;
for (uint32_t i = 0; i < numIndices; i += TRIANGLE_STRIDE) {
TriangleEdge edge;
// the triangles indices are stored in sequential order
for (uint32_t j = 0; j < 3; ++j) {
edge.setIndices(meshIndices[i + j], meshIndices[i + ((j + 1) % 3)]);
EdgeList::iterator edgeEntry = edges.find(edge);
if (edgeEntry == edges.end()) {
edges.insert(std::pair<TriangleEdge, uint32_t>(edge, 1));
} else {
edgeEntry->second += 1;
}
}
}
// scan for outside edge
for (auto& edgeEntry : edges) {
if (edgeEntry.second == 1) {
return false;
}
}
return true;
}
} // MeshUtil namespace
#endif // hifi_MeshUtil_h

View file

@ -42,7 +42,7 @@ void ShapeInfoTests::testHashFunctions() {
int testCount = 0;
int numCollisions = 0;
btClock timer;
for (int x = 1; x < numSteps && testCount < maxTests; ++x) {
float radiusX = (float)x * deltaLength;
@ -52,7 +52,7 @@ void ShapeInfoTests::testHashFunctions() {
DoubleHashKey key = info.getHash();
uint32_t* hashPtr = hashes.find(key.getHash());
if (hashPtr && *hashPtr == key.getHash2()) {
std::cout << testCount << " hash collision radiusX = " << radiusX
std::cout << testCount << " hash collision radiusX = " << radiusX
<< " h1 = 0x" << std::hex << key.getHash()
<< " h2 = 0x" << std::hex << key.getHash2()
<< std::endl;
@ -88,7 +88,7 @@ void ShapeInfoTests::testHashFunctions() {
key = info.getHash();
hashPtr = hashes.find(key.getHash());
if (hashPtr && *hashPtr == key.getHash2()) {
std::cout << testCount << " hash collision radiusX = " << radiusX << " radiusY = " << radiusY
std::cout << testCount << " hash collision radiusX = " << radiusX << " radiusY = " << radiusY
<< " h1 = 0x" << std::hex << key.getHash()
<< " h2 = 0x" << std::hex << key.getHash2()
<< std::endl;
@ -113,8 +113,8 @@ void ShapeInfoTests::testHashFunctions() {
DoubleHashKey key = info.getHash();
hashPtr = hashes.find(key.getHash());
if (hashPtr && *hashPtr == key.getHash2()) {
std::cout << testCount << " hash collision radiusX = " << radiusX
<< " radiusY = " << radiusY << " radiusZ = " << radiusZ
std::cout << testCount << " hash collision radiusX = " << radiusX
<< " radiusY = " << radiusY << " radiusZ = " << radiusZ
<< " h1 = 0x" << std::hex << key.getHash()
<< " h2 = 0x" << std::hex << key.getHash2()
<< std::endl;
@ -148,9 +148,9 @@ void ShapeInfoTests::testBoxShape() {
info.setBox(halfExtents);
DoubleHashKey key = info.getHash();
btCollisionShape* shape = ShapeFactory::createShapeFromInfo(info);
const btCollisionShape* shape = ShapeFactory::createShapeFromInfo(info);
QCOMPARE(shape != nullptr, true);
ShapeInfo otherInfo = info;
DoubleHashKey otherKey = otherInfo.getHash();
QCOMPARE(key.getHash(), otherKey.getHash());
@ -165,7 +165,7 @@ void ShapeInfoTests::testSphereShape() {
info.setSphere(radius);
DoubleHashKey key = info.getHash();
btCollisionShape* shape = ShapeFactory::createShapeFromInfo(info);
const btCollisionShape* shape = ShapeFactory::createShapeFromInfo(info);
QCOMPARE(shape != nullptr, true);
ShapeInfo otherInfo = info;

View file

@ -27,14 +27,14 @@ void ShapeManagerTests::testShapeAccounting() {
QCOMPARE(numReferences, 0);
// create one shape and verify we get a valid pointer
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
QCOMPARE(shape != nullptr, true);
// verify number of shapes
QCOMPARE(shapeManager.getNumShapes(), 1);
// reference the shape again and verify that we get the same pointer
btCollisionShape* otherShape = shapeManager.getShape(info);
const btCollisionShape* otherShape = shapeManager.getShape(info);
QCOMPARE(otherShape, shape);
// verify number of references
@ -84,7 +84,7 @@ void ShapeManagerTests::testShapeAccounting() {
void ShapeManagerTests::addManyShapes() {
ShapeManager shapeManager;
QVector<btCollisionShape*> shapes;
QVector<const btCollisionShape*> shapes;
int numSizes = 100;
float startSize = 1.0f;
@ -96,7 +96,7 @@ void ShapeManagerTests::addManyShapes() {
float s = startSize + (float)i * deltaSize;
glm::vec3 scale(s, 1.23f + s, s - 0.573f);
info.setBox(0.5f * scale);
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
shapes.push_back(shape);
QCOMPARE(shape != nullptr, true);
@ -114,14 +114,14 @@ void ShapeManagerTests::addManyShapes() {
// release each shape by pointer
for (int i = 0; i < numShapes; ++i) {
btCollisionShape* shape = shapes[i];
const btCollisionShape* shape = shapes[i];
bool success = shapeManager.releaseShape(shape);
QCOMPARE(success, true);
}
// verify zero references
for (int i = 0; i < numShapes; ++i) {
btCollisionShape* shape = shapes[i];
const btCollisionShape* shape = shapes[i];
int numReferences = shapeManager.getNumReferences(shape);
QCOMPARE(numReferences, 0);
}
@ -133,10 +133,10 @@ void ShapeManagerTests::addBoxShape() {
info.setBox(halfExtents);
ShapeManager shapeManager;
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
ShapeInfo otherInfo = info;
btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
const btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
QCOMPARE(shape, otherShape);
}
@ -146,10 +146,10 @@ void ShapeManagerTests::addSphereShape() {
info.setSphere(radius);
ShapeManager shapeManager;
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
ShapeInfo otherInfo = info;
btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
const btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
QCOMPARE(shape, otherShape);
}
@ -161,10 +161,10 @@ void ShapeManagerTests::addCylinderShape() {
info.setCylinder(radius, height);
ShapeManager shapeManager;
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
ShapeInfo otherInfo = info;
btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
const btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
QCOMPARE(shape, otherShape);
*/
}
@ -177,10 +177,10 @@ void ShapeManagerTests::addCapsuleShape() {
info.setCapsule(radius, height);
ShapeManager shapeManager;
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
ShapeInfo otherInfo = info;
btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
const btCollisionShape* otherShape = shapeManager.getShape(otherInfo);
QCOMPARE(shape, otherShape);
*/
}
@ -219,14 +219,14 @@ void ShapeManagerTests::addCompoundShape() {
// create the shape
ShapeManager shapeManager;
btCollisionShape* shape = shapeManager.getShape(info);
const btCollisionShape* shape = shapeManager.getShape(info);
QVERIFY(shape != nullptr);
// verify the shape is correct type
QCOMPARE(shape->getShapeType(), (int)COMPOUND_SHAPE_PROXYTYPE);
// verify the shape has correct number of children
btCompoundShape* compoundShape = static_cast<btCompoundShape*>(shape);
const btCompoundShape* compoundShape = static_cast<const btCompoundShape*>(shape);
QCOMPARE(compoundShape->getNumChildShapes(), numHulls);
// verify manager has only one shape