diff --git a/BUILD.md b/BUILD.md index be30d35f21..d6e1603f37 100644 --- a/BUILD.md +++ b/BUILD.md @@ -96,7 +96,9 @@ Currently building on Windows has been tested using the following compilers: #####Windows SDK 7.1 -Whichever version of Visual Studio you use, first install [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279). +Whichever version of Visual Studio you use, you will need [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279). + +NOTE: If using Visual Studio C++ 2010 Express, you need to follow a specific install order. See below before installing the Windows SDK. ######Windows 8.1 You may have already downloaded the Windows 8 SDK (e.g. if you have previously installed Visual Studio 2013). If so, change CMAKE_PREFIX_PATH in %HIFI_DIR%\CMakeLists.txt to point to the Windows 8 SDK binaries. The default path is `C:\Program Files (x86)\Windows Kits\8.1\Lib\winv6.3\um\x86` @@ -109,6 +111,14 @@ The following patches/service packs are also required: * [VS2010 SP1](http://www.microsoft.com/en-us/download/details.aspx?id=23691) * [VS2010 SP1 Compiler Update](http://www.microsoft.com/en-us/download/details.aspx?id=4422) +IMPORTANT: Use the following install order: +Visual Studio C++ 2010 Express +Windows SDK 7.1 +VS2010 SP1 +VS2010 SP1 Compiler Update + +If you get an error while installing the VS2010 SP1 Compiler update saying that you don't have the Windows SDK installed, then uninstall all of the above and start again in the correct order. + Some of the build instructions will ask you to start a Visual Studio Command Prompt. You should have a shortcut in your Start menu called "Open Visual Studio Command Prompt (2010)" which will do so. #####Visual Studio 2013 diff --git a/assignment-client/src/Agent.cpp b/assignment-client/src/Agent.cpp index 8754492827..0594d997bd 100644 --- a/assignment-client/src/Agent.cpp +++ b/assignment-client/src/Agent.cpp @@ -33,12 +33,17 @@ #include "Agent.h" +static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10; + Agent::Agent(const QByteArray& packet) : ThreadedAssignment(packet), _voxelEditSender(), _particleEditSender(), _entityEditSender(), - _receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, 1, false, 1, 0, false), + _receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, + InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false, + DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES, + DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false)), _avatarHashMap() { // be the parent of the script engine so it gets moved when we do @@ -148,7 +153,7 @@ void Agent::readPendingDatagrams() { _voxelViewer.processDatagram(mutablePacket, sourceNode); } - } else if (datagramPacketType == PacketTypeMixedAudio) { + } else if (datagramPacketType == PacketTypeMixedAudio || datagramPacketType == PacketTypeSilentAudioFrame) { _receivedAudioStream.parseData(receivedPacket); diff --git a/assignment-client/src/audio/AudioMixer.cpp b/assignment-client/src/audio/AudioMixer.cpp index 5f4c3827f2..51ef47b67d 100644 --- a/assignment-client/src/audio/AudioMixer.cpp +++ b/assignment-client/src/audio/AudioMixer.cpp @@ -69,12 +69,12 @@ void attachNewNodeDataToNode(Node *newNode) { } } -bool AudioMixer::_useDynamicJitterBuffers = false; -int AudioMixer::_staticDesiredJitterBufferFrames = 0; -int AudioMixer::_maxFramesOverDesired = 0; +InboundAudioStream::Settings AudioMixer::_streamSettings; bool AudioMixer::_printStreamStats = false; +bool AudioMixer::_enableFilter = false; + AudioMixer::AudioMixer(const QByteArray& packet) : ThreadedAssignment(packet), _trailingSleepRatio(1.0f), @@ -85,7 +85,12 @@ AudioMixer::AudioMixer(const QByteArray& packet) : _sumMixes(0), _sourceUnattenuatedZone(NULL), _listenerUnattenuatedZone(NULL), - _lastSendAudioStreamStatsTime(usecTimestampNow()) + _lastPerSecondCallbackTime(usecTimestampNow()), + _sendAudioStreamStats(false), + _datagramsReadPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS), + _timeSpentPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS), + _timeSpentPerHashMatchCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS), + _readPendingCallsPerSecondStats(1, READ_DATAGRAMS_STATS_WINDOW_SECONDS) { } @@ -99,15 +104,44 @@ const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f; const float ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE = 0.18f; const float ATTENUATION_EPSILON_DISTANCE = 0.1f; -void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd, +int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd, AvatarAudioStream* listeningNodeStream) { + // If repetition with fade is enabled: + // If streamToAdd could not provide a frame (it was starved), then we'll mix its previously-mixed frame + // This is preferable to not mixing it at all since that's equivalent to inserting silence. + // Basically, we'll repeat that last frame until it has a frame to mix. Depending on how many times + // we've repeated that frame in a row, we'll gradually fade that repeated frame into silence. + // This improves the perceived quality of the audio slightly. + + float repeatedFrameFadeFactor = 1.0f; + + if (!streamToAdd->lastPopSucceeded()) { + if (_streamSettings._repetitionWithFade && !streamToAdd->getLastPopOutput().isNull()) { + // reptition with fade is enabled, and we do have a valid previous frame to repeat. + // calculate its fade factor, which depends on how many times it's already been repeated. + repeatedFrameFadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1); + if (repeatedFrameFadeFactor == 0.0f) { + return 0; + } + } else { + return 0; + } + } + + // at this point, we know streamToAdd's last pop output is valid + + // if the frame we're about to mix is silent, bail + if (streamToAdd->getLastPopOutputLoudness() == 0.0f) { + return 0; + } + float bearingRelativeAngleToSource = 0.0f; float attenuationCoefficient = 1.0f; int numSamplesDelay = 0; float weakChannelAmplitudeRatio = 1.0f; bool shouldAttenuate = (streamToAdd != listeningNodeStream); - + if (shouldAttenuate) { // if the two stream pointers do not match then these are different streams @@ -122,7 +156,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) { // according to mixer performance we have decided this does not get to be mixed in // bail out - return; + return 0; } ++_sumMixes; @@ -222,12 +256,13 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* int delayedChannelIndex = 0; const int SINGLE_STEREO_OFFSET = 2; + float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor; for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) { // setup the int16_t variables for the two sample sets - correctStreamSample[0] = streamPopOutput[s / 2] * attenuationCoefficient; - correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationCoefficient; + correctStreamSample[0] = streamPopOutput[s / 2] * attenuationAndFade; + correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationAndFade; delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset; @@ -243,7 +278,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* if (numSamplesDelay > 0) { // if there was a sample delay for this stream, we need to pull samples prior to the popped output // to stick at the beginning - float attenuationAndWeakChannelRatio = attenuationCoefficient * weakChannelAmplitudeRatio; + float attenuationAndWeakChannelRatioAndFade = attenuationCoefficient * weakChannelAmplitudeRatio * repeatedFrameFadeFactor; AudioRingBuffer::ConstIterator delayStreamPopOutput = streamPopOutput - numSamplesDelay; // TODO: delayStreamPopOutput may be inside the last frame written if the ringbuffer is completely full @@ -251,7 +286,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* for (int i = 0; i < numSamplesDelay; i++) { int parentIndex = i * 2; - _clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatio; + _clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatioAndFade; ++delayStreamPopOutput; } } @@ -262,41 +297,82 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* attenuationCoefficient = 1.0f; } + float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor; + for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) { - _clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationCoefficient), + _clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade), MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE); } } + + if (_enableFilter && shouldAttenuate) { + + glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition(); + if (relativePosition.z < 0) { // if the source is behind us + AudioFilterHSF1s& penumbraFilter = streamToAdd->getFilter(); + + // calculate penumbra angle + float headPenumbraAngle = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f), + glm::normalize(relativePosition)); + + if (relativePosition.x < 0) { + headPenumbraAngle *= -1.0f; // [-pi/2,+pi/2] + } + + const float SQUARE_ROOT_OF_TWO_OVER_TWO = 0.71f; // half power + const float ONE_OVER_TWO_PI = 1.0f / TWO_PI; + const float FILTER_CUTOFF_FREQUENCY_HZ = 4000.0f; + + // calculate the updated gain, frequency and slope. this will be tuned over time. + const float penumbraFilterGainL = (-1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO; + const float penumbraFilterGainR = (+1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO; + const float penumbraFilterFrequency = FILTER_CUTOFF_FREQUENCY_HZ; // constant frequency + const float penumbraFilterSlope = SQUARE_ROOT_OF_TWO_OVER_TWO; // constant slope + + qDebug() << "penumbra gainL=" + << penumbraFilterGainL + << "penumbra gainR=" + << penumbraFilterGainR + << "penumbraAngle=" + << headPenumbraAngle; + + // set the gain on both filter channels + penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope); + penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope); + + penumbraFilter.render(_clientSamples, _clientSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2); + } + } + + return 1; } -void AudioMixer::prepareMixForListeningNode(Node* node) { +int AudioMixer::prepareMixForListeningNode(Node* node) { AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream(); - + // zero out the client mix for this node memset(_clientSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_STEREO); // loop through all other nodes that have sufficient audio to mix + int streamsMixed = 0; foreach (const SharedNodePointer& otherNode, NodeList::getInstance()->getNodeHash()) { if (otherNode->getLinkedData()) { - AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData(); // enumerate the ARBs attached to the otherNode and add all that should be added to mix const QHash& otherNodeAudioStreams = otherNodeClientData->getAudioStreams(); QHash::ConstIterator i; - for (i = otherNodeAudioStreams.begin(); i != otherNodeAudioStreams.constEnd(); i++) { + for (i = otherNodeAudioStreams.constBegin(); i != otherNodeAudioStreams.constEnd(); i++) { PositionalAudioStream* otherNodeStream = i.value(); - - if ((*otherNode != *node || otherNodeStream->shouldLoopbackForNode()) - && otherNodeStream->lastPopSucceeded() - && otherNodeStream->getLastPopOutputTrailingLoudness() > 0.0f) { - - addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream); + + if (*otherNode != *node || otherNodeStream->shouldLoopbackForNode()) { + streamsMixed += addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream); } } } } + return streamsMixed; } void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) { @@ -332,7 +408,7 @@ void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const Hif void AudioMixer::sendStatsPacket() { static QJsonObject statsObject; - statsObject["useDynamicJitterBuffers"] = _useDynamicJitterBuffers; + statsObject["useDynamicJitterBuffers"] = _streamSettings._dynamicJitterBuffers; statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f; statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio; @@ -358,9 +434,42 @@ void AudioMixer::sendStatsPacket() { int sizeOfStats = 0; int TOO_BIG_FOR_MTU = 1200; // some extra space for JSONification + QString property = "readPendingDatagram_calls_stats"; + QString value = getReadPendingDatagramsCallsPerSecondsStatsString(); + statsObject2[qPrintable(property)] = value; + somethingToSend = true; + sizeOfStats += property.size() + value.size(); + + property = "readPendingDatagram_packets_per_call_stats"; + value = getReadPendingDatagramsPacketsPerCallStatsString(); + statsObject2[qPrintable(property)] = value; + somethingToSend = true; + sizeOfStats += property.size() + value.size(); + + property = "readPendingDatagram_packets_time_per_call_stats"; + value = getReadPendingDatagramsTimeStatsString(); + statsObject2[qPrintable(property)] = value; + somethingToSend = true; + sizeOfStats += property.size() + value.size(); + + property = "readPendingDatagram_hashmatch_time_per_call_stats"; + value = getReadPendingDatagramsHashMatchTimeStatsString(); + statsObject2[qPrintable(property)] = value; + somethingToSend = true; + sizeOfStats += property.size() + value.size(); + NodeList* nodeList = NodeList::getInstance(); int clientNumber = 0; foreach (const SharedNodePointer& node, nodeList->getNodeHash()) { + + // if we're too large, send the packet + if (sizeOfStats > TOO_BIG_FOR_MTU) { + nodeList->sendStatsToDomainServer(statsObject2); + sizeOfStats = 0; + statsObject2 = QJsonObject(); // clear it + somethingToSend = false; + } + clientNumber++; AudioMixerClientData* clientData = static_cast(node->getLinkedData()); if (clientData) { @@ -370,14 +479,6 @@ void AudioMixer::sendStatsPacket() { somethingToSend = true; sizeOfStats += property.size() + value.size(); } - - // if we're too large, send the packet - if (sizeOfStats > TOO_BIG_FOR_MTU) { - nodeList->sendStatsToDomainServer(statsObject2); - sizeOfStats = 0; - statsObject2 = QJsonObject(); // clear it - somethingToSend = false; - } } if (somethingToSend) { @@ -448,41 +549,81 @@ void AudioMixer::run() { if (settingsObject.contains(AUDIO_GROUP_KEY)) { QJsonObject audioGroupObject = settingsObject[AUDIO_GROUP_KEY].toObject(); - + // check the payload to see if we have asked for dynamicJitterBuffer support const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "A-dynamic-jitter-buffer"; - bool shouldUseDynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool(); - if (shouldUseDynamicJitterBuffers) { + _streamSettings._dynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool(); + if (_streamSettings._dynamicJitterBuffers) { qDebug() << "Enable dynamic jitter buffers."; - _useDynamicJitterBuffers = true; } else { qDebug() << "Dynamic jitter buffers disabled."; - _useDynamicJitterBuffers = false; } - + bool ok; - - const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-desired-jitter-buffer-frames"; - _staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok); + const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-static-desired-jitter-buffer-frames"; + _streamSettings._staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok); if (!ok) { - _staticDesiredJitterBufferFrames = DEFAULT_DESIRED_JITTER_BUFFER_FRAMES; + _streamSettings._staticDesiredJitterBufferFrames = DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES; } - qDebug() << "Static desired jitter buffer frames:" << _staticDesiredJitterBufferFrames; + qDebug() << "Static desired jitter buffer frames:" << _streamSettings._staticDesiredJitterBufferFrames; const QString MAX_FRAMES_OVER_DESIRED_JSON_KEY = "C-max-frames-over-desired"; - _maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok); + _streamSettings._maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok); if (!ok) { - _maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED; + _streamSettings._maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED; + } + qDebug() << "Max frames over desired:" << _streamSettings._maxFramesOverDesired; + + const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "D-use-stdev-for-desired-calc"; + _streamSettings._useStDevForJitterCalc = audioGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool(); + if (_streamSettings._useStDevForJitterCalc) { + qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled"; + } else { + qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled"; } - qDebug() << "Max frames over desired:" << _maxFramesOverDesired; - const QString PRINT_STREAM_STATS_JSON_KEY = "H-print-stream-stats"; + const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "E-window-starve-threshold"; + _streamSettings._windowStarveThreshold = audioGroupObject[WINDOW_STARVE_THRESHOLD_JSON_KEY].toString().toInt(&ok); + if (!ok) { + _streamSettings._windowStarveThreshold = DEFAULT_WINDOW_STARVE_THRESHOLD; + } + qDebug() << "Window A starve threshold:" << _streamSettings._windowStarveThreshold; + + const QString WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY = "F-window-seconds-for-desired-calc-on-too-many-starves"; + _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY].toString().toInt(&ok); + if (!ok) { + _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES; + } + qDebug() << "Window A length:" << _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves << "seconds"; + + const QString WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY = "G-window-seconds-for-desired-reduction"; + _streamSettings._windowSecondsForDesiredReduction = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY].toString().toInt(&ok); + if (!ok) { + _streamSettings._windowSecondsForDesiredReduction = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION; + } + qDebug() << "Window B length:" << _streamSettings._windowSecondsForDesiredReduction << "seconds"; + + const QString REPETITION_WITH_FADE_JSON_KEY = "H-repetition-with-fade"; + _streamSettings._repetitionWithFade = audioGroupObject[REPETITION_WITH_FADE_JSON_KEY].toBool(); + if (_streamSettings._repetitionWithFade) { + qDebug() << "Repetition with fade enabled"; + } else { + qDebug() << "Repetition with fade disabled"; + } + + const QString PRINT_STREAM_STATS_JSON_KEY = "I-print-stream-stats"; _printStreamStats = audioGroupObject[PRINT_STREAM_STATS_JSON_KEY].toBool(); if (_printStreamStats) { qDebug() << "Stream stats will be printed to stdout"; } - const QString UNATTENUATED_ZONE_KEY = "D-unattenuated-zone"; + const QString FILTER_KEY = "J-enable-filter"; + _enableFilter = audioGroupObject[FILTER_KEY].toBool(); + if (_enableFilter) { + qDebug() << "Filter enabled"; + } + + const QString UNATTENUATED_ZONE_KEY = "Z-unattenuated-zone"; QString unattenuatedZoneString = audioGroupObject[UNATTENUATED_ZONE_KEY].toString(); if (!unattenuatedZoneString.isEmpty()) { @@ -510,9 +651,8 @@ void AudioMixer::run() { int nextFrame = 0; QElapsedTimer timer; timer.start(); - - char* clientMixBuffer = new char[NETWORK_BUFFER_LENGTH_BYTES_STEREO + sizeof(quint16) - + numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)]; + + char clientMixBuffer[MAX_PACKET_SIZE]; int usecToSleep = BUFFER_SEND_INTERVAL_USECS; @@ -571,15 +711,13 @@ void AudioMixer::run() { if (!hasRatioChanged) { ++framesSinceCutoffEvent; } - - bool sendAudioStreamStats = false; - quint64 now = usecTimestampNow(); - if (now - _lastSendAudioStreamStatsTime > TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS) { - _lastSendAudioStreamStatsTime = now; - sendAudioStreamStats = true; - } - bool streamStatsPrinted = false; + quint64 now = usecTimestampNow(); + if (now - _lastPerSecondCallbackTime > USECS_PER_SECOND) { + perSecondActions(); + _lastPerSecondCallbackTime = now; + } + foreach (const SharedNodePointer& node, nodeList->getNodeHash()) { if (node->getLinkedData()) { AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData(); @@ -592,43 +730,52 @@ void AudioMixer::run() { if (node->getType() == NodeType::Agent && node->getActiveSocket() && nodeData->getAvatarAudioStream()) { - prepareMixForListeningNode(node.data()); + int streamsMixed = prepareMixForListeningNode(node.data()); - // pack header - int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio); - char* dataAt = clientMixBuffer + numBytesPacketHeader; + char* dataAt; + if (streamsMixed > 0) { + // pack header + int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio); + dataAt = clientMixBuffer + numBytesPacketHeader; - // pack sequence number - quint16 sequence = nodeData->getOutgoingSequenceNumber(); - memcpy(dataAt, &sequence, sizeof(quint16)); - dataAt += sizeof(quint16); + // pack sequence number + quint16 sequence = nodeData->getOutgoingSequenceNumber(); + memcpy(dataAt, &sequence, sizeof(quint16)); + dataAt += sizeof(quint16); - // pack mixed audio samples - memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO); - dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO; + // pack mixed audio samples + memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO); + dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO; + } else { + // pack header + int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame); + dataAt = clientMixBuffer + numBytesPacketHeader; + + // pack sequence number + quint16 sequence = nodeData->getOutgoingSequenceNumber(); + memcpy(dataAt, &sequence, sizeof(quint16)); + dataAt += sizeof(quint16); + + // pack number of silent audio samples + quint16 numSilentSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; + memcpy(dataAt, &numSilentSamples, sizeof(quint16)); + dataAt += sizeof(quint16); + } // send mixed audio packet nodeList->writeDatagram(clientMixBuffer, dataAt - clientMixBuffer, node); nodeData->incrementOutgoingMixedAudioSequenceNumber(); // send an audio stream stats packet if it's time - if (sendAudioStreamStats) { + if (_sendAudioStreamStats) { nodeData->sendAudioStreamStatsPackets(node); - - if (_printStreamStats) { - printf("\nStats for agent %s:\n", node->getUUID().toString().toLatin1().data()); - nodeData->printUpstreamDownstreamStats(); - streamStatsPrinted = true; - } + _sendAudioStreamStats = false; } ++_sumListeners; } } } - if (streamStatsPrinted) { - printf("\n----------------------------------------------------------------\n"); - } ++_numStatFrames; @@ -644,6 +791,90 @@ void AudioMixer::run() { usleep(usecToSleep); } } - - delete[] clientMixBuffer; +} + +void AudioMixer::perSecondActions() { + _sendAudioStreamStats = true; + + int callsLastSecond = _datagramsReadPerCallStats.getCurrentIntervalSamples(); + _readPendingCallsPerSecondStats.update(callsLastSecond); + + if (_printStreamStats) { + + printf("\n================================================================================\n\n"); + + printf(" readPendingDatagram() calls per second | avg: %.2f, avg_30s: %.2f, last_second: %d\n", + _readPendingCallsPerSecondStats.getAverage(), + _readPendingCallsPerSecondStats.getWindowAverage(), + callsLastSecond); + + printf(" Datagrams read per call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n", + _datagramsReadPerCallStats.getAverage(), + _datagramsReadPerCallStats.getWindowAverage(), + _datagramsReadPerCallStats.getCurrentIntervalAverage()); + + printf(" Usecs spent per readPendingDatagram() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n", + _timeSpentPerCallStats.getAverage(), + _timeSpentPerCallStats.getWindowAverage(), + _timeSpentPerCallStats.getCurrentIntervalAverage()); + + printf(" Usecs spent per packetVersionAndHashMatch() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n", + _timeSpentPerHashMatchCallStats.getAverage(), + _timeSpentPerHashMatchCallStats.getWindowAverage(), + _timeSpentPerHashMatchCallStats.getCurrentIntervalAverage()); + + double WINDOW_LENGTH_USECS = READ_DATAGRAMS_STATS_WINDOW_SECONDS * USECS_PER_SECOND; + + printf(" %% time spent in readPendingDatagram() calls | avg_30s: %.6f%%, last_second: %.6f%%\n", + _timeSpentPerCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0, + _timeSpentPerCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0); + + printf("%% time spent in packetVersionAndHashMatch() calls: | avg_30s: %.6f%%, last_second: %.6f%%\n", + _timeSpentPerHashMatchCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0, + _timeSpentPerHashMatchCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0); + + foreach(const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) { + if (node->getLinkedData()) { + AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData(); + + if (node->getType() == NodeType::Agent && node->getActiveSocket()) { + printf("\nStats for agent %s --------------------------------\n", + node->getUUID().toString().toLatin1().data()); + nodeData->printUpstreamDownstreamStats(); + } + } + } + } + + _datagramsReadPerCallStats.currentIntervalComplete(); + _timeSpentPerCallStats.currentIntervalComplete(); + _timeSpentPerHashMatchCallStats.currentIntervalComplete(); +} + +QString AudioMixer::getReadPendingDatagramsCallsPerSecondsStatsString() const { + QString result = "calls_per_sec_avg_30s: " + QString::number(_readPendingCallsPerSecondStats.getWindowAverage(), 'f', 2) + + " calls_last_sec: " + QString::number(_readPendingCallsPerSecondStats.getLastCompleteIntervalStats().getSum() + 0.5, 'f', 0); + return result; +} + +QString AudioMixer::getReadPendingDatagramsPacketsPerCallStatsString() const { + QString result = "pkts_per_call_avg_30s: " + QString::number(_datagramsReadPerCallStats.getWindowAverage(), 'f', 2) + + " pkts_per_call_avg_1s: " + QString::number(_datagramsReadPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2); + return result; +} + +QString AudioMixer::getReadPendingDatagramsTimeStatsString() const { + QString result = "usecs_per_call_avg_30s: " + QString::number(_timeSpentPerCallStats.getWindowAverage(), 'f', 2) + + " usecs_per_call_avg_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2) + + " prct_time_in_call_30s: " + QString::number(_timeSpentPerCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%" + + " prct_time_in_call_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%"; + return result; +} + +QString AudioMixer::getReadPendingDatagramsHashMatchTimeStatsString() const { + QString result = "usecs_per_hashmatch_avg_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowAverage(), 'f', 2) + + " usecs_per_hashmatch_avg_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2) + + " prct_time_in_hashmatch_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%" + + " prct_time_in_hashmatch_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%"; + return result; } diff --git a/assignment-client/src/audio/AudioMixer.h b/assignment-client/src/audio/AudioMixer.h index 2a4b93149c..83ce6195cc 100644 --- a/assignment-client/src/audio/AudioMixer.h +++ b/assignment-client/src/audio/AudioMixer.h @@ -21,7 +21,8 @@ class AvatarAudioStream; const int SAMPLE_PHASE_DELAY_AT_90 = 20; -const quint64 TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS = 1 * USECS_PER_SECOND; +const int READ_DATAGRAMS_STATS_WINDOW_SECONDS = 30; + /// Handles assignments of type AudioMixer - mixing streams of audio and re-distributing to various clients. class AudioMixer : public ThreadedAssignment { @@ -38,21 +39,26 @@ public slots: void sendStatsPacket(); - static bool getUseDynamicJitterBuffers() { return _useDynamicJitterBuffers; } - static int getStaticDesiredJitterBufferFrames() { return _staticDesiredJitterBufferFrames; } - static int getMaxFramesOverDesired() { return _maxFramesOverDesired; } - + static const InboundAudioStream::Settings& getStreamSettings() { return _streamSettings; } + private: /// adds one stream to the mix for a listening node - void addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd, + int addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd, AvatarAudioStream* listeningNodeStream); /// prepares and sends a mix to one Node - void prepareMixForListeningNode(Node* node); + int prepareMixForListeningNode(Node* node); // client samples capacity is larger than what will be sent to optimize mixing // we are MMX adding 4 samples at a time so we need client samples to have an extra 4 int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)]; + + void perSecondActions(); + + QString getReadPendingDatagramsCallsPerSecondsStatsString() const; + QString getReadPendingDatagramsPacketsPerCallStatsString() const; + QString getReadPendingDatagramsTimeStatsString() const; + QString getReadPendingDatagramsHashMatchTimeStatsString() const; float _trailingSleepRatio; float _minAudibilityThreshold; @@ -63,13 +69,21 @@ private: AABox* _sourceUnattenuatedZone; AABox* _listenerUnattenuatedZone; - static bool _useDynamicJitterBuffers; - static int _staticDesiredJitterBufferFrames; - static int _maxFramesOverDesired; + static InboundAudioStream::Settings _streamSettings; static bool _printStreamStats; + static bool _enableFilter; + + quint64 _lastPerSecondCallbackTime; - quint64 _lastSendAudioStreamStatsTime; + bool _sendAudioStreamStats; + + // stats + MovingMinMaxAvg _datagramsReadPerCallStats; // update with # of datagrams read for each readPendingDatagrams call + MovingMinMaxAvg _timeSpentPerCallStats; // update with usecs spent inside each readPendingDatagrams call + MovingMinMaxAvg _timeSpentPerHashMatchCallStats; // update with usecs spent inside each packetVersionAndHashMatch call + + MovingMinMaxAvg _readPendingCallsPerSecondStats; // update with # of readPendingDatagrams calls in the last second }; #endif // hifi_AudioMixer_h diff --git a/assignment-client/src/audio/AudioMixerClientData.cpp b/assignment-client/src/audio/AudioMixerClientData.cpp index 9a8a85c3d1..68ab7d74e1 100644 --- a/assignment-client/src/audio/AudioMixerClientData.cpp +++ b/assignment-client/src/audio/AudioMixerClientData.cpp @@ -74,9 +74,7 @@ int AudioMixerClientData::parseData(const QByteArray& packet) { quint8 channelFlag = *(reinterpret_cast(channelFlagAt)); bool isStereo = channelFlag == 1; - _audioStreams.insert(nullUUID, - matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getUseDynamicJitterBuffers(), - AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired())); + _audioStreams.insert(nullUUID, matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getStreamSettings())); } else { matchingStream = _audioStreams.value(nullUUID); } @@ -88,9 +86,8 @@ int AudioMixerClientData::parseData(const QByteArray& packet) { QUuid streamIdentifier = QUuid::fromRfc4122(packet.mid(bytesBeforeStreamIdentifier, NUM_BYTES_RFC4122_UUID)); if (!_audioStreams.contains(streamIdentifier)) { - _audioStreams.insert(streamIdentifier, - matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getUseDynamicJitterBuffers(), - AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired())); + // we don't have this injected stream yet, so add it + _audioStreams.insert(streamIdentifier, matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getStreamSettings())); } else { matchingStream = _audioStreams.value(streamIdentifier); } @@ -105,18 +102,15 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend(AABox* checkSourceZone, A QHash::ConstIterator i; for (i = _audioStreams.constBegin(); i != _audioStreams.constEnd(); i++) { PositionalAudioStream* stream = i.value(); + if (stream->popFrames(1, true) > 0) { - // this is a ring buffer that is ready to go - - // calculate the trailing avg loudness for the next frame - // that would be mixed in - stream->updateLastPopOutputTrailingLoudness(); - - if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) { - stream->setListenerUnattenuatedZone(listenerZone); - } else { - stream->setListenerUnattenuatedZone(NULL); - } + stream->updateLastPopOutputLoudnessAndTrailingLoudness(); + } + + if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) { + stream->setListenerUnattenuatedZone(listenerZone); + } else { + stream->setListenerUnattenuatedZone(NULL); } } } @@ -185,7 +179,9 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer& // pack the calculated number of stream stats for (int i = 0; i < numStreamStatsToPack; i++) { - AudioStreamStats streamStats = audioStreamsIterator.value()->updateSeqHistoryAndGetAudioStreamStats(); + PositionalAudioStream* stream = audioStreamsIterator.value(); + stream->perSecondCallbackForUpdatingStats(); + AudioStreamStats streamStats = stream->getAudioStreamStats(); memcpy(dataAt, &streamStats, sizeof(AudioStreamStats)); dataAt += sizeof(AudioStreamStats); diff --git a/assignment-client/src/audio/AvatarAudioStream.cpp b/assignment-client/src/audio/AvatarAudioStream.cpp index fcb78d7a6c..c7534d0551 100644 --- a/assignment-client/src/audio/AvatarAudioStream.cpp +++ b/assignment-client/src/audio/AvatarAudioStream.cpp @@ -13,8 +13,8 @@ #include "AvatarAudioStream.h" -AvatarAudioStream::AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) : - PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired) +AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings) : + PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, settings) { } @@ -38,26 +38,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray& // read the positional data readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes)); - if (type == PacketTypeSilentAudioFrame) { - int16_t numSilentSamples; - memcpy(&numSilentSamples, packetAfterSeqNum.data() + readBytes, sizeof(int16_t)); - readBytes += sizeof(int16_t); - - numAudioSamples = numSilentSamples; - } else { - int numAudioBytes = packetAfterSeqNum.size() - readBytes; - numAudioSamples = numAudioBytes / sizeof(int16_t); - } - return readBytes; -} - -int AvatarAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) { - int readBytes = 0; - if (type == PacketTypeSilentAudioFrame) { - writeDroppableSilentSamples(numAudioSamples); - } else { - // there is audio data to read - readBytes += _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t)); - } + // calculate how many samples are in this packet + int numAudioBytes = packetAfterSeqNum.size() - readBytes; + numAudioSamples = numAudioBytes / sizeof(int16_t); + return readBytes; } diff --git a/assignment-client/src/audio/AvatarAudioStream.h b/assignment-client/src/audio/AvatarAudioStream.h index ebad4585e0..cc2ff1aca7 100644 --- a/assignment-client/src/audio/AvatarAudioStream.h +++ b/assignment-client/src/audio/AvatarAudioStream.h @@ -18,7 +18,7 @@ class AvatarAudioStream : public PositionalAudioStream { public: - AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired); + AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings); private: // disallow copying of AvatarAudioStream objects @@ -26,7 +26,6 @@ private: AvatarAudioStream& operator= (const AvatarAudioStream&); int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples); - int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples); }; #endif // hifi_AvatarAudioStream_h diff --git a/domain-server/resources/web/settings/describe.json b/domain-server/resources/web/settings/describe.json index 788a3ad551..fee7ff21fc 100644 --- a/domain-server/resources/web/settings/describe.json +++ b/domain-server/resources/web/settings/describe.json @@ -9,8 +9,8 @@ "help": "Dynamically buffer client audio based on perceived jitter in packet receipt timing", "default": false }, - "B-desired-jitter-buffer-frames": { - "label": "Desired Jitter Buffer Frames", + "B-static-desired-jitter-buffer-frames": { + "label": "Static Desired Jitter Buffer Frames", "help": "If dynamic jitter buffers is disabled, this determines the target number of frames maintained by the AudioMixer's jitter buffers", "placeholder": "1", "default": "1" @@ -21,18 +21,54 @@ "placeholder": "10", "default": "10" }, - "H-print-stream-stats": { + "D-use-stdev-for-desired-calc": { + "type": "checkbox", + "label": "Use Stdev for Desired Jitter Frames Calc:", + "help": "If checked, Philip's method (stdev of timegaps) is used to calculate desired jitter frames. Otherwise, Fred's method (max timegap) is used", + "default": false + }, + "E-window-starve-threshold": { + "label": "Window Starve Threshold", + "help": "If this many starves occur in an N-second window (N is the number in the next field), then the desired jitter frames will be re-evaluated using Window A.", + "placeholder": "3", + "default": "3" + }, + "F-window-seconds-for-desired-calc-on-too-many-starves": { + "label": "Timegaps Window (A) Seconds:", + "help": "Window A contains a history of timegaps. Its max timegap is used to re-evaluate the desired jitter frames when too many starves occur within it.", + "placeholder": "50", + "default": "50" + }, + "G-window-seconds-for-desired-reduction": { + "label": "Timegaps Window (B) Seconds:", + "help": "Window B contains a history of timegaps. Its max timegap is used as a ceiling for the desired jitter frames value.", + "placeholder": "10", + "default": "10" + }, + "H-repetition-with-fade": { + "type": "checkbox", + "label": "Repetition with Fade:", + "help": "If enabled, dropped frames and mixing during starves will repeat the last frame, eventually fading to silence", + "default": false + }, + "I-print-stream-stats": { "type": "checkbox", "label": "Print Stream Stats:", "help": "If enabled, audio upstream and downstream stats of each agent will be printed each second to stdout", "default": false }, - "D-unattenuated-zone": { + "Z-unattenuated-zone": { "label": "Unattenuated Zone", "help": "Boxes for source and listener (corner x, corner y, corner z, size x, size y, size z, corner x, corner y, corner z, size x, size y, size z)", "placeholder": "no zone", "default": "" + }, + "J-enable-filter": { + "type": "checkbox", + "label": "Enable Positional Filter", + "help": "If enabled, positional audio stream uses lowpass filter", + "default": false } } } -} \ No newline at end of file +} diff --git a/examples/PlayRecordingOnAC.js b/examples/PlayRecordingOnAC.js new file mode 100644 index 0000000000..a68e60a6fa --- /dev/null +++ b/examples/PlayRecordingOnAC.js @@ -0,0 +1,47 @@ +// +// PlayRecordingOnAC.js +// examples +// +// Created by Clément Brisset on 8/24/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + + +var filename = "http://your.recording.url"; +var playFromCurrentLocation = true; + +Avatar.faceModelURL = "http://public.highfidelity.io/models/heads/EvilPhilip_v7.fst"; +Avatar.skeletonModelURL = "http://public.highfidelity.io/models/skeletons/Philip_Carl_Body_A-Pose.fst"; + +// Set position here if playFromCurrentLocation is true +Avatar.position = { x:1, y: 1, z: 1 }; + +Agent.isAvatar = true; + +Avatar.loadRecording(filename); + +count = 300; // This is necessary to wait for the audio mixer to connect +function update(event) { + if (count > 0) { + count--; + return; + } + if (count == 0) { + Avatar.startPlaying(playFromCurrentLocation); + Avatar.play(); + Vec3.print("Playing from ", Avatar.position); + + count--; + } + + if (Avatar.isPlaying()) { + Avatar.play(); + } else { + Script.update.disconnect(update); + } +} + +Script.update.connect(update); diff --git a/examples/Recorder.js b/examples/Recorder.js new file mode 100644 index 0000000000..cf4b422926 --- /dev/null +++ b/examples/Recorder.js @@ -0,0 +1,205 @@ +// +// Recorder.js +// examples +// +// Created by Clément Brisset on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +Script.include("toolBars.js"); + +var recordingFile = "recording.rec"; + +var windowDimensions = Controller.getViewportDimensions(); +var TOOL_ICON_URL = "http://s3-us-west-1.amazonaws.com/highfidelity-public/images/tools/"; +var ALPHA_ON = 1.0; +var ALPHA_OFF = 0.7; +var COLOR_ON = { red: 128, green: 0, blue: 0 }; +var COLOR_OFF = { red: 128, green: 128, blue: 128 }; +Tool.IMAGE_WIDTH *= 0.7; +Tool.IMAGE_HEIGHT *= 0.7; + +var toolBar = null; +var recordIcon; +var playIcon; +var saveIcon; +var loadIcon; +setupToolBar(); + +var timer = null; +setupTimer(); + +function setupToolBar() { + if (toolBar != null) { + print("Multiple calls to Recorder.js:setupToolBar()"); + return; + } + + toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL); + toolBar.setBack(COLOR_OFF, ALPHA_OFF); + + recordIcon = toolBar.addTool({ + imageURL: TOOL_ICON_URL + "record.svg", + width: Tool.IMAGE_WIDTH, + height: Tool.IMAGE_HEIGHT, + alpha: ALPHA_ON, + visible: true + }, false); + + playIcon = toolBar.addTool({ + imageURL: TOOL_ICON_URL + "play.svg", + width: Tool.IMAGE_WIDTH, + height: Tool.IMAGE_HEIGHT, + alpha: ALPHA_ON, + visible: true + }, false, false); + + saveIcon = toolBar.addTool({ + imageURL: TOOL_ICON_URL + "save.svg", + width: Tool.IMAGE_WIDTH, + height: Tool.IMAGE_HEIGHT, + alpha: ALPHA_ON, + visible: true + }, false, false); + + loadIcon = toolBar.addTool({ + imageURL: TOOL_ICON_URL + "load.svg", + width: Tool.IMAGE_WIDTH, + height: Tool.IMAGE_HEIGHT, + alpha: ALPHA_ON, + visible: true + }, false, false); +} + +function setupTimer() { + timer = Overlays.addOverlay("text", { + font: { size: 20 }, + text: (0.00).toFixed(3), + backgroundColor: COLOR_OFF, + x: 0, y: 0, + width: 100, + height: 100, + alpha: 1.0, + visible: true + }); +} + +function updateTimer() { + var text = ""; + if (MyAvatar.isRecording()) { + text = formatTime(MyAvatar.recorderElapsed()) + } else { + text = formatTime(MyAvatar.playerElapsed()) + " / " + + formatTime(MyAvatar.playerLength()); + } + + Overlays.editOverlay(timer, { + text: text + }) +} + +function formatTime(time) { + var MIN_PER_HOUR = 60; + var SEC_PER_MIN = 60; + var MSEC_PER_SEC = 1000; + + var hours = Math.floor(time / (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR)); + time -= hours * (MSEC_PER_SEC * SEC_PER_MIN * MIN_PER_HOUR); + + var minutes = Math.floor(time / (MSEC_PER_SEC * SEC_PER_MIN)); + time -= minutes * (MSEC_PER_SEC * SEC_PER_MIN); + + var seconds = Math.floor(time / MSEC_PER_SEC); + seconds = time / MSEC_PER_SEC; + + var text = ""; + text += (hours > 0) ? hours + ":" : + ""; + text += (minutes > 0) ? ((minutes < 10 && text != "") ? "0" : "") + minutes + ":" : + ""; + text += ((seconds < 10 && text != "") ? "0" : "") + seconds.toFixed(3); + return text; +} + +function moveUI() { + var relative = { x: 30, y: 90 }; + toolBar.move(relative.x, + windowDimensions.y - relative.y); + Overlays.editOverlay(timer, { + x: relative.x - 10, + y: windowDimensions.y - relative.y - 35, + width: 0, + height: 0 + }); +} + +function mousePressEvent(event) { + clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y }); + + print("Status: isPlaying=" + MyAvatar.isPlaying() + ", isRecording=" + MyAvatar.isRecording()); + + if (recordIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isPlaying()) { + if (!MyAvatar.isRecording()) { + MyAvatar.startRecording(); + toolBar.setBack(COLOR_ON, ALPHA_ON); + } else { + MyAvatar.stopRecording(); + MyAvatar.loadLastRecording(); + toolBar.setBack(COLOR_OFF, ALPHA_OFF); + } + } else if (playIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isRecording()) { + if (MyAvatar.isPlaying()) { + MyAvatar.stopPlaying(); + } else { + MyAvatar.setPlayFromCurrentLocation(true); + MyAvatar.setPlayerLoop(true); + MyAvatar.startPlaying(true); + } + } else if (saveIcon === toolBar.clicked(clickedOverlay)) { + if (!MyAvatar.isRecording()) { + recordingFile = Window.save("Save recording to file", ".", "*.rec"); + MyAvatar.saveRecording(recordingFile); + } + } else if (loadIcon === toolBar.clicked(clickedOverlay)) { + if (!MyAvatar.isRecording()) { + recordingFile = Window.browse("Load recorcding from file", ".", "*.rec"); + MyAvatar.loadRecording(recordingFile); + } + } else { + + } +} + +function update() { + var newDimensions = Controller.getViewportDimensions(); + if (windowDimensions.x != newDimensions.x || + windowDimensions.y != newDimensions.y) { + windowDimensions = newDimensions; + moveUI(); + } + + updateTimer(); +} + +function scriptEnding() { + if (MyAvatar.isRecording()) { + MyAvatar.stopRecording(); + } + if (MyAvatar.isPlaying()) { + MyAvatar.stopPlaying(); + } + toolBar.cleanup(); + Overlays.deleteOverlay(timer); +} + +Controller.mousePressEvent.connect(mousePressEvent); +Script.update.connect(update); +Script.scriptEnding.connect(scriptEnding); + +// Should be called last to put everything into position +moveUI(); + + diff --git a/examples/Test.js b/examples/Test.js index 36dee7bd90..612c56d10b 100644 --- a/examples/Test.js +++ b/examples/Test.js @@ -59,6 +59,13 @@ UnitTest.prototype.assertEquals = function(expected, actual, message) { } }; +UnitTest.prototype.assertContains = function (expected, actual, message) { + this.numAssertions++; + if (actual.indexOf(expected) == -1) { + throw new AssertionException(expected, actual, message); + } +}; + UnitTest.prototype.assertHasProperty = function(property, actual, message) { this.numAssertions++; if (actual[property] === undefined) { diff --git a/examples/editModels.js b/examples/editModels.js index 2f134d70bc..a63233809b 100644 --- a/examples/editModels.js +++ b/examples/editModels.js @@ -9,12 +9,12 @@ // // If using the hydras : // grab grab models with the triggers, you can then move the models around or scale them with both hands. -// You can switch mode using the bumpers so that you can move models roud more easily. +// You can switch mode using the bumpers so that you can move models around more easily. // // If using the mouse : // - left click lets you move the model in the plane facing you. -// If pressing shift, it will move on the horizontale plane it's in. -// - right click lets you rotate the model. z and x give you access to more axix of rotation while shift allows for finer control. +// If pressing shift, it will move on the horizontal plane it's in. +// - right click lets you rotate the model. z and x give access to more axes of rotation while shift provides finer control. // - left + right click lets you scale the model. // - you can press r while holding the model to reset its rotation // @@ -39,36 +39,1326 @@ var MAX_ANGULAR_SIZE = 45; var LEFT = 0; var RIGHT = 1; - var SPAWN_DISTANCE = 1; -var radiusDefault = 0.10; +var DEFAULT_RADIUS = 0.10; var modelURLs = [ - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Feisar_Ship.FBX", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/birarda/birarda_head.fbx", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/pug.fbx", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/newInvader16x16-large-purple.svo", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/minotaur/mino_full.fbx", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Combat_tank_V01.FBX", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/orc.fbx", - "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/slimer.fbx", - ]; - -var toolBar; + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Feisar_Ship.FBX", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/birarda/birarda_head.fbx", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/pug.fbx", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/newInvader16x16-large-purple.svo", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/minotaur/mino_full.fbx", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Combat_tank_V01.FBX", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/orc.fbx", + "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/slimer.fbx" + ]; var jointList = MyAvatar.getJointNames(); var mode = 0; +var isActive = false; + + +if (typeof String.prototype.fileName !== "function") { + String.prototype.fileName = function () { + return this.replace(/^(.*[\/\\])*/, ""); + }; +} + +if (typeof String.prototype.fileBase !== "function") { + String.prototype.fileBase = function () { + var filename = this.fileName(); + return filename.slice(0, filename.indexOf(".")); + }; +} + +if (typeof String.prototype.fileType !== "function") { + String.prototype.fileType = function () { + return this.slice(this.lastIndexOf(".") + 1); + }; +} + +if (typeof String.prototype.path !== "function") { + String.prototype.path = function () { + return this.replace(/[\\\/][^\\\/]*$/, ""); + }; +} + +if (typeof String.prototype.regExpEscape !== "function") { + String.prototype.regExpEscape = function () { + return this.replace(/([$\^.+*?|\\\/{}()\[\]])/g, '\\$1'); + }; +} + +if (typeof String.prototype.toArrayBuffer !== "function") { + String.prototype.toArrayBuffer = function () { + var length, + buffer, + view, + charCode, + charCodes, + i; + + charCodes = []; + + length = this.length; + for (i = 0; i < length; i += 1) { + charCode = this.charCodeAt(i); + if (charCode <= 255) { + charCodes.push(charCode); + } else { + charCodes.push(charCode / 256); + charCodes.push(charCode % 256); + } + } + + length = charCodes.length; + buffer = new ArrayBuffer(length); + view = new Uint8Array(buffer); + for (i = 0; i < length; i += 1) { + view[i] = charCodes[i]; + } + + return buffer; + }; +} + +if (typeof DataView.prototype.indexOf !== "function") { + DataView.prototype.indexOf = function (searchString, position) { + var searchLength = searchString.length, + byteArrayLength = this.byteLength, + maxSearchIndex = byteArrayLength - searchLength, + searchCharCodes = [], + found, + i, + j; + + searchCharCodes[searchLength] = 0; + for (j = 0; j < searchLength; j += 1) { + searchCharCodes[j] = searchString.charCodeAt(j); + } + + i = position; + found = false; + while (i < maxSearchIndex && !found) { + j = 0; + while (j < searchLength && this.getUint8(i + j) === searchCharCodes[j]) { + j += 1; + } + found = (j === searchLength); + i += 1; + } + + return found ? i - 1 : -1; + }; +} + +if (typeof DataView.prototype.string !== "function") { + DataView.prototype.string = function (start, length) { + var charCodes = [], + end, + i; + + if (start === undefined) { + start = 0; + } + if (length === undefined) { + length = this.length; + } + + end = start + length; + for (i = start; i < end; i += 1) { + charCodes.push(this.getUint8(i)); + } + + return String.fromCharCode.apply(String, charCodes); + }; +} + +var progressDialog = (function () { + var that = {}, + progressBackground, + progressMessage, + cancelButton, + displayed = false, + backgroundWidth = 300, + backgroundHeight = 100, + messageHeight = 32, + cancelWidth = 70, + cancelHeight = 32, + textColor = { red: 255, green: 255, blue: 255 }, + textBackground = { red: 52, green: 52, blue: 52 }, + backgroundUrl = toolIconUrl + "progress-background.svg", + windowDimensions; + + progressBackground = Overlays.addOverlay("image", { + width: backgroundWidth, + height: backgroundHeight, + imageURL: backgroundUrl, + alpha: 0.9, + visible: false + }); + + progressMessage = Overlays.addOverlay("text", { + width: backgroundWidth - 40, + height: messageHeight, + text: "", + textColor: textColor, + backgroundColor: textBackground, + alpha: 0.9, + visible: false + }); + + cancelButton = Overlays.addOverlay("text", { + width: cancelWidth, + height: cancelHeight, + text: "Cancel", + textColor: textColor, + backgroundColor: textBackground, + alpha: 0.9, + visible: false + }); + + function move() { + var progressX, + progressY; + + if (displayed) { + + if (windowDimensions.x === Window.innerWidth && windowDimensions.y === Window.innerHeight) { + return; + } + windowDimensions.x = Window.innerWidth; + windowDimensions.y = Window.innerHeight; + + progressX = (windowDimensions.x - backgroundWidth) / 2; // Center. + progressY = windowDimensions.y / 2 - backgroundHeight; // A little up from center. + + Overlays.editOverlay(progressBackground, { x: progressX, y: progressY }); + Overlays.editOverlay(progressMessage, { x: progressX + 20, y: progressY + 15 }); + Overlays.editOverlay(cancelButton, { + x: progressX + backgroundWidth - cancelWidth - 20, + y: progressY + backgroundHeight - cancelHeight - 15 + }); + } + } + that.move = move; + + that.onCancel = undefined; + + function open(message) { + if (!displayed) { + windowDimensions = { x: 0, y : 0 }; + displayed = true; + move(); + Overlays.editOverlay(progressBackground, { visible: true }); + Overlays.editOverlay(progressMessage, { visible: true, text: message }); + Overlays.editOverlay(cancelButton, { visible: true }); + } else { + throw new Error("open() called on progressDialog when already open"); + } + } + that.open = open; + + function isOpen() { + return displayed; + } + that.isOpen = isOpen; + + function update(message) { + if (displayed) { + Overlays.editOverlay(progressMessage, { text: message }); + } else { + throw new Error("update() called on progressDialog when not open"); + } + } + that.update = update; + + function close() { + if (displayed) { + Overlays.editOverlay(cancelButton, { visible: false }); + Overlays.editOverlay(progressMessage, { visible: false }); + Overlays.editOverlay(progressBackground, { visible: false }); + displayed = false; + } else { + throw new Error("close() called on progressDialog when not open"); + } + } + that.close = close; + + function mousePressEvent(event) { + if (Overlays.getOverlayAtPoint({ x: event.x, y: event.y }) === cancelButton) { + if (typeof this.onCancel === "function") { + close(); + this.onCancel(); + } + return true; + } + return false; + } + that.mousePressEvent = mousePressEvent; + + function cleanup() { + Overlays.deleteOverlay(cancelButton); + Overlays.deleteOverlay(progressMessage); + Overlays.deleteOverlay(progressBackground); + } + that.cleanup = cleanup; + + return that; +}()); + +var httpMultiPart = (function () { + var that = {}, + parts, + byteLength, + boundaryString, + crlf; + + function clear() { + boundaryString = "--boundary_" + String(Uuid.generate()).slice(1, 36) + "="; + parts = []; + byteLength = 0; + crlf = ""; + } + that.clear = clear; + + function boundary() { + return boundaryString.slice(2); + } + that.boundary = boundary; + + function length() { + return byteLength; + } + that.length = length; + + function add(object) { + // - name, string + // - name, buffer + var buffer, + string, + stringBuffer, + compressedBuffer; + + if (object.name === undefined) { + + throw new Error("Item to add to HttpMultiPart must have a name"); + + } else if (object.string !== undefined) { + //--= + //Content-Disposition: form-data; name="model_name" + // + // + + string = crlf + boundaryString + "\r\n" + + "Content-Disposition: form-data; name=\"" + object.name + "\"\r\n" + + "\r\n" + + object.string; + buffer = string.toArrayBuffer(); + + } else if (object.buffer !== undefined) { + //--= + //Content-Disposition: form-data; name="fbx"; filename="" + //Content-Type: application/octet-stream + // + // + + string = crlf + boundaryString + "\r\n" + + "Content-Disposition: form-data; name=\"" + object.name + + "\"; filename=\"" + object.buffer.filename + "\"\r\n" + + "Content-Type: application/octet-stream\r\n" + + "\r\n"; + stringBuffer = string.toArrayBuffer(); + + compressedBuffer = object.buffer.buffer.compress(); + buffer = new Uint8Array(stringBuffer.byteLength + compressedBuffer.byteLength); + buffer.set(new Uint8Array(stringBuffer)); + buffer.set(new Uint8Array(compressedBuffer), stringBuffer.byteLength); + + } else { + + throw new Error("Item to add to HttpMultiPart not recognized"); + } + + byteLength += buffer.byteLength; + parts.push(buffer); + + crlf = "\r\n"; + + return true; + } + that.add = add; + + function response() { + var buffer, + index, + str, + i; + + str = crlf + boundaryString + "--\r\n"; + buffer = str.toArrayBuffer(); + byteLength += buffer.byteLength; + parts.push(buffer); + + buffer = new Uint8Array(byteLength); + index = 0; + for (i = 0; i < parts.length; i += 1) { + buffer.set(new Uint8Array(parts[i]), index); + index += parts[i].byteLength; + } + + return buffer; + } + that.response = response; + + clear(); + + return that; +}()); + +var modelUploader = (function () { + var that = {}, + modelFile, + modelName, + modelURL, + modelCallback, + isProcessing, + fstBuffer, + fbxBuffer, + //svoBuffer, + mapping, + geometry, + API_URL = "https://data.highfidelity.io/api/v1/models", + MODEL_URL = "http://public.highfidelity.io/models/content", + NAME_FIELD = "name", + SCALE_FIELD = "scale", + FILENAME_FIELD = "filename", + TEXDIR_FIELD = "texdir", + MAX_TEXTURE_SIZE = 1024; + + function info(message) { + if (progressDialog.isOpen()) { + progressDialog.update(message); + } else { + progressDialog.open(message); + } + print(message); + } + + function error(message) { + if (progressDialog.isOpen()) { + progressDialog.close(); + } + print(message); + Window.alert(message); + } + + function randomChar(length) { + var characters = "0123457689abcdefghijklmnopqrstuvwxyz", + string = "", + i; + + for (i = 0; i < length; i += 1) { + string += characters[Math.floor(Math.random() * 36)]; + } + + return string; + } + + function resetDataObjects() { + fstBuffer = null; + fbxBuffer = null; + //svoBuffer = null; + mapping = {}; + geometry = {}; + geometry.textures = []; + geometry.embedded = []; + } + + function readFile(filename) { + var url = "file:///" + filename, + req = new XMLHttpRequest(); + + req.open("GET", url, false); + req.responseType = "arraybuffer"; + req.send(); + if (req.status !== 200) { + error("Could not read file: " + filename + " : " + req.statusText); + return null; + } + + return { + filename: filename.fileName(), + buffer: req.response + }; + } + + function readMapping(buffer) { + var dv = new DataView(buffer.buffer), + lines, + line, + tokens, + i, + name, + value, + remainder, + existing; + + mapping = {}; // { name : value | name : { value : [remainder] } } + lines = dv.string(0, dv.byteLength).split(/\r\n|\r|\n/); + for (i = 0; i < lines.length; i += 1) { + line = lines[i].trim(); + if (line.length > 0 && line[0] !== "#") { + tokens = line.split(/\s*=\s*/); + if (tokens.length > 1) { + name = tokens[0]; + value = tokens[1]; + if (tokens.length > 2) { + remainder = tokens.slice(2, tokens.length).join(" = "); + } else { + remainder = null; + } + if (tokens.length === 2 && mapping[name] === undefined) { + mapping[name] = value; + } else { + if (mapping[name] === undefined) { + mapping[name] = {}; + + } else if (typeof mapping[name] !== "object") { + existing = mapping[name]; + mapping[name] = { existing : null }; + } + + if (mapping[name][value] === undefined) { + mapping[name][value] = []; + } + mapping[name][value].push(remainder); + } + } + } + } + } + + function writeMapping(buffer) { + var name, + value, + remainder, + i, + string = ""; + + for (name in mapping) { + if (mapping.hasOwnProperty(name)) { + if (typeof mapping[name] === "object") { + for (value in mapping[name]) { + if (mapping[name].hasOwnProperty(value)) { + remainder = mapping[name][value]; + if (remainder === null) { + string += (name + " = " + value + "\n"); + } else { + for (i = 0; i < remainder.length; i += 1) { + string += (name + " = " + value + " = " + remainder[i] + "\n"); + } + } + } + } + } else { + string += (name + " = " + mapping[name] + "\n"); + } + } + } + + buffer.buffer = string.toArrayBuffer(); + } + + function readGeometry(fbxBuffer) { + var textures, + view, + index, + EOF, + previousNodeFilename; + + // Reference: + // http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/ + + textures = {}; + view = new DataView(fbxBuffer.buffer); + EOF = false; + + function parseBinaryFBX() { + var endOffset, + numProperties, + propertyListLength, + nameLength, + name, + filename; + + endOffset = view.getUint32(index, true); + numProperties = view.getUint32(index + 4, true); + propertyListLength = view.getUint32(index + 8, true); + nameLength = view.getUint8(index + 12); + index += 13; + + if (endOffset === 0) { + return; + } + if (endOffset < index || endOffset > view.byteLength) { + EOF = true; + return; + } + + name = view.string(index, nameLength).toLowerCase(); + index += nameLength; + + if (name === "content" && previousNodeFilename !== "") { + // Blender 2.71 exporter "embeds" external textures as empty binary blobs so ignore these + if (propertyListLength > 5) { + geometry.embedded.push(previousNodeFilename); + } + } + + if (name === "relativefilename") { + filename = view.string(index + 5, view.getUint32(index + 1, true)).fileName(); + if (!textures.hasOwnProperty(filename)) { + textures[filename] = ""; + geometry.textures.push(filename); + } + previousNodeFilename = filename; + } else { + previousNodeFilename = ""; + } + + index += (propertyListLength); + + while (index < endOffset && !EOF) { + parseBinaryFBX(); + } + } + + function readTextFBX() { + var line, + view, + viewLength, + charCode, + charCodes, + numCharCodes, + filename, + relativeFilename = "", + MAX_CHAR_CODES = 250; + + view = new Uint8Array(fbxBuffer.buffer); + viewLength = view.byteLength; + charCodes = []; + numCharCodes = 0; + + for (index = 0; index < viewLength; index += 1) { + charCode = view[index]; + if (charCode !== 9 && charCode !== 32) { + if (charCode === 10) { // EOL. Can ignore EOF. + line = String.fromCharCode.apply(String, charCodes).toLowerCase(); + // For embedded textures, "Content:" line immediately follows "RelativeFilename:" line. + if (line.slice(0, 8) === "content:" && relativeFilename !== "") { + geometry.embedded.push(relativeFilename); + } + if (line.slice(0, 17) === "relativefilename:") { + filename = line.slice(line.indexOf("\""), line.lastIndexOf("\"") - line.length).fileName(); + if (!textures.hasOwnProperty(filename)) { + textures[filename] = ""; + geometry.textures.push(filename); + } + relativeFilename = filename; + } else { + relativeFilename = ""; + } + charCodes = []; + numCharCodes = 0; + } else { + if (numCharCodes < MAX_CHAR_CODES) { // Only interested in start of line + charCodes.push(charCode); + numCharCodes += 1; + } + } + } + } + } + + if (view.string(0, 18) === "Kaydara FBX Binary") { + previousNodeFilename = ""; + + index = 27; + while (index < view.byteLength - 39 && !EOF) { + parseBinaryFBX(); + } + + } else { + + readTextFBX(); + + } + } + + function readModel() { + var fbxFilename, + //svoFilename, + fileType; + + info("Reading model file"); + print("Model file: " + modelFile); + + if (modelFile.toLowerCase().fileType() === "fst") { + fstBuffer = readFile(modelFile); + if (fstBuffer === null) { + return false; + } + readMapping(fstBuffer); + fileType = mapping[FILENAME_FIELD].toLowerCase().fileType(); + if (mapping.hasOwnProperty(FILENAME_FIELD)) { + if (fileType === "fbx") { + fbxFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD]; + //} else if (fileType === "svo") { + // svoFilename = modelFile.path() + "\\" + mapping[FILENAME_FIELD]; + } else { + error("Unrecognized model type in FST file!"); + return false; + } + } else { + error("Model file name not found in FST file!"); + return false; + } + } else { + fstBuffer = { + filename: "Interface." + randomChar(6), // Simulate avatar model uploading behaviour + buffer: null + }; + + if (modelFile.toLowerCase().fileType() === "fbx") { + fbxFilename = modelFile; + mapping[FILENAME_FIELD] = modelFile.fileName(); + + //} else if (modelFile.toLowerCase().fileType() === "svo") { + // svoFilename = modelFile; + // mapping[FILENAME_FIELD] = modelFile.fileName(); + + } else { + error("Unrecognized file type: " + modelFile); + return false; + } + } + + if (!isProcessing) { return false; } + + if (fbxFilename) { + fbxBuffer = readFile(fbxFilename); + if (fbxBuffer === null) { + return false; + } + + if (!isProcessing) { return false; } + + readGeometry(fbxBuffer); + } + + //if (svoFilename) { + // svoBuffer = readFile(svoFilename); + // if (svoBuffer === null) { + // return false; + // } + //} + + // Add any missing basic mappings + if (!mapping.hasOwnProperty(NAME_FIELD)) { + mapping[NAME_FIELD] = modelFile.fileName().fileBase(); + } + if (!mapping.hasOwnProperty(TEXDIR_FIELD)) { + mapping[TEXDIR_FIELD] = "."; + } + if (!mapping.hasOwnProperty(SCALE_FIELD)) { + mapping[SCALE_FIELD] = 1.0; + } + + return true; + } + + function setProperties() { + var form = [], + directory, + displayAs, + validateAs; + + progressDialog.close(); + print("Setting model properties"); + + form.push({ label: "Name:", value: mapping[NAME_FIELD] }); + + directory = modelFile.path() + "/" + mapping[TEXDIR_FIELD]; + displayAs = new RegExp("^" + modelFile.path().regExpEscape() + "[\\\\\\\/](.*)"); + validateAs = new RegExp("^" + modelFile.path().regExpEscape() + "([\\\\\\\/].*)?"); + + form.push({ + label: "Texture directory:", + directory: modelFile.path() + "/" + mapping[TEXDIR_FIELD], + title: "Choose Texture Directory", + displayAs: displayAs, + validateAs: validateAs, + errorMessage: "Texture directory must be subdirectory of the model directory." + }); + + form.push({ button: "Cancel" }); + + if (!Window.form("Set Model Properties", form)) { + print("User cancelled uploading model"); + return false; + } + + mapping[NAME_FIELD] = form[0].value; + mapping[TEXDIR_FIELD] = form[1].directory.slice(modelFile.path().length + 1); + if (mapping[TEXDIR_FIELD] === "") { + mapping[TEXDIR_FIELD] = "."; + } + + writeMapping(fstBuffer); + + return true; + } + + function createHttpMessage(callback) { + var multiparts = [], + lodCount, + lodFile, + lodBuffer, + textureBuffer, + textureSourceFormat, + textureTargetFormat, + embeddedTextures, + i; + + info("Preparing to send model"); + + // Model name + if (mapping.hasOwnProperty(NAME_FIELD)) { + multiparts.push({ + name : "model_name", + string : mapping[NAME_FIELD] + }); + } else { + error("Model name is missing"); + httpMultiPart.clear(); + return; + } + + // FST file + if (fstBuffer) { + multiparts.push({ + name : "fst", + buffer: fstBuffer + }); + } + + // FBX file + if (fbxBuffer) { + multiparts.push({ + name : "fbx", + buffer: fbxBuffer + }); + } + + // SVO file + //if (svoBuffer) { + // multiparts.push({ + // name : "svo", + // buffer: svoBuffer + // }); + //} + + // LOD files + lodCount = 0; + for (lodFile in mapping.lod) { + if (mapping.lod.hasOwnProperty(lodFile)) { + lodBuffer = readFile(modelFile.path() + "\/" + lodFile); + if (lodBuffer === null) { + return; + } + multiparts.push({ + name: "lod" + lodCount, + buffer: lodBuffer + }); + lodCount += 1; + } + if (!isProcessing) { return; } + } + + // Textures + embeddedTextures = "|" + geometry.embedded.join("|") + "|"; + for (i = 0; i < geometry.textures.length; i += 1) { + if (embeddedTextures.indexOf("|" + geometry.textures[i].fileName() + "|") === -1) { + textureBuffer = readFile(modelFile.path() + "\/" + + (mapping[TEXDIR_FIELD] !== "." ? mapping[TEXDIR_FIELD] + "\/" : "") + + geometry.textures[i]); + if (textureBuffer === null) { + return; + } + + textureSourceFormat = geometry.textures[i].fileType().toLowerCase(); + textureTargetFormat = (textureSourceFormat === "jpg" ? "jpg" : "png"); + textureBuffer.buffer = + textureBuffer.buffer.recodeImage(textureSourceFormat, textureTargetFormat, MAX_TEXTURE_SIZE); + textureBuffer.filename = textureBuffer.filename.slice(0, -textureSourceFormat.length) + textureTargetFormat; + + multiparts.push({ + name: "texture" + i, + buffer: textureBuffer + }); + } + + if (!isProcessing) { return; } + } + + // Model category + multiparts.push({ + name : "model_category", + string : "content" + }); + + // Create HTTP message + httpMultiPart.clear(); + Script.setTimeout(function addMultipart() { + var multipart = multiparts.shift(); + httpMultiPart.add(multipart); + + if (!isProcessing) { return; } + + if (multiparts.length > 0) { + Script.setTimeout(addMultipart, 25); + } else { + callback(); + } + }, 25); + } + + function sendToHighFidelity() { + var req, + uploadedChecks, + HTTP_GET_TIMEOUT = 60, // 1 minute + HTTP_SEND_TIMEOUT = 900, // 15 minutes + UPLOADED_CHECKS = 30, + CHECK_UPLOADED_TIMEOUT = 1, // 1 second + handleCheckUploadedResponses, + handleUploadModelResponses, + handleRequestUploadResponses; + + function uploadTimedOut() { + error("Model upload failed: Internet request timed out!"); + } + + function debugResponse() { + print("req.errorCode = " + req.errorCode); + print("req.readyState = " + req.readyState); + print("req.status = " + req.status); + print("req.statusText = " + req.statusText); + print("req.responseType = " + req.responseType); + print("req.responseText = " + req.responseText); + print("req.response = " + req.response); + print("req.getAllResponseHeaders() = " + req.getAllResponseHeaders()); + } + + function checkUploaded() { + if (!isProcessing) { return; } + + info("Checking uploaded model"); + + req = new XMLHttpRequest(); + req.open("HEAD", modelURL, true); + req.timeout = HTTP_GET_TIMEOUT * 1000; + req.onreadystatechange = handleCheckUploadedResponses; + req.ontimeout = uploadTimedOut; + req.send(); + } + + handleCheckUploadedResponses = function () { + //debugResponse(); + if (req.readyState === req.DONE) { + if (req.status === 200) { + // Note: Unlike avatar models, for content models we don't need to refresh texture cache. + print("Model uploaded: " + modelURL); + progressDialog.close(); + if (Window.confirm("Your model has been uploaded as: " + modelURL + "\nDo you want to rez it?")) { + modelCallback(modelURL); + } + } else if (req.status === 404) { + if (uploadedChecks > 0) { + uploadedChecks -= 1; + Script.setTimeout(checkUploaded, CHECK_UPLOADED_TIMEOUT * 1000); + } else { + print("Error: " + req.status + " " + req.statusText); + error("We could not verify that your model was successfully uploaded but it may have been at: " + + modelURL); + } + } else { + print("Error: " + req.status + " " + req.statusText); + error("There was a problem with your upload, please try again later."); + } + } + }; + + function uploadModel(method) { + var url; + + if (!isProcessing) { return; } + + req = new XMLHttpRequest(); + if (method === "PUT") { + url = API_URL + "\/" + modelName; + req.open("PUT", url, true); //print("PUT " + url); + } else { + url = API_URL; + req.open("POST", url, true); //print("POST " + url); + } + req.setRequestHeader("Content-Type", "multipart/form-data; boundary=\"" + httpMultiPart.boundary() + "\""); + req.timeout = HTTP_SEND_TIMEOUT * 1000; + req.onreadystatechange = handleUploadModelResponses; + req.ontimeout = uploadTimedOut; + req.send(httpMultiPart.response().buffer); + } + + handleUploadModelResponses = function () { + //debugResponse(); + if (req.readyState === req.DONE) { + if (req.status === 200) { + uploadedChecks = UPLOADED_CHECKS; + checkUploaded(); + } else { + print("Error: " + req.status + " " + req.statusText); + error("There was a problem with your upload, please try again later."); + } + } + }; + + function requestUpload() { + var url; + + if (!isProcessing) { return; } + + url = API_URL + "\/" + modelName; // XMLHttpRequest automatically handles authorization of API requests. + req = new XMLHttpRequest(); + req.open("GET", url, true); //print("GET " + url); + req.responseType = "json"; + req.timeout = HTTP_GET_TIMEOUT * 1000; + req.onreadystatechange = handleRequestUploadResponses; + req.ontimeout = uploadTimedOut; + req.send(); + } + + handleRequestUploadResponses = function () { + var response; + + //debugResponse(); + if (req.readyState === req.DONE) { + if (req.status === 200) { + if (req.responseType === "json") { + response = JSON.parse(req.responseText); + if (response.status === "success") { + if (response.exists === false) { + uploadModel("POST"); + } else if (response.can_update === true) { + uploadModel("PUT"); + } else { + error("This model file already exists and is owned by someone else!"); + } + return; + } + } + } else { + print("Error: " + req.status + " " + req.statusText); + } + error("Model upload failed! Something went wrong at the data server."); + } + }; + + info("Sending model to High Fidelity"); + + requestUpload(); + } + + that.upload = function (file, callback) { + + modelFile = file; + modelCallback = callback; + + isProcessing = true; + + progressDialog.onCancel = function () { + print("User cancelled uploading model"); + isProcessing = false; + }; + + resetDataObjects(); + + if (readModel()) { + if (setProperties()) { + modelName = mapping[NAME_FIELD]; + modelURL = MODEL_URL + "\/" + mapping[NAME_FIELD] + ".fst"; // All models are uploaded as an FST + + createHttpMessage(sendToHighFidelity); + } + } + + resetDataObjects(); + }; + + return that; +}()); + +var toolBar = (function () { + var that = {}, + toolBar, + activeButton, + newModelButton, + newCubeButton, + newSphereButton, + browseModelsButton, + loadURLMenuItem, + loadFileMenuItem, + menuItemWidth = 90, + menuItemOffset, + menuItemHeight, + menuItemMargin = 5, + menuTextColor = { red: 255, green: 255, blue: 255 }, + menuBackgoundColor = { red: 18, green: 66, blue: 66 }; + + function initialize() { + toolBar = new ToolBar(0, 0, ToolBar.VERTICAL); + + activeButton = toolBar.addTool({ + imageURL: toolIconUrl + "models-tool.svg", + subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, + width: toolWidth, + height: toolHeight, + alpha: 0.9, + visible: true + }, true, false); + + newModelButton = toolBar.addTool({ + imageURL: toolIconUrl + "add-model-tool.svg", + subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, + width: toolWidth, + height: toolHeight, + alpha: 0.9, + visible: true + }, true, false); + + browseModelsButton = toolBar.addTool({ + imageURL: toolIconUrl + "list-icon.png", + width: toolWidth, + height: toolHeight, + alpha: 0.7, + visible: true + }); + + menuItemOffset = toolBar.height / 3 + 2; + menuItemHeight = Tool.IMAGE_HEIGHT / 2 - 2; + + loadURLMenuItem = Overlays.addOverlay("text", { + x: newModelButton.x - menuItemWidth, + y: newModelButton.y + menuItemOffset, + width: menuItemWidth, + height: menuItemHeight, + backgroundColor: menuBackgoundColor, + topMargin: menuItemMargin, + text: "Model URL", + alpha: 0.9, + visible: false + }); + + loadFileMenuItem = Overlays.addOverlay("text", { + x: newModelButton.x - menuItemWidth, + y: newModelButton.y + menuItemOffset + menuItemHeight, + width: menuItemWidth, + height: menuItemHeight, + backgroundColor: menuBackgoundColor, + topMargin: menuItemMargin, + text: "Model File", + alpha: 0.9, + visible: false + }); + + newCubeButton = toolBar.addTool({ + imageURL: toolIconUrl + "add-cube.svg", + subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, + width: toolWidth, + height: toolHeight, + alpha: 0.9, + visible: true + }); + + newSphereButton = toolBar.addTool({ + imageURL: toolIconUrl + "add-sphere.svg", + subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, + width: toolWidth, + height: toolHeight, + alpha: 0.9, + visible: true + }); + + } + + function toggleNewModelButton(active) { + if (active === undefined) { + active = !toolBar.toolSelected(newModelButton); + } + toolBar.selectTool(newModelButton, active); + + Overlays.editOverlay(loadURLMenuItem, { visible: active }); + Overlays.editOverlay(loadFileMenuItem, { visible: active }); + } + + function addModel(url) { + var position; + + position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); + + if (position.x > 0 && position.y > 0 && position.z > 0) { + Entities.addEntity({ + type: "Model", + position: position, + radius: DEFAULT_RADIUS, + modelURL: url + }); + print("Model added: " + url); + } else { + print("Can't add model: Model would be out of bounds."); + } + } + + that.move = function () { + var newViewPort, + toolsX, + toolsY; + + newViewPort = Controller.getViewportDimensions(); + + if (toolBar === undefined) { + initialize(); + + } else if (windowDimensions.x === newViewPort.x && + windowDimensions.y === newViewPort.y) { + return; + } + + windowDimensions = newViewPort; + toolsX = windowDimensions.x - 8 - toolBar.width; + toolsY = (windowDimensions.y - toolBar.height) / 2; + + toolBar.move(toolsX, toolsY); + + Overlays.editOverlay(loadURLMenuItem, { x: toolsX - menuItemWidth, y: toolsY + menuItemOffset }); + Overlays.editOverlay(loadFileMenuItem, { x: toolsX - menuItemWidth, y: toolsY + menuItemOffset + menuItemHeight }); + }; + + that.mousePressEvent = function (event) { + var clickedOverlay, + url, + file; + + clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y }); + + if (activeButton === toolBar.clicked(clickedOverlay)) { + isActive = !isActive; + return true; + } + + if (newModelButton === toolBar.clicked(clickedOverlay)) { + toggleNewModelButton(); + return true; + } + + if (clickedOverlay === loadURLMenuItem) { + toggleNewModelButton(false); + url = Window.prompt("Model URL", modelURLs[Math.floor(Math.random() * modelURLs.length)]); + if (url !== null && url !== "") { + addModel(url); + } + return true; + } + + if (clickedOverlay === loadFileMenuItem) { + toggleNewModelButton(false); + + // TODO BUG: this is bug, if the user has never uploaded a model, this will throw an JS exception + file = Window.browse("Select your model file ...", + Settings.getValue("LastModelUploadLocation").path(), + "Model files (*.fst *.fbx)"); + //"Model files (*.fst *.fbx *.svo)"); + if (file !== null) { + Settings.setValue("LastModelUploadLocation", file); + modelUploader.upload(file, addModel); + } + return true; + } + + if (browseModelsButton === toolBar.clicked(clickedOverlay)) { + toggleNewModelButton(false); + url = Window.s3Browse(".*(fbx|FBX)"); + if (url !== null && url !== "") { + addModel(url); + } + return true; + } + + if (newCubeButton === toolBar.clicked(clickedOverlay)) { + var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); + + if (position.x > 0 && position.y > 0 && position.z > 0) { + Entities.addEntity({ + type: "Box", + position: position, + radius: DEFAULT_RADIUS, + color: { red: 255, green: 0, blue: 0 } + }); + } else { + print("Can't create box: Box would be out of bounds."); + } + return true; + } + + if (newSphereButton === toolBar.clicked(clickedOverlay)) { + var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); + + if (position.x > 0 && position.y > 0 && position.z > 0) { + Entities.addEntity({ + type: "Sphere", + position: position, + radius: DEFAULT_RADIUS, + color: { red: 255, green: 0, blue: 0 } + }); + } else { + print("Can't create box: Box would be out of bounds."); + } + return true; + } + + + return false; + }; + + that.cleanup = function () { + toolBar.cleanup(); + Overlays.deleteOverlay(loadURLMenuItem); + Overlays.deleteOverlay(loadFileMenuItem); + }; + + return that; +}()); + var exportMenu = null; -var ExportMenu = function(opts) { +var ExportMenu = function (opts) { var self = this; var windowDimensions = Controller.getViewportDimensions(); var pos = { x: windowDimensions.x / 2, y: windowDimensions.y - 100 }; - this._onClose = opts.onClose || function() {}; + this._onClose = opts.onClose || function () { }; this._position = { x: 0.0, y: 0.0, z: 0.0 }; this._scale = 1.0; @@ -82,7 +1372,7 @@ var ExportMenu = function(opts) { var margin = 4; var height = 30; var outerHeight = height + (2 * margin); - var buttonColor = { red: 128, green: 128, blue: 128}; + var buttonColor = { red: 128, green: 128, blue: 128 }; var SCALE_MINUS = scaleWidth * 40.0 / 100.0; var SCALE_PLUS = scaleWidth * 63.0 / 100.0; @@ -91,7 +1381,7 @@ var ExportMenu = function(opts) { var offset = fullWidth / 2; pos.x -= offset; - var background= Overlays.addOverlay("text", { + var background = Overlays.addOverlay("text", { x: pos.x, y: pos.y, opacity: 1, @@ -125,7 +1415,7 @@ var ExportMenu = function(opts) { y: pos.y + margin, width: scaleWidth, height: height, - subImage: { x: 0, y: 3, width: 144, height: height}, + subImage: { x: 0, y: 3, width: 144, height: height }, imageURL: toolIconUrl + "voxel-size-selector.svg", alpha: 0.9, }); @@ -157,16 +1447,16 @@ var ExportMenu = function(opts) { }); var voxelPreview = Overlays.addOverlay("cube", { - position: { x: 0, y: 0, z: 0}, + position: { x: 0, y: 0, z: 0 }, size: this._scale, - color: { red: 255, green: 255, blue: 0}, + color: { red: 255, green: 255, blue: 0 }, alpha: 1, solid: false, visible: true, lineWidth: 4 }); - this.parsePosition = function(str) { + this.parsePosition = function (str) { var parts = str.split(','); if (parts.length == 3) { var x = parseFloat(parts[0]); @@ -179,7 +1469,7 @@ var ExportMenu = function(opts) { return null; }; - this.showPositionPrompt = function() { + this.showPositionPrompt = function () { var positionStr = self._position.x + ", " + self._position.y + ", " + self._position.z; while (1) { positionStr = Window.prompt("Position to export form:", positionStr); @@ -195,17 +1485,17 @@ var ExportMenu = function(opts) { } }; - this.setScale = function(scale) { + this.setScale = function (scale) { self._scale = Math.min(maxScale, Math.max(minScale, scale)); Overlays.editOverlay(scaleView, { text: self._scale }); Overlays.editOverlay(voxelPreview, { size: self._scale }); } - this.decreaseScale = function() { + this.decreaseScale = function () { self.setScale(self._scale /= 2); } - this.increaseScale = function() { + this.increaseScale = function () { self.setScale(self._scale *= 2); } @@ -225,11 +1515,11 @@ var ExportMenu = function(opts) { self.close(); }; - this.getPosition = function() { + this.getPosition = function () { return self._position; }; - this.setPosition = function(x, y, z) { + this.setPosition = function (x, y, z) { self._position = { x: x, y: y, z: z }; var positionStr = x + ", " + y + ", " + z; Overlays.editOverlay(locationButton, { text: positionStr }); @@ -237,8 +1527,8 @@ var ExportMenu = function(opts) { }; - this.mouseReleaseEvent = function(event) { - var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y}); + this.mouseReleaseEvent = function (event) { + var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y }); if (clickedOverlay == locationButton) { self.showPositionPrompt(); @@ -257,12 +1547,12 @@ var ExportMenu = function(opts) { } }; - this.close = function() { + this.close = function () { this.cleanup(); this._onClose(); }; - this.cleanup = function() { + this.cleanup = function () { Overlays.deleteOverlay(background); Overlays.deleteOverlay(titleText); Overlays.deleteOverlay(locationButton); @@ -277,7 +1567,7 @@ var ExportMenu = function(opts) { Controller.mouseReleaseEvent.connect(this.mouseReleaseEvent); }; -var ModelImporter = function(opts) { +var ModelImporter = function (opts) { var self = this; var height = 30; @@ -294,13 +1584,13 @@ var ModelImporter = function(opts) { }); var importScale = 1; var importBoundaries = Overlays.addOverlay("cube", { - position: { x: 0, y: 0, z: 0 }, - size: 1, - color: { red: 128, blue: 128, green: 128 }, - lineWidth: 4, - solid: false, - visible: false - }); + position: { x: 0, y: 0, z: 0 }, + size: 1, + color: { red: 128, blue: 128, green: 128 }, + lineWidth: 4, + solid: false, + visible: false + }); var pos = { x: windowDimensions.x / 2 - (fullWidth / 2), y: windowDimensions.y - 100 }; @@ -337,7 +1627,7 @@ var ModelImporter = function(opts) { }); this._importing = false; - this.setImportVisible = function(visible) { + this.setImportVisible = function (visible) { Overlays.editOverlay(importBoundaries, { visible: visible }); Overlays.editOverlay(localModels, { visible: visible }); Overlays.editOverlay(cancelButton, { visible: visible }); @@ -346,17 +1636,17 @@ var ModelImporter = function(opts) { }; var importPosition = { x: 0, y: 0, z: 0 }; - this.moveImport = function(position) { + this.moveImport = function (position) { importPosition = position; Overlays.editOverlay(localModels, { - position: { x: importPosition.x, y: importPosition.y, z: importPosition.z } - }); + position: { x: importPosition.x, y: importPosition.y, z: importPosition.z } + }); Overlays.editOverlay(importBoundaries, { - position: { x: importPosition.x, y: importPosition.y, z: importPosition.z } - }); + position: { x: importPosition.x, y: importPosition.y, z: importPosition.z } + }); } - this.mouseMoveEvent = function(event) { + this.mouseMoveEvent = function (event) { if (self._importing) { var pickRay = Camera.computePickRay(event.x, event.y); var intersection = Voxels.findRayIntersection(pickRay); @@ -392,8 +1682,8 @@ var ModelImporter = function(opts) { } } - this.mouseReleaseEvent = function(event) { - var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y}); + this.mouseReleaseEvent = function (event) { + var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y }); if (clickedOverlay == cancelButton) { self._importing = false; @@ -403,7 +1693,7 @@ var ModelImporter = function(opts) { // Would prefer to use {4} for the coords, but it would only capture the last digit. var fileRegex = /__(.+)__(\d+(?:\.\d+)?)_(\d+(?:\.\d+)?)_(\d+(?:\.\d+)?)_(\d+(?:\.\d+)?)__/; - this.doImport = function() { + this.doImport = function () { if (!self._importing) { var filename = Window.browse("Select models to import", "", "*.svo") if (filename) { @@ -445,7 +1735,7 @@ var ModelImporter = function(opts) { } } - this.paste = function() { + this.paste = function () { if (self._importing) { // self._importing = false; // self.setImportVisible(false); @@ -453,7 +1743,7 @@ var ModelImporter = function(opts) { } } - this.cleanup = function() { + this.cleanup = function () { Overlays.deleteOverlay(localModels); Overlays.deleteOverlay(importBoundaries); Overlays.deleteOverlay(cancelButton); @@ -467,9 +1757,10 @@ var ModelImporter = function(opts) { var modelImporter = new ModelImporter(); + function isLocked(properties) { // special case to lock the ground plane model in hq. - if (location.hostname == "hq.highfidelity.io" && + if (location.hostname == "hq.highfidelity.io" && properties.modelURL == "https://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/Terrain_Reduce_forAlpha.fbx") { return true; } @@ -483,74 +1774,73 @@ function controller(wichSide) { this.tip = 2 * wichSide + 1; this.trigger = wichSide; this.bumper = 6 * wichSide + 5; - + this.oldPalmPosition = Controller.getSpatialControlPosition(this.palm); this.palmPosition = Controller.getSpatialControlPosition(this.palm); - + this.oldTipPosition = Controller.getSpatialControlPosition(this.tip); this.tipPosition = Controller.getSpatialControlPosition(this.tip); - + this.oldUp = Controller.getSpatialControlNormal(this.palm); this.up = this.oldUp; - + this.oldFront = Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition)); this.front = this.oldFront; - + this.oldRight = Vec3.cross(this.front, this.up); this.right = this.oldRight; - + this.oldRotation = Quat.multiply(MyAvatar.orientation, Controller.getSpatialControlRawRotation(this.palm)); this.rotation = this.oldRotation; - + this.triggerValue = Controller.getTriggerValue(this.trigger); this.bumperValue = Controller.isButtonPressed(this.bumper); - + this.pressed = false; // is trigger pressed this.pressing = false; // is trigger being pressed (is pressed now but wasn't previously) - + this.grabbing = false; this.entityID = { isKnownID: false }; this.modelURL = ""; this.oldModelRotation; this.oldModelPosition; this.oldModelRadius; - + this.positionAtGrab; this.rotationAtGrab; this.modelPositionAtGrab; this.rotationAtGrab; - this.jointsIntersectingFromStart = []; - + this.laser = Overlays.addOverlay("line3d", { - position: { x: 0, y: 0, z: 0 }, - end: { x: 0, y: 0, z: 0 }, - color: LASER_COLOR, - alpha: 1, - visible: false, - lineWidth: LASER_WIDTH, - anchor: "MyAvatar" - }); - + position: { x: 0, y: 0, z: 0 }, + end: { x: 0, y: 0, z: 0 }, + color: LASER_COLOR, + alpha: 1, + visible: false, + lineWidth: LASER_WIDTH, + anchor: "MyAvatar" + }); + this.guideScale = 0.02; this.ball = Overlays.addOverlay("sphere", { - position: { x: 0, y: 0, z: 0 }, - size: this.guideScale, - solid: true, - color: { red: 0, green: 255, blue: 0 }, - alpha: 1, - visible: false, - anchor: "MyAvatar" - }); + position: { x: 0, y: 0, z: 0 }, + size: this.guideScale, + solid: true, + color: { red: 0, green: 255, blue: 0 }, + alpha: 1, + visible: false, + anchor: "MyAvatar" + }); this.leftRight = Overlays.addOverlay("line3d", { - position: { x: 0, y: 0, z: 0 }, - end: { x: 0, y: 0, z: 0 }, - color: { red: 0, green: 0, blue: 255 }, - alpha: 1, - visible: false, - lineWidth: LASER_WIDTH, - anchor: "MyAvatar" - }); + position: { x: 0, y: 0, z: 0 }, + end: { x: 0, y: 0, z: 0 }, + color: { red: 0, green: 0, blue: 255 }, + alpha: 1, + visible: false, + lineWidth: LASER_WIDTH, + anchor: "MyAvatar" + }); this.topDown = Overlays.addOverlay("line3d", { position: { x: 0, y: 0, z: 0 }, end: { x: 0, y: 0, z: 0 }, @@ -568,20 +1858,18 @@ function controller(wichSide) { print("Model locked " + entityID.id); } else { print("Grabbing " + entityID.id); - this.grabbing = true; this.entityID = entityID; this.modelURL = properties.modelURL; - + this.oldModelPosition = properties.position; this.oldModelRotation = properties.rotation; this.oldModelRadius = properties.radius; - + this.positionAtGrab = this.palmPosition; this.rotationAtGrab = this.rotation; this.modelPositionAtGrab = properties.position; this.rotationAtGrab = properties.rotation; - this.jointsIntersectingFromStart = []; for (var i = 0; i < jointList.length; i++) { var distance = Vec3.distance(MyAvatar.getJointPosition(jointList[i]), this.oldModelPosition); @@ -592,11 +1880,11 @@ function controller(wichSide) { this.showLaser(false); } } - + this.release = function () { if (this.grabbing) { jointList = MyAvatar.getJointNames(); - + var closestJointIndex = -1; var closestJointDistance = 10; for (var i = 0; i < jointList.length; i++) { @@ -606,14 +1894,14 @@ function controller(wichSide) { closestJointIndex = i; } } - + if (closestJointIndex != -1) { print("closestJoint: " + jointList[closestJointIndex]); print("closestJointDistance (attach max distance): " + closestJointDistance + " (" + this.oldModelRadius + ")"); } - + if (closestJointDistance < this.oldModelRadius) { - + if (this.jointsIntersectingFromStart.indexOf(closestJointIndex) != -1 || (leftController.grabbing && rightController.grabbing && leftController.entityID.id == rightController.entityID.id)) { @@ -622,26 +1910,25 @@ function controller(wichSide) { print("Attaching to " + jointList[closestJointIndex]); var jointPosition = MyAvatar.getJointPosition(jointList[closestJointIndex]); var jointRotation = MyAvatar.getJointCombinedRotation(jointList[closestJointIndex]); - + var attachmentOffset = Vec3.subtract(this.oldModelPosition, jointPosition); attachmentOffset = Vec3.multiplyQbyV(Quat.inverse(jointRotation), attachmentOffset); var attachmentRotation = Quat.multiply(Quat.inverse(jointRotation), this.oldModelRotation); - + MyAvatar.attach(this.modelURL, jointList[closestJointIndex], attachmentOffset, attachmentRotation, 2.0 * this.oldModelRadius, true, false); - Entities.deleteEntity(this.entityID); } } } - + this.grabbing = false; this.entityID.isKnownID = false; this.jointsIntersectingFromStart = []; this.showLaser(true); } - + this.checkTrigger = function () { if (this.triggerValue > 0.9) { if (this.pressed) { @@ -661,8 +1948,8 @@ function controller(wichSide) { if (isLocked(properties)) { return { valid: false }; } - - + + // P P - Model // /| A - Palm // / | d B - unit vector toward tip @@ -673,58 +1960,58 @@ function controller(wichSide) { // |X-A| = (P-A).B // X == A + ((P-A).B)B // d = |P-X| - + var A = this.palmPosition; var B = this.front; var P = properties.position; - + var x = Vec3.dot(Vec3.subtract(P, A), B); var y = Vec3.dot(Vec3.subtract(P, A), this.up); var z = Vec3.dot(Vec3.subtract(P, A), this.right); var X = Vec3.sum(A, Vec3.multiply(B, x)); var d = Vec3.length(Vec3.subtract(P, X)); - + var angularSize = 2 * Math.atan(properties.radius / Vec3.distance(Camera.getPosition(), properties.position)) * 180 / 3.14; if (0 < x && angularSize > MIN_ANGULAR_SIZE) { if (angularSize > MAX_ANGULAR_SIZE) { print("Angular size too big: " + 2 * Math.atan(properties.radius / Vec3.distance(Camera.getPosition(), properties.position)) * 180 / 3.14); return { valid: false }; } - + return { valid: true, x: x, y: y, z: z }; } return { valid: false }; } - + this.glowedIntersectingModel = { isKnownID: false }; this.moveLaser = function () { // the overlays here are anchored to the avatar, which means they are specified in the avatar's local frame - + var inverseRotation = Quat.inverse(MyAvatar.orientation); var startPosition = Vec3.multiplyQbyV(inverseRotation, Vec3.subtract(this.palmPosition, MyAvatar.position)); var direction = Vec3.multiplyQbyV(inverseRotation, Vec3.subtract(this.tipPosition, this.palmPosition)); var distance = Vec3.length(direction); direction = Vec3.multiply(direction, LASER_LENGTH_FACTOR / distance); var endPosition = Vec3.sum(startPosition, direction); - + Overlays.editOverlay(this.laser, { - position: startPosition, - end: endPosition - }); - - + position: startPosition, + end: endPosition + }); + + Overlays.editOverlay(this.ball, { - position: endPosition - }); + position: endPosition + }); Overlays.editOverlay(this.leftRight, { - position: Vec3.sum(endPosition, Vec3.multiply(this.right, 2 * this.guideScale)), - end: Vec3.sum(endPosition, Vec3.multiply(this.right, -2 * this.guideScale)) - }); - Overlays.editOverlay(this.topDown, {position: Vec3.sum(endPosition, Vec3.multiply(this.up, 2 * this.guideScale)), - end: Vec3.sum(endPosition, Vec3.multiply(this.up, -2 * this.guideScale)) - }); + position: Vec3.sum(endPosition, Vec3.multiply(this.right, 2 * this.guideScale)), + end: Vec3.sum(endPosition, Vec3.multiply(this.right, -2 * this.guideScale)) + }); + Overlays.editOverlay(this.topDown, { position: Vec3.sum(endPosition, Vec3.multiply(this.up, 2 * this.guideScale)), + end: Vec3.sum(endPosition, Vec3.multiply(this.up, -2 * this.guideScale)) + }); this.showLaser(!this.grabbing || mode == 0); - + if (this.glowedIntersectingModel.isKnownID) { Entities.editEntity(this.glowedIntersectingModel, { glowLevel: 0.0 }); this.glowedIntersectingModel.isKnownID = false; @@ -741,14 +2028,13 @@ function controller(wichSide) { } } } - - this.showLaser = function(show) { + + this.showLaser = function (show) { Overlays.editOverlay(this.laser, { visible: show }); Overlays.editOverlay(this.ball, { visible: show }); Overlays.editOverlay(this.leftRight, { visible: show }); Overlays.editOverlay(this.topDown, { visible: show }); } - this.moveEntity = function () { if (this.grabbing) { if (!this.entityID.isKnownID) { @@ -761,7 +2047,7 @@ function controller(wichSide) { } var newPosition; var newRotation; - + switch (mode) { case 0: newPosition = Vec3.sum(this.palmPosition, @@ -770,8 +2056,8 @@ function controller(wichSide) { Vec3.multiply(this.up, this.y)); newPosition = Vec3.sum(newPosition, Vec3.multiply(this.right, this.z)); - - + + newRotation = Quat.multiply(this.rotation, Quat.inverse(this.oldRotation)); newRotation = Quat.multiply(newRotation, @@ -780,11 +2066,11 @@ function controller(wichSide) { case 1: var forward = Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -1 }); var d = Vec3.dot(forward, MyAvatar.position); - + var factor1 = Vec3.dot(forward, this.positionAtGrab) - d; var factor2 = Vec3.dot(forward, this.modelPositionAtGrab) - d; var vector = Vec3.subtract(this.palmPosition, this.positionAtGrab); - + if (factor2 < 0) { factor2 = 0; } @@ -792,26 +2078,24 @@ function controller(wichSide) { factor1 = 1; factor2 = 1; } - + newPosition = Vec3.sum(this.modelPositionAtGrab, Vec3.multiply(vector, factor2 / factor1)); - + newRotation = Quat.multiply(this.rotation, Quat.inverse(this.rotationAtGrab)); newRotation = Quat.multiply(newRotation, this.rotationAtGrab); break; } - Entities.editEntity(this.entityID, { position: newPosition, rotation: newRotation }); - this.oldModelRotation = newRotation; this.oldModelPosition = newPosition; - + var indicesToRemove = []; for (var i = 0; i < this.jointsIntersectingFromStart.length; ++i) { var distance = Vec3.distance(MyAvatar.getJointPosition(this.jointsIntersectingFromStart[i]), this.oldModelPosition); @@ -825,27 +2109,27 @@ function controller(wichSide) { } } } - + this.update = function () { this.oldPalmPosition = this.palmPosition; this.oldTipPosition = this.tipPosition; this.palmPosition = Controller.getSpatialControlPosition(this.palm); this.tipPosition = Controller.getSpatialControlPosition(this.tip); - + this.oldUp = this.up; this.up = Vec3.normalize(Controller.getSpatialControlNormal(this.palm)); - + this.oldFront = this.front; this.front = Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition)); - + this.oldRight = this.right; this.right = Vec3.normalize(Vec3.cross(this.front, this.up)); - + this.oldRotation = this.rotation; this.rotation = Quat.multiply(MyAvatar.orientation, Controller.getSpatialControlRawRotation(this.palm)); - + this.triggerValue = Controller.getTriggerValue(this.trigger); - + var bumperValue = Controller.isButtonPressed(this.bumper); if (bumperValue && !this.bumperValue) { if (mode == 0) { @@ -859,49 +2143,49 @@ function controller(wichSide) { } } this.bumperValue = bumperValue; - - + + this.checkTrigger(); - + this.moveLaser(); - + if (!this.pressed && this.grabbing) { // release if trigger not pressed anymore. this.release(); } - + if (this.pressing) { // Checking for attachments intersecting var attachments = MyAvatar.getAttachmentData(); var attachmentIndex = -1; var attachmentX = LASER_LENGTH_FACTOR; - + var newModel; var newProperties; - + for (var i = 0; i < attachments.length; ++i) { var position = Vec3.sum(MyAvatar.getJointPosition(attachments[i].jointName), Vec3.multiplyQbyV(MyAvatar.getJointCombinedRotation(attachments[i].jointName), attachments[i].translation)); var scale = attachments[i].scale; - + var A = this.palmPosition; var B = this.front; var P = position; - + var x = Vec3.dot(Vec3.subtract(P, A), B); var X = Vec3.sum(A, Vec3.multiply(B, x)); var d = Vec3.length(Vec3.subtract(P, X)); - + if (d < scale / 2.0 && 0 < x && x < attachmentX) { attachmentIndex = i; attachmentX = d; } } - + if (attachmentIndex != -1) { print("Detaching: " + attachments[attachmentIndex].modelURL); MyAvatar.detachOne(attachments[attachmentIndex].modelURL, attachments[attachmentIndex].jointName); - + newProperties = { type: "Model", position: Vec3.sum(MyAvatar.getJointPosition(attachments[attachmentIndex].jointName), @@ -928,7 +2212,6 @@ function controller(wichSide) { return; } newModel = foundIntersection.entityID; - if (!newModel.isKnownID) { var identify = Entities.identifyEntity(newModel); if (!identify.isKnownID) { @@ -939,10 +2222,7 @@ function controller(wichSide) { } newProperties = Entities.getEntityProperties(newModel); } - - print("foundEntity.modelURL=" + newProperties.modelURL); - if (isLocked(newProperties)) { print("Model locked " + newProperties.id); } else { @@ -950,9 +2230,9 @@ function controller(wichSide) { if (!check.valid) { return; } - + this.grab(newModel, newProperties); - + this.x = check.x; this.y = check.y; this.z = check.z; @@ -977,32 +2257,32 @@ function moveEntities() { var newPosition = leftController.oldModelPosition; var rotation = leftController.oldModelRotation; var ratio = 1; - - + + switch (mode) { case 0: var oldLeftPoint = Vec3.sum(leftController.oldPalmPosition, Vec3.multiply(leftController.oldFront, leftController.x)); var oldRightPoint = Vec3.sum(rightController.oldPalmPosition, Vec3.multiply(rightController.oldFront, rightController.x)); - + var oldMiddle = Vec3.multiply(Vec3.sum(oldLeftPoint, oldRightPoint), 0.5); var oldLength = Vec3.length(Vec3.subtract(oldLeftPoint, oldRightPoint)); - - + + var leftPoint = Vec3.sum(leftController.palmPosition, Vec3.multiply(leftController.front, leftController.x)); var rightPoint = Vec3.sum(rightController.palmPosition, Vec3.multiply(rightController.front, rightController.x)); - + var middle = Vec3.multiply(Vec3.sum(leftPoint, rightPoint), 0.5); var length = Vec3.length(Vec3.subtract(leftPoint, rightPoint)); - - + + ratio = length / oldLength; newPosition = Vec3.sum(middle, Vec3.multiply(Vec3.subtract(leftController.oldModelPosition, oldMiddle), ratio)); - break; + break; case 1: var u = Vec3.normalize(Vec3.subtract(rightController.oldPalmPosition, leftController.oldPalmPosition)); var v = Vec3.normalize(Vec3.subtract(rightController.palmPosition, leftController.palmPosition)); - + var cos_theta = Vec3.dot(u, v); if (cos_theta > 1) { cos_theta = 1; @@ -1010,41 +2290,37 @@ function moveEntities() { var angle = Math.acos(cos_theta) / Math.PI * 180; if (angle < 0.1) { return; - + } var w = Vec3.normalize(Vec3.cross(u, v)); - + rotation = Quat.multiply(Quat.angleAxis(angle, w), leftController.oldModelRotation); - - + + leftController.positionAtGrab = leftController.palmPosition; leftController.rotationAtGrab = leftController.rotation; leftController.modelPositionAtGrab = leftController.oldModelPosition; leftController.rotationAtGrab = rotation; - rightController.positionAtGrab = rightController.palmPosition; rightController.rotationAtGrab = rightController.rotation; rightController.modelPositionAtGrab = rightController.oldModelPosition; rightController.rotationAtGrab = rotation; break; } - Entities.editEntity(leftController.entityID, { position: newPosition, rotation: rotation, radius: leftController.oldModelRadius * ratio }); - leftController.oldModelPosition = newPosition; leftController.oldModelRotation = rotation; leftController.oldModelRadius *= ratio; - + rightController.oldModelPosition = newPosition; rightController.oldModelRotation = rotation; rightController.oldModelRadius *= ratio; return; } - leftController.moveEntity(); rightController.moveEntity(); } @@ -1060,95 +2336,28 @@ function checkController(deltaTime) { // So that we hide the lasers bellow and keep updating the overlays position numberOfButtons = 0; } - + // this is expected for hydras - if (numberOfButtons==12 && numberOfTriggers == 2 && controllersPerTrigger == 2) { + if (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2) { if (!hydraConnected) { hydraConnected = true; } - + leftController.update(); rightController.update(); moveEntities(); } else { if (hydraConnected) { hydraConnected = false; - + leftController.showLaser(false); rightController.showLaser(false); } } - - moveOverlays(); + toolBar.move(); + progressDialog.move(); } -var isActive = false; -var active; -var newModel; -var browser; -var newBox; -var newSphere; -function initToolBar() { - toolBar = new ToolBar(0, 0, ToolBar.VERTICAL); - // New Model - active = toolBar.addTool({ - imageURL: toolIconUrl + "models-tool.svg", - subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, - width: toolWidth, height: toolHeight, - visible: true, - alpha: 0.9 - }, true, false); - newModel = toolBar.addTool({ - imageURL: toolIconUrl + "add-model-tool.svg", - subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, - width: toolWidth, height: toolHeight, - visible: true, - alpha: 0.9 - }); - browser = toolBar.addTool({ - imageURL: toolIconUrl + "list-icon.png", - width: toolWidth, height: toolHeight, - visible: true, - alpha: 0.7 - }); - newBox = toolBar.addTool({ - imageURL: toolIconUrl + "models-tool.svg", - subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, - width: toolWidth, height: toolHeight, - visible: true, - alpha: 0.9 - }); - - newSphere = toolBar.addTool({ - imageURL: toolIconUrl + "models-tool.svg", - subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, - width: toolWidth, height: toolHeight, - visible: true, - alpha: 0.9 - }); -} - -function moveOverlays() { - var newViewPort = Controller.getViewportDimensions(); - - if (typeof(toolBar) === 'undefined') { - initToolBar(); - - } else if (windowDimensions.x == newViewPort.x && - windowDimensions.y == newViewPort.y) { - return; - } - - - windowDimensions = newViewPort; - var toolsX = windowDimensions.x - 8 - toolBar.width; - var toolsY = (windowDimensions.y - toolBar.height) / 2; - - toolBar.move(toolsX, toolsY); -} - - - var entitySelected = false; var selectedEntityID; var selectedEntityProperties; @@ -1162,7 +2371,7 @@ var SCALE_FACTOR = 200.0; function rayPlaneIntersection(pickRay, point, normal) { var d = -Vec3.dot(point, normal); var t = -(Vec3.dot(pickRay.origin, normal) + d) / Vec3.dot(pickRay.direction, normal); - + return Vec3.sum(pickRay.origin, Vec3.multiply(pickRay.direction, t)); } @@ -1170,22 +2379,22 @@ function Tooltip() { this.x = 285; this.y = 115; this.width = 500; - this.height = 145 ; + this.height = 145; this.margin = 5; this.decimals = 3; - + this.textOverlay = Overlays.addOverlay("text", { - x: this.x, - y: this.y, - width: this.width, - height: this.height, - margin: this.margin, - text: "", - color: { red: 128, green: 128, blue: 128 }, - alpha: 0.2, - visible: false - }); - this.show = function(doShow) { + x: this.x, + y: this.y, + width: this.width, + height: this.height, + margin: this.margin, + text: "", + color: { red: 128, green: 128, blue: 128 }, + alpha: 0.2, + visible: false + }); + this.show = function (doShow) { Overlays.editOverlay(this.textOverlay, { visible: doShow }); } this.updateText = function(properties) { @@ -1221,8 +2430,8 @@ function Tooltip() { Overlays.editOverlay(this.textOverlay, { text: text }); } - - this.cleanup = function() { + + this.cleanup = function () { Overlays.deleteOverlay(this.textOverlay); } } @@ -1232,82 +2441,14 @@ function mousePressEvent(event) { if (event.isAlt) { return; } - + mouseLastPosition = { x: event.x, y: event.y }; entitySelected = false; - var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y}); - - if (active == toolBar.clicked(clickedOverlay)) { - isActive = !isActive; - return; - } + var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y }); - if (newModel == toolBar.clicked(clickedOverlay)) { - var url = Window.prompt("Model URL", modelURLs[Math.floor(Math.random() * modelURLs.length)]); - if (url == null || url == "") { - return; - } - - var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); - - if (position.x > 0 && position.y > 0 && position.z > 0) { - Entities.addEntity({ - type: "Model", - position: position, - radius: radiusDefault, - modelURL: url - }); - } else { - print("Can't create model: Model would be out of bounds."); - } - - } else if (newBox == toolBar.clicked(clickedOverlay)) { - var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); - - if (position.x > 0 && position.y > 0 && position.z > 0) { - Entities.addEntity({ - type: "Box", - position: position, - radius: radiusDefault, - color: { red: 255, green: 0, blue: 0 } - }); - } else { - print("Can't create box: Box would be out of bounds."); - } - - } else if (newSphere == toolBar.clicked(clickedOverlay)) { - var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); - - if (position.x > 0 && position.y > 0 && position.z > 0) { - Entities.addEntity({ - type: "Sphere", - position: position, - radius: radiusDefault, - color: { red: 255, green: 0, blue: 0 } - }); - } else { - print("Can't create box: Box would be out of bounds."); - } - - } else if (browser == toolBar.clicked(clickedOverlay)) { - var url = Window.s3Browse(".*(fbx|FBX)"); - if (url == null || url == "") { - return; - } - - var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE)); - - if (position.x > 0 && position.y > 0 && position.z > 0) { - Entities.addEntity({ - type: "Model", - position: position, - radius: radiusDefault, - modelURL: url - }); - } else { - print("Can't create model: Model would be out of bounds."); - } - + if (toolBar.mousePressEvent(event) || progressDialog.mousePressEvent(event)) { + // Event handled; do nothing. + return; } else { // If we aren't active and didn't click on an overlay: quit if (!isActive) { @@ -1333,7 +2474,6 @@ function mousePressEvent(event) { } var properties = Entities.getEntityProperties(foundEntity); - if (isLocked(properties)) { print("Model locked " + properties.id); } else { @@ -1348,22 +2488,21 @@ function mousePressEvent(event) { // |X-A| = (P-A).B // X == A + ((P-A).B)B // d = |P-X| - + var A = pickRay.origin; var B = Vec3.normalize(pickRay.direction); var P = properties.position; - + var x = Vec3.dot(Vec3.subtract(P, A), B); var X = Vec3.sum(A, Vec3.multiply(B, x)); var d = Vec3.length(Vec3.subtract(P, X)); - + var angularSize = 2 * Math.atan(properties.radius / Vec3.distance(Camera.getPosition(), properties.position)) * 180 / 3.14; if (0 < x && angularSize > MIN_ANGULAR_SIZE) { if (angularSize < MAX_ANGULAR_SIZE) { entitySelected = true; selectedEntityID = foundEntity; selectedEntityProperties = properties; - orientation = MyAvatar.orientation; intersection = rayPlaneIntersection(pickRay, P, Quat.getFront(orientation)); } else { @@ -1372,19 +2511,18 @@ function mousePressEvent(event) { } } } - if (entitySelected) { selectedEntityProperties.oldRadius = selectedEntityProperties.radius; selectedEntityProperties.oldPosition = { - x: selectedEntityProperties.position.x, - y: selectedEntityProperties.position.y, - z: selectedEntityProperties.position.z, + x: selectedEntityProperties.position.x, + y: selectedEntityProperties.position.y, + z: selectedEntityProperties.position.z, }; selectedEntityProperties.oldRotation = { - x: selectedEntityProperties.rotation.x, - y: selectedEntityProperties.rotation.y, - z: selectedEntityProperties.rotation.z, - w: selectedEntityProperties.rotation.w, + x: selectedEntityProperties.rotation.x, + y: selectedEntityProperties.rotation.y, + z: selectedEntityProperties.rotation.z, + w: selectedEntityProperties.rotation.w, }; selectedEntityProperties.glowLevel = 0.0; @@ -1398,13 +2536,12 @@ var glowedEntityID = { id: -1, isKnownID: false }; var oldModifier = 0; var modifier = 0; var wasShifted = false; -function mouseMoveEvent(event) { +function mouseMoveEvent(event) { if (event.isAlt || !isActive) { return; } - + var pickRay = Camera.computePickRay(event.x, event.y); - if (!entitySelected) { var entityIntersection = Entities.findRayIntersection(pickRay); if (entityIntersection.accurate) { @@ -1422,7 +2559,7 @@ function mouseMoveEvent(event) { } return; } - + if (event.isLeftButton) { if (event.isRightButton) { modifier = 1; // Scale @@ -1439,28 +2576,28 @@ function mouseMoveEvent(event) { selectedEntityProperties.oldRadius = selectedEntityProperties.radius; selectedEntityProperties.oldPosition = { - x: selectedEntityProperties.position.x, - y: selectedEntityProperties.position.y, - z: selectedEntityProperties.position.z, + x: selectedEntityProperties.position.x, + y: selectedEntityProperties.position.y, + z: selectedEntityProperties.position.z, }; selectedEntityProperties.oldRotation = { - x: selectedEntityProperties.rotation.x, - y: selectedEntityProperties.rotation.y, - z: selectedEntityProperties.rotation.z, - w: selectedEntityProperties.rotation.w, + x: selectedEntityProperties.rotation.x, + y: selectedEntityProperties.rotation.y, + z: selectedEntityProperties.rotation.z, + w: selectedEntityProperties.rotation.w, }; orientation = MyAvatar.orientation; intersection = rayPlaneIntersection(pickRay, selectedEntityProperties.oldPosition, Quat.getFront(orientation)); - + mouseLastPosition = { x: event.x, y: event.y }; wasShifted = event.isShifted; oldModifier = modifier; return; } - - + + switch (modifier) { case 0: return; @@ -1468,13 +2605,12 @@ function mouseMoveEvent(event) { // Let's Scale selectedEntityProperties.radius = (selectedEntityProperties.oldRadius * (1.0 + (mouseLastPosition.y - event.y) / SCALE_FACTOR)); - if (selectedEntityProperties.radius < 0.01) { print("Scale too small ... bailling."); return; } break; - + case 2: // Let's translate var newIntersection = rayPlaneIntersection(pickRay, @@ -1487,7 +2623,6 @@ function mouseMoveEvent(event) { vector = Vec3.sum(Vec3.multiply(Quat.getRight(orientation), i), Vec3.multiply(Quat.getFront(orientation), j)); } - selectedEntityProperties.position = Vec3.sum(selectedEntityProperties.oldPosition, vector); break; case 3: @@ -1501,10 +2636,10 @@ function mouseMoveEvent(event) { mouseLastPosition.y = event.y; somethingChanged = false; } - - + + var pixelPerDegrees = windowDimensions.y / (1 * 360); // the entire height of the window allow you to make 2 full rotations - + //compute delta in pixel var cameraForward = Quat.getFront(Camera.getOrientation()); var rotationAxis = (!zIsPressed && xIsPressed) ? { x: 1, y: 0, z: 0 } : @@ -1516,12 +2651,12 @@ function mouseMoveEvent(event) { .x, y: mouseLastPosition.y - event.y, z: 0 }; var transformedMouseDelta = Vec3.multiplyQbyV(Camera.getOrientation(), mouseDelta); var delta = Math.floor(Vec3.dot(transformedMouseDelta, Vec3.normalize(orthogonalAxis)) / pixelPerDegrees); - + var STEP = 15; if (!event.isShifted) { delta = Math.round(delta / STEP) * STEP; } - + var rotation = Quat.fromVec3Degrees({ x: (!zIsPressed && xIsPressed) ? delta : 0, // x is pressed y: (!zIsPressed && !xIsPressed) ? delta : 0, // neither is pressed @@ -1545,7 +2680,6 @@ function mouseReleaseEvent(event) { if (event.isAlt || !isActive) { return; } - if (entitySelected) { tooltip.show(false); } @@ -1564,11 +2698,11 @@ function setupModelMenus() { print("setupModelMenus()"); // adj our menuitems Menu.addMenuItem({ menuName: "Edit", menuItemName: "Models", isSeparator: true, beforeItem: "Physics" }); - Menu.addMenuItem({ menuName: "Edit", menuItemName: "Edit Properties...", + Menu.addMenuItem({ menuName: "Edit", menuItemName: "Edit Properties...", shortcutKeyEvent: { text: "`" }, afterItem: "Models" }); - if (!Menu.menuItemExists("Edit","Delete")) { + if (!Menu.menuItemExists("Edit", "Delete")) { print("no delete... adding ours"); - Menu.addMenuItem({ menuName: "Edit", menuItemName: "Delete", + Menu.addMenuItem({ menuName: "Edit", menuItemName: "Delete", shortcutKeyEvent: { text: "backspace" }, afterItem: "Models" }); modelMenuAddedDelete = true; } else { @@ -1600,6 +2734,7 @@ function cleanupModelMenus() { function scriptEnding() { leftController.cleanup(); rightController.cleanup(); + progressDialog.cleanup(); toolBar.cleanup(); cleanupModelMenus(); tooltip.cleanup(); @@ -1618,7 +2753,7 @@ Controller.mouseReleaseEvent.connect(mouseReleaseEvent); setupModelMenus(); -function handeMenuEvent(menuItem){ +function handeMenuEvent(menuItem) { print("menuItemEvent() in JS... menuItem=" + menuItem); if (menuItem == "Delete") { if (leftController.grabbing) { @@ -1698,49 +2833,51 @@ function handeMenuEvent(menuItem){ array.push({ label: "Green:", value: properties.color.green }); array.push({ label: "Blue:", value: properties.color.blue }); } + array.push({ button: "Cancel" }); var propertyName = Window.form("Edit Properties", array); + if (Window.form("Edit Properties", array)) { + var index = 0; + if (properties.type == "Model") { + properties.modelURL = array[index++].value; + properties.animationURL = array[index++].value; + properties.animationIsPlaying = array[index++].value; + properties.animationFPS = array[index++].value; + properties.animationFrameIndex = array[index++].value; + } + properties.position.x = array[index++].value; + properties.position.y = array[index++].value; + properties.position.z = array[index++].value; + angles.x = array[index++].value; + angles.y = array[index++].value; + angles.z = array[index++].value; + properties.rotation = Quat.fromVec3Degrees(angles); + properties.radius = array[index++].value / 2; + + properties.velocity.x = array[index++].value; + properties.velocity.y = array[index++].value; + properties.velocity.z = array[index++].value; + properties.damping = array[index++].value; + properties.gravity.x = array[index++].value; + properties.gravity.y = array[index++].value; + properties.gravity.z = array[index++].value; + properties.lifetime = array[index++].value; // give ourselves that many more seconds + + if (properties.type == "Box") { + properties.color.red = array[index++].value; + properties.color.green = array[index++].value; + properties.color.blue = array[index++].value; + } + Entities.editEntity(editModelID, properties); + } modelSelected = false; - - var index = 0; - if (properties.type == "Model") { - properties.modelURL = array[index++].value; - properties.animationURL = array[index++].value; - properties.animationIsPlaying = array[index++].value; - properties.animationFPS = array[index++].value; - properties.animationFrameIndex = array[index++].value; - } - properties.position.x = array[index++].value; - properties.position.y = array[index++].value; - properties.position.z = array[index++].value; - angles.x = array[index++].value; - angles.y = array[index++].value; - angles.z = array[index++].value; - properties.rotation = Quat.fromVec3Degrees(angles); - properties.radius = array[index++].value / 2; - - properties.velocity.x = array[index++].value; - properties.velocity.y = array[index++].value; - properties.velocity.z = array[index++].value; - properties.damping = array[index++].value; - properties.gravity.x = array[index++].value; - properties.gravity.y = array[index++].value; - properties.gravity.z = array[index++].value; - properties.lifetime = array[index++].value; // give ourselves that many more seconds - - if (properties.type == "Box") { - properties.color.red = array[index++].value; - properties.color.green = array[index++].value; - properties.color.blue = array[index++].value; - } - Entities.editEntity(editModelID, properties); } } else if (menuItem == "Paste Models") { modelImporter.paste(); } else if (menuItem == "Export Models") { if (!exportMenu) { exportMenu = new ExportMenu({ - onClose: function() { + onClose: function () { exportMenu = null; } }); @@ -1758,7 +2895,7 @@ Menu.menuItemEvent.connect(handeMenuEvent); var zIsPressed = false; var xIsPressed = false; var somethingChanged = false; -Controller.keyPressEvent.connect(function(event) { +Controller.keyPressEvent.connect(function (event) { if ((event.text == "z" || event.text == "Z") && !zIsPressed) { zIsPressed = true; somethingChanged = true; @@ -1767,7 +2904,7 @@ Controller.keyPressEvent.connect(function(event) { xIsPressed = true; somethingChanged = true; } - + // resets model orientation when holding with mouse if (event.text == "r" && entitySelected) { selectedEntityProperties.rotation = Quat.fromVec3Degrees({ x: 0, y: 0, z: 0 }); @@ -1777,7 +2914,7 @@ Controller.keyPressEvent.connect(function(event) { } }); -Controller.keyReleaseEvent.connect(function(event) { +Controller.keyReleaseEvent.connect(function (event) { if (event.text == "z" || event.text == "Z") { zIsPressed = false; somethingChanged = true; diff --git a/examples/editVoxels.js b/examples/editVoxels.js index 1ed3dcc0c3..a85c04dd02 100644 --- a/examples/editVoxels.js +++ b/examples/editVoxels.js @@ -51,9 +51,6 @@ var lastVoxelScale = 0; var dragStart = { x: 0, y: 0 }; var wheelPixelsMoved = 0; -var mouseX = 0; -var mouseY = 0; - // Create a table of the different colors you can choose var colors = new Array(); colors[0] = { red: 120, green: 181, blue: 126 }; @@ -1041,8 +1038,6 @@ function mousePressEvent(event) { // TODO: does any of this stuff need to execute if we're panning or orbiting? trackMouseEvent(event); // used by preview support - mouseX = event.x; - mouseY = event.y; var pickRay = Camera.computePickRay(event.x, event.y); var intersection = Voxels.findRayIntersection(pickRay); audioOptions.position = Vec3.sum(pickRay.origin, pickRay.direction); @@ -1296,40 +1291,30 @@ function mouseMoveEvent(event) { } if (isAdding) { - // Watch the drag direction to tell which way to 'extrude' this voxel + var pickRay = Camera.computePickRay(event.x, event.y); + var distance = Vec3.length(Vec3.subtract(pickRay.origin, lastVoxelPosition)); + var mouseSpot = Vec3.sum(Vec3.multiply(pickRay.direction, distance), pickRay.origin); + var delta = Vec3.subtract(mouseSpot, lastVoxelPosition); + if (!isExtruding) { - var pickRay = Camera.computePickRay(event.x, event.y); - var lastVoxelDistance = { x: pickRay.origin.x - lastVoxelPosition.x, - y: pickRay.origin.y - lastVoxelPosition.y, - z: pickRay.origin.z - lastVoxelPosition.z }; - var distance = Vec3.length(lastVoxelDistance); - var mouseSpot = { x: pickRay.direction.x * distance, y: pickRay.direction.y * distance, z: pickRay.direction.z * distance }; - mouseSpot.x += pickRay.origin.x; - mouseSpot.y += pickRay.origin.y; - mouseSpot.z += pickRay.origin.z; - var dx = mouseSpot.x - lastVoxelPosition.x; - var dy = mouseSpot.y - lastVoxelPosition.y; - var dz = mouseSpot.z - lastVoxelPosition.z; + // Use the drag direction to tell which way to 'extrude' this voxel extrudeScale = lastVoxelScale; extrudeDirection = { x: 0, y: 0, z: 0 }; isExtruding = true; - if (dx > lastVoxelScale) extrudeDirection.x = extrudeScale; - else if (dx < -lastVoxelScale) extrudeDirection.x = -extrudeScale; - else if (dy > lastVoxelScale) extrudeDirection.y = extrudeScale; - else if (dy < -lastVoxelScale) extrudeDirection.y = -extrudeScale; - else if (dz > lastVoxelScale) extrudeDirection.z = extrudeScale; - else if (dz < -lastVoxelScale) extrudeDirection.z = -extrudeScale; + if (delta.x > lastVoxelScale) extrudeDirection.x = 1; + else if (delta.x < -lastVoxelScale) extrudeDirection.x = -1; + else if (delta.y > lastVoxelScale) extrudeDirection.y = 1; + else if (delta.y < -lastVoxelScale) extrudeDirection.y = -1; + else if (delta.z > lastVoxelScale) extrudeDirection.z = 1; + else if (delta.z < -lastVoxelScale) extrudeDirection.z = -1; else isExtruding = false; } else { - // We have got an extrusion direction, now look for mouse move beyond threshold to add new voxel - var dx = event.x - mouseX; - var dy = event.y - mouseY; - if (Math.sqrt(dx*dx + dy*dy) > PIXELS_PER_EXTRUDE_VOXEL) { - lastVoxelPosition = Vec3.sum(lastVoxelPosition, extrudeDirection); - Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z, - extrudeScale, lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue); - mouseX = event.x; - mouseY = event.y; + // Extrude if mouse has moved by a voxel in the extrude direction + var distanceInDirection = Vec3.dot(delta, extrudeDirection); + if (distanceInDirection > extrudeScale) { + lastVoxelPosition = Vec3.sum(lastVoxelPosition, Vec3.multiply(extrudeDirection, extrudeScale)); + Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z, extrudeScale, + lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue); } } } diff --git a/examples/frisbee.js b/examples/frisbee.js new file mode 100644 index 0000000000..e893a29309 --- /dev/null +++ b/examples/frisbee.js @@ -0,0 +1,443 @@ +// +// frisbee.js +// examples +// +// Created by Thijs Wenker on 7/5/14. +// Copyright 2014 High Fidelity, Inc. +// +// Requirements: Razer Hydra's +// +// Fun game to throw frisbee's to eachother. Hold the trigger on any of the hydra's to create or catch a frisbee. +// +// Tip: use this together with the squeezeHands.js script to make it look nicer. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// +Script.include("toolBars.js"); + +const LEFT_PALM = 0; +const LEFT_TIP = 1; +const LEFT_BUTTON_FWD = 5; +const LEFT_BUTTON_3 = 3; + +const RIGHT_PALM = 2; +const RIGHT_TIP = 3; +const RIGHT_BUTTON_FWD = 11; +const RIGHT_BUTTON_3 = 9; + +const FRISBEE_RADIUS = 0.08; +const GRAVITY_STRENGTH = 0.5; + +const CATCH_RADIUS = 0.5; +const MIN_SIMULATION_SPEED = 0.15; +const THROWN_VELOCITY_SCALING = 1.5; + +const SOUNDS_ENABLED = true; +const FRISBEE_BUTTON_URL = "http://test.thoys.nl/hifi/images/frisbee/frisbee_button_by_Judas.svg"; +const FRISBEE_MODEL_SCALE = 275; +const FRISBEE_MENU = "Toys>Frisbee"; +const FRISBEE_DESIGN_MENU = "Toys>Frisbee>Design"; +const FRISBEE_ENABLED_SETTING = "Frisbee>Enabled"; +const FRISBEE_CREATENEW_SETTING = "Frisbee>CreateNew"; +const FRISBEE_DESIGN_SETTING = "Frisbee>Design"; +const FRISBEE_FORCE_MOUSE_CONTROLS_SETTING = "Frisbee>ForceMouseControls"; + +//Add your own designs in FRISBEE_DESIGNS, be sure to put frisbee in the URL if you want others to be able to catch it without having a copy of your frisbee script. +const FRISBEE_DESIGNS = [ + {"name":"Interface", "model":"http://test.thoys.nl/hifi/models/frisbee/frisbee.fbx"}, + {"name":"Pizza", "model":"http://test.thoys.nl/hifi/models/frisbee/pizza.fbx"}, + {"name":"Swirl", "model":"http://test.thoys.nl/hifi/models/frisbee/swirl.fbx"}, + {"name":"Mayan", "model":"http://test.thoys.nl/hifi/models/frisbee/mayan.fbx"}, + ]; +const FRISBEE_MENU_DESIGN_POSTFIX = " Design"; +const FRISBEE_DESIGN_RANDOM = "Random"; + +const SPIN_MULTIPLIER = 1000; +const FRISBEE_LIFETIME = 300; // 5 minutes + +var windowDimensions = Controller.getViewportDimensions(); +var toolHeight = 50; +var toolWidth = 50; +var frisbeeToggle; +var toolBar; +var frisbeeEnabled = true; +var newfrisbeeEnabled = false; +var forceMouseControls = false; +var hydrasConnected = false; +var selectedDesign = FRISBEE_DESIGN_RANDOM; + +function loadSettings() { + frisbeeEnabled = Settings.getValue(FRISBEE_ENABLED_SETTING, "true") == "true"; + newfrisbeeEnabled = Settings.getValue(FRISBEE_CREATENEW_SETTING, "false") == "true"; + forceMouseControls = Settings.getValue(FRISBEE_FORCE_MOUSE_CONTROLS_SETTING, "false") == "true"; + selectedDesign = Settings.getValue(FRISBEE_DESIGN_SETTING, "Random"); +} + +function saveSettings() { + Settings.setValue(FRISBEE_ENABLED_SETTING, frisbeeEnabled ? "true" : "false"); + Settings.setValue(FRISBEE_CREATENEW_SETTING, newfrisbeeEnabled ? "true" : "false"); + Settings.setValue(FRISBEE_FORCE_MOUSE_CONTROLS_SETTING, forceMouseControls ? "true" : "false"); + Settings.setValue(FRISBEE_DESIGN_SETTING, selectedDesign); +} + +function moveOverlays() { + var newViewPort = Controller.getViewportDimensions(); + if (typeof(toolBar) === 'undefined') { + initToolBar(); + } else if (windowDimensions.x == newViewPort.x && + windowDimensions.y == newViewPort.y) { + return; + } + + windowDimensions = newViewPort; + var toolsX = windowDimensions.x - 8 - toolBar.width; + var toolsY = (windowDimensions.y - toolBar.height) / 2 + 80; + toolBar.move(toolsX, toolsY); +} + +function frisbeeURL() { + return selectedDesign == FRISBEE_DESIGN_RANDOM ? FRISBEE_DESIGNS[Math.floor(Math.random() * FRISBEE_DESIGNS.length)].model : getFrisbee(selectedDesign).model; +} + +//This function checks if the modelURL is inside of our Designs or contains "frisbee" in it. +function validFrisbeeURL(frisbeeURL) { + for (var frisbee in FRISBEE_DESIGNS) { + if (FRISBEE_DESIGNS[frisbee].model == frisbeeURL) { + return true; + } + } + return frisbeeURL.toLowerCase().indexOf("frisbee") !== -1; +} + +function getFrisbee(frisbeeName) { + for (var frisbee in FRISBEE_DESIGNS) { + if (FRISBEE_DESIGNS[frisbee].name == frisbeeName) { + return FRISBEE_DESIGNS[frisbee]; + } + } + return undefined; +} + +function Hand(name, palm, tip, forwardButton, button3, trigger) { + this.name = name; + this.palm = palm; + this.tip = tip; + this.forwardButton = forwardButton; + this.button3 = button3; + this.trigger = trigger; + this.holdingFrisbee = false; + this.particle = false; + this.palmPosition = function() { return Controller.getSpatialControlPosition(this.palm); } + this.grabButtonPressed = function() { + return ( + Controller.isButtonPressed(this.forwardButton) || + Controller.isButtonPressed(this.button3) || + Controller.getTriggerValue(this.trigger) > 0.5 + ) + }; + this.holdPosition = function() { return this.palm == LEFT_PALM ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition(); }; + this.holdRotation = function() { + var q = Controller.getSpatialControlRawRotation(this.palm); + q = Quat.multiply(MyAvatar.orientation, q); + return {x: q.x, y: q.y, z: q.z, w: q.w}; + }; + this.tipVelocity = function() { return Controller.getSpatialControlVelocity(this.tip); }; +} + +function MouseControl(button) { + this.button = button; +} + +var leftHand = new Hand("LEFT", LEFT_PALM, LEFT_TIP, LEFT_BUTTON_FWD, LEFT_BUTTON_3, 0); +var rightHand = new Hand("RIGHT", RIGHT_PALM, RIGHT_TIP, RIGHT_BUTTON_FWD, RIGHT_BUTTON_3, 1); + +var leftMouseControl = new MouseControl("LEFT"); +var middleMouseControl = new MouseControl("MIDDLE"); +var rightMouseControl = new MouseControl("RIGHT"); +var mouseControls = [leftMouseControl, middleMouseControl, rightMouseControl]; +var currentMouseControl = false; + +var newSound = new Sound("https://dl.dropboxusercontent.com/u/1864924/hifi-sounds/throw.raw"); +var catchSound = new Sound("https://dl.dropboxusercontent.com/u/1864924/hifi-sounds/catch.raw"); +var throwSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Switches%20and%20sliders/slider%20-%20whoosh1.raw"); + +var simulatedFrisbees = []; + +var wantDebugging = false; +function debugPrint(message) { + if (wantDebugging) { + print(message); + } +} + +function playSound(sound, position) { + if (!SOUNDS_ENABLED) { + return; + } + var options = new AudioInjectionOptions(); + options.position = position; + options.volume = 1.0; + Audio.playSound(sound, options); +} + +function cleanupFrisbees() { + simulatedFrisbees = []; + var particles = Particles.findParticles(MyAvatar.position, 1000); + for (particle in particles) { + Particles.deleteParticle(particles[particle]); + } +} + +function checkControllerSide(hand) { + // If I don't currently have a frisbee in my hand, then try to catch closest one + if (!hand.holdingFrisbee && hand.grabButtonPressed()) { + var closestParticle = Particles.findClosestParticle(hand.palmPosition(), CATCH_RADIUS); + var modelUrl = Particles.getParticleProperties(closestParticle).modelURL; + if (closestParticle.isKnownID && validFrisbeeURL(Particles.getParticleProperties(closestParticle).modelURL)) { + Particles.editParticle(closestParticle, {modelScale: 1, inHand: true, position: hand.holdPosition(), shouldDie: true}); + Particles.deleteParticle(closestParticle); + debugPrint(hand.message + " HAND- CAUGHT SOMETHING!!"); + + var properties = { + position: hand.holdPosition(), + velocity: { x: 0, y: 0, z: 0}, + gravity: { x: 0, y: 0, z: 0}, + inHand: true, + radius: FRISBEE_RADIUS, + damping: 0.999, + modelURL: modelUrl, + modelScale: FRISBEE_MODEL_SCALE, + modelRotation: hand.holdRotation(), + lifetime: FRISBEE_LIFETIME + }; + + newParticle = Particles.addParticle(properties); + + hand.holdingFrisbee = true; + hand.particle = newParticle; + + playSound(catchSound, hand.holdPosition()); + + return; // exit early + } + } + + // If '3' is pressed, and not holding a frisbee, make a new one + if (hand.grabButtonPressed() && !hand.holdingFrisbee && newfrisbeeEnabled) { + var properties = { + position: hand.holdPosition(), + velocity: { x: 0, y: 0, z: 0}, + gravity: { x: 0, y: 0, z: 0}, + inHand: true, + radius: FRISBEE_RADIUS, + damping: 0.999, + modelURL: frisbeeURL(), + modelScale: FRISBEE_MODEL_SCALE, + modelRotation: hand.holdRotation(), + lifetime: FRISBEE_LIFETIME + }; + + newParticle = Particles.addParticle(properties); + hand.holdingFrisbee = true; + hand.particle = newParticle; + + // Play a new frisbee sound + playSound(newSound, hand.holdPosition()); + + return; // exit early + } + + if (hand.holdingFrisbee) { + // If holding the frisbee keep it in the palm + if (hand.grabButtonPressed()) { + debugPrint(">>>>> " + hand.name + "-FRISBEE IN HAND, grabbing, hold and move"); + var properties = { + position: hand.holdPosition(), + modelRotation: hand.holdRotation() + }; + Particles.editParticle(hand.particle, properties); + } else { + debugPrint(">>>>> " + hand.name + "-FRISBEE IN HAND, not grabbing, THROW!!!"); + // If frisbee just released, add velocity to it! + + var properties = { + velocity: Vec3.multiply(hand.tipVelocity(), THROWN_VELOCITY_SCALING), + inHand: false, + lifetime: FRISBEE_LIFETIME, + gravity: { x: 0, y: -GRAVITY_STRENGTH, z: 0}, + modelRotation: hand.holdRotation() + }; + + Particles.editParticle(hand.particle, properties); + + simulatedFrisbees.push(hand.particle); + + hand.holdingFrisbee = false; + hand.particle = false; + + playSound(throwSound, hand.holdPosition()); + } + } +} + +function initToolBar() { + toolBar = new ToolBar(0, 0, ToolBar.VERTICAL); + frisbeeToggle = toolBar.addTool({ + imageURL: FRISBEE_BUTTON_URL, + subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT }, + width: toolWidth, + height: toolHeight, + visible: true, + alpha: 0.9 + }, true); + enableNewFrisbee(newfrisbeeEnabled); +} + +function hydraCheck() { + var numberOfButtons = Controller.getNumberOfButtons(); + var numberOfTriggers = Controller.getNumberOfTriggers(); + var numberOfSpatialControls = Controller.getNumberOfSpatialControls(); + var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers; + hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2); + return hydrasConnected; +} + +function checkController(deltaTime) { + moveOverlays(); + if (!frisbeeEnabled) { + return; + } + // this is expected for hydras + if (hydraCheck()) { + checkControllerSide(leftHand); + checkControllerSide(rightHand); + } + if (!hydrasConnected || forceMouseControls) { + //TODO: add mouse cursor control code here. + } +} + +function controlFrisbees(deltaTime) { + var killSimulations = []; + for (frisbee in simulatedFrisbees) { + var properties = Particles.getParticleProperties(simulatedFrisbees[frisbee]); + //get the horizon length from the velocity origin in order to get speed + var speed = Vec3.length({x:properties.velocity.x, y:0, z:properties.velocity.z}); + if (speed < MIN_SIMULATION_SPEED) { + //kill the frisbee simulation when speed is low + killSimulations.push(frisbee); + continue; + } + Particles.editParticle(simulatedFrisbees[frisbee], {modelRotation: Quat.multiply(properties.modelRotation, Quat.fromPitchYawRollDegrees(0, speed * deltaTime * SPIN_MULTIPLIER, 0))}); + + } + for (var i = killSimulations.length - 1; i >= 0; i--) { + simulatedFrisbees.splice(killSimulations[i], 1); + } +} + +//catches interfering calls of hydra-cursors +function withinBounds(coords) { + return coords.x >= 0 && coords.x < windowDimensions.x && coords.y >= 0 && coords.y < windowDimensions.y; +} + +function mouseMoveEvent(event) { + //TODO: mouse controls //print(withinBounds(event)); //print("move"+event.x); +} + +function mousePressEvent(event) { + print(event.x); + var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y}); + if (frisbeeToggle == toolBar.clicked(clickedOverlay)) { + newfrisbeeEnabled = !newfrisbeeEnabled; + saveSettings(); + enableNewFrisbee(newfrisbeeEnabled); + } +} + +function enableNewFrisbee(enable) { + if (toolBar.numberOfTools() > 0) { + toolBar.tools[0].select(enable); + } +} + +function mouseReleaseEvent(event) { + //TODO: mouse controls //print(JSON.stringify(event)); +} + +function setupMenus() { + Menu.addMenu(FRISBEE_MENU); + Menu.addMenuItem({ + menuName: FRISBEE_MENU, + menuItemName: "Frisbee Enabled", + isCheckable: true, + isChecked: frisbeeEnabled + }); + Menu.addMenuItem({ + menuName: FRISBEE_MENU, + menuItemName: "Cleanup Frisbees" + }); + Menu.addMenuItem({ + menuName: FRISBEE_MENU, + menuItemName: "Force Mouse Controls", + isCheckable: true, + isChecked: forceMouseControls + }); + Menu.addMenu(FRISBEE_DESIGN_MENU); + Menu.addMenuItem({ + menuName: FRISBEE_DESIGN_MENU, + menuItemName: FRISBEE_DESIGN_RANDOM + FRISBEE_MENU_DESIGN_POSTFIX, + isCheckable: true, + isChecked: selectedDesign == FRISBEE_DESIGN_RANDOM + }); + for (frisbee in FRISBEE_DESIGNS) { + Menu.addMenuItem({ + menuName: FRISBEE_DESIGN_MENU, + menuItemName: FRISBEE_DESIGNS[frisbee].name + FRISBEE_MENU_DESIGN_POSTFIX, + isCheckable: true, + isChecked: selectedDesign == FRISBEE_DESIGNS[frisbee].name + }); + } +} + +//startup calls: +loadSettings(); +setupMenus(); +function scriptEnding() { + toolBar.cleanup(); + Menu.removeMenu(FRISBEE_MENU); +} + +function menuItemEvent(menuItem) { + if (menuItem == "Cleanup Frisbees") { + cleanupFrisbees(); + return; + } else if (menuItem == "Frisbee Enabled") { + frisbeeEnabled = Menu.isOptionChecked(menuItem); + saveSettings(); + return; + } else if (menuItem == "Force Mouse Controls") { + forceMouseControls = Menu.isOptionChecked(menuItem); + saveSettings(); + return; + } + if (menuItem.indexOf(FRISBEE_MENU_DESIGN_POSTFIX, menuItem.length - FRISBEE_MENU_DESIGN_POSTFIX.length) !== -1) { + var item_name = menuItem.substring(0, menuItem.length - FRISBEE_MENU_DESIGN_POSTFIX.length); + if (item_name == FRISBEE_DESIGN_RANDOM || getFrisbee(item_name) != undefined) { + Menu.setIsOptionChecked(selectedDesign + FRISBEE_MENU_DESIGN_POSTFIX, false); + selectedDesign = item_name; + saveSettings(); + Menu.setIsOptionChecked(selectedDesign + FRISBEE_MENU_DESIGN_POSTFIX, true); + } + } +} + +// register the call back so it fires before each data send +Controller.mouseMoveEvent.connect(mouseMoveEvent); +Controller.mousePressEvent.connect(mousePressEvent); +Controller.mouseReleaseEvent.connect(mouseReleaseEvent); +Menu.menuItemEvent.connect(menuItemEvent); +Script.scriptEnding.connect(scriptEnding); +Script.update.connect(checkController); +Script.update.connect(controlFrisbees); \ No newline at end of file diff --git a/examples/hydraMove.js b/examples/hydraMove.js index 675a885b6d..853c18ebce 100644 --- a/examples/hydraMove.js +++ b/examples/hydraMove.js @@ -21,10 +21,10 @@ var position = { x: MyAvatar.position.x, y: MyAvatar.position.y, z: MyAvatar.pos var joysticksCaptured = false; var THRUST_CONTROLLER = 0; var VIEW_CONTROLLER = 1; -var INITIAL_THRUST_MULTPLIER = 1.0; +var INITIAL_THRUST_MULTIPLIER = 1.0; var THRUST_INCREASE_RATE = 1.05; var MAX_THRUST_MULTIPLIER = 75.0; -var thrustMultiplier = INITIAL_THRUST_MULTPLIER; +var thrustMultiplier = INITIAL_THRUST_MULTIPLIER; var grabDelta = { x: 0, y: 0, z: 0}; var grabStartPosition = { x: 0, y: 0, z: 0}; var grabDeltaVelocity = { x: 0, y: 0, z: 0}; @@ -34,6 +34,8 @@ var grabbingWithRightHand = false; var wasGrabbingWithRightHand = false; var grabbingWithLeftHand = false; var wasGrabbingWithLeftHand = false; +var movingWithHead = false; +var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw; var EPSILON = 0.000001; var velocity = { x: 0, y: 0, z: 0}; var THRUST_MAG_UP = 100.0; @@ -241,6 +243,47 @@ function handleGrabBehavior(deltaTime) { wasGrabbingWithLeftHand = grabbingWithLeftHand; } +var HEAD_MOVE_DEAD_ZONE = 0.0; +var HEAD_STRAFE_DEAD_ZONE = 0.0; +var HEAD_ROTATE_DEAD_ZONE = 0.0; +var HEAD_THRUST_FWD_SCALE = 12000.0; +var HEAD_THRUST_STRAFE_SCALE = 1000.0; +var HEAD_YAW_RATE = 2.0; +var HEAD_PITCH_RATE = 1.0; +var HEAD_ROLL_THRUST_SCALE = 75.0; +var HEAD_PITCH_LIFT_THRUST = 3.0; + +function moveWithHead(deltaTime) { + if (movingWithHead) { + var deltaYaw = MyAvatar.getHeadFinalYaw() - headStartYaw; + var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch; + + var bodyLocalCurrentHeadVector = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position); + bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.angleAxis(-deltaYaw, {x:0, y: 1, z:0}), bodyLocalCurrentHeadVector); + var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition); + headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta); + headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion + + // Thrust based on leaning forward and side-to-side + if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) { + MyAvatar.addThrust(Vec3.multiply(Quat.getFront(Camera.getOrientation()), -headDelta.z * HEAD_THRUST_FWD_SCALE * deltaTime)); + } + if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) { + MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), headDelta.x * HEAD_THRUST_STRAFE_SCALE * deltaTime)); + } + if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) { + var orientation = Quat.multiply(Quat.angleAxis(deltaYaw * HEAD_YAW_RATE * deltaTime, {x:0, y: 1, z:0}), MyAvatar.orientation); + MyAvatar.orientation = orientation; + } + // Thrust Up/Down based on head pitch + MyAvatar.addThrust(Vec3.multiply({ x:0, y:1, z:0 }, (MyAvatar.getHeadFinalPitch() - headStartFinalPitch) * HEAD_PITCH_LIFT_THRUST * deltaTime)); + // For head trackers, adjust pitch by head pitch + MyAvatar.headPitch += deltaPitch * HEAD_PITCH_RATE * deltaTime; + // Thrust strafe based on roll ange + MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), -(MyAvatar.getHeadFinalRoll() - headStartRoll) * HEAD_ROLL_THRUST_SCALE * deltaTime)); + } +} + // Update for joysticks and move button function flyWithHydra(deltaTime) { var thrustJoystickPosition = Controller.getJoystickPosition(THRUST_CONTROLLER); @@ -262,7 +305,7 @@ function flyWithHydra(deltaTime) { thrustJoystickPosition.x * thrustMultiplier * deltaTime); MyAvatar.addThrust(thrustRight); } else { - thrustMultiplier = INITIAL_THRUST_MULTPLIER; + thrustMultiplier = INITIAL_THRUST_MULTIPLIER; } // View Controller @@ -280,6 +323,7 @@ function flyWithHydra(deltaTime) { MyAvatar.headPitch = newPitch; } handleGrabBehavior(deltaTime); + moveWithHead(deltaTime); displayDebug(); } @@ -296,3 +340,19 @@ function scriptEnding() { } Script.scriptEnding.connect(scriptEnding); +Controller.keyPressEvent.connect(function(event) { + if (event.text == "SPACE" && !movingWithHead) { + movingWithHead = true; + headStartPosition = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position); + headStartDeltaPitch = MyAvatar.getHeadDeltaPitch(); + headStartFinalPitch = MyAvatar.getHeadFinalPitch(); + headStartRoll = MyAvatar.getHeadFinalRoll(); + headStartYaw = MyAvatar.getHeadFinalYaw(); + } +}); +Controller.keyReleaseEvent.connect(function(event) { + if (event.text == "SPACE") { + movingWithHead = false; + } +}); + diff --git a/examples/playSoundOrbit.js b/examples/playSoundOrbit.js new file mode 100644 index 0000000000..da7c746e8e --- /dev/null +++ b/examples/playSoundOrbit.js @@ -0,0 +1,42 @@ +// +// playSoundPath.js +// examples +// +// Created by Craig Hansen-Sturm on 05/27/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +var soundClip = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Voxels/voxel create 3.raw"); + +var currentTime = 1.570079; // pi/2 +var deltaTime = 0.05; +var distance = 1; +var debug = 0; + +function playSound() { + var options = new AudioInjectionOptions(); + currentTime += deltaTime; + + var s = distance * Math.sin(currentTime); + var c = distance * Math.cos(currentTime); + + var soundOffset = { x:s, y:0, z:c }; + + if (debug) { + print("t=" + currentTime + "offset=" + soundOffset.x + "," + soundOffset.y + "," + soundOffset.z); + } + + var avatarPosition = MyAvatar.position; + var soundPosition = Vec3.sum(avatarPosition,soundOffset); + + options.position = soundPosition + options.volume = 1.0; + Audio.playSound(soundClip, options); +} + +Script.setInterval(playSound, 250); + + diff --git a/examples/sit.js b/examples/sit.js index 49123f254a..c3abe9a908 100644 --- a/examples/sit.js +++ b/examples/sit.js @@ -269,8 +269,7 @@ function update(deltaTime){ } var locationChanged = false; - if (location.hostname != oldHost) { - print("Changed domain"); + if (location.hostname != oldHost || !location.isConnected) { for (model in models) { removeIndicators(models[model]); } diff --git a/examples/speechControl.js b/examples/speechControl.js new file mode 100644 index 0000000000..e2fb9699b7 --- /dev/null +++ b/examples/speechControl.js @@ -0,0 +1,201 @@ +// +// speechControl.js +// examples +// +// Created by Ryan Huffman on 07/31/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +var ACCELERATION = 80; +var STEP_DURATION = 1.0; // Duration of a step command in seconds +var TURN_DEGREES = 90; +var SLIGHT_TURN_DEGREES = 45; +var TURN_AROUND_DEGREES = 180; +var TURN_RATE = 90; // Turn rate in degrees per second + + +/*****************************************************************************/ +/** COMMANDS *****************************************************************/ +var CMD_MOVE_FORWARD = "Move forward"; +var CMD_MOVE_BACKWARD = "Move backward"; +var CMD_MOVE_UP = "Move up"; +var CMD_MOVE_DOWN = "Move down"; +var CMD_MOVE_LEFT = "Move left"; +var CMD_MOVE_RIGHT = "Move right"; + +var CMD_STEP_FORWARD = "Step forward"; +var CMD_STEP_BACKWARD = "Step backward"; +var CMD_STEP_LEFT = "Step left"; +var CMD_STEP_RIGHT = "Step right"; +var CMD_STEP_UP = "Step up"; +var CMD_STEP_DOWN = "Step down"; + +var CMD_TURN_LEFT = "Turn left"; +var CMD_TURN_SLIGHT_LEFT = "Turn slight left"; +var CMD_TURN_RIGHT = "Turn right"; +var CMD_TURN_SLIGHT_RIGHT = "Turn slight right"; +var CMD_TURN_AROUND = "Turn around"; + +var CMD_STOP = "Stop"; + +var CMD_SHOW_COMMANDS = "Show commands"; + +var MOVE_COMMANDS = [ + CMD_MOVE_FORWARD, + CMD_MOVE_BACKWARD, + CMD_MOVE_UP, + CMD_MOVE_DOWN, + CMD_MOVE_LEFT, + CMD_MOVE_RIGHT, +]; + +var STEP_COMMANDS = [ + CMD_STEP_FORWARD, + CMD_STEP_BACKWARD, + CMD_STEP_UP, + CMD_STEP_DOWN, + CMD_STEP_LEFT, + CMD_STEP_RIGHT, +]; + +var TURN_COMMANDS = [ + CMD_TURN_LEFT, + CMD_TURN_SLIGHT_LEFT, + CMD_TURN_RIGHT, + CMD_TURN_SLIGHT_RIGHT, + CMD_TURN_AROUND, +]; + +var OTHER_COMMANDS = [ + CMD_STOP, + CMD_SHOW_COMMANDS, +]; + +var ALL_COMMANDS = [] + .concat(MOVE_COMMANDS) + .concat(STEP_COMMANDS) + .concat(TURN_COMMANDS) + .concat(OTHER_COMMANDS); + +/** END OF COMMANDS **********************************************************/ +/*****************************************************************************/ + + +var currentCommandFunc = null; + +function handleCommandRecognized(command) { + if (MOVE_COMMANDS.indexOf(command) > -1 || STEP_COMMANDS.indexOf(command) > -1) { + // If this is a STEP_* command, we will want to countdown the duration + // of time to move. MOVE_* commands don't stop. + var timeRemaining = MOVE_COMMANDS.indexOf(command) > -1 ? 0 : STEP_DURATION; + var accel = { x: 0, y: 0, z: 0 }; + + if (command == CMD_MOVE_FORWARD || command == CMD_STEP_FORWARD) { + accel = { x: 0, y: 0, z: 1 }; + } else if (command == CMD_MOVE_BACKWARD || command == CMD_STEP_BACKWARD) { + accel = { x: 0, y: 0, z: -1 }; + } else if (command === CMD_MOVE_UP || command == CMD_STEP_UP) { + accel = { x: 0, y: 1, z: 0 }; + } else if (command == CMD_MOVE_DOWN || command == CMD_STEP_DOWN) { + accel = { x: 0, y: -1, z: 0 }; + } else if (command == CMD_MOVE_LEFT || command == CMD_STEP_LEFT) { + accel = { x: -1, y: 0, z: 0 }; + } else if (command == CMD_MOVE_RIGHT || command == CMD_STEP_RIGHT) { + accel = { x: 1, y: 0, z: 0 }; + } + + currentCommandFunc = function(dt) { + if (timeRemaining > 0 && dt >= timeRemaining) { + dt = timeRemaining; + } + + var headOrientation = MyAvatar.headOrientation; + var front = Quat.getFront(headOrientation); + var right = Quat.getRight(headOrientation); + var up = Quat.getUp(headOrientation); + + var thrust = Vec3.multiply(front, accel.z * ACCELERATION); + thrust = Vec3.sum(thrust, Vec3.multiply(right, accel.x * ACCELERATION)); + thrust = Vec3.sum(thrust, Vec3.multiply(up, accel.y * ACCELERATION)); + MyAvatar.addThrust(thrust); + + if (timeRemaining > 0) { + timeRemaining -= dt; + return timeRemaining > 0; + } + + return true; + }; + } else if (TURN_COMMANDS.indexOf(command) > -1) { + var degreesRemaining; + var sign; + if (command == CMD_TURN_LEFT) { + sign = 1; + degreesRemaining = TURN_DEGREES; + } else if (command == CMD_TURN_RIGHT) { + sign = -1; + degreesRemaining = TURN_DEGREES; + } else if (command == CMD_TURN_SLIGHT_LEFT) { + sign = 1; + degreesRemaining = SLIGHT_TURN_DEGREES; + } else if (command == CMD_TURN_SLIGHT_RIGHT) { + sign = -1; + degreesRemaining = SLIGHT_TURN_DEGREES; + } else if (command == CMD_TURN_AROUND) { + sign = 1; + degreesRemaining = TURN_AROUND_DEGREES; + } + currentCommandFunc = function(dt) { + // Determine how much to turn by + var turnAmount = TURN_RATE * dt; + if (turnAmount > degreesRemaining) { + turnAmount = degreesRemaining; + } + + // Apply turn + var orientation = MyAvatar.orientation; + var deltaOrientation = Quat.fromPitchYawRollDegrees(0, sign * turnAmount, 0); + MyAvatar.orientation = Quat.multiply(orientation, deltaOrientation); + + degreesRemaining -= turnAmount; + return turnAmount > 0; + } + } else if (command == CMD_STOP) { + currentCommandFunc = null; + } else if (command == CMD_SHOW_COMMANDS) { + var msg = ""; + for (var i = 0; i < ALL_COMMANDS.length; i++) { + msg += ALL_COMMANDS[i] + "\n"; + } + Window.alert(msg); + } +} + +function update(dt) { + if (currentCommandFunc) { + if (currentCommandFunc(dt) === false) { + currentCommandFunc = null; + } + } +} + +function setup() { + for (var i = 0; i < ALL_COMMANDS.length; i++) { + SpeechRecognizer.addCommand(ALL_COMMANDS[i]); + } +} + +function scriptEnding() { + for (var i = 0; i < ALL_COMMANDS.length; i++) { + SpeechRecognizer.removeCommand(ALL_COMMANDS[i]); + } +} + +Script.scriptEnding.connect(scriptEnding); +Script.update.connect(update); +SpeechRecognizer.commandRecognized.connect(handleCommandRecognized); + +setup(); diff --git a/examples/testXMLHttpRequest.js b/examples/testXMLHttpRequest.js index 421eb458e4..79d2842464 100644 --- a/examples/testXMLHttpRequest.js +++ b/examples/testXMLHttpRequest.js @@ -145,3 +145,98 @@ test("Test timeout", function() { this.assertEquals(0, req.status, "status should be `0`"); this.assertEquals(4, req.errorCode, "4 is the timeout error code for QNetworkReply::NetworkError"); }); + + +var localFile = Window.browse("Find defaultScripts.js file ...", "", "defaultScripts.js (defaultScripts.js)"); + +if (localFile !== null) { + + localFile = "file:///" + localFile; + + test("Test GET local file synchronously", function () { + var req = new XMLHttpRequest(); + + var statesVisited = [true, false, false, false, false] + req.onreadystatechange = function () { + statesVisited[req.readyState] = true; + }; + + req.open("GET", localFile, false); + req.send(); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(200, req.status, "status should be `200`"); + this.assertEquals("OK", req.statusText, "statusText should be `OK`"); + this.assertEquals(0, req.errorCode); + this.assertNotEquals("", req.getAllResponseHeaders(), "headers should not be null"); + this.assertContains("High Fidelity", req.response.substring(0, 100), "expected text not found in response") + + for (var i = 0; i <= req.DONE; i++) { + this.assertEquals(true, statesVisited[i], i + " should be set"); + } + }); + + test("Test GET nonexistent local file", function () { + var nonexistentFile = localFile.replace(".js", "NoExist.js"); + + var req = new XMLHttpRequest(); + req.open("GET", nonexistentFile, false); + req.send(); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(404, req.status, "status should be `404`"); + this.assertEquals("Not Found", req.statusText, "statusText should be `Not Found`"); + this.assertNotEquals(0, req.errorCode); + }); + + test("Test GET local file already open", function () { + // Can't open file exclusively in order to test. + }); + + test("Test GET local file with data not implemented", function () { + var req = new XMLHttpRequest(); + req.open("GET", localFile, true); + req.send("data"); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(501, req.status, "status should be `501`"); + this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`"); + this.assertNotEquals(0, req.errorCode); + }); + + test("Test GET local file asynchronously not implemented", function () { + var req = new XMLHttpRequest(); + req.open("GET", localFile, true); + req.send(); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(501, req.status, "status should be `501`"); + this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`"); + this.assertNotEquals(0, req.errorCode); + }); + + test("Test POST local file not implemented", function () { + var req = new XMLHttpRequest(); + req.open("POST", localFile, false); + req.send(); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(501, req.status, "status should be `501`"); + this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`"); + this.assertNotEquals(0, req.errorCode); + }); + + test("Test local file username and password not implemented", function () { + var req = new XMLHttpRequest(); + req.open("GET", localFile, false, "username", "password"); + req.send(); + + this.assertEquals(req.DONE, req.readyState, "readyState should be DONE"); + this.assertEquals(501, req.status, "status should be `501`"); + this.assertEquals("Not Implemented", req.statusText, "statusText should be `Not Implemented`"); + this.assertNotEquals(0, req.errorCode); + }); + +} else { + print("Local file operation not tested"); +} diff --git a/examples/toolBars.js b/examples/toolBars.js index 1a464b4e4f..064ae372fd 100644 --- a/examples/toolBars.js +++ b/examples/toolBars.js @@ -132,20 +132,34 @@ ToolBar = function(x, y, direction) { this.y = y; this.width = 0; this.height = 0; - + this.back = this.back = Overlays.addOverlay("text", { + backgroundColor: { red: 255, green: 255, blue: 255 }, + x: this.x, + y: this.y, + width: this.width, + height: this.height, + alpha: 1.0, + visible: false + }); this.addTool = function(properties, selectable, selected) { if (direction == ToolBar.HORIZONTAL) { properties.x = this.x + this.width; properties.y = this.y; this.width += properties.width + ToolBar.SPACING; - this.height += Math.max(properties.height, this.height); + this.height = Math.max(properties.height, this.height); } else { properties.x = this.x; properties.y = this.y + this.height; this.width = Math.max(properties.width, this.width); this.height += properties.height + ToolBar.SPACING; } + if (this.back != null) { + Overlays.editOverlay(this.back, { + width: this.width + 2 * ToolBar.SPACING, + height: this.height + 2 * ToolBar.SPACING + }); + } this.tools[this.tools.length] = new Tool(properties, selectable, selected); return ((this.tools.length) - 1); @@ -159,18 +173,48 @@ ToolBar = function(x, y, direction) { for(var tool in this.tools) { this.tools[tool].move(this.tools[tool].x() + dx, this.tools[tool].y() + dy); } + if (this.back != null) { + Overlays.editOverlay(this.back, { + x: x - ToolBar.SPACING, + y: y - ToolBar.SPACING + }); + } } - this.setAlpha = function(alpha) { - for(var tool in this.tools) { + this.setAlpha = function(alpha, tool) { + if(typeof(tool) === 'undefined') { + for(var tool in this.tools) { + this.tools[tool].setAlpha(alpha); + } + if (this.back != null) { + Overlays.editOverlay(this.back, { alpha: alpha}); + } + } else { this.tools[tool].setAlpha(alpha); } } + + this.setBack = function(color, alpha) { + if (color == null) { + Overlays.editOverlay(this.back, { + visible: false + }); + } else { + Overlays.editOverlay(this.back, { + visible: true, + backgroundColor: color, + alpha: alpha + }) + } + } this.show = function(doShow) { for(var tool in this.tools) { this.tools[tool].show(doShow); } + if (this.back != null) { + Overlays.editOverlay(this.back, { visible: doShow}); + } } this.clicked = function(clickedOverlay) { @@ -186,12 +230,25 @@ ToolBar = function(x, y, direction) { return this.tools.length; } + this.selectTool = function (tool, select) { + this.tools[tool].select(select); + } + + this.toolSelected = function (tool) { + return this.tools[tool].selected(); + } + this.cleanup = function() { for(var tool in this.tools) { this.tools[tool].cleanup(); delete this.tools[tool]; } + if (this.back != null) { + Overlays.deleteOverlay(this.back); + this.back = null; + } + this.tools = []; this.x = x; this.y = y; diff --git a/interface/CMakeLists.txt b/interface/CMakeLists.txt index be8ffb1e9d..5653286104 100644 --- a/interface/CMakeLists.txt +++ b/interface/CMakeLists.txt @@ -46,6 +46,15 @@ foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels pa set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}") endforeach(SUBDIR) +# Add SpeechRecognizer if on OS X, otherwise remove +if (APPLE) + file(GLOB INTERFACE_OBJCPP_SRCS "src/SpeechRecognizer.mm") + set(INTERFACE_SRCS ${INTERFACE_SRCS} ${INTERFACE_OBJCPP_SRCS}) +else () + get_filename_component(SPEECHRECOGNIZER_H "src/SpeechRecognizer.h" ABSOLUTE) + list(REMOVE_ITEM INTERFACE_SRCS ${SPEECHRECOGNIZER_H}) +endif () + find_package(Qt5 COMPONENTS Gui Multimedia Network OpenGL Script Svg WebKitWidgets) # grab the ui files in resources/ui @@ -165,8 +174,9 @@ if (APPLE) find_library(CoreFoundation CoreFoundation) find_library(GLUT GLUT) find_library(OpenGL OpenGL) + find_library(AppKit AppKit) - target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation} ${GLUT} ${OpenGL}) + target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation} ${GLUT} ${OpenGL} ${AppKit}) # install command for OS X bundle INSTALL(TARGETS ${TARGET_NAME} diff --git a/interface/external/libovr/readme.txt b/interface/external/libovr/readme.txt index f68818d1ee..f9db808d88 100644 --- a/interface/external/libovr/readme.txt +++ b/interface/external/libovr/readme.txt @@ -2,15 +2,15 @@ Instructions for adding the Oculus library (LibOVR) to Interface Stephen Birarda, March 6, 2014 -You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.3.2. +You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1. -1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/oculus folder. +1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder. This readme.txt should be there as well. You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects). If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above. - NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \oculus\Lib\Win32\ directory. + NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory. 2. Clear your build directory, run cmake and build, and you should be all set. diff --git a/interface/resources/shaders/metavoxel_heightfield_base.frag b/interface/resources/shaders/metavoxel_heightfield_base.frag new file mode 100644 index 0000000000..9b64a59e6f --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_base.frag @@ -0,0 +1,20 @@ +#version 120 + +// +// metavoxel_heightfield_base.frag +// fragment shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the diffuse texture +uniform sampler2D diffuseMap; + +void main(void) { + // compute the base color based on OpenGL lighting model + gl_FragColor = gl_Color * texture2D(diffuseMap, gl_TexCoord[0].st); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_base.vert b/interface/resources/shaders/metavoxel_heightfield_base.vert new file mode 100644 index 0000000000..3e4b081d6f --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_base.vert @@ -0,0 +1,33 @@ +#version 120 + +// +// metavoxel_heightfield_base.vert +// vertex shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the height texture +uniform sampler2D heightMap; + +// the distance between height points in texture space +uniform float heightScale; + +// the scale between height and color textures +uniform float colorScale; + +void main(void) { + // add the height to the position + float height = texture2D(heightMap, gl_MultiTexCoord0.st).r; + gl_Position = gl_ModelViewProjectionMatrix * (gl_Vertex + vec4(0.0, height, 0.0, 0.0)); + + // the zero height should be invisible + gl_FrontColor = vec4(1.0, 1.0, 1.0, step(height, 0.0)); + + // pass along the scaled/offset texture coordinates + gl_TexCoord[0] = (gl_MultiTexCoord0 - vec4(heightScale, heightScale, 0.0, 0.0)) * colorScale; +} diff --git a/interface/resources/shaders/metavoxel_heightfield_light.frag b/interface/resources/shaders/metavoxel_heightfield_light.frag new file mode 100644 index 0000000000..ce3f23e142 --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_light.frag @@ -0,0 +1,21 @@ +#version 120 + +// +// metavoxel_heightfield_light.frag +// fragment shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the interpolated normal +varying vec4 normal; + +void main(void) { + // compute the base color based on OpenGL lighting model + gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient + + gl_FrontLightProduct[0].diffuse * max(0.0, dot(normalize(normal), gl_LightSource[0].position))); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_light.vert b/interface/resources/shaders/metavoxel_heightfield_light.vert new file mode 100644 index 0000000000..228d575b81 --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_light.vert @@ -0,0 +1,45 @@ +#version 120 + +// +// metavoxel_heighfield_light.vert +// vertex shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the height texture +uniform sampler2D heightMap; + +// the distance between height points in texture space +uniform float heightScale; + +// the interpolated position +varying vec4 position; + +// the interpolated normal +varying vec4 normal; + +void main(void) { + // transform and store the normal for interpolation + vec2 heightCoord = gl_MultiTexCoord0.st; + float deltaX = texture2D(heightMap, heightCoord - vec2(heightScale, 0.0)).r - + texture2D(heightMap, heightCoord + vec2(heightScale, 0.0)).r; + float deltaZ = texture2D(heightMap, heightCoord - vec2(0.0, heightScale)).r - + texture2D(heightMap, heightCoord + vec2(0.0, heightScale)).r; + normal = normalize(gl_ModelViewMatrix * vec4(deltaX, heightScale, deltaZ, 0.0)); + + // add the height to the position + float height = texture2D(heightMap, heightCoord).r; + position = gl_ModelViewMatrix * (gl_Vertex + vec4(0.0, height, 0.0, 0.0)); + gl_Position = gl_ProjectionMatrix * position; + + // the zero height should be invisible + gl_FrontColor = vec4(1.0, 1.0, 1.0, step(height, 0.0)); + + // and the shadow texture coordinates + gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], position), dot(gl_EyePlaneT[0], position), dot(gl_EyePlaneR[0], position), 1.0); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_light_cascaded_shadow_map.frag b/interface/resources/shaders/metavoxel_heightfield_light_cascaded_shadow_map.frag new file mode 100644 index 0000000000..73382eb83c --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_light_cascaded_shadow_map.frag @@ -0,0 +1,44 @@ +#version 120 + +// +// metavoxel_heightfield_light_cascaded_shadow_map.frag +// fragment shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the shadow texture +uniform sampler2DShadow shadowMap; + +// the distances to the cascade sections +uniform vec3 shadowDistances; + +// the inverse of the size of the shadow map +const float shadowScale = 1.0 / 2048.0; + +// the interpolated position +varying vec4 position; + +// the interpolated normal +varying vec4 normal; + +void main(void) { + // compute the index of the cascade to use and the corresponding texture coordinates + int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0))); + vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position), + dot(gl_EyePlaneR[shadowIndex], position)); + + // compute the base color based on OpenGL lighting model + float diffuse = dot(normalize(normal), gl_LightSource[0].position); + float facingLight = step(0.0, diffuse) * 0.25 * + (shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r + + shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r + + shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r + + shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r); + gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient + + gl_FrontLightProduct[0].diffuse * (diffuse * facingLight)); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_light_shadow_map.frag b/interface/resources/shaders/metavoxel_heightfield_light_shadow_map.frag new file mode 100644 index 0000000000..4f2df8958b --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_light_shadow_map.frag @@ -0,0 +1,33 @@ +#version 120 + +// +// metavoxel_heightfield_light_shadow_map.frag +// fragment shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the shadow texture +uniform sampler2DShadow shadowMap; + +// the inverse of the size of the shadow map +const float shadowScale = 1.0 / 2048.0; + +// the interpolated normal +varying vec4 normal; + +void main(void) { + // compute the base color based on OpenGL lighting model + float diffuse = dot(normalize(normal), gl_LightSource[0].position); + float facingLight = step(0.0, diffuse) * 0.25 * + (shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r + + shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r + + shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r + + shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r); + gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient + + gl_FrontLightProduct[0].diffuse * (diffuse * facingLight)); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_splat.frag b/interface/resources/shaders/metavoxel_heightfield_splat.frag new file mode 100644 index 0000000000..bb6b0d6536 --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_splat.frag @@ -0,0 +1,29 @@ +#version 120 + +// +// metavoxel_heightfield_splat.frag +// fragment shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the number of splats per pass +const int SPLAT_COUNT = 4; + +// the splat textures +uniform sampler2D diffuseMaps[SPLAT_COUNT]; + +// alpha values for the four splat textures +varying vec4 alphaValues; + +void main(void) { + // blend the splat textures + gl_FragColor = gl_Color * (texture2D(diffuseMaps[0], gl_TexCoord[0].st) * alphaValues.x + + texture2D(diffuseMaps[1], gl_TexCoord[1].st) * alphaValues.y + + texture2D(diffuseMaps[2], gl_TexCoord[2].st) * alphaValues.z + + texture2D(diffuseMaps[3], gl_TexCoord[3].st) * alphaValues.w); +} diff --git a/interface/resources/shaders/metavoxel_heightfield_splat.vert b/interface/resources/shaders/metavoxel_heightfield_splat.vert new file mode 100644 index 0000000000..926bcdd6c3 --- /dev/null +++ b/interface/resources/shaders/metavoxel_heightfield_splat.vert @@ -0,0 +1,64 @@ +#version 120 + +// +// metavoxel_heighfield_splat.vert +// vertex shader +// +// Created by Andrzej Kapolka on 8/20/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +// the height texture +uniform sampler2D heightMap; + +// the texture that contains the texture indices +uniform sampler2D textureMap; + +// the distance between height points in texture space +uniform float heightScale; + +// the scale between height and texture textures +uniform float textureScale; + +// the splat texture offset +uniform vec2 splatTextureOffset; + +// the splat textures scales on the S axis +uniform vec4 splatTextureScalesS; + +// the splat texture scales on the T axis +uniform vec4 splatTextureScalesT; + +// the lower bounds of the values corresponding to the splat textures +uniform vec4 textureValueMinima; + +// the upper bounds of the values corresponding to the splat textures +uniform vec4 textureValueMaxima; + +// alpha values for the four splat textures +varying vec4 alphaValues; + +void main(void) { + // add the height to the position + float height = texture2D(heightMap, gl_MultiTexCoord0.st).r; + vec4 modelSpacePosition = gl_Vertex + vec4(0.0, height, 0.0, 0.0); + gl_Position = gl_ModelViewProjectionMatrix * modelSpacePosition; + + // the zero height should be invisible + gl_FrontColor = vec4(1.0, 1.0, 1.0, 1.0 - step(height, 0.0)); + + // pass along the scaled/offset texture coordinates + vec4 textureSpacePosition = vec4(modelSpacePosition.xz, 0.0, 1.0) + vec4(splatTextureOffset, 0.0, 0.0); + gl_TexCoord[0] = textureSpacePosition * vec4(splatTextureScalesS[0], splatTextureScalesT[0], 0.0, 1.0); + gl_TexCoord[1] = textureSpacePosition * vec4(splatTextureScalesS[1], splatTextureScalesT[1], 0.0, 1.0); + gl_TexCoord[2] = textureSpacePosition * vec4(splatTextureScalesS[2], splatTextureScalesT[2], 0.0, 1.0); + gl_TexCoord[3] = textureSpacePosition * vec4(splatTextureScalesS[3], splatTextureScalesT[3], 0.0, 1.0); + + // compute the alpha values for each texture + float value = texture2D(textureMap, (gl_MultiTexCoord0.st - vec2(heightScale, heightScale)) * textureScale).r; + vec4 valueVector = vec4(value, value, value, value); + alphaValues = step(textureValueMinima, valueVector) * step(valueVector, textureValueMaxima); +} diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 6bac9593ae..4c7f871a91 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -246,7 +246,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) : connect(&domainHandler, SIGNAL(connectedToDomain(const QString&)), SLOT(updateWindowTitle())); connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle())); connect(&domainHandler, &DomainHandler::settingsReceived, this, &Application::domainSettingsReceived); - + connect(&domainHandler, &DomainHandler::hostnameChanged, Menu::getInstance(), &Menu::clearLoginDialogDisplayedFlag); + // hookup VoxelEditSender to PaymentManager so we can pay for octree edits const PaymentManager& paymentManager = PaymentManager::getInstance(); connect(&_voxelEditSender, &VoxelEditPacketSender::octreePaymentRequired, @@ -894,7 +895,7 @@ void Application::keyPressEvent(QKeyEvent* event) { } break; - case Qt::Key_Space: + case Qt::Key_Apostrophe: resetSensors(); break; @@ -1053,7 +1054,6 @@ void Application::keyPressEvent(QKeyEvent* event) { Menu::getInstance()->triggerOption(MenuOption::FrustumRenderMode); } break; - break; case Qt::Key_Percent: Menu::getInstance()->triggerOption(MenuOption::Stats); break; @@ -1772,14 +1772,7 @@ void Application::init() { _lastTimeUpdated.start(); Menu::getInstance()->loadSettings(); - if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) { - _audio.setDynamicJitterBuffers(false); - _audio.setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames()); - } else { - _audio.setDynamicJitterBuffers(true); - } - - _audio.setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired()); + _audio.setReceivedAudioStreamSettings(Menu::getInstance()->getReceivedAudioStreamSettings()); qDebug("Loaded settings"); @@ -3747,6 +3740,10 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript scriptEngine->registerGlobalObject("Camera", cameraScriptable); connect(scriptEngine, SIGNAL(finished(const QString&)), cameraScriptable, SLOT(deleteLater())); +#ifdef Q_OS_MAC + scriptEngine->registerGlobalObject("SpeechRecognizer", Menu::getInstance()->getSpeechRecognizer()); +#endif + ClipboardScriptingInterface* clipboardScriptable = new ClipboardScriptingInterface(); scriptEngine->registerGlobalObject("Clipboard", clipboardScriptable); connect(scriptEngine, SIGNAL(finished(const QString&)), clipboardScriptable, SLOT(deleteLater())); @@ -3829,6 +3826,10 @@ void Application::stopAllScripts(bool restart) { it.value()->stop(); qDebug() << "stopping script..." << it.key(); } + // HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities + // whenever a script stops in case it happened to have been setting joint rotations. + // TODO: expose animation priorities and provide a layered animation control system. + _myAvatar->clearJointAnimationPriorities(); } void Application::stopScript(const QString &scriptName) { @@ -3836,6 +3837,10 @@ void Application::stopScript(const QString &scriptName) { if (_scriptEnginesHash.contains(scriptURLString)) { _scriptEnginesHash.value(scriptURLString)->stop(); qDebug() << "stopping script..." << scriptName; + // HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities + // whenever a script stops in case it happened to have been setting joint rotations. + // TODO: expose animation priorities and provide a layered animation control system. + _myAvatar->clearJointAnimationPriorities(); } } diff --git a/interface/src/Audio.cpp b/interface/src/Audio.cpp index 0484860c65..8a788df831 100644 --- a/interface/src/Audio.cpp +++ b/interface/src/Audio.cpp @@ -72,7 +72,7 @@ Audio::Audio(QObject* parent) : _proceduralAudioOutput(NULL), _proceduralOutputDevice(NULL), _inputRingBuffer(0), - _receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, true, 0, 0, true), + _receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, InboundAudioStream::Settings()), _isStereoInput(false), _averagedLatency(0.0), _lastInputLoudness(0), @@ -82,7 +82,6 @@ Audio::Audio(QObject* parent) : _noiseGateSampleCounter(0), _noiseGateOpen(false), _noiseGateEnabled(true), - _peqEnabled(false), _toneInjectionEnabled(false), _noiseGateFramesToClose(0), _totalInputAudioSamples(0), @@ -102,9 +101,11 @@ Audio::Audio(QObject* parent) : _scopeOutputOffset(0), _framesPerScope(DEFAULT_FRAMES_PER_SCOPE), _samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope), + _peqEnabled(false), _scopeInput(0), _scopeOutputLeft(0), _scopeOutputRight(0), + _scopeLastFrame(), _statsEnabled(false), _statsShowInjectedStreams(false), _outgoingAvatarAudioSequenceNumber(0), @@ -113,14 +114,17 @@ Audio::Audio(QObject* parent) : _audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS), _lastSentAudioPacket(0), _packetSentTimeGaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS), - _audioOutputIODevice(*this) + _audioOutputIODevice(_receivedAudioStream) { // clear the array of locally injected samples memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL); // Create the noise sample array _noiseSampleFrames = new float[NUMBER_OF_NOISE_SAMPLE_FRAMES]; - connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedAudioStreamSamples, Qt::DirectConnection); + connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedSilence, this, &Audio::addStereoSilenceToScope, Qt::DirectConnection); + connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade, this, &Audio::addLastFrameRepeatedWithFadeToScope, Qt::DirectConnection); + connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedStereoSamples, this, &Audio::addStereoSamplesToScope, Qt::DirectConnection); + connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection); } void Audio::init(QGLWidget *parent) { @@ -460,9 +464,12 @@ void Audio::handleAudioInput() { static char audioDataPacket[MAX_PACKET_SIZE]; static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho); - static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8); - static int16_t* networkAudioSamples = (int16_t*) (audioDataPacket + leadingBytes); + // NOTE: we assume PacketTypeMicrophoneAudioWithEcho has same size headers as + // PacketTypeMicrophoneAudioNoEcho. If not, then networkAudioSamples will be pointing to the wrong place for writing + // audio samples with echo. + static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8); + static int16_t* networkAudioSamples = (int16_t*)(audioDataPacket + leadingBytes); float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio(_numInputCallbackBytes); @@ -475,7 +482,7 @@ void Audio::handleAudioInput() { int16_t* ioBuffer = (int16_t*)inputByteArray.data(); - _peq.render( ioBuffer, ioBuffer, inputByteArray.size() / sizeof(int16_t) ); + _peq.render(ioBuffer, ioBuffer, inputByteArray.size() / sizeof(int16_t)); } if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted && _audioOutput) { @@ -668,32 +675,27 @@ void Audio::handleAudioInput() { if (!_isStereoInput && _scopeEnabled && !_scopeEnabledPause) { unsigned int numMonoAudioChannels = 1; unsigned int monoAudioChannel = 0; - addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, monoAudioChannel, numMonoAudioChannels); - _scopeInputOffset += NETWORK_SAMPLES_PER_FRAME; - _scopeInputOffset %= _samplesPerScope; + _scopeInputOffset = addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, NETWORK_SAMPLES_PER_FRAME, monoAudioChannel, numMonoAudioChannels); } NodeList* nodeList = NodeList::getInstance(); SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer); + + if (_recorder && _recorder.data()->isRecording()) { + _recorder.data()->record(reinterpret_cast(networkAudioSamples), numNetworkBytes); + } + if (audioMixer && audioMixer->getActiveSocket()) { MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar(); glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition(); glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientationInWorldFrame(); quint8 isStereo = _isStereoInput ? 1 : 0; - - int numAudioBytes = 0; - + PacketType packetType; if (_lastInputLoudness == 0) { packetType = PacketTypeSilentAudioFrame; - - // we need to indicate how many silent samples this is to the audio mixer - networkAudioSamples[0] = numNetworkSamples; - numAudioBytes = sizeof(int16_t); } else { - numAudioBytes = numNetworkBytes; - if (Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)) { packetType = PacketTypeMicrophoneAudioWithEcho; } else { @@ -702,21 +704,31 @@ void Audio::handleAudioInput() { } char* currentPacketPtr = audioDataPacket + populatePacketHeader(audioDataPacket, packetType); - + // pack sequence number memcpy(currentPacketPtr, &_outgoingAvatarAudioSequenceNumber, sizeof(quint16)); currentPacketPtr += sizeof(quint16); - // set the mono/stereo byte - *currentPacketPtr++ = isStereo; + if (packetType == PacketTypeSilentAudioFrame) { + // pack num silent samples + quint16 numSilentSamples = numNetworkSamples; + memcpy(currentPacketPtr, &numSilentSamples, sizeof(quint16)); + currentPacketPtr += sizeof(quint16); + } else { + // set the mono/stereo byte + *currentPacketPtr++ = isStereo; - // memcpy the three float positions - memcpy(currentPacketPtr, &headPosition, sizeof(headPosition)); - currentPacketPtr += (sizeof(headPosition)); + // memcpy the three float positions + memcpy(currentPacketPtr, &headPosition, sizeof(headPosition)); + currentPacketPtr += (sizeof(headPosition)); + + // memcpy our orientation + memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation)); + currentPacketPtr += sizeof(headOrientation); - // memcpy our orientation - memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation)); - currentPacketPtr += sizeof(headOrientation); + // audio samples have already been packed (written to networkAudioSamples) + currentPacketPtr += numNetworkBytes; + } // first time this is 0 if (_lastSentAudioPacket == 0) { @@ -728,18 +740,58 @@ void Audio::handleAudioInput() { _lastSentAudioPacket = now; } - - nodeList->writeDatagram(audioDataPacket, numAudioBytes + leadingBytes, audioMixer); + + int packetBytes = currentPacketPtr - audioDataPacket; + nodeList->writeDatagram(audioDataPacket, packetBytes, audioMixer); _outgoingAvatarAudioSequenceNumber++; Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO) - .updateValue(numAudioBytes + leadingBytes); + .updateValue(packetBytes); } delete[] inputAudioSamples; } } -void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) { +const int STEREO_FACTOR = 2; + +void Audio::addStereoSilenceToScope(int silentSamplesPerChannel) { + if (!_scopeEnabled || _scopeEnabledPause) { + return; + } + addSilenceToScope(_scopeOutputLeft, _scopeOutputOffset, silentSamplesPerChannel); + _scopeOutputOffset = addSilenceToScope(_scopeOutputRight, _scopeOutputOffset, silentSamplesPerChannel); +} + +void Audio::addStereoSamplesToScope(const QByteArray& samples) { + if (!_scopeEnabled || _scopeEnabledPause) { + return; + } + const int16_t* samplesData = reinterpret_cast(samples.data()); + int samplesPerChannel = samples.size() / sizeof(int16_t) / STEREO_FACTOR; + + addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, STEREO_FACTOR); + _scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, STEREO_FACTOR); + + _scopeLastFrame = samples.right(NETWORK_BUFFER_LENGTH_BYTES_STEREO); +} + +void Audio::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) { + const int16_t* lastFrameData = reinterpret_cast(_scopeLastFrame.data()); + + int samplesRemaining = samplesPerChannel; + int indexOfRepeat = 0; + do { + int samplesToWriteThisIteration = std::min(samplesRemaining, (int)NETWORK_SAMPLES_PER_FRAME); + float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat); + addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 0, STEREO_FACTOR, fade); + _scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 1, STEREO_FACTOR, fade); + + samplesRemaining -= samplesToWriteThisIteration; + indexOfRepeat++; + } while (samplesRemaining > 0); +} + +void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) { const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t); const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount()) @@ -784,30 +836,6 @@ void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QBy numNetworkOutputSamples, numDeviceOutputSamples, _desiredOutputFormat, _outputFormat); - - - if (_scopeEnabled && !_scopeEnabledPause) { - unsigned int numAudioChannels = _desiredOutputFormat.channelCount(); - const int16_t* samples = receivedSamples; - for (int numSamples = numNetworkOutputSamples / numAudioChannels; numSamples > 0; numSamples -= NETWORK_SAMPLES_PER_FRAME) { - - unsigned int audioChannel = 0; - addBufferToScope( - _scopeOutputLeft, - _scopeOutputOffset, - samples, audioChannel, numAudioChannels); - - audioChannel = 1; - addBufferToScope( - _scopeOutputRight, - _scopeOutputOffset, - samples, audioChannel, numAudioChannels); - - _scopeOutputOffset += NETWORK_SAMPLES_PER_FRAME; - _scopeOutputOffset %= _samplesPerScope; - samples += NETWORK_SAMPLES_PER_FRAME * numAudioChannels; - } - } } void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) { @@ -820,9 +848,6 @@ void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) { Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(audioByteArray.size()); } - - - void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) { int numBytesPacketHeader = numBytesForPacketHeader(packet); @@ -855,12 +880,13 @@ void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) { void Audio::sendDownstreamAudioStatsPacket() { - // since this function is called every second, we'll sample some of our stats here - + // since this function is called every second, we'll sample for some of our stats here _inputRingBufferMsecsAvailableStats.update(getInputRingBufferMsecsAvailable()); - _audioOutputMsecsUnplayedStats.update(getAudioOutputMsecsUnplayed()); + // also, call _receivedAudioStream's per-second callback + _receivedAudioStream.perSecondCallbackForUpdatingStats(); + char packet[MAX_PACKET_SIZE]; // pack header @@ -878,7 +904,7 @@ void Audio::sendDownstreamAudioStatsPacket() { dataAt += sizeof(quint16); // pack downstream audio stream stats - AudioStreamStats stats = _receivedAudioStream.updateSeqHistoryAndGetAudioStreamStats(); + AudioStreamStats stats = _receivedAudioStream.getAudioStreamStats(); memcpy(dataAt, &stats, sizeof(AudioStreamStats)); dataAt += sizeof(AudioStreamStats); @@ -911,7 +937,7 @@ void Audio::addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& s unsigned int delayCount = delay * _desiredOutputFormat.channelCount(); unsigned int silentCount = (remaining < delayCount) ? remaining : delayCount; if (silentCount) { - _spatialAudioRingBuffer.addSilentFrame(silentCount); + _spatialAudioRingBuffer.addSilentSamples(silentCount); } // Recalculate the number of remaining samples @@ -1215,8 +1241,6 @@ void Audio::selectAudioFilterSmiley() { void Audio::toggleScope() { _scopeEnabled = !_scopeEnabled; if (_scopeEnabled) { - _scopeInputOffset = 0; - _scopeOutputOffset = 0; allocateScope(); } else { freeScope(); @@ -1254,6 +1278,8 @@ void Audio::selectAudioScopeFiftyFrames() { } void Audio::allocateScope() { + _scopeInputOffset = 0; + _scopeOutputOffset = 0; int num = _samplesPerScope * sizeof(int16_t); _scopeInput = new QByteArray(num, 0); _scopeOutputLeft = new QByteArray(num, 0); @@ -1285,12 +1311,18 @@ void Audio::freeScope() { } } -void Audio::addBufferToScope( - QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels) { - +int Audio::addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamplesPerChannel, + unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade) { + if (!_scopeEnabled || _scopeEnabledPause) { + return 0; + } + // Constant multiplier to map sample value to vertical size of scope float multiplier = (float)MULTIPLIER_SCOPE_HEIGHT / logf(2.0f); + // Used to scale each sample. (logf(sample) + fadeOffset) is same as logf(sample * fade). + float fadeOffset = logf(fade); + // Temporary variable receives sample value float sample; @@ -1301,17 +1333,41 @@ void Audio::addBufferToScope( // Short int pointer to mapped samples in byte array int16_t* destination = (int16_t*) byteArray->data(); - for (unsigned int i = 0; i < NETWORK_SAMPLES_PER_FRAME; i++) { + for (int i = 0; i < sourceSamplesPerChannel; i++) { sample = (float)source[i * sourceNumberOfChannels + sourceChannel]; - if (sample > 0) { - value = (int16_t)(multiplier * logf(sample)); - } else if (sample < 0) { - value = (int16_t)(-multiplier * logf(-sample)); + if (sample > 1) { + value = (int16_t)(multiplier * (logf(sample) + fadeOffset)); + } else if (sample < -1) { + value = (int16_t)(-multiplier * (logf(-sample) + fadeOffset)); } else { value = 0; } - destination[i + frameOffset] = value; + destination[frameOffset] = value; + frameOffset = (frameOffset == _samplesPerScope - 1) ? 0 : frameOffset + 1; } + return frameOffset; +} + +int Audio::addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples) { + + QMutexLocker lock(&_guard); + // Short int pointer to mapped samples in byte array + int16_t* destination = (int16_t*)byteArray->data(); + + if (silentSamples >= _samplesPerScope) { + memset(destination, 0, byteArray->size()); + return frameOffset; + } + + int samplesToBufferEnd = _samplesPerScope - frameOffset; + if (silentSamples > samplesToBufferEnd) { + memset(destination + frameOffset, 0, samplesToBufferEnd * sizeof(int16_t)); + memset(destination, 0, silentSamples - samplesToBufferEnd * sizeof(int16_t)); + } else { + memset(destination + frameOffset, 0, silentSamples * sizeof(int16_t)); + } + + return (frameOffset + silentSamples) % _samplesPerScope; } void Audio::renderStats(const float* color, int width, int height) { @@ -1512,17 +1568,17 @@ void Audio::renderScope(int width, int height) { return; static const float backgroundColor[4] = { 0.4f, 0.4f, 0.4f, 0.6f }; - static const float gridColor[4] = { 0.3f, 0.3f, 0.3f, 0.6f }; + static const float gridColor[4] = { 0.7f, 0.7f, 0.7f, 1.0f }; static const float inputColor[4] = { 0.3f, 1.0f, 0.3f, 1.0f }; static const float outputLeftColor[4] = { 1.0f, 0.3f, 0.3f, 1.0f }; static const float outputRightColor[4] = { 0.3f, 0.3f, 1.0f, 1.0f }; static const int gridRows = 2; int gridCols = _framesPerScope; - int x = (width - SCOPE_WIDTH) / 2; - int y = (height - SCOPE_HEIGHT) / 2; - int w = SCOPE_WIDTH; - int h = SCOPE_HEIGHT; + int x = (width - (int)SCOPE_WIDTH) / 2; + int y = (height - (int)SCOPE_HEIGHT) / 2; + int w = (int)SCOPE_WIDTH; + int h = (int)SCOPE_HEIGHT; renderBackground(backgroundColor, x, y, w, h); renderGrid(gridColor, x, y, w, h, gridRows, gridCols); @@ -1712,7 +1768,7 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) // setup our general output device for audio-mixer audio _audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this); _audioOutput->setBufferSize(AUDIO_OUTPUT_BUFFER_SIZE_FRAMES * _outputFrameSize * sizeof(int16_t)); - qDebug() << "Ring Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize; + qDebug() << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize; _audioOutputIODevice.start(); _audioOutput->start(&_audioOutputIODevice); @@ -1787,13 +1843,11 @@ float Audio::getInputRingBufferMsecsAvailable() const { } qint64 Audio::AudioOutputIODevice::readData(char * data, qint64 maxSize) { - MixedProcessedAudioStream& receivedAUdioStream = _parent._receivedAudioStream; - int samplesRequested = maxSize / sizeof(int16_t); int samplesPopped; int bytesWritten; - if ((samplesPopped = receivedAUdioStream.popSamples(samplesRequested, false)) > 0) { - AudioRingBuffer::ConstIterator lastPopOutput = receivedAUdioStream.getLastPopOutput(); + if ((samplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) { + AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput(); lastPopOutput.readSamples((int16_t*)data, samplesPopped); bytesWritten = samplesPopped * sizeof(int16_t); } else { diff --git a/interface/src/Audio.h b/interface/src/Audio.h index 4fb54218af..b5069f3b3a 100644 --- a/interface/src/Audio.h +++ b/interface/src/Audio.h @@ -17,9 +17,11 @@ #include "InterfaceConfig.h" #include "AudioStreamStats.h" +#include "Recorder.h" #include "RingBufferHistory.h" #include "MovingMinMaxAvg.h" #include "AudioFilter.h" +#include "AudioFilterBank.h" #include #include @@ -49,14 +51,14 @@ public: class AudioOutputIODevice : public QIODevice { public: - AudioOutputIODevice(Audio& parent) : _parent(parent) {}; + AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream) : _receivedAudioStream(receivedAudioStream) {}; void start() { open(QIODevice::ReadOnly); } void stop() { close(); } qint64 readData(char * data, qint64 maxSize); qint64 writeData(const char * data, qint64 maxSize) { return 0; } private: - Audio& _parent; + MixedProcessedAudioStream& _receivedAudioStream; }; @@ -72,10 +74,7 @@ public: virtual void startCollisionSound(float magnitude, float frequency, float noise, float duration, bool flashScreen); virtual void startDrumSound(float volume, float frequency, float duration, float decay); - void setDynamicJitterBuffers(bool dynamicJitterBuffers) { _receivedAudioStream.setDynamicJitterBuffers(dynamicJitterBuffers); } - void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames) { _receivedAudioStream.setStaticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames); } - - void setMaxFramesOverDesired(int maxFramesOverDesired) { _receivedAudioStream.setMaxFramesOverDesired(maxFramesOverDesired); } + void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& settings) { _receivedAudioStream.setSettings(settings); } int getDesiredJitterBufferFrames() const { return _receivedAudioStream.getDesiredJitterBufferFrames(); } @@ -102,6 +101,8 @@ public: float getAudioOutputMsecsUnplayed() const; float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); } + + void setRecorder(RecorderPointer recorder) { _recorder = recorder; } public slots: void start(); @@ -109,7 +110,6 @@ public slots: void addReceivedAudioToStream(const QByteArray& audioByteArray); void parseAudioStreamStatsPacket(const QByteArray& packet); void addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& spatialAudio, unsigned int numSamples); - void processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer); void handleAudioInput(); void reset(); void resetStats(); @@ -126,6 +126,10 @@ public slots: void selectAudioScopeFiveFrames(); void selectAudioScopeTwentyFrames(); void selectAudioScopeFiftyFrames(); + void addStereoSilenceToScope(int silentSamplesPerChannel); + void addLastFrameRepeatedWithFadeToScope(int samplesPerChannel); + void addStereoSamplesToScope(const QByteArray& samples); + void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer); void toggleAudioFilter(); void selectAudioFilterFlat(); void selectAudioFilterTrebleCut(); @@ -252,8 +256,9 @@ private: void reallocateScope(int frames); // Audio scope methods for data acquisition - void addBufferToScope( - QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels); + int addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamples, + unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade = 1.0f); + int addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples); // Audio scope methods for rendering void renderBackground(const float* color, int x, int y, int width, int height); @@ -278,13 +283,14 @@ private: int _samplesPerScope; // Multi-band parametric EQ - bool _peqEnabled; - AudioFilterPEQ3 _peq; + bool _peqEnabled; + AudioFilterPEQ3m _peq; QMutex _guard; QByteArray* _scopeInput; QByteArray* _scopeOutputLeft; QByteArray* _scopeOutputRight; + QByteArray _scopeLastFrame; #ifdef _WIN32 static const unsigned int STATS_WIDTH = 1500; #else @@ -308,6 +314,8 @@ private: MovingMinMaxAvg _packetSentTimeGaps; AudioOutputIODevice _audioOutputIODevice; + + WeakRecorderPointer _recorder; }; diff --git a/interface/src/DatagramProcessor.cpp b/interface/src/DatagramProcessor.cpp index e36e161abb..62f5b6453a 100644 --- a/interface/src/DatagramProcessor.cpp +++ b/interface/src/DatagramProcessor.cpp @@ -48,6 +48,7 @@ void DatagramProcessor::processDatagrams() { // only process this packet if we have a match on the packet version switch (packetTypeForPacket(incomingPacket)) { case PacketTypeMixedAudio: + case PacketTypeSilentAudioFrame: QMetaObject::invokeMethod(&application->_audio, "addReceivedAudioToStream", Qt::QueuedConnection, Q_ARG(QByteArray, incomingPacket)); break; diff --git a/interface/src/Menu.cpp b/interface/src/Menu.cpp index c21b533695..0dcfd60051 100644 --- a/interface/src/Menu.cpp +++ b/interface/src/Menu.cpp @@ -82,8 +82,7 @@ const int CONSOLE_HEIGHT = 200; Menu::Menu() : _actionHash(), - _audioJitterBufferFrames(0), - _maxFramesOverDesired(0), + _receivedAudioStreamSettings(), _bandwidthDialog(NULL), _fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES), _realWorldFieldOfView(DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES), @@ -94,6 +93,9 @@ Menu::Menu() : _octreeStatsDialog(NULL), _lodToolsDialog(NULL), _userLocationsDialog(NULL), +#ifdef Q_OS_MAC + _speechRecognizer(), +#endif _maxVoxels(DEFAULT_MAX_VOXELS_PER_SYSTEM), _voxelSizeScale(DEFAULT_OCTREE_SIZE_SCALE), _oculusUIAngularSize(DEFAULT_OCULUS_UI_ANGULAR_SIZE), @@ -112,6 +114,7 @@ Menu::Menu() : _loginAction(NULL), _preferencesDialog(NULL), _loginDialog(NULL), + _hasLoginDialogDisplayed(false), _snapshotsLocation(), _scriptsLocation(), _walletPrivateKey() @@ -221,19 +224,16 @@ Menu::Menu() : addActionToQMenuAndActionHash(editMenu, MenuOption::Attachments, 0, this, SLOT(editAttachments())); addActionToQMenuAndActionHash(editMenu, MenuOption::Animations, 0, this, SLOT(editAnimations())); - addDisabledActionAndSeparator(editMenu, "Physics"); - QObject* avatar = appInstance->getAvatar(); - addCheckableActionToQMenuAndActionHash(editMenu, MenuOption::ObeyEnvironmentalGravity, Qt::SHIFT | Qt::Key_G, false, - avatar, SLOT(updateMotionBehaviorsFromMenu())); - addCheckableActionToQMenuAndActionHash(editMenu, MenuOption::StandOnNearbyFloors, 0, true, - avatar, SLOT(updateMotionBehaviorsFromMenu())); - - addAvatarCollisionSubMenu(editMenu); - QMenu* toolsMenu = addMenu("Tools"); addActionToQMenuAndActionHash(toolsMenu, MenuOption::MetavoxelEditor, 0, this, SLOT(showMetavoxelEditor())); addActionToQMenuAndActionHash(toolsMenu, MenuOption::ScriptEditor, Qt::ALT | Qt::Key_S, this, SLOT(showScriptEditor())); +#ifdef Q_OS_MAC + QAction* speechRecognizerAction = addCheckableActionToQMenuAndActionHash(toolsMenu, MenuOption::ControlWithSpeech, + Qt::CTRL | Qt::SHIFT | Qt::Key_C, _speechRecognizer.getEnabled(), &_speechRecognizer, SLOT(setEnabled(bool))); + connect(&_speechRecognizer, SIGNAL(enabledUpdated(bool)), speechRecognizerAction, SLOT(setChecked(bool))); +#endif + #ifdef HAVE_QXMPP _chatAction = addActionToQMenuAndActionHash(toolsMenu, MenuOption::Chat, @@ -257,6 +257,45 @@ Menu::Menu() : this, SLOT(toggleConsole())); + QMenu* avatarMenu = addMenu("Avatar"); + + QMenu* avatarSizeMenu = avatarMenu->addMenu("Size"); + addActionToQMenuAndActionHash(avatarSizeMenu, + MenuOption::IncreaseAvatarSize, + Qt::Key_Plus, + appInstance->getAvatar(), + SLOT(increaseSize())); + addActionToQMenuAndActionHash(avatarSizeMenu, + MenuOption::DecreaseAvatarSize, + Qt::Key_Minus, + appInstance->getAvatar(), + SLOT(decreaseSize())); + addActionToQMenuAndActionHash(avatarSizeMenu, + MenuOption::ResetAvatarSize, + Qt::Key_Equal, + appInstance->getAvatar(), + SLOT(resetSize())); + + QObject* avatar = appInstance->getAvatar(); + addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ChatCircling, 0, false); + addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true); + addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true); + addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ObeyEnvironmentalGravity, Qt::SHIFT | Qt::Key_G, false, + avatar, SLOT(updateMotionBehaviorsFromMenu())); + addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::StandOnNearbyFloors, 0, true, + avatar, SLOT(updateMotionBehaviorsFromMenu())); + + QMenu* collisionsMenu = avatarMenu->addMenu("Collide With..."); + addCheckableActionToQMenuAndActionHash(collisionsMenu, MenuOption::CollideAsRagdoll); + addCheckableActionToQMenuAndActionHash(collisionsMenu, MenuOption::CollideWithAvatars, + 0, true, avatar, SLOT(updateCollisionGroups())); + addCheckableActionToQMenuAndActionHash(collisionsMenu, MenuOption::CollideWithVoxels, + 0, false, avatar, SLOT(updateCollisionGroups())); + addCheckableActionToQMenuAndActionHash(collisionsMenu, MenuOption::CollideWithParticles, + 0, true, avatar, SLOT(updateCollisionGroups())); + addCheckableActionToQMenuAndActionHash(collisionsMenu, MenuOption::CollideWithEnvironment, + 0, false, avatar, SLOT(updateCollisionGroups())); + QMenu* viewMenu = addMenu("View"); #ifdef Q_OS_MAC @@ -304,25 +343,6 @@ Menu::Menu() : Qt::CTRL | Qt::SHIFT | Qt::Key_3, false, &nodeBounds, SLOT(setShowParticleNodes(bool))); - - QMenu* avatarSizeMenu = viewMenu->addMenu("Avatar Size"); - - addActionToQMenuAndActionHash(avatarSizeMenu, - MenuOption::IncreaseAvatarSize, - Qt::Key_Plus, - appInstance->getAvatar(), - SLOT(increaseSize())); - addActionToQMenuAndActionHash(avatarSizeMenu, - MenuOption::DecreaseAvatarSize, - Qt::Key_Minus, - appInstance->getAvatar(), - SLOT(decreaseSize())); - addActionToQMenuAndActionHash(avatarSizeMenu, - MenuOption::ResetAvatarSize, - Qt::Key_Equal, - appInstance->getAvatar(), - SLOT(resetSize())); - addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::OffAxisProjection, 0, false); addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false); addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::MoveWithLean, 0, false); @@ -338,111 +358,97 @@ Menu::Menu() : QMenu* developerMenu = addMenu("Developer"); - QMenu* renderOptionsMenu = developerMenu->addMenu("Rendering Options"); - - addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars, Qt::Key_Asterisk, true); + QMenu* renderOptionsMenu = developerMenu->addMenu("Render"); addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, Qt::SHIFT | Qt::Key_A, true); + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Avatars, 0, true); + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Metavoxels, 0, true); + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Models, 0, true); + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Particles, 0, true); + + QMenu* shadowMenu = renderOptionsMenu->addMenu("Shadows"); + QActionGroup* shadowGroup = new QActionGroup(shadowMenu); + shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, "None", 0, true)); + shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::SimpleShadows, 0, false)); + shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::CascadedShadows, 0, false)); + shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::AvatarsReceiveShadows, 0, true)); + + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars, Qt::Key_Asterisk, true); + addCheckableActionToQMenuAndActionHash(renderOptionsMenu, + MenuOption::Voxels, + Qt::SHIFT | Qt::Key_V, + true, + appInstance, + SLOT(setRenderVoxels(bool))); addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true); addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::GlowMode, 0, appInstance->getGlowEffect(), SLOT(cycleRenderMode())); - - QMenu* shadowMenu = renderOptionsMenu->addMenu("Shadows"); - QActionGroup* shadowGroup = new QActionGroup(shadowMenu); - shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, "None", 0, true)); - shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::SimpleShadows, 0, false)); - shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::CascadedShadows, 0, false)); - - addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Metavoxels, 0, true); - addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::BuckyBalls, 0, false); - addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::StringHair, 0, false); - addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Particles, 0, true); addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::LodTools, Qt::SHIFT | Qt::Key_L, this, SLOT(lodTools())); - QMenu* voxelOptionsMenu = developerMenu->addMenu("Voxel Options"); - - addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, - MenuOption::Voxels, - Qt::SHIFT | Qt::Key_V, - true, - appInstance, - SLOT(setRenderVoxels(bool))); - - addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::VoxelTextures); - addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::AmbientOcclusion); - addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DontFadeOnVoxelServerChanges); - addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DisableAutoAdjustLOD); - - QMenu* modelOptionsMenu = developerMenu->addMenu("Model Options"); - addCheckableActionToQMenuAndActionHash(modelOptionsMenu, MenuOption::Models, 0, true); - addCheckableActionToQMenuAndActionHash(modelOptionsMenu, MenuOption::DisplayModelBounds, 0, false); - addCheckableActionToQMenuAndActionHash(modelOptionsMenu, MenuOption::DisplayModelElementProxy, 0, false); - addCheckableActionToQMenuAndActionHash(modelOptionsMenu, MenuOption::DisplayModelElementChildProxies, 0, false); - - QMenu* avatarOptionsMenu = developerMenu->addMenu("Avatar Options"); - - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Avatars, 0, true); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::AvatarsReceiveShadows, 0, true); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderSkeletonCollisionShapes); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderHeadCollisionShapes); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderBoundingCollisionShapes); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::CollideAsRagdoll); - - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, false); + QMenu* avatarDebugMenu = developerMenu->addMenu("Avatar"); #ifdef HAVE_FACESHIFT - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::Faceshift, 0, true, appInstance->getFaceshift(), SLOT(setTCPEnabled(bool))); #endif - #ifdef HAVE_FACEPLUS - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Faceplus, 0, true, - appInstance->getFaceplus(), SLOT(updateEnabled())); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::Faceplus, 0, true, + appInstance->getFaceplus(), SLOT(updateEnabled())); #endif - #ifdef HAVE_VISAGE - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Visage, 0, false, - appInstance->getVisage(), SLOT(updateEnabled())); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::Visage, 0, false, + appInstance->getVisage(), SLOT(updateEnabled())); #endif - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::GlowWhenSpeaking, 0, true); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false); - addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::FocusIndicators, 0, false); - - QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options"); - addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true); - addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseLasers, 0, true); - - QMenu* handOptionsMenu = developerMenu->addMenu("Hand Options"); - - addCheckableActionToQMenuAndActionHash(handOptionsMenu, + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderSkeletonCollisionShapes); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderHeadCollisionShapes); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false); + addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false); + + QMenu* modelDebugMenu = developerMenu->addMenu("Models"); + addCheckableActionToQMenuAndActionHash(modelDebugMenu, MenuOption::DisplayModelBounds, 0, false); + addCheckableActionToQMenuAndActionHash(modelDebugMenu, MenuOption::DisplayModelElementProxy, 0, false); + addCheckableActionToQMenuAndActionHash(modelDebugMenu, MenuOption::DisplayModelElementChildProxies, 0, false); + + QMenu* voxelOptionsMenu = developerMenu->addMenu("Voxels"); + addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::VoxelTextures); + addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::AmbientOcclusion); + addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DontFadeOnVoxelServerChanges); + addCheckableActionToQMenuAndActionHash(voxelOptionsMenu, MenuOption::DisableAutoAdjustLOD); + + QMenu* handOptionsMenu = developerMenu->addMenu("Hands"); + addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false); + addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false); + addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true); + addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false); + addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false); + + QMenu* sixenseOptionsMenu = handOptionsMenu->addMenu("Sixense"); + addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::FilterSixense, 0, true, appInstance->getSixenseManager(), SLOT(setFilter(bool))); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, + addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::LowVelocityFilter, 0, true, appInstance, SLOT(setLowVelocityFilter(bool))); + addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true); + addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseLasers, 0, false); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHands, 0, true); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::HandsCollideWithSelf, 0, false); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, true); - addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false); - - addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::DisableNackPackets, 0, false); - addCheckableActionToQMenuAndActionHash(developerMenu, + QMenu* networkMenu = developerMenu->addMenu("Network"); + addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::DisableNackPackets, 0, false); + addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::DisableActivityLogger, 0, false, @@ -451,9 +457,7 @@ Menu::Menu() : addActionToQMenuAndActionHash(developerMenu, MenuOption::WalletPrivateKey, 0, this, SLOT(changePrivateKey())); - addDisabledActionAndSeparator(developerMenu, "Testing"); - - QMenu* timingMenu = developerMenu->addMenu("Timing and Statistics Tools"); + QMenu* timingMenu = developerMenu->addMenu("Timing and Stats"); QMenu* perfTimerMenu = timingMenu->addMenu("Performance Timer"); addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::DisplayTimingDetails, 0, true); addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandUpdateTiming, 0, false); @@ -465,8 +469,10 @@ Menu::Menu() : addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::TestPing, 0, true); addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::FrameTimer); addActionToQMenuAndActionHash(timingMenu, MenuOption::RunTimingTests, 0, this, SLOT(runTests())); + addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::PipelineWarnings); + addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::SuppressShortTimings); - QMenu* frustumMenu = developerMenu->addMenu("View Frustum Debugging Tools"); + QMenu* frustumMenu = developerMenu->addMenu("View Frustum"); addCheckableActionToQMenuAndActionHash(frustumMenu, MenuOption::DisplayFrustum, Qt::SHIFT | Qt::Key_F); addActionToQMenuAndActionHash(frustumMenu, MenuOption::FrustumRenderMode, @@ -476,11 +482,7 @@ Menu::Menu() : updateFrustumRenderModeAction(); - QMenu* renderDebugMenu = developerMenu->addMenu("Render Debugging Tools"); - addCheckableActionToQMenuAndActionHash(renderDebugMenu, MenuOption::PipelineWarnings); - addCheckableActionToQMenuAndActionHash(renderDebugMenu, MenuOption::SuppressShortTimings); - - QMenu* audioDebugMenu = developerMenu->addMenu("Audio Debugging Tools"); + QMenu* audioDebugMenu = developerMenu->addMenu("Audio"); addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNoiseReduction, 0, true, @@ -493,7 +495,7 @@ Menu::Menu() : appInstance->getAudio(), SLOT(toggleAudioFilter())); - QMenu* audioFilterMenu = audioDebugMenu->addMenu("Audio Filter Options"); + QMenu* audioFilterMenu = audioDebugMenu->addMenu("Audio Filter"); addDisabledActionAndSeparator(audioFilterMenu, "Filter Response"); { QAction *flat = addCheckableActionToQMenuAndActionHash(audioFilterMenu, MenuOption::AudioFilterFlat, @@ -557,7 +559,7 @@ Menu::Menu() : appInstance->getAudio(), SLOT(toggleScopePause())); - QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope Options"); + QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope"); addDisabledActionAndSeparator(audioScopeMenu, "Display Frames"); { QAction *fiveFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiveFrames, @@ -648,13 +650,17 @@ Menu::Menu() : appInstance->getAudio(), SLOT(toggleStatsShowInjectedStreams())); + connect(appInstance->getAudio(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled())); + + QMenu* experimentalOptionsMenu = developerMenu->addMenu("Experimental"); + addCheckableActionToQMenuAndActionHash(experimentalOptionsMenu, MenuOption::BuckyBalls, 0, false); + addCheckableActionToQMenuAndActionHash(experimentalOptionsMenu, MenuOption::StringHair, 0, false); + addActionToQMenuAndActionHash(developerMenu, MenuOption::PasteToVoxel, Qt::CTRL | Qt::SHIFT | Qt::Key_V, this, SLOT(pasteToVoxel())); - connect(appInstance->getAudio(), SIGNAL(muteToggled()), this, SLOT(audioMuteToggled())); - #ifndef Q_OS_MAC QMenu* helpMenu = addMenu("Help"); QAction* helpAction = helpMenu->addAction(MenuOption::AboutApp); @@ -674,8 +680,15 @@ void Menu::loadSettings(QSettings* settings) { lockedSettings = true; } - _audioJitterBufferFrames = loadSetting(settings, "audioJitterBufferFrames", 0); - _maxFramesOverDesired = loadSetting(settings, "maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED); + _receivedAudioStreamSettings._dynamicJitterBuffers = settings->value("dynamicJitterBuffers", DEFAULT_DYNAMIC_JITTER_BUFFERS).toBool(); + _receivedAudioStreamSettings._maxFramesOverDesired = settings->value("maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED).toInt(); + _receivedAudioStreamSettings._staticDesiredJitterBufferFrames = settings->value("staticDesiredJitterBufferFrames", DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES).toInt(); + _receivedAudioStreamSettings._useStDevForJitterCalc = settings->value("useStDevForJitterCalc", DEFAULT_USE_STDEV_FOR_JITTER_CALC).toBool(); + _receivedAudioStreamSettings._windowStarveThreshold = settings->value("windowStarveThreshold", DEFAULT_WINDOW_STARVE_THRESHOLD).toInt(); + _receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves = settings->value("windowSecondsForDesiredCalcOnTooManyStarves", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES).toInt(); + _receivedAudioStreamSettings._windowSecondsForDesiredReduction = settings->value("windowSecondsForDesiredReduction", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION).toInt(); + _receivedAudioStreamSettings._repetitionWithFade = settings->value("repetitionWithFade", DEFAULT_REPETITION_WITH_FADE).toBool(); + _fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES); _realWorldFieldOfView = loadSetting(settings, "realWorldFieldOfView", DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES); _faceshiftEyeDeflection = loadSetting(settings, "faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION); @@ -692,6 +705,10 @@ void Menu::loadSettings(QSettings* settings) { QStandardPaths::writableLocation(QStandardPaths::DesktopLocation)).toString(); setScriptsLocation(settings->value("scriptsLocation", QString()).toString()); +#ifdef Q_OS_MAC + _speechRecognizer.setEnabled(settings->value("speechRecognitionEnabled", false).toBool()); +#endif + settings->beginGroup("View Frustum Offset Camera"); // in case settings is corrupt or missing loadSetting() will check for NaN _viewFrustumOffset.yaw = loadSetting(settings, "viewFrustumOffsetYaw", 0.0f); @@ -725,8 +742,15 @@ void Menu::saveSettings(QSettings* settings) { lockedSettings = true; } - settings->setValue("audioJitterBufferFrames", _audioJitterBufferFrames); - settings->setValue("maxFramesOverDesired", _maxFramesOverDesired); + settings->setValue("dynamicJitterBuffers", _receivedAudioStreamSettings._dynamicJitterBuffers); + settings->setValue("maxFramesOverDesired", _receivedAudioStreamSettings._maxFramesOverDesired); + settings->setValue("staticDesiredJitterBufferFrames", _receivedAudioStreamSettings._staticDesiredJitterBufferFrames); + settings->setValue("useStDevForJitterCalc", _receivedAudioStreamSettings._useStDevForJitterCalc); + settings->setValue("windowStarveThreshold", _receivedAudioStreamSettings._windowStarveThreshold); + settings->setValue("windowSecondsForDesiredCalcOnTooManyStarves", _receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves); + settings->setValue("windowSecondsForDesiredReduction", _receivedAudioStreamSettings._windowSecondsForDesiredReduction); + settings->setValue("repetitionWithFade", _receivedAudioStreamSettings._repetitionWithFade); + settings->setValue("fieldOfView", _fieldOfView); settings->setValue("faceshiftEyeDeflection", _faceshiftEyeDeflection); settings->setValue("maxVoxels", _maxVoxels); @@ -739,6 +763,9 @@ void Menu::saveSettings(QSettings* settings) { settings->setValue("boundaryLevelAdjust", _boundaryLevelAdjust); settings->setValue("snapshotsLocation", _snapshotsLocation); settings->setValue("scriptsLocation", _scriptsLocation); +#ifdef Q_OS_MAC + settings->setValue("speechRecognitionEnabled", _speechRecognizer.getEnabled()); +#endif settings->beginGroup("View Frustum Offset Camera"); settings->setValue("viewFrustumOffsetYaw", _viewFrustumOffset.yaw); settings->setValue("viewFrustumOffsetPitch", _viewFrustumOffset.pitch); @@ -1026,12 +1053,24 @@ void sendFakeEnterEvent() { const float DIALOG_RATIO_OF_WINDOW = 0.30f; +void Menu::clearLoginDialogDisplayedFlag() { + // Needed for domains that don't require login. + _hasLoginDialogDisplayed = false; +} + void Menu::loginForCurrentDomain() { - if (!_loginDialog) { + if (!_loginDialog && !_hasLoginDialogDisplayed) { _loginDialog = new LoginDialog(Application::getInstance()->getWindow()); _loginDialog->show(); _loginDialog->resizeAndPosition(false); } + + _hasLoginDialogDisplayed = true; +} + +void Menu::showLoginForCurrentDomain() { + _hasLoginDialogDisplayed = false; + loginForCurrentDomain(); } void Menu::editPreferences() { @@ -1378,7 +1417,7 @@ void Menu::toggleLoginMenuItem() { // change the menu item to login _loginAction->setText("Login"); - connect(_loginAction, &QAction::triggered, this, &Menu::loginForCurrentDomain); + connect(_loginAction, &QAction::triggered, this, &Menu::showLoginForCurrentDomain); } } @@ -1457,7 +1496,9 @@ void Menu::toggleConsole() { void Menu::audioMuteToggled() { QAction *muteAction = _actionHash.value(MenuOption::MuteAudio); - muteAction->setChecked(Application::getInstance()->getAudio()->getMuted()); + if (muteAction) { + muteAction->setChecked(Application::getInstance()->getAudio()->getMuted()); + } } void Menu::bandwidthDetailsClosed() { @@ -1629,45 +1670,31 @@ void Menu::runTests() { void Menu::updateFrustumRenderModeAction() { QAction* frustumRenderModeAction = _actionHash.value(MenuOption::FrustumRenderMode); - switch (_frustumDrawMode) { - default: - case FRUSTUM_DRAW_MODE_ALL: - frustumRenderModeAction->setText("Render Mode - All"); - break; - case FRUSTUM_DRAW_MODE_VECTORS: - frustumRenderModeAction->setText("Render Mode - Vectors"); - break; - case FRUSTUM_DRAW_MODE_PLANES: - frustumRenderModeAction->setText("Render Mode - Planes"); - break; - case FRUSTUM_DRAW_MODE_NEAR_PLANE: - frustumRenderModeAction->setText("Render Mode - Near"); - break; - case FRUSTUM_DRAW_MODE_FAR_PLANE: - frustumRenderModeAction->setText("Render Mode - Far"); - break; - case FRUSTUM_DRAW_MODE_KEYHOLE: - frustumRenderModeAction->setText("Render Mode - Keyhole"); - break; + if (frustumRenderModeAction) { + switch (_frustumDrawMode) { + default: + case FRUSTUM_DRAW_MODE_ALL: + frustumRenderModeAction->setText("Render Mode - All"); + break; + case FRUSTUM_DRAW_MODE_VECTORS: + frustumRenderModeAction->setText("Render Mode - Vectors"); + break; + case FRUSTUM_DRAW_MODE_PLANES: + frustumRenderModeAction->setText("Render Mode - Planes"); + break; + case FRUSTUM_DRAW_MODE_NEAR_PLANE: + frustumRenderModeAction->setText("Render Mode - Near"); + break; + case FRUSTUM_DRAW_MODE_FAR_PLANE: + frustumRenderModeAction->setText("Render Mode - Far"); + break; + case FRUSTUM_DRAW_MODE_KEYHOLE: + frustumRenderModeAction->setText("Render Mode - Keyhole"); + break; + } } } -void Menu::addAvatarCollisionSubMenu(QMenu* overMenu) { - // add avatar collisions subMenu to overMenu - QMenu* subMenu = overMenu->addMenu("Collision Options"); - - Application* appInstance = Application::getInstance(); - QObject* avatar = appInstance->getAvatar(); - addCheckableActionToQMenuAndActionHash(subMenu, MenuOption::CollideWithEnvironment, - 0, false, avatar, SLOT(updateCollisionGroups())); - addCheckableActionToQMenuAndActionHash(subMenu, MenuOption::CollideWithAvatars, - 0, true, avatar, SLOT(updateCollisionGroups())); - addCheckableActionToQMenuAndActionHash(subMenu, MenuOption::CollideWithVoxels, - 0, false, avatar, SLOT(updateCollisionGroups())); - addCheckableActionToQMenuAndActionHash(subMenu, MenuOption::CollideWithParticles, - 0, true, avatar, SLOT(updateCollisionGroups())); -} - QAction* Menu::getActionFromName(const QString& menuName, QMenu* menu) { QList menuActions; if (menu) { @@ -1887,10 +1914,9 @@ void Menu::removeMenuItem(const QString& menu, const QString& menuitem) { }; bool Menu::menuItemExists(const QString& menu, const QString& menuitem) { - QMenu* menuObj = getMenu(menu); QAction* menuItemAction = _actionHash.value(menuitem); - if (menuObj && menuItemAction) { - return true; + if (menuItemAction) { + return (getMenu(menu) != NULL); } return false; }; diff --git a/interface/src/Menu.h b/interface/src/Menu.h index 7ef744e62e..1d57da2891 100644 --- a/interface/src/Menu.h +++ b/interface/src/Menu.h @@ -23,6 +23,10 @@ #include #include +#ifdef Q_OS_MAC +#include "SpeechRecognizer.h" +#endif + #include "location/LocationManager.h" #include "ui/PreferencesDialog.h" #include "ui/ChatWindow.h" @@ -85,10 +89,8 @@ public: void triggerOption(const QString& menuOption); QAction* getActionForOption(const QString& menuOption); - float getAudioJitterBufferFrames() const { return _audioJitterBufferFrames; } - void setAudioJitterBufferFrames(float audioJitterBufferSamples) { _audioJitterBufferFrames = audioJitterBufferSamples; } - int getMaxFramesOverDesired() const { return _maxFramesOverDesired; } - void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; } + const InboundAudioStream::Settings& getReceivedAudioStreamSettings() const { return _receivedAudioStreamSettings; } + void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& receivedAudioStreamSettings) { _receivedAudioStreamSettings = receivedAudioStreamSettings; } float getFieldOfView() const { return _fieldOfView; } void setFieldOfView(float fieldOfView) { _fieldOfView = fieldOfView; } float getRealWorldFieldOfView() const { return _realWorldFieldOfView; } @@ -137,6 +139,10 @@ public: void setBoundaryLevelAdjust(int boundaryLevelAdjust); int getBoundaryLevelAdjust() const { return _boundaryLevelAdjust; } +#ifdef Q_OS_MAC + SpeechRecognizer* getSpeechRecognizer() { return &_speechRecognizer; } +#endif + // User Tweakable PPS from Voxel Server int getMaxVoxelPacketsPerSecond() const { return _maxVoxelPacketsPerSecond; } void setMaxVoxelPacketsPerSecond(int maxVoxelPacketsPerSecond) { _maxVoxelPacketsPerSecond = maxVoxelPacketsPerSecond; } @@ -169,7 +175,9 @@ signals: public slots: + void clearLoginDialogDisplayedFlag(); void loginForCurrentDomain(); + void showLoginForCurrentDomain(); void bandwidthDetails(); void octreeStatsDetails(); void lodTools(); @@ -246,8 +254,6 @@ private: void updateFrustumRenderModeAction(); - void addAvatarCollisionSubMenu(QMenu* overMenu); - QAction* getActionFromName(const QString& menuName, QMenu* menu); QMenu* getSubMenuFromName(const QString& menuName, QMenu* menu); QMenu* getMenuParent(const QString& menuName, QString& finalMenuPart); @@ -259,8 +265,7 @@ private: QHash _actionHash; - int _audioJitterBufferFrames; /// number of extra samples to wait before starting audio playback - int _maxFramesOverDesired; + InboundAudioStream::Settings _receivedAudioStreamSettings; BandwidthDialog* _bandwidthDialog; float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus float _realWorldFieldOfView; // The actual FOV set by the user's monitor size and view distance @@ -274,6 +279,9 @@ private: OctreeStatsDialog* _octreeStatsDialog; LodToolsDialog* _lodToolsDialog; UserLocationsDialog* _userLocationsDialog; +#ifdef Q_OS_MAC + SpeechRecognizer _speechRecognizer; +#endif int _maxVoxels; float _voxelSizeScale; float _oculusUIAngularSize; @@ -296,6 +304,7 @@ private: QPointer _attachmentsDialog; QPointer _animationsDialog; QPointer _loginDialog; + bool _hasLoginDialogDisplayed; QAction* _chatAction; QString _snapshotsLocation; QString _scriptsLocation; @@ -342,25 +351,27 @@ namespace MenuOption { const QString AvatarsReceiveShadows = "Avatars Receive Shadows"; const QString Bandwidth = "Bandwidth Display"; const QString BandwidthDetails = "Bandwidth Details"; + const QString BlueSpeechSphere = "Blue Sphere While Speaking"; const QString BuckyBalls = "Bucky Balls"; const QString CascadedShadows = "Cascaded"; const QString Chat = "Chat..."; const QString ChatCircling = "Chat Circling"; - const QString CollideAsRagdoll = "Collide As Ragdoll"; - const QString CollideWithAvatars = "Collide With Avatars"; + const QString CollideAsRagdoll = "Collide With Self (Ragdoll)"; + const QString CollideWithAvatars = "Collide With Other Avatars"; const QString CollideWithEnvironment = "Collide With World Boundaries"; const QString CollideWithParticles = "Collide With Particles"; const QString CollideWithVoxels = "Collide With Voxels"; const QString Collisions = "Collisions"; const QString Console = "Console..."; + const QString ControlWithSpeech = "Control With Speech"; const QString DecreaseAvatarSize = "Decrease Avatar Size"; const QString DecreaseVoxelSize = "Decrease Voxel Size"; const QString DisableActivityLogger = "Disable Activity Logger"; const QString DisableAutoAdjustLOD = "Disable Automatically Adjusting LOD"; const QString DisableNackPackets = "Disable NACK Packets"; const QString DisplayFrustum = "Display Frustum"; - const QString DisplayHands = "Display Hands"; - const QString DisplayHandTargets = "Display Hand Targets"; + const QString DisplayHands = "Show Hand Info"; + const QString DisplayHandTargets = "Show Hand Targets"; const QString DisplayModelBounds = "Display Model Bounds"; const QString DisplayModelElementChildProxies = "Display Model Element Children"; const QString DisplayModelElementProxy = "Display Model Element Bounds"; @@ -380,7 +391,6 @@ namespace MenuOption { const QString Faceshift = "Faceshift"; const QString FilterSixense = "Smooth Sixense Movement"; const QString FirstPerson = "First Person"; - const QString FocusIndicators = "Focus Indicators"; const QString FrameTimer = "Show Timer"; const QString FrustumRenderMode = "Render Mode"; const QString Fullscreen = "Fullscreen"; @@ -391,7 +401,6 @@ namespace MenuOption { const QString GoToDomain = "Go To Domain..."; const QString GoTo = "Go To..."; const QString GoToLocation = "Go To Location..."; - const QString HandsCollideWithSelf = "Collide With Self"; const QString HeadMouse = "Head Mouse"; const QString IncreaseAvatarSize = "Increase Avatar Size"; const QString IncreaseVoxelSize = "Increase Voxel Size"; @@ -401,7 +410,6 @@ namespace MenuOption { const QString Login = "Login"; const QString Log = "Log"; const QString Logout = "Logout"; - const QString LookAtVectors = "Look-at Vectors"; const QString LowVelocityFilter = "Low Velocity Filter"; const QString MetavoxelEditor = "Metavoxel Editor..."; const QString Metavoxels = "Metavoxels"; @@ -421,13 +429,15 @@ namespace MenuOption { const QString Pair = "Pair"; const QString Particles = "Particles"; const QString PasteToVoxel = "Paste to Voxel..."; - const QString PipelineWarnings = "Show Render Pipeline Warnings"; + const QString PipelineWarnings = "Log Render Pipeline Warnings"; const QString Preferences = "Preferences..."; const QString Quit = "Quit"; const QString ReloadAllScripts = "Reload All Scripts"; - const QString RenderBoundingCollisionShapes = "Bounding Collision Shapes"; - const QString RenderHeadCollisionShapes = "Head Collision Shapes"; - const QString RenderSkeletonCollisionShapes = "Skeleton Collision Shapes"; + const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes"; + const QString RenderFocusIndicator = "Show Eye Focus"; + const QString RenderHeadCollisionShapes = "Show Head Collision Shapes"; + const QString RenderLookAtVectors = "Show Look-at Vectors"; + const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes"; const QString ResetAvatarSize = "Reset Avatar Size"; const QString RunningScripts = "Running Scripts"; const QString RunTimingTests = "Run Timing Tests"; @@ -459,7 +469,7 @@ namespace MenuOption { const QString VoxelMode = "Cycle Voxel Mode"; const QString Voxels = "Voxels"; const QString VoxelTextures = "Voxel Textures"; - const QString WalletPrivateKey = "Wallet Private Key"; + const QString WalletPrivateKey = "Wallet Private Key..."; } void sendFakeEnterEvent(); diff --git a/interface/src/MetavoxelSystem.cpp b/interface/src/MetavoxelSystem.cpp index 433c8af23e..1d97dc94fc 100644 --- a/interface/src/MetavoxelSystem.cpp +++ b/interface/src/MetavoxelSystem.cpp @@ -118,7 +118,7 @@ void MetavoxelSystem::render() { viewFrustum->getNearBottomLeft(), viewFrustum->getNearBottomRight()); RenderVisitor renderVisitor(getLOD()); - guideToAugmented(renderVisitor); + guideToAugmented(renderVisitor, true); } class RayHeightfieldIntersectionVisitor : public RayIntersectionVisitor { @@ -449,22 +449,29 @@ void MetavoxelSystem::renderHeightfieldCursor(const glm::vec3& position, float r glDepthFunc(GL_LESS); } -void MetavoxelSystem::deleteTextures(int heightID, int colorID) { +void MetavoxelSystem::deleteTextures(int heightID, int colorID, int textureID) { glDeleteTextures(1, (GLuint*)&heightID); glDeleteTextures(1, (GLuint*)&colorID); + glDeleteTextures(1, (GLuint*)&textureID); } MetavoxelClient* MetavoxelSystem::createClient(const SharedNodePointer& node) { return new MetavoxelSystemClient(node, _updater); } -void MetavoxelSystem::guideToAugmented(MetavoxelVisitor& visitor) { +void MetavoxelSystem::guideToAugmented(MetavoxelVisitor& visitor, bool render) { foreach (const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) { if (node->getType() == NodeType::MetavoxelServer) { QMutexLocker locker(&node->getMutex()); MetavoxelSystemClient* client = static_cast(node->getLinkedData()); if (client) { - client->getAugmentedData().guide(visitor); + MetavoxelData data = client->getAugmentedData(); + data.guide(visitor); + if (render) { + // save the rendered augmented data so that its cached texture references, etc., don't + // get collected when we replace it with more recent versions + client->setRenderedAugmentedData(data); + } } } } @@ -601,15 +608,19 @@ const int HeightfieldBuffer::SHARED_EDGE = 1; const int HeightfieldBuffer::HEIGHT_EXTENSION = 2 * HeightfieldBuffer::HEIGHT_BORDER + HeightfieldBuffer::SHARED_EDGE; HeightfieldBuffer::HeightfieldBuffer(const glm::vec3& translation, float scale, - const QByteArray& height, const QByteArray& color) : + const QByteArray& height, const QByteArray& color, const QByteArray& texture, + const QVector& textures) : _translation(translation), _scale(scale), _heightBounds(translation, translation + glm::vec3(scale, scale, scale)), _colorBounds(_heightBounds), _height(height), _color(color), + _texture(texture), + _textures(textures), _heightTextureID(0), _colorTextureID(0), + _textureTextureID(0), _heightSize(glm::sqrt(height.size())), _heightIncrement(scale / (_heightSize - HEIGHT_EXTENSION)), _colorSize(glm::sqrt(color.size() / HeightfieldData::COLOR_BYTES)), @@ -628,10 +639,11 @@ HeightfieldBuffer::~HeightfieldBuffer() { // the textures have to be deleted on the main thread (for its opengl context) if (QThread::currentThread() != Application::getInstance()->thread()) { QMetaObject::invokeMethod(Application::getInstance()->getMetavoxels(), "deleteTextures", - Q_ARG(int, _heightTextureID), Q_ARG(int, _colorTextureID)); + Q_ARG(int, _heightTextureID), Q_ARG(int, _colorTextureID), Q_ARG(int, _textureTextureID)); } else { glDeleteTextures(1, &_heightTextureID); glDeleteTextures(1, &_colorTextureID); + glDeleteTextures(1, &_textureTextureID); } } @@ -667,13 +679,17 @@ public: glm::vec3 vertex; }; +const int SPLAT_COUNT = 4; +const GLint SPLAT_TEXTURE_UNITS[] = { 3, 4, 5, 6 }; + void HeightfieldBuffer::render(bool cursor) { // initialize textures, etc. on first render if (_heightTextureID == 0) { glGenTextures(1, &_heightTextureID); glBindTexture(GL_TEXTURE_2D, _heightTextureID); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _heightSize, _heightSize, 0, @@ -692,6 +708,27 @@ void HeightfieldBuffer::render(bool cursor) { int colorSize = glm::sqrt(_color.size() / HeightfieldData::COLOR_BYTES); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, colorSize, colorSize, 0, GL_RGB, GL_UNSIGNED_BYTE, _color.constData()); } + + if (!_texture.isEmpty()) { + glGenTextures(1, &_textureTextureID); + glBindTexture(GL_TEXTURE_2D, _textureTextureID); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + int textureSize = glm::sqrt(_texture.size()); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, textureSize, textureSize, 0, + GL_LUMINANCE, GL_UNSIGNED_BYTE, _texture.constData()); + + _networkTextures.resize(_textures.size()); + for (int i = 0; i < _textures.size(); i++) { + const SharedObjectPointer texture = _textures.at(i); + if (texture) { + _networkTextures[i] = Application::getInstance()->getTextureCache()->getTexture( + static_cast(texture.data())->getURL(), SPLAT_TEXTURE); + } + } + } } // create the buffer objects lazily int innerSize = _heightSize - 2 * HeightfieldBuffer::HEIGHT_BORDER; @@ -759,7 +796,115 @@ void HeightfieldBuffer::render(bool cursor) { glBindTexture(GL_TEXTURE_2D, _heightTextureID); - if (!cursor) { + if (cursor) { + glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); + + } else if (!_textures.isEmpty()) { + DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().bind(); + DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getBaseHeightScaleLocation(), 1.0f / _heightSize); + DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getBaseColorScaleLocation(), (float)_heightSize / innerSize); + glActiveTexture(GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, _colorTextureID); + + glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); + + glDepthFunc(GL_LEQUAL); + glDepthMask(false); + glEnable(GL_BLEND); + glDisable(GL_ALPHA_TEST); + glEnable(GL_POLYGON_OFFSET_FILL); + glPolygonOffset(-1.0f, -1.0f); + + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().bind(); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatHeightScaleLocation(), 1.0f / _heightSize); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureScaleLocation(), (float)_heightSize / innerSize); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureOffsetLocation(), + _translation.x / _scale, _translation.z / _scale); + + glBindTexture(GL_TEXTURE_2D, _textureTextureID); + + const int TEXTURES_PER_SPLAT = 4; + for (int i = 0; i < _textures.size(); i += TEXTURES_PER_SPLAT) { + QVector4D scalesS, scalesT; + + for (int j = 0; j < SPLAT_COUNT; j++) { + glActiveTexture(GL_TEXTURE0 + SPLAT_TEXTURE_UNITS[j]); + int index = i + j; + if (index < _networkTextures.size()) { + const NetworkTexturePointer& texture = _networkTextures.at(index); + if (texture) { + HeightfieldTexture* heightfieldTexture = static_cast(_textures.at(index).data()); + scalesS[j] = _scale / heightfieldTexture->getScaleS(); + scalesT[j] = _scale / heightfieldTexture->getScaleT(); + glBindTexture(GL_TEXTURE_2D, texture->getID()); + } else { + glBindTexture(GL_TEXTURE_2D, 0); + } + } else { + glBindTexture(GL_TEXTURE_2D, 0); + } + } + const float QUARTER_STEP = 0.25f * EIGHT_BIT_MAXIMUM_RECIPROCAL; + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureScalesSLocation(), scalesS); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureScalesTLocation(), scalesT); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureValueMinimaLocation(), + (i + 1) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, (i + 2) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, + (i + 3) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, (i + 4) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP); + DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getSplatTextureValueMaximaLocation(), + (i + 1) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP, (i + 2) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP, + (i + 3) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP, (i + 4) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP); + glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); + } + + glEnable(GL_ALPHA_TEST); + glBlendFunc(GL_DST_COLOR, GL_ZERO); + + for (int i = 0; i < SPLAT_COUNT; i++) { + glActiveTexture(GL_TEXTURE0 + SPLAT_TEXTURE_UNITS[i]); + glBindTexture(GL_TEXTURE_2D, 0); + } + + glActiveTexture(GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, 0); + + if (Menu::getInstance()->isOptionChecked(MenuOption::SimpleShadows)) { + DefaultMetavoxelRendererImplementation::getShadowLightHeightfieldProgram().bind(); + DefaultMetavoxelRendererImplementation::getShadowLightHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getShadowLightHeightScaleLocation(), 1.0f / _heightSize); + + } else if (Menu::getInstance()->isOptionChecked(MenuOption::CascadedShadows)) { + DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightfieldProgram().bind(); + DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightScaleLocation(), 1.0f / _heightSize); + + } else { + DefaultMetavoxelRendererImplementation::getLightHeightfieldProgram().bind(); + DefaultMetavoxelRendererImplementation::getLightHeightfieldProgram().setUniformValue( + DefaultMetavoxelRendererImplementation::getBaseHeightScaleLocation(), 1.0f / _heightSize); + } + + glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); + + DefaultMetavoxelRendererImplementation::getHeightfieldProgram().bind(); + + glDisable(GL_POLYGON_OFFSET_FILL); + glDisable(GL_BLEND); + glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE); + glDepthFunc(GL_LESS); + glDepthMask(true); + + glActiveTexture(GL_TEXTURE0); + + } else { int heightScaleLocation = DefaultMetavoxelRendererImplementation::getHeightScaleLocation(); int colorScaleLocation = DefaultMetavoxelRendererImplementation::getColorScaleLocation(); ProgramObject* program = &DefaultMetavoxelRendererImplementation::getHeightfieldProgram(); @@ -777,11 +922,9 @@ void HeightfieldBuffer::render(bool cursor) { program->setUniformValue(colorScaleLocation, (float)_heightSize / innerSize); glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, _colorTextureID); - } - - glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); - - if (!cursor) { + + glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0); + glBindTexture(GL_TEXTURE_2D, 0); glActiveTexture(GL_TEXTURE0); } @@ -898,6 +1041,74 @@ void DefaultMetavoxelRendererImplementation::init() { _shadowDistancesLocation = _cascadedShadowMapHeightfieldProgram.uniformLocation("shadowDistances"); _cascadedShadowMapHeightfieldProgram.release(); + _baseHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + + "shaders/metavoxel_heightfield_base.vert"); + _baseHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + + "shaders/metavoxel_heightfield_base.frag"); + _baseHeightfieldProgram.link(); + + _baseHeightfieldProgram.bind(); + _baseHeightfieldProgram.setUniformValue("heightMap", 0); + _baseHeightfieldProgram.setUniformValue("diffuseMap", 1); + _baseHeightScaleLocation = _heightfieldProgram.uniformLocation("heightScale"); + _baseColorScaleLocation = _heightfieldProgram.uniformLocation("colorScale"); + _baseHeightfieldProgram.release(); + + _splatHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + + "shaders/metavoxel_heightfield_splat.vert"); + _splatHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + + "shaders/metavoxel_heightfield_splat.frag"); + _splatHeightfieldProgram.link(); + + _splatHeightfieldProgram.bind(); + _splatHeightfieldProgram.setUniformValue("heightMap", 0); + _splatHeightfieldProgram.setUniformValue("textureMap", 1); + _splatHeightfieldProgram.setUniformValueArray("diffuseMaps", SPLAT_TEXTURE_UNITS, SPLAT_COUNT); + _splatHeightScaleLocation = _splatHeightfieldProgram.uniformLocation("heightScale"); + _splatTextureScaleLocation = _splatHeightfieldProgram.uniformLocation("textureScale"); + _splatTextureOffsetLocation = _splatHeightfieldProgram.uniformLocation("splatTextureOffset"); + _splatTextureScalesSLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesS"); + _splatTextureScalesTLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesT"); + _splatTextureValueMinimaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMinima"); + _splatTextureValueMaximaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMaxima"); + _splatHeightfieldProgram.release(); + + _lightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light.vert"); + _lightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light.frag"); + _lightHeightfieldProgram.link(); + + _lightHeightfieldProgram.bind(); + _lightHeightfieldProgram.setUniformValue("heightMap", 0); + _lightHeightScaleLocation = _lightHeightfieldProgram.uniformLocation("heightScale"); + _lightHeightfieldProgram.release(); + + _shadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light.vert"); + _shadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light_shadow_map.frag"); + _shadowLightHeightfieldProgram.link(); + + _shadowLightHeightfieldProgram.bind(); + _shadowLightHeightfieldProgram.setUniformValue("heightMap", 0); + _shadowLightHeightfieldProgram.setUniformValue("shadowMap", 2); + _shadowLightHeightScaleLocation = _shadowLightHeightfieldProgram.uniformLocation("heightScale"); + _shadowLightHeightfieldProgram.release(); + + _cascadedShadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light.vert"); + _cascadedShadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + + "shaders/metavoxel_heightfield_light_cascaded_shadow_map.frag"); + _cascadedShadowLightHeightfieldProgram.link(); + + _cascadedShadowLightHeightfieldProgram.bind(); + _cascadedShadowLightHeightfieldProgram.setUniformValue("heightMap", 0); + _cascadedShadowLightHeightfieldProgram.setUniformValue("shadowMap", 2); + _cascadedShadowLightHeightScaleLocation = _cascadedShadowLightHeightfieldProgram.uniformLocation("heightScale"); + _shadowLightDistancesLocation = _cascadedShadowLightHeightfieldProgram.uniformLocation("shadowDistances"); + _cascadedShadowLightHeightfieldProgram.release(); + _heightfieldCursorProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/metavoxel_heightfield_cursor.vert"); _heightfieldCursorProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + @@ -1011,7 +1222,7 @@ int HeightfieldFetchVisitor::visit(MetavoxelInfo& info) { if (!info.isLeaf && info.size > _buffer->getScale()) { return DEFAULT_ORDER; } - HeightfieldDataPointer height = info.inputValues.at(0).getInlineValue(); + HeightfieldHeightDataPointer height = info.inputValues.at(0).getInlineValue(); if (!height) { return STOP_RECURSION; } @@ -1065,11 +1276,11 @@ int HeightfieldFetchVisitor::visit(MetavoxelInfo& info) { int colorSize = _buffer->getColorSize(); if (colorSize == 0) { - return STOP_RECURSION; + continue; } - HeightfieldDataPointer color = info.inputValues.at(1).getInlineValue(); + HeightfieldColorDataPointer color = info.inputValues.at(1).getInlineValue(); if (!color) { - return STOP_RECURSION; + continue; } const Box& colorBounds = _buffer->getColorBounds(); overlap = colorBounds.getIntersection(overlap); @@ -1138,6 +1349,7 @@ private: HeightfieldRegionVisitor::HeightfieldRegionVisitor(const MetavoxelLOD& lod) : MetavoxelVisitor(QVector() << AttributeRegistry::getInstance()->getHeightfieldAttribute() << AttributeRegistry::getInstance()->getHeightfieldColorAttribute() << + AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() << Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute(), QVector() << Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute(), lod), regionBounds(glm::vec3(FLT_MAX, FLT_MAX, FLT_MAX), glm::vec3(-FLT_MAX, -FLT_MAX, -FLT_MAX)), @@ -1149,14 +1361,14 @@ int HeightfieldRegionVisitor::visit(MetavoxelInfo& info) { return DEFAULT_ORDER; } HeightfieldBuffer* buffer = NULL; - HeightfieldDataPointer height = info.inputValues.at(0).getInlineValue(); + HeightfieldHeightDataPointer height = info.inputValues.at(0).getInlineValue(); if (height) { const QByteArray& heightContents = height->getContents(); int size = glm::sqrt(heightContents.size()); int extendedSize = size + HeightfieldBuffer::HEIGHT_EXTENSION; int heightContentsSize = extendedSize * extendedSize; - HeightfieldDataPointer color = info.inputValues.at(1).getInlineValue(); + HeightfieldColorDataPointer color = info.inputValues.at(1).getInlineValue(); int colorContentsSize = 0; if (color) { const QByteArray& colorContents = color->getContents(); @@ -1165,33 +1377,44 @@ int HeightfieldRegionVisitor::visit(MetavoxelInfo& info) { colorContentsSize = extendedColorSize * extendedColorSize * HeightfieldData::COLOR_BYTES; } + HeightfieldTextureDataPointer texture = info.inputValues.at(2).getInlineValue(); + QByteArray textureContents; + QVector textures; + if (texture) { + textureContents = texture->getContents(); + textures = texture->getTextures(); + } + const HeightfieldBuffer* existingBuffer = static_cast( - info.inputValues.at(2).getInlineValue().data()); + info.inputValues.at(3).getInlineValue().data()); Box bounds = info.getBounds(); if (existingBuffer && existingBuffer->getHeight().size() == heightContentsSize && existingBuffer->getColor().size() == colorContentsSize) { // we already have a buffer of the correct resolution addRegion(bounds, existingBuffer->getHeightBounds()); - return STOP_RECURSION; + buffer = new HeightfieldBuffer(info.minimum, info.size, existingBuffer->getHeight(), + existingBuffer->getColor(), textureContents, textures); + + } else { + // we must create a new buffer and update its borders + buffer = new HeightfieldBuffer(info.minimum, info.size, QByteArray(heightContentsSize, 0), + QByteArray(colorContentsSize, 0), textureContents, textures); + const Box& heightBounds = buffer->getHeightBounds(); + addRegion(bounds, heightBounds); + + _intersections.clear(); + _intersections.append(Box(heightBounds.minimum, + glm::vec3(bounds.maximum.x, heightBounds.maximum.y, bounds.minimum.z))); + _intersections.append(Box(glm::vec3(bounds.maximum.x, heightBounds.minimum.y, heightBounds.minimum.z), + glm::vec3(heightBounds.maximum.x, heightBounds.maximum.y, bounds.maximum.z))); + _intersections.append(Box(glm::vec3(bounds.minimum.x, heightBounds.minimum.y, bounds.maximum.z), + heightBounds.maximum)); + _intersections.append(Box(glm::vec3(heightBounds.minimum.x, heightBounds.minimum.y, bounds.minimum.z), + glm::vec3(bounds.minimum.x, heightBounds.maximum.y, heightBounds.maximum.z))); + + _fetchVisitor.init(buffer); + _data->guide(_fetchVisitor); } - // we must create a new buffer and update its borders - buffer = new HeightfieldBuffer(info.minimum, info.size, QByteArray(heightContentsSize, 0), - QByteArray(colorContentsSize, 0)); - const Box& heightBounds = buffer->getHeightBounds(); - addRegion(bounds, heightBounds); - - _intersections.clear(); - _intersections.append(Box(heightBounds.minimum, - glm::vec3(bounds.maximum.x, heightBounds.maximum.y, bounds.minimum.z))); - _intersections.append(Box(glm::vec3(bounds.maximum.x, heightBounds.minimum.y, heightBounds.minimum.z), - glm::vec3(heightBounds.maximum.x, heightBounds.maximum.y, bounds.maximum.z))); - _intersections.append(Box(glm::vec3(bounds.minimum.x, heightBounds.minimum.y, bounds.maximum.z), - heightBounds.maximum)); - _intersections.append(Box(glm::vec3(heightBounds.minimum.x, heightBounds.minimum.y, bounds.minimum.z), - glm::vec3(bounds.minimum.x, heightBounds.maximum.y, heightBounds.maximum.z))); - - _fetchVisitor.init(buffer); - _data->guide(_fetchVisitor); } info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(BufferDataPointer(buffer))); return STOP_RECURSION; @@ -1249,7 +1472,7 @@ int HeightfieldUpdateVisitor::visit(MetavoxelInfo& info) { return STOP_RECURSION; } HeightfieldBuffer* newBuffer = new HeightfieldBuffer(info.minimum, info.size, - buffer->getHeight(), buffer->getColor()); + buffer->getHeight(), buffer->getColor(), buffer->getTexture(), buffer->getTextures()); _fetchVisitor.init(newBuffer); _data->guide(_fetchVisitor); info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(BufferDataPointer(newBuffer))); @@ -1437,6 +1660,9 @@ void DefaultMetavoxelRendererImplementation::render(MetavoxelData& data, Metavox ProgramObject* program = &_heightfieldProgram; if (Menu::getInstance()->getShadowsEnabled()) { if (Menu::getInstance()->isOptionChecked(MenuOption::CascadedShadows)) { + _cascadedShadowLightHeightfieldProgram.bind(); + _cascadedShadowLightHeightfieldProgram.setUniform(_shadowLightDistancesLocation, + Application::getInstance()->getShadowDistances()); program = &_cascadedShadowMapHeightfieldProgram; program->bind(); program->setUniform(_shadowDistancesLocation, Application::getInstance()->getShadowDistances()); @@ -1482,6 +1708,24 @@ ProgramObject DefaultMetavoxelRendererImplementation::_cascadedShadowMapHeightfi int DefaultMetavoxelRendererImplementation::_cascadedShadowMapHeightScaleLocation; int DefaultMetavoxelRendererImplementation::_cascadedShadowMapColorScaleLocation; int DefaultMetavoxelRendererImplementation::_shadowDistancesLocation; +ProgramObject DefaultMetavoxelRendererImplementation::_baseHeightfieldProgram; +int DefaultMetavoxelRendererImplementation::_baseHeightScaleLocation; +int DefaultMetavoxelRendererImplementation::_baseColorScaleLocation; +ProgramObject DefaultMetavoxelRendererImplementation::_splatHeightfieldProgram; +int DefaultMetavoxelRendererImplementation::_splatHeightScaleLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureScaleLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureOffsetLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureScalesSLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureScalesTLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureValueMinimaLocation; +int DefaultMetavoxelRendererImplementation::_splatTextureValueMaximaLocation; +ProgramObject DefaultMetavoxelRendererImplementation::_lightHeightfieldProgram; +int DefaultMetavoxelRendererImplementation::_lightHeightScaleLocation; +ProgramObject DefaultMetavoxelRendererImplementation::_shadowLightHeightfieldProgram; +int DefaultMetavoxelRendererImplementation::_shadowLightHeightScaleLocation; +ProgramObject DefaultMetavoxelRendererImplementation::_cascadedShadowLightHeightfieldProgram; +int DefaultMetavoxelRendererImplementation::_cascadedShadowLightHeightScaleLocation; +int DefaultMetavoxelRendererImplementation::_shadowLightDistancesLocation; ProgramObject DefaultMetavoxelRendererImplementation::_heightfieldCursorProgram; static void enableClipPlane(GLenum plane, float x, float y, float z, float w) { diff --git a/interface/src/MetavoxelSystem.h b/interface/src/MetavoxelSystem.h index 38d67bcaed..b1ddcf0bff 100644 --- a/interface/src/MetavoxelSystem.h +++ b/interface/src/MetavoxelSystem.h @@ -49,7 +49,7 @@ public: Q_INVOKABLE float getHeightfieldHeight(const glm::vec3& location); - Q_INVOKABLE void deleteTextures(int heightID, int colorID); + Q_INVOKABLE void deleteTextures(int heightID, int colorID, int textureID); protected: @@ -57,7 +57,7 @@ protected: private: - void guideToAugmented(MetavoxelVisitor& visitor); + void guideToAugmented(MetavoxelVisitor& visitor, bool render = false); AttributePointer _pointBufferAttribute; AttributePointer _heightfieldBufferAttribute; @@ -92,6 +92,8 @@ public: /// Returns a copy of the augmented data. This function is thread-safe. MetavoxelData getAugmentedData(); + void setRenderedAugmentedData(const MetavoxelData& data) { _renderedAugmentedData = data; } + virtual int parseData(const QByteArray& packet); protected: @@ -102,6 +104,7 @@ protected: private: MetavoxelData _augmentedData; + MetavoxelData _renderedAugmentedData; QReadWriteLock _augmentedDataLock; }; @@ -139,7 +142,9 @@ public: static const int SHARED_EDGE; static const int HEIGHT_EXTENSION; - HeightfieldBuffer(const glm::vec3& translation, float scale, const QByteArray& height, const QByteArray& color); + HeightfieldBuffer(const glm::vec3& translation, float scale, const QByteArray& height, + const QByteArray& color, const QByteArray& texture = QByteArray(), + const QVector& textures = QVector()); ~HeightfieldBuffer(); const glm::vec3& getTranslation() const { return _translation; } @@ -154,6 +159,11 @@ public: QByteArray& getColor() { return _color; } const QByteArray& getColor() const { return _color; } + QByteArray& getTexture() { return _texture; } + const QByteArray& getTexture() const { return _texture; } + + const QVector& getTextures() const { return _textures; } + QByteArray getUnextendedHeight() const; QByteArray getUnextendedColor() const; @@ -173,13 +183,17 @@ private: Box _colorBounds; QByteArray _height; QByteArray _color; + QByteArray _texture; + QVector _textures; GLuint _heightTextureID; GLuint _colorTextureID; + GLuint _textureTextureID; + QVector _networkTextures; int _heightSize; float _heightIncrement; int _colorSize; float _colorIncrement; - + typedef QPair BufferPair; static QHash _bufferPairs; }; @@ -231,6 +245,28 @@ public: static int getCascadedShadowMapHeightScaleLocation() { return _cascadedShadowMapHeightScaleLocation; } static int getCascadedShadowMapColorScaleLocation() { return _cascadedShadowMapColorScaleLocation; } + static ProgramObject& getBaseHeightfieldProgram() { return _baseHeightfieldProgram; } + static int getBaseHeightScaleLocation() { return _baseHeightScaleLocation; } + static int getBaseColorScaleLocation() { return _baseColorScaleLocation; } + + static ProgramObject& getSplatHeightfieldProgram() { return _splatHeightfieldProgram; } + static int getSplatHeightScaleLocation() { return _splatHeightScaleLocation; } + static int getSplatTextureScaleLocation() { return _splatTextureScaleLocation; } + static int getSplatTextureOffsetLocation() { return _splatTextureOffsetLocation; } + static int getSplatTextureScalesSLocation() { return _splatTextureScalesSLocation; } + static int getSplatTextureScalesTLocation() { return _splatTextureScalesTLocation; } + static int getSplatTextureValueMinimaLocation() { return _splatTextureValueMinimaLocation; } + static int getSplatTextureValueMaximaLocation() { return _splatTextureValueMaximaLocation; } + + static ProgramObject& getLightHeightfieldProgram() { return _lightHeightfieldProgram; } + static int getLightHeightScaleLocation() { return _lightHeightScaleLocation; } + + static ProgramObject& getShadowLightHeightfieldProgram() { return _shadowLightHeightfieldProgram; } + static int getShadowLightHeightScaleLocation() { return _shadowLightHeightScaleLocation; } + + static ProgramObject& getCascadedShadowLightHeightfieldProgram() { return _cascadedShadowLightHeightfieldProgram; } + static int getCascadedShadowLightHeightScaleLocation() { return _cascadedShadowLightHeightScaleLocation; } + static ProgramObject& getHeightfieldCursorProgram() { return _heightfieldCursorProgram; } Q_INVOKABLE DefaultMetavoxelRendererImplementation(); @@ -257,6 +293,29 @@ private: static int _cascadedShadowMapColorScaleLocation; static int _shadowDistancesLocation; + static ProgramObject _baseHeightfieldProgram; + static int _baseHeightScaleLocation; + static int _baseColorScaleLocation; + + static ProgramObject _splatHeightfieldProgram; + static int _splatHeightScaleLocation; + static int _splatTextureScaleLocation; + static int _splatTextureOffsetLocation; + static int _splatTextureScalesSLocation; + static int _splatTextureScalesTLocation; + static int _splatTextureValueMinimaLocation; + static int _splatTextureValueMaximaLocation; + + static ProgramObject _lightHeightfieldProgram; + static int _lightHeightScaleLocation; + + static ProgramObject _shadowLightHeightfieldProgram; + static int _shadowLightHeightScaleLocation; + + static ProgramObject _cascadedShadowLightHeightfieldProgram; + static int _cascadedShadowLightHeightScaleLocation; + static int _shadowLightDistancesLocation; + static ProgramObject _heightfieldCursorProgram; }; diff --git a/interface/src/SpeechRecognizer.h b/interface/src/SpeechRecognizer.h new file mode 100644 index 0000000000..edd4abe1d6 --- /dev/null +++ b/interface/src/SpeechRecognizer.h @@ -0,0 +1,47 @@ +// +// SpeechRecognizer.h +// interface/src +// +// Created by Ryan Huffman on 07/31/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_SpeechRecognizer_h +#define hifi_SpeechRecognizer_h + +#include +#include +#include + +class SpeechRecognizer : public QObject { + Q_OBJECT +public: + SpeechRecognizer(); + ~SpeechRecognizer(); + + void handleCommandRecognized(const char* command); + bool getEnabled() const { return _enabled; } + +public slots: + void setEnabled(bool enabled); + void addCommand(const QString& command); + void removeCommand(const QString& command); + +signals: + void commandRecognized(const QString& command); + void enabledUpdated(bool enabled); + +protected: + void reloadCommands(); + +private: + bool _enabled; + QSet _commands; + void* _speechRecognizerDelegate; + void* _speechRecognizer; +}; + +#endif // hifi_SpeechRecognizer_h diff --git a/interface/src/SpeechRecognizer.mm b/interface/src/SpeechRecognizer.mm new file mode 100644 index 0000000000..038bcce3e4 --- /dev/null +++ b/interface/src/SpeechRecognizer.mm @@ -0,0 +1,109 @@ +// +// SpeechRecognizer.mm +// interface/src +// +// Created by Ryan Huffman on 07/31/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#include +#ifdef Q_OS_MAC + +#import +#import +#import + +#include + +#include "SpeechRecognizer.h" + +@interface SpeechRecognizerDelegate : NSObject { + SpeechRecognizer* _listener; +} + +- (void)setListener:(SpeechRecognizer*)listener; +- (void)speechRecognizer:(NSSpeechRecognizer*)sender didRecognizeCommand:(id)command; + +@end + +@implementation SpeechRecognizerDelegate + +- (void)setListener:(SpeechRecognizer*)listener { + _listener = listener; +} + +- (void)speechRecognizer:(NSSpeechRecognizer*)sender didRecognizeCommand:(id)command { + _listener->handleCommandRecognized(((NSString*)command).UTF8String); +} + +@end + +SpeechRecognizer::SpeechRecognizer() : + QObject(), + _enabled(false), + _commands(), + _speechRecognizerDelegate([[SpeechRecognizerDelegate alloc] init]), + _speechRecognizer(NULL) { + + [(id)_speechRecognizerDelegate setListener:this]; +} + +SpeechRecognizer::~SpeechRecognizer() { + if (_speechRecognizer) { + [(id)_speechRecognizer dealloc]; + } + if (_speechRecognizerDelegate) { + [(id)_speechRecognizerDelegate dealloc]; + } +} + +void SpeechRecognizer::handleCommandRecognized(const char* command) { + emit commandRecognized(QString(command)); +} + +void SpeechRecognizer::setEnabled(bool enabled) { + if (enabled == _enabled) { + return; + } + + _enabled = enabled; + if (_enabled) { + _speechRecognizer = [[NSSpeechRecognizer alloc] init]; + + reloadCommands(); + + [(id)_speechRecognizer setDelegate:(id)_speechRecognizerDelegate]; + [(id)_speechRecognizer startListening]; + } else { + [(id)_speechRecognizer stopListening]; + [(id)_speechRecognizer dealloc]; + _speechRecognizer = NULL; + } + + emit enabledUpdated(_enabled); +} + +void SpeechRecognizer::reloadCommands() { + if (_speechRecognizer) { + NSMutableArray* cmds = [NSMutableArray array]; + for (QSet::const_iterator iter = _commands.constBegin(); iter != _commands.constEnd(); iter++) { + [cmds addObject:[NSString stringWithUTF8String:(*iter).toLocal8Bit().data()]]; + } + [(id)_speechRecognizer setCommands:cmds]; + } +} + +void SpeechRecognizer::addCommand(const QString& command) { + _commands.insert(command); + reloadCommands(); +} + +void SpeechRecognizer::removeCommand(const QString& command) { + _commands.remove(command); + reloadCommands(); +} + +#endif // Q_OS_MAC diff --git a/interface/src/avatar/Avatar.cpp b/interface/src/avatar/Avatar.cpp index af1917b34c..de902e4a99 100644 --- a/interface/src/avatar/Avatar.cpp +++ b/interface/src/avatar/Avatar.cpp @@ -29,6 +29,7 @@ #include "Menu.h" #include "ModelReferential.h" #include "Physics.h" +#include "Recorder.h" #include "world.h" #include "devices/OculusManager.h" #include "renderer/TextureCache.h" @@ -301,7 +302,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) { return; } - glm::vec3 toTarget = cameraPosition - Application::getInstance()->getAvatar()->getPosition(); + glm::vec3 toTarget = cameraPosition - getPosition(); float distanceToTarget = glm::length(toTarget); { @@ -348,7 +349,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) { } // If this is the avatar being looked at, render a little ball above their head - if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::FocusIndicators)) { + if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::RenderFocusIndicator)) { const float LOOK_AT_INDICATOR_RADIUS = 0.03f; const float LOOK_AT_INDICATOR_OFFSET = 0.22f; const float LOOK_AT_INDICATOR_COLOR[] = { 0.8f, 0.0f, 0.0f, 0.75f }; @@ -367,10 +368,12 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) { // quick check before falling into the code below: // (a 10 degree breadth of an almost 2 meter avatar kicks in at about 12m) const float MIN_VOICE_SPHERE_DISTANCE = 12.0f; - if (distanceToTarget > MIN_VOICE_SPHERE_DISTANCE) { + if (Menu::getInstance()->isOptionChecked(MenuOption::BlueSpeechSphere) + && distanceToTarget > MIN_VOICE_SPHERE_DISTANCE) { + // render voice intensity sphere for avatars that are farther away const float MAX_SPHERE_ANGLE = 10.0f * RADIANS_PER_DEGREE; - const float MIN_SPHERE_ANGLE = 1.0f * RADIANS_PER_DEGREE; + const float MIN_SPHERE_ANGLE = 0.5f * RADIANS_PER_DEGREE; const float MIN_SPHERE_SIZE = 0.01f; const float SPHERE_LOUDNESS_SCALING = 0.0005f; const float SPHERE_COLOR[] = { 0.5f, 0.8f, 0.8f }; @@ -391,7 +394,7 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) { } } - const float DISPLAYNAME_DISTANCE = 10.0f; + const float DISPLAYNAME_DISTANCE = 20.0f; setShowDisplayName(renderMode == NORMAL_RENDER_MODE && distanceToTarget < DISPLAYNAME_DISTANCE); if (renderMode != NORMAL_RENDER_MODE || (isMyAvatar() && Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_FIRST_PERSON)) { @@ -725,6 +728,17 @@ bool Avatar::findCollisions(const QVector& shapes, CollisionList& return collided; } +QVector Avatar::getJointRotations() const { + if (QThread::currentThread() != thread()) { + return AvatarData::getJointRotations(); + } + QVector jointRotations(_skeletonModel.getJointStateCount()); + for (int i = 0; i < _skeletonModel.getJointStateCount(); ++i) { + _skeletonModel.getJointState(i, jointRotations[i]); + } + return jointRotations; +} + glm::quat Avatar::getJointRotation(int index) const { if (QThread::currentThread() != thread()) { return AvatarData::getJointRotation(index); diff --git a/interface/src/avatar/Avatar.h b/interface/src/avatar/Avatar.h index 555a0f6d32..46780e50ea 100755 --- a/interface/src/avatar/Avatar.h +++ b/interface/src/avatar/Avatar.h @@ -23,6 +23,7 @@ #include "Hand.h" #include "Head.h" #include "InterfaceConfig.h" +#include "Recorder.h" #include "SkeletonModel.h" #include "world.h" @@ -90,7 +91,7 @@ public: const QVector& getAttachmentModels() const { return _attachmentModels; } glm::vec3 getChestPosition() const; float getScale() const { return _scale; } - const glm::vec3& getVelocity() const { return _velocity; } + Q_INVOKABLE const glm::vec3& getVelocity() const { return _velocity; } const Head* getHead() const { return static_cast(_headData); } Head* getHead() { return static_cast(_headData); } Hand* getHand() { return static_cast(_handData); } @@ -121,6 +122,7 @@ public: virtual bool isMyAvatar() { return false; } + virtual QVector getJointRotations() const; virtual glm::quat getJointRotation(int index) const; virtual int getJointIndex(const QString& name) const; virtual QStringList getJointNames() const; @@ -150,9 +152,9 @@ public: Q_INVOKABLE glm::quat getJointCombinedRotation(int index) const; Q_INVOKABLE glm::quat getJointCombinedRotation(const QString& name) const; - glm::vec3 getAcceleration() const { return _acceleration; } - glm::vec3 getAngularVelocity() const { return _angularVelocity; } - glm::vec3 getAngularAcceleration() const { return _angularAcceleration; } + Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; } + Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; } + Q_INVOKABLE glm::vec3 getAngularAcceleration() const { return _angularAcceleration; } /// Scales a world space position vector relative to the avatar position and scale @@ -220,8 +222,6 @@ private: void renderBillboard(); float getBillboardSize() const; - - }; #endif // hifi_Avatar_h diff --git a/interface/src/avatar/AvatarManager.cpp b/interface/src/avatar/AvatarManager.cpp index 5cc8812b40..0a9cbfe762 100644 --- a/interface/src/avatar/AvatarManager.cpp +++ b/interface/src/avatar/AvatarManager.cpp @@ -82,7 +82,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) { void AvatarManager::renderAvatars(Avatar::RenderMode renderMode, bool selfAvatarOnly) { PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::renderAvatars()"); - bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::LookAtVectors); + bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors); glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition(); diff --git a/interface/src/avatar/FaceModel.cpp b/interface/src/avatar/FaceModel.cpp index 203dbf2283..521a4ddc57 100644 --- a/interface/src/avatar/FaceModel.cpp +++ b/interface/src/avatar/FaceModel.cpp @@ -54,7 +54,7 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2])) * glm::angleAxis(RADIANS_PER_DEGREE * _owningHead->getFinalYaw(), glm::normalize(inverse * axes[1])) * glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalPitch(), glm::normalize(inverse * axes[0])) - * joint.rotation); + * joint.rotation, DEFAULT_PRIORITY); } void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) { @@ -69,7 +69,7 @@ void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJ glm::quat between = rotationBetween(front, lookAt); const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE; state.setRotationInConstrainedFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) * - joint.rotation); + joint.rotation, DEFAULT_PRIORITY); } void FaceModel::updateJointState(int index) { diff --git a/interface/src/avatar/Head.cpp b/interface/src/avatar/Head.cpp index b0333b1acf..b226b8ed31 100644 --- a/interface/src/avatar/Head.cpp +++ b/interface/src/avatar/Head.cpp @@ -64,13 +64,18 @@ void Head::reset() { void Head::simulate(float deltaTime, bool isMine, bool billboard) { // Update audio trailing average for rendering facial animations if (isMine) { - FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker(); - if ((_isFaceshiftConnected = faceTracker)) { - _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); - _isFaceshiftConnected = true; - } else if (Application::getInstance()->getDDE()->isActive()) { - faceTracker = Application::getInstance()->getDDE(); - _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); + MyAvatar* myAvatar = static_cast(_owningAvatar); + + // Only use face trackers when not playing back a recording. + if (!myAvatar->isPlaying()) { + FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker(); + if ((_isFaceshiftConnected = faceTracker)) { + _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); + _isFaceshiftConnected = true; + } else if (Application::getInstance()->getDDE()->isActive()) { + faceTracker = Application::getInstance()->getDDE(); + _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); + } } } @@ -217,6 +222,18 @@ glm::vec3 Head::getScalePivot() const { return _faceModel.isActive() ? _faceModel.getTranslation() : _position; } +void Head::setFinalPitch(float finalPitch) { + _deltaPitch = glm::clamp(finalPitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH) - _basePitch; +} + +void Head::setFinalYaw(float finalYaw) { + _deltaYaw = glm::clamp(finalYaw, MIN_HEAD_YAW, MAX_HEAD_YAW) - _baseYaw; +} + +void Head::setFinalRoll(float finalRoll) { + _deltaRoll = glm::clamp(finalRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL) - _baseRoll; +} + float Head::getFinalYaw() const { return glm::clamp(_baseYaw + _deltaYaw, MIN_HEAD_YAW, MAX_HEAD_YAW); } diff --git a/interface/src/avatar/Head.h b/interface/src/avatar/Head.h index 1de5ea7dd1..1cdfdaf5a3 100644 --- a/interface/src/avatar/Head.h +++ b/interface/src/avatar/Head.h @@ -48,8 +48,6 @@ public: void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; } void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; } void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; } - void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; } - void setLeanForward(float leanForward) { _leanForward = leanForward; } /// \return orientationBase+Delta glm::quat getFinalOrientationInLocalFrame() const; @@ -57,7 +55,6 @@ public: /// \return orientationBody * (orientationBase+Delta) glm::quat getFinalOrientationInWorldFrame() const; - /// \return orientationBody * orientationBasePitch glm::quat getCameraOrientation () const; @@ -71,8 +68,6 @@ public: glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; } glm::vec3 getUpDirection() const { return getOrientation() * IDENTITY_UP; } glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; } - float getLeanSideways() const { return _leanSideways; } - float getLeanForward() const { return _leanForward; } float getFinalLeanSideways() const { return _leanSideways + _deltaLeanSideways; } float getFinalLeanForward() const { return _leanForward + _deltaLeanForward; } @@ -100,6 +95,9 @@ public: void setDeltaRoll(float roll) { _deltaRoll = roll; } float getDeltaRoll() const { return _deltaRoll; } + virtual void setFinalYaw(float finalYaw); + virtual void setFinalPitch(float finalPitch); + virtual void setFinalRoll(float finalRoll); virtual float getFinalPitch() const; virtual float getFinalYaw() const; virtual float getFinalRoll() const; diff --git a/interface/src/avatar/MyAvatar.cpp b/interface/src/avatar/MyAvatar.cpp index d6e53259b0..692278788b 100644 --- a/interface/src/avatar/MyAvatar.cpp +++ b/interface/src/avatar/MyAvatar.cpp @@ -35,6 +35,7 @@ #include "ModelReferential.h" #include "MyAvatar.h" #include "Physics.h" +#include "Recorder.h" #include "devices/Faceshift.h" #include "devices/OculusManager.h" #include "ui/TextRenderer.h" @@ -80,6 +81,7 @@ MyAvatar::MyAvatar() : _billboardValid(false), _physicsSimulation() { + ShapeCollider::initDispatchTable(); for (int i = 0; i < MAX_DRIVE_KEYS; i++) { _driveKeys[i] = 0.0f; } @@ -136,6 +138,12 @@ void MyAvatar::update(float deltaTime) { void MyAvatar::simulate(float deltaTime) { PerformanceTimer perfTimer("simulate"); + + // Play back recording + if (_player && _player->isPlaying()) { + _player->play(); + } + if (_scale != _targetScale) { float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale; setScale(scale); @@ -148,7 +156,7 @@ void MyAvatar::simulate(float deltaTime) { updateOrientation(deltaTime); updatePosition(deltaTime); } - + { PerformanceTimer perfTimer("hand"); // update avatar skeleton and simulate hand and head @@ -206,12 +214,21 @@ void MyAvatar::simulate(float deltaTime) { { PerformanceTimer perfTimer("ragdoll"); - if (Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagdoll)) { + Ragdoll* ragdoll = _skeletonModel.getRagdoll(); + if (ragdoll && Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagdoll)) { const float minError = 0.00001f; const float maxIterations = 3; const quint64 maxUsec = 4000; _physicsSimulation.setTranslation(_position); _physicsSimulation.stepForward(deltaTime, minError, maxIterations, maxUsec); + + // harvest any displacement of the Ragdoll that is a result of collisions + glm::vec3 ragdollDisplacement = ragdoll->getAndClearAccumulatedMovement(); + const float MAX_RAGDOLL_DISPLACEMENT_2 = 1.0f; + float length2 = glm::length2(ragdollDisplacement); + if (length2 > EPSILON && length2 < MAX_RAGDOLL_DISPLACEMENT_2) { + setPosition(getPosition() + ragdollDisplacement); + } } else { _skeletonModel.moveShapesTowardJoints(1.0f); } @@ -243,6 +260,11 @@ void MyAvatar::simulate(float deltaTime) { } } + // Record avatars movements. + if (_recorder && _recorder->isRecording()) { + _recorder->record(); + } + // consider updating our billboard maybeUpdateBillboard(); } @@ -250,7 +272,11 @@ void MyAvatar::simulate(float deltaTime) { // Update avatar head rotation with sensor data void MyAvatar::updateFromTrackers(float deltaTime) { glm::vec3 estimatedPosition, estimatedRotation; - + + if (isPlaying() && !OculusManager::isConnected()) { + return; + } + if (Application::getInstance()->getPrioVR()->hasHeadRotation()) { estimatedRotation = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getHeadRotation())); estimatedRotation.x *= -1.0f; @@ -293,7 +319,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) { Head* head = getHead(); - if (OculusManager::isConnected()) { + if (OculusManager::isConnected() || isPlaying()) { head->setDeltaPitch(estimatedRotation.x); head->setDeltaYaw(estimatedRotation.y); } else { @@ -310,7 +336,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) { head->setLeanForward(eulers.x); return; } - // Update torso lean distance based on accelerometer data const float TORSO_LENGTH = 0.5f; glm::vec3 relativePosition = estimatedPosition - glm::vec3(0.0f, -TORSO_LENGTH, 0.0f); @@ -481,6 +506,89 @@ bool MyAvatar::setJointReferential(const QUuid& id, int jointIndex) { } } +bool MyAvatar::isRecording() { + if (!_recorder) { + return false; + } + if (QThread::currentThread() != thread()) { + bool result; + QMetaObject::invokeMethod(this, "isRecording", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(bool, result)); + return result; + } + return _recorder && _recorder->isRecording(); +} + +qint64 MyAvatar::recorderElapsed() { + if (!_recorder) { + return 0; + } + if (QThread::currentThread() != thread()) { + qint64 result; + QMetaObject::invokeMethod(this, "recorderElapsed", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(qint64, result)); + return result; + } + return _recorder->elapsed(); +} + +void MyAvatar::startRecording() { + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "startRecording", Qt::BlockingQueuedConnection); + return; + } + if (!_recorder) { + _recorder = RecorderPointer(new Recorder(this)); + } + Application::getInstance()->getAudio()->setRecorder(_recorder); + _recorder->startRecording(); + +} + +void MyAvatar::stopRecording() { + if (!_recorder) { + return; + } + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "stopRecording", Qt::BlockingQueuedConnection); + return; + } + if (_recorder) { + _recorder->stopRecording(); + } +} + +void MyAvatar::saveRecording(QString filename) { + if (!_recorder) { + qDebug() << "There is no recording to save"; + return; + } + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "saveRecording", Qt::BlockingQueuedConnection, + Q_ARG(QString, filename)); + return; + } + if (_recorder) { + _recorder->saveToFile(filename); + } +} + +void MyAvatar::loadLastRecording() { + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection); + return; + } + if (!_recorder) { + qDebug() << "There is no recording to load"; + return; + } + if (!_player) { + _player = PlayerPointer(new Player(this)); + } + + _player->loadRecording(_recorder->getRecording()); +} + void MyAvatar::setLocalGravity(glm::vec3 gravity) { _motionBehaviors |= AVATAR_MOTION_OBEY_LOCAL_GRAVITY; // Environmental and Local gravities are incompatible. Since Local is being set here @@ -861,19 +969,39 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const { return _position + getWorldAlignedOrientation() * glm::vec3(0.0f, getPelvisToHeadLength(), 0.0f); } -const float JOINT_PRIORITY = 2.0f; +const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f; +const float RECORDER_PRIORITY = SCRIPT_PRIORITY + 1.0f; + +void MyAvatar::setJointRotations(QVector jointRotations) { + int numStates = glm::min(_skeletonModel.getJointStateCount(), jointRotations.size()); + for (int i = 0; i < numStates; ++i) { + // HACK: ATM only Recorder calls setJointRotations() so we hardcode its priority here + _skeletonModel.setJointState(i, true, jointRotations[i], RECORDER_PRIORITY); + } +} void MyAvatar::setJointData(int index, const glm::quat& rotation) { - Avatar::setJointData(index, rotation); if (QThread::currentThread() == thread()) { - _skeletonModel.setJointState(index, true, rotation, JOINT_PRIORITY); + // HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority + _skeletonModel.setJointState(index, true, rotation, SCRIPT_PRIORITY); } } void MyAvatar::clearJointData(int index) { - Avatar::clearJointData(index); if (QThread::currentThread() == thread()) { - _skeletonModel.setJointState(index, false, glm::quat(), JOINT_PRIORITY); + // HACK: ATM only JS scripts call clearJointData() on MyAvatar so we hardcode the priority + _skeletonModel.setJointState(index, false, glm::quat(), 0.0f); + } +} + +void MyAvatar::clearJointsData() { + clearJointAnimationPriorities(); +} + +void MyAvatar::clearJointAnimationPriorities() { + int numStates = _skeletonModel.getJointStateCount(); + for (int i = 0; i < numStates; ++i) { + _skeletonModel.clearJointAnimationPriority(i); } } @@ -1768,12 +1896,8 @@ void MyAvatar::resetSize() { } void MyAvatar::goToLocationFromResponse(const QJsonObject& jsonObject) { - if (jsonObject["status"].toString() == "success") { - QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject(); - goToLocationFromAddress(locationObject); - } else { - QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found."); - } + QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject(); + goToLocationFromAddress(locationObject); } void MyAvatar::goToLocationFromAddress(const QJsonObject& locationObject) { diff --git a/interface/src/avatar/MyAvatar.h b/interface/src/avatar/MyAvatar.h index 1e54e2f5b0..f1a12c3304 100644 --- a/interface/src/avatar/MyAvatar.h +++ b/interface/src/avatar/MyAvatar.h @@ -106,18 +106,30 @@ public: virtual int parseDataAtOffset(const QByteArray& packet, int offset); static void sendKillAvatar(); - + + Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); } + Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); } + Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); } + Q_INVOKABLE float getHeadFinalPitch() const { return getHead()->getFinalPitch(); } + Q_INVOKABLE float getHeadDeltaPitch() const { return getHead()->getDeltaPitch(); } + + Q_INVOKABLE glm::vec3 getEyePosition() const { return getHead()->getEyePosition(); } + Q_INVOKABLE glm::vec3 getTargetAvatarPosition() const { return _targetAvatarPosition; } AvatarData* getLookAtTargetAvatar() const { return _lookAtTargetAvatar.data(); } void updateLookAtTargetAvatar(); void clearLookAtTargetAvatar(); + virtual void setJointRotations(QVector jointRotations); virtual void setJointData(int index, const glm::quat& rotation); virtual void clearJointData(int index); + virtual void clearJointsData(); virtual void setFaceModelURL(const QUrl& faceModelURL); virtual void setSkeletonModelURL(const QUrl& skeletonModelURL); virtual void setAttachmentData(const QVector& attachmentData); + void clearJointAnimationPriorities(); + virtual void attach(const QString& modelURL, const QString& jointName = QString(), const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f, bool allowDuplicates = false, bool useSaved = true); @@ -132,6 +144,10 @@ public: /// Renders a laser pointer for UI picking void renderLaserPointers(); glm::vec3 getLaserPointerTipPosition(const PalmData* palm); + + const RecorderPointer getRecorder() const { return _recorder; } + const PlayerPointer getPlayer() const { return _player; } + public slots: void goHome(); void increaseSize(); @@ -155,6 +171,13 @@ public slots: bool setModelReferential(const QUuid& id); bool setJointReferential(const QUuid& id, int jointIndex); + bool isRecording(); + qint64 recorderElapsed(); + void startRecording(); + void stopRecording(); + void saveRecording(QString filename); + void loadLastRecording(); + signals: void transformChanged(); @@ -192,6 +215,8 @@ private: QList _animationHandles; PhysicsSimulation _physicsSimulation; + RecorderPointer _recorder; + // private methods float computeDistanceToFloor(const glm::vec3& startPoint); void updateOrientation(float deltaTime); diff --git a/interface/src/avatar/SkeletonModel.cpp b/interface/src/avatar/SkeletonModel.cpp index 4e954af46b..9dd299c4df 100644 --- a/interface/src/avatar/SkeletonModel.cpp +++ b/interface/src/avatar/SkeletonModel.cpp @@ -14,14 +14,11 @@ #include #include -#include -#include #include "Application.h" #include "Avatar.h" #include "Hand.h" #include "Menu.h" -#include "MuscleConstraint.h" #include "SkeletonModel.h" #include "SkeletonRagdoll.h" @@ -54,7 +51,8 @@ void SkeletonModel::setJointStates(QVector states) { } } -const float PALM_PRIORITY = 3.0f; +const float PALM_PRIORITY = DEFAULT_PRIORITY; +const float LEAN_PRIORITY = DEFAULT_PRIORITY; void SkeletonModel::simulate(float deltaTime, bool fullUpdate) { setTranslation(_owningAvatar->getPosition()); @@ -65,9 +63,15 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) { Model::simulate(deltaTime, fullUpdate); - if (!(isActive() && _owningAvatar->isMyAvatar())) { + if (!isActive() || !_owningAvatar->isMyAvatar()) { return; // only simulate for own avatar } + + MyAvatar* myAvatar = static_cast(_owningAvatar); + if (myAvatar->isPlaying()) { + // Don't take inputs if playing back a recording. + return; + } const FBXGeometry& geometry = _geometry->getFBXGeometry(); PrioVR* prioVR = Application::getInstance()->getPrioVR(); @@ -227,7 +231,7 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) { JointState& parentState = _jointStates[parentJointIndex]; parentState.setRotationInBindFrame(palmRotation, PALM_PRIORITY); // lock hand to forearm by slamming its rotation (in parent-frame) to identity - _jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat()); + _jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat(), PALM_PRIORITY); } else { inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY); } @@ -240,7 +244,7 @@ void SkeletonModel::updateJointState(int index) { const JointState& parentState = _jointStates.at(joint.parentIndex); const FBXGeometry& geometry = _geometry->getFBXGeometry(); if (index == geometry.leanJointIndex) { - maybeUpdateLeanRotation(parentState, joint, state); + maybeUpdateLeanRotation(parentState, state); } else if (index == geometry.neckJointIndex) { maybeUpdateNeckRotation(parentState, joint, state); @@ -257,17 +261,18 @@ void SkeletonModel::updateJointState(int index) { } } -void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) { +void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, JointState& state) { if (!_owningAvatar->isMyAvatar() || Application::getInstance()->getPrioVR()->isActive()) { return; } // get the rotation axes in joint space and use them to adjust the rotation - glm::mat3 axes = glm::mat3_cast(glm::quat()); - glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInConstrainedFrame()) * - joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation))); - state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(), - glm::normalize(inverse * axes[2])) * glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(), - glm::normalize(inverse * axes[0])) * joint.rotation); + glm::vec3 xAxis(1.0f, 0.0f, 0.0f); + glm::vec3 zAxis(0.0f, 0.0f, 1.0f); + glm::quat inverse = glm::inverse(parentState.getRotation() * state.getDefaultRotationInParentFrame()); + state.setRotationInConstrainedFrame( + glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(), inverse * zAxis) + * glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(), inverse * xAxis) + * state.getFBXJoint().rotation, LEAN_PRIORITY); } void SkeletonModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) { @@ -573,6 +578,7 @@ SkeletonRagdoll* SkeletonModel::buildRagdoll() { if (!_ragdoll) { _ragdoll = new SkeletonRagdoll(this); if (_enableShapes) { + clearShapes(); buildShapes(); } } @@ -597,6 +603,7 @@ void SkeletonModel::buildShapes() { if (!_ragdoll) { _ragdoll = new SkeletonRagdoll(this); } + _ragdoll->setRootIndex(geometry.rootJointIndex); _ragdoll->initPoints(); QVector& points = _ragdoll->getPoints(); @@ -604,44 +611,47 @@ void SkeletonModel::buildShapes() { float uniformScale = extractUniformScale(_scale); const int numStates = _jointStates.size(); + float totalMass = 0.0f; for (int i = 0; i < numStates; i++) { JointState& state = _jointStates[i]; const FBXJoint& joint = state.getFBXJoint(); float radius = uniformScale * joint.boneRadius; float halfHeight = 0.5f * uniformScale * joint.distanceToParent; Shape::Type type = joint.shapeType; - if (i == 0 || (type == Shape::CAPSULE_SHAPE && halfHeight < EPSILON)) { + int parentIndex = joint.parentIndex; + if (parentIndex == -1 || radius < EPSILON) { + type = UNKNOWN_SHAPE; + } else if (type == CAPSULE_SHAPE && halfHeight < EPSILON) { // this shape is forced to be a sphere - type = Shape::SPHERE_SHAPE; - } - if (radius < EPSILON) { - type = Shape::UNKNOWN_SHAPE; + type = SPHERE_SHAPE; } Shape* shape = NULL; - int parentIndex = joint.parentIndex; - if (type == Shape::SPHERE_SHAPE) { + if (type == SPHERE_SHAPE) { shape = new VerletSphereShape(radius, &(points[i])); shape->setEntity(this); - points[i].setMass(massScale * glm::max(MIN_JOINT_MASS, DENSITY_OF_WATER * shape->getVolume())); - } else if (type == Shape::CAPSULE_SHAPE) { + float mass = massScale * glm::max(MIN_JOINT_MASS, DENSITY_OF_WATER * shape->getVolume()); + points[i].setMass(mass); + totalMass += mass; + } else if (type == CAPSULE_SHAPE) { assert(parentIndex != -1); shape = new VerletCapsuleShape(radius, &(points[parentIndex]), &(points[i])); shape->setEntity(this); - points[i].setMass(massScale * glm::max(MIN_JOINT_MASS, DENSITY_OF_WATER * shape->getVolume())); + float mass = massScale * glm::max(MIN_JOINT_MASS, DENSITY_OF_WATER * shape->getVolume()); + points[i].setMass(mass); + totalMass += mass; } - if (parentIndex != -1) { + if (shape && parentIndex != -1) { // always disable collisions between joint and its parent - if (shape) { - disableCollisions(i, parentIndex); - } - } else { - // give the base joint a very large mass since it doesn't actually move - // in the local-frame simulation (it defines the origin) - points[i].setMass(VERY_BIG_MASS); - } + disableCollisions(i, parentIndex); + } _shapes.push_back(shape); } + // set the mass of the root + if (numStates > 0) { + points[_ragdoll->getRootIndex()].setMass(totalMass); + } + // This method moves the shapes to their default positions in Model frame. computeBoundingShape(geometry); @@ -721,7 +731,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) { shapeExtents.reset(); glm::vec3 localPosition = shape->getTranslation(); int type = shape->getType(); - if (type == Shape::CAPSULE_SHAPE) { + if (type == CAPSULE_SHAPE) { // add the two furthest surface points of the capsule CapsuleShape* capsule = static_cast(shape); glm::vec3 axis; @@ -733,7 +743,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) { shapeExtents.addPoint(localPosition + axis); shapeExtents.addPoint(localPosition - axis); totalExtents.addExtents(shapeExtents); - } else if (type == Shape::SPHERE_SHAPE) { + } else if (type == SPHERE_SHAPE) { float radius = shape->getBoundingRadius(); glm::vec3 axis = glm::vec3(radius); shapeExtents.addPoint(localPosition + axis); @@ -837,13 +847,13 @@ void SkeletonModel::renderJointCollisionShapes(float alpha) { glPushMatrix(); // shapes are stored in simulation-frame but we want position to be model-relative - if (shape->getType() == Shape::SPHERE_SHAPE) { + if (shape->getType() == SPHERE_SHAPE) { glm::vec3 position = shape->getTranslation() - simulationTranslation; glTranslatef(position.x, position.y, position.z); // draw a grey sphere at shape position glColor4f(0.75f, 0.75f, 0.75f, alpha); glutSolidSphere(shape->getBoundingRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS); - } else if (shape->getType() == Shape::CAPSULE_SHAPE) { + } else if (shape->getType() == CAPSULE_SHAPE) { CapsuleShape* capsule = static_cast(shape); // draw a blue sphere at the capsule endpoint diff --git a/interface/src/avatar/SkeletonModel.h b/interface/src/avatar/SkeletonModel.h index b0d6ed7325..9bd8df745a 100644 --- a/interface/src/avatar/SkeletonModel.h +++ b/interface/src/avatar/SkeletonModel.h @@ -127,7 +127,7 @@ protected: /// Updates the state of the joint at the specified index. virtual void updateJointState(int index); - void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state); + void maybeUpdateLeanRotation(const JointState& parentState, JointState& state); void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state); void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state); diff --git a/interface/src/avatar/SkeletonRagdoll.cpp b/interface/src/avatar/SkeletonRagdoll.cpp index 503f38f00f..7c0e056826 100644 --- a/interface/src/avatar/SkeletonRagdoll.cpp +++ b/interface/src/avatar/SkeletonRagdoll.cpp @@ -36,8 +36,9 @@ void SkeletonRagdoll::stepForward(float deltaTime) { void SkeletonRagdoll::slamPointPositions() { QVector& jointStates = _model->getJointStates(); - int numStates = jointStates.size(); - for (int i = 0; i < numStates; ++i) { + const int numPoints = _points.size(); + assert(numPoints == jointStates.size()); + for (int i = _rootIndex; i < numPoints; ++i) { _points[i].initPosition(jointStates.at(i).getPosition()); } } @@ -49,8 +50,7 @@ void SkeletonRagdoll::initPoints() { initTransform(); // one point for each joint - QVector& jointStates = _model->getJointStates(); - int numStates = jointStates.size(); + int numStates = _model->getJointStates().size(); _points.fill(VerletPoint(), numStates); slamPointPositions(); } @@ -67,13 +67,10 @@ void SkeletonRagdoll::buildConstraints() { float minBone = FLT_MAX; float maxBone = -FLT_MAX; QMultiMap families; - for (int i = 0; i < numPoints; ++i) { + for (int i = _rootIndex; i < numPoints; ++i) { const JointState& state = jointStates.at(i); int parentIndex = state.getParentIndex(); - if (parentIndex == -1) { - FixedConstraint* anchor = new FixedConstraint(&_translationInSimulationFrame, &(_points[i])); - _fixedConstraints.push_back(anchor); - } else { + if (parentIndex != -1) { DistanceConstraint* bone = new DistanceConstraint(&(_points[i]), &(_points[parentIndex])); bone->setDistance(state.getDistanceToParent()); _boneConstraints.push_back(bone); @@ -108,7 +105,7 @@ void SkeletonRagdoll::buildConstraints() { float MAX_STRENGTH = 0.6f; float MIN_STRENGTH = 0.05f; // each joint gets a MuscleConstraint to its parent - for (int i = 1; i < numPoints; ++i) { + for (int i = _rootIndex + 1; i < numPoints; ++i) { const JointState& state = jointStates.at(i); int p = state.getParentIndex(); if (p == -1) { diff --git a/interface/src/avatar/SkeletonRagdoll.h b/interface/src/avatar/SkeletonRagdoll.h index f9f99395ac..ae9bec9116 100644 --- a/interface/src/avatar/SkeletonRagdoll.h +++ b/interface/src/avatar/SkeletonRagdoll.h @@ -33,7 +33,9 @@ public: virtual void initPoints(); virtual void buildConstraints(); +protected: void updateMuscles(); + private: Model* _model; QVector _muscleConstraints; diff --git a/interface/src/devices/CaraFaceTracker.cpp b/interface/src/devices/CaraFaceTracker.cpp index 27cf3b175b..9f056fab9b 100644 --- a/interface/src/devices/CaraFaceTracker.cpp +++ b/interface/src/devices/CaraFaceTracker.cpp @@ -389,7 +389,6 @@ void CaraFaceTracker::decodePacket(const QByteArray& buffer) { if (theta > EPSILON) { float rMag = glm::length(glm::vec3(r.x, r.y, r.z)); const float AVERAGE_CARA_FRAME_TIME = 0.04f; - const float ANGULAR_VELOCITY_MIN = 1.2f; const float YAW_STANDARD_DEV_DEG = 2.5f; _headAngularVelocity = theta / AVERAGE_CARA_FRAME_TIME * glm::vec3(r.x, r.y, r.z) / rMag; diff --git a/interface/src/devices/DdeFaceTracker.cpp b/interface/src/devices/DdeFaceTracker.cpp index ae5beb8c85..aab3e1deb4 100644 --- a/interface/src/devices/DdeFaceTracker.cpp +++ b/interface/src/devices/DdeFaceTracker.cpp @@ -194,12 +194,12 @@ float updateAndGetCoefficient(float * coefficient, float currentValue, bool scal coefficient[AVG] = LONG_TERM_AVERAGE * coefficient[AVG] + (1.f - LONG_TERM_AVERAGE) * currentValue; if (coefficient[MAX] > coefficient[MIN]) { if (scaleToRange) { - return glm::clamp((currentValue - coefficient[AVG]) / (coefficient[MAX] - coefficient[MIN]), 0.f, 1.f); + return glm::clamp((currentValue - coefficient[AVG]) / (coefficient[MAX] - coefficient[MIN]), 0.0f, 1.0f); } else { - return glm::clamp(currentValue - coefficient[AVG], 0.f, 1.f); + return glm::clamp(currentValue - coefficient[AVG], 0.0f, 1.0f); } } else { - return 0.f; + return 0.0f; } } @@ -242,13 +242,11 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) { // Set blendshapes float EYE_MAGNIFIER = 4.0f; - - float rightEye = (updateAndGetCoefficient(_rightEye, packet.expressions[0])) * EYE_MAGNIFIER; + float rightEye = glm::clamp((updateAndGetCoefficient(_rightEye, packet.expressions[0])) * EYE_MAGNIFIER, 0.0f, 1.0f); _blendshapeCoefficients[_rightBlinkIndex] = rightEye; - float leftEye = (updateAndGetCoefficient(_leftEye, packet.expressions[1])) * EYE_MAGNIFIER; + float leftEye = glm::clamp((updateAndGetCoefficient(_leftEye, packet.expressions[1])) * EYE_MAGNIFIER, 0.0f, 1.0f); _blendshapeCoefficients[_leftBlinkIndex] = leftEye; - // Right eye = packet.expressions[0]; float leftBrow = 1.0f - rescaleCoef(packet.expressions[14]); if (leftBrow < 0.5f) { @@ -270,9 +268,9 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) { float JAW_OPEN_MAGNIFIER = 1.4f; _blendshapeCoefficients[_jawOpenIndex] = rescaleCoef(packet.expressions[21]) * JAW_OPEN_MAGNIFIER; - - _blendshapeCoefficients[_mouthSmileLeftIndex] = rescaleCoef(packet.expressions[24]); - _blendshapeCoefficients[_mouthSmileRightIndex] = rescaleCoef(packet.expressions[23]); + float SMILE_MULTIPLIER = 2.0f; + _blendshapeCoefficients[_mouthSmileLeftIndex] = glm::clamp(packet.expressions[24] * SMILE_MULTIPLIER, 0.0f, 1.0f); + _blendshapeCoefficients[_mouthSmileRightIndex] = glm::clamp(packet.expressions[23] * SMILE_MULTIPLIER, 0.0f, 1.0f); } else { diff --git a/interface/src/devices/Faceshift.cpp b/interface/src/devices/Faceshift.cpp index b5cba8348c..345e635045 100644 --- a/interface/src/devices/Faceshift.cpp +++ b/interface/src/devices/Faceshift.cpp @@ -26,11 +26,17 @@ using namespace fs; using namespace std; const quint16 FACESHIFT_PORT = 33433; +float STARTING_FACESHIFT_FRAME_TIME = 0.033f; Faceshift::Faceshift() : _tcpEnabled(true), _tcpRetryCount(0), _lastTrackingStateReceived(0), + _averageFrameTime(STARTING_FACESHIFT_FRAME_TIME), + _headAngularVelocity(0), + _headLinearVelocity(0), + _lastHeadTranslation(0), + _filteredHeadTranslation(0), _eyeGazeLeftPitch(0.0f), _eyeGazeLeftYaw(0.0f), _eyeGazeRightPitch(0.0f), @@ -209,23 +215,41 @@ void Faceshift::receive(const QByteArray& buffer) { float theta = 2 * acos(r.w); if (theta > EPSILON) { float rMag = glm::length(glm::vec3(r.x, r.y, r.z)); - float AVERAGE_FACESHIFT_FRAME_TIME = 0.033f; - _headAngularVelocity = theta / AVERAGE_FACESHIFT_FRAME_TIME * glm::vec3(r.x, r.y, r.z) / rMag; + _headAngularVelocity = theta / _averageFrameTime * glm::vec3(r.x, r.y, r.z) / rMag; } else { _headAngularVelocity = glm::vec3(0,0,0); } - _headRotation = newRotation; + const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f; + _headRotation = safeMix(_headRotation, newRotation, glm::clamp(glm::length(_headAngularVelocity) * + ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f)); const float TRANSLATION_SCALE = 0.02f; - _headTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y, - -data.m_headTranslation.z) * TRANSLATION_SCALE; + glm::vec3 newHeadTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y, + -data.m_headTranslation.z) * TRANSLATION_SCALE; + + _headLinearVelocity = (newHeadTranslation - _lastHeadTranslation) / _averageFrameTime; + + const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f; + float velocityFilter = glm::clamp(1.0f - glm::length(_headLinearVelocity) * + LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f); + _filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * newHeadTranslation; + + _lastHeadTranslation = newHeadTranslation; + _headTranslation = _filteredHeadTranslation; + _eyeGazeLeftPitch = -data.m_eyeGazeLeftPitch; _eyeGazeLeftYaw = data.m_eyeGazeLeftYaw; _eyeGazeRightPitch = -data.m_eyeGazeRightPitch; _eyeGazeRightYaw = data.m_eyeGazeRightYaw; _blendshapeCoefficients = QVector::fromStdVector(data.m_coeffs); - _lastTrackingStateReceived = usecTimestampNow(); + const float FRAME_AVERAGING_FACTOR = 0.99f; + quint64 usecsNow = usecTimestampNow(); + if (_lastTrackingStateReceived != 0) { + _averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime + + (1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastTrackingStateReceived) / 1000000.0f; + } + _lastTrackingStateReceived = usecsNow; } break; } diff --git a/interface/src/devices/Faceshift.h b/interface/src/devices/Faceshift.h index 25abd8c0eb..e7d87827eb 100644 --- a/interface/src/devices/Faceshift.h +++ b/interface/src/devices/Faceshift.h @@ -98,8 +98,12 @@ private: int _tcpRetryCount; bool _tracking; quint64 _lastTrackingStateReceived; + float _averageFrameTime; glm::vec3 _headAngularVelocity; + glm::vec3 _headLinearVelocity; + glm::vec3 _lastHeadTranslation; + glm::vec3 _filteredHeadTranslation; // degrees float _eyeGazeLeftPitch; diff --git a/interface/src/location/LocationManager.cpp b/interface/src/location/LocationManager.cpp index cbf7d3dfd0..551e08a8bc 100644 --- a/interface/src/location/LocationManager.cpp +++ b/interface/src/location/LocationManager.cpp @@ -85,9 +85,13 @@ void LocationManager::goTo(QString destination) { if (!goToDestination(destination)) { destination = QString(QUrl::toPercentEncoding(destination)); UserActivityLogger::getInstance().wentTo(OTHER_DESTINATION_TYPE, destination); + JSONCallbackParameters callbackParams; callbackParams.jsonCallbackReceiver = this; callbackParams.jsonCallbackMethod = "goToAddressFromResponse"; + callbackParams.errorCallbackReceiver = this; + callbackParams.errorCallbackMethod = "handleAddressLookupError"; + AccountManager::getInstance().authenticatedRequest(GET_ADDRESSES.arg(destination), QNetworkAccessManager::GetOperation, callbackParams); @@ -96,21 +100,17 @@ void LocationManager::goTo(QString destination) { void LocationManager::goToAddressFromResponse(const QJsonObject& responseData) { QJsonValue status = responseData["status"]; - qDebug() << responseData; - if (!status.isUndefined() && status.toString() == "success") { - const QJsonObject& data = responseData["data"].toObject(); - const QJsonValue& userObject = data["user"]; - const QJsonValue& placeObject = data["place"]; - - if (!placeObject.isUndefined() && !userObject.isUndefined()) { - emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject()); - } else if (placeObject.isUndefined()) { - Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject()); - } else { - Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject()); - } + + const QJsonObject& data = responseData["data"].toObject(); + const QJsonValue& userObject = data["user"]; + const QJsonValue& placeObject = data["place"]; + + if (!placeObject.isUndefined() && !userObject.isUndefined()) { + emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject()); + } else if (placeObject.isUndefined()) { + Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject()); } else { - QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found."); + Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject()); } } @@ -118,6 +118,8 @@ void LocationManager::goToUser(QString userName) { JSONCallbackParameters callbackParams; callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar(); callbackParams.jsonCallbackMethod = "goToLocationFromResponse"; + callbackParams.errorCallbackReceiver = this; + callbackParams.errorCallbackMethod = "handleAddressLookupError"; userName = QString(QUrl::toPercentEncoding(userName)); AccountManager::getInstance().authenticatedRequest(GET_USER_ADDRESS.arg(userName), @@ -129,6 +131,8 @@ void LocationManager::goToPlace(QString placeName) { JSONCallbackParameters callbackParams; callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar(); callbackParams.jsonCallbackMethod = "goToLocationFromResponse"; + callbackParams.errorCallbackReceiver = this; + callbackParams.errorCallbackMethod = "handleAddressLookupError"; placeName = QString(QUrl::toPercentEncoding(placeName)); AccountManager::getInstance().authenticatedRequest(GET_PLACE_ADDRESS.arg(placeName), @@ -212,6 +216,19 @@ bool LocationManager::goToDestination(QString destination) { return false; } +void LocationManager::handleAddressLookupError(QNetworkReply::NetworkError networkError, + const QString& errorString) { + QString messageBoxString; + + if (networkError == QNetworkReply::ContentNotFoundError) { + messageBoxString = "That address could not be found."; + } else { + messageBoxString = errorString; + } + + QMessageBox::warning(Application::getInstance()->getWindow(), "", messageBoxString); +} + void LocationManager::replaceLastOccurrence(const QChar search, const QChar replace, QString& string) { int lastIndex; lastIndex = string.lastIndexOf(search); diff --git a/interface/src/location/LocationManager.h b/interface/src/location/LocationManager.h index b781f3f54e..30b4447ded 100644 --- a/interface/src/location/LocationManager.h +++ b/interface/src/location/LocationManager.h @@ -37,6 +37,9 @@ public: void goToPlace(QString placeName); void goToOrientation(QString orientation); bool goToDestination(QString destination); + +public slots: + void handleAddressLookupError(QNetworkReply::NetworkError networkError, const QString& errorString); private: void replaceLastOccurrence(const QChar search, const QChar replace, QString& string); diff --git a/interface/src/renderer/GeometryCache.cpp b/interface/src/renderer/GeometryCache.cpp index b5bd63ab87..3cfc5efd5f 100644 --- a/interface/src/renderer/GeometryCache.cpp +++ b/interface/src/renderer/GeometryCache.cpp @@ -593,17 +593,20 @@ void NetworkGeometry::setGeometry(const FBXGeometry& geometry) { NetworkMeshPart networkPart; if (!part.diffuseTexture.filename.isEmpty()) { networkPart.diffuseTexture = Application::getInstance()->getTextureCache()->getTexture( - _textureBase.resolved(QUrl(part.diffuseTexture.filename)), false, mesh.isEye, part.diffuseTexture.content); + _textureBase.resolved(QUrl(part.diffuseTexture.filename)), DEFAULT_TEXTURE, + mesh.isEye, part.diffuseTexture.content); networkPart.diffuseTexture->setLoadPriorities(_loadPriorities); } if (!part.normalTexture.filename.isEmpty()) { networkPart.normalTexture = Application::getInstance()->getTextureCache()->getTexture( - _textureBase.resolved(QUrl(part.normalTexture.filename)), true, false, part.normalTexture.content); + _textureBase.resolved(QUrl(part.normalTexture.filename)), NORMAL_TEXTURE, + false, part.normalTexture.content); networkPart.normalTexture->setLoadPriorities(_loadPriorities); } if (!part.specularTexture.filename.isEmpty()) { networkPart.specularTexture = Application::getInstance()->getTextureCache()->getTexture( - _textureBase.resolved(QUrl(part.specularTexture.filename)), true, false, part.specularTexture.content); + _textureBase.resolved(QUrl(part.specularTexture.filename)), SPECULAR_TEXTURE, + false, part.specularTexture.content); networkPart.specularTexture->setLoadPriorities(_loadPriorities); } networkMesh.parts.append(networkPart); diff --git a/interface/src/renderer/JointState.cpp b/interface/src/renderer/JointState.cpp index 316dfeb9ca..96561758da 100644 --- a/interface/src/renderer/JointState.cpp +++ b/interface/src/renderer/JointState.cpp @@ -73,7 +73,7 @@ void JointState::setFBXJoint(const FBXJoint* joint) { } } -void JointState::updateConstraint() { +void JointState::buildConstraint() { if (_constraint) { delete _constraint; _constraint = NULL; @@ -145,7 +145,7 @@ glm::quat JointState::getVisibleRotationInParentFrame() const { void JointState::restoreRotation(float fraction, float priority) { assert(_fbxJoint != NULL); if (priority == _animationPriority || _animationPriority == 0.0f) { - setRotationInConstrainedFrame(safeMix(_rotationInConstrainedFrame, _fbxJoint->rotation, fraction)); + setRotationInConstrainedFrameInternal(safeMix(_rotationInConstrainedFrame, _fbxJoint->rotation, fraction)); _animationPriority = 0.0f; } } @@ -158,7 +158,7 @@ void JointState::setRotationInBindFrame(const glm::quat& rotation, float priorit if (constrain && _constraint) { _constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f); } - setRotationInConstrainedFrame(targetRotation); + setRotationInConstrainedFrameInternal(targetRotation); _animationPriority = priority; } } @@ -173,10 +173,6 @@ void JointState::clearTransformTranslation() { _visibleTransform[3][2] = 0.0f; } -void JointState::setRotation(const glm::quat& rotation, bool constrain, float priority) { - applyRotationDelta(rotation * glm::inverse(getRotation()), true, priority); -} - void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, float priority) { // NOTE: delta is in model-frame assert(_fbxJoint != NULL); @@ -193,7 +189,7 @@ void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, floa _rotation = delta * getRotation(); return; } - setRotationInConstrainedFrame(targetRotation); + setRotationInConstrainedFrameInternal(targetRotation); } /// Applies delta rotation to joint but mixes a little bit of the default pose as well. @@ -212,7 +208,7 @@ void JointState::mixRotationDelta(const glm::quat& delta, float mixFactor, float if (_constraint) { _constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f); } - setRotationInConstrainedFrame(targetRotation); + setRotationInConstrainedFrameInternal(targetRotation); } void JointState::mixVisibleRotationDelta(const glm::quat& delta, float mixFactor) { @@ -236,7 +232,17 @@ glm::quat JointState::computeVisibleParentRotation() const { return _visibleRotation * glm::inverse(_fbxJoint->preRotation * _visibleRotationInConstrainedFrame * _fbxJoint->postRotation); } -void JointState::setRotationInConstrainedFrame(const glm::quat& targetRotation) { +void JointState::setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain) { + if (priority >= _animationPriority || _animationPriority == 0.0f) { + if (constrain && _constraint) { + _constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f); + } + setRotationInConstrainedFrameInternal(targetRotation); + _animationPriority = priority; + } +} + +void JointState::setRotationInConstrainedFrameInternal(const glm::quat& targetRotation) { glm::quat parentRotation = computeParentRotation(); _rotationInConstrainedFrame = targetRotation; _transformChanged = true; @@ -258,6 +264,11 @@ const bool JointState::rotationIsDefault(const glm::quat& rotation, float tolera glm::abs(rotation.w - defaultRotation.w) < tolerance; } +glm::quat JointState::getDefaultRotationInParentFrame() const { + // NOTE: the result is constant and could be cached in the FBXJoint + return _fbxJoint->preRotation * _fbxJoint->rotation * _fbxJoint->postRotation; +} + const glm::vec3& JointState::getDefaultTranslationInConstrainedFrame() const { assert(_fbxJoint != NULL); return _fbxJoint->translation; @@ -267,4 +278,4 @@ void JointState::slaveVisibleTransform() { _visibleTransform = _transform; _visibleRotation = getRotation(); _visibleRotationInConstrainedFrame = _rotationInConstrainedFrame; -} \ No newline at end of file +} diff --git a/interface/src/renderer/JointState.h b/interface/src/renderer/JointState.h index 21961ba48c..b502083463 100644 --- a/interface/src/renderer/JointState.h +++ b/interface/src/renderer/JointState.h @@ -19,6 +19,8 @@ #include #include +const float DEFAULT_PRIORITY = 3.0f; + class AngularConstraint; class JointState { @@ -30,7 +32,7 @@ public: void setFBXJoint(const FBXJoint* joint); const FBXJoint& getFBXJoint() const { return *_fbxJoint; } - void updateConstraint(); + void buildConstraint(); void copyState(const JointState& state); void initTransform(const glm::mat4& parentTransform); @@ -60,9 +62,6 @@ public: int getParentIndex() const { return _fbxJoint->parentIndex; } - /// \param rotation rotation of joint in model-frame - void setRotation(const glm::quat& rotation, bool constrain, float priority); - /// \param delta is in the model-frame void applyRotationDelta(const glm::quat& delta, bool constrain = true, float priority = 1.0f); @@ -84,13 +83,14 @@ public: /// NOTE: the JointState's model-frame transform/rotation are NOT updated! void setRotationInBindFrame(const glm::quat& rotation, float priority, bool constrain = false); - void setRotationInConstrainedFrame(const glm::quat& targetRotation); + void setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain = false); void setVisibleRotationInConstrainedFrame(const glm::quat& targetRotation); const glm::quat& getRotationInConstrainedFrame() const { return _rotationInConstrainedFrame; } const glm::quat& getVisibleRotationInConstrainedFrame() const { return _visibleRotationInConstrainedFrame; } const bool rotationIsDefault(const glm::quat& rotation, float tolerance = EPSILON) const; + glm::quat getDefaultRotationInParentFrame() const; const glm::vec3& getDefaultTranslationInConstrainedFrame() const; @@ -106,6 +106,7 @@ public: glm::quat computeVisibleParentRotation() const; private: + void setRotationInConstrainedFrameInternal(const glm::quat& targetRotation); /// debug helper function void loadBindRotation(); diff --git a/interface/src/renderer/Model.cpp b/interface/src/renderer/Model.cpp index b50379b131..d49eefa4bd 100644 --- a/interface/src/renderer/Model.cpp +++ b/interface/src/renderer/Model.cpp @@ -438,7 +438,7 @@ void Model::reset() { } const FBXGeometry& geometry = _geometry->getFBXGeometry(); for (int i = 0; i < _jointStates.size(); i++) { - _jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation); + _jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation, 0.0f); } } @@ -547,7 +547,7 @@ void Model::setJointStates(QVector states) { if (distance > radius) { radius = distance; } - _jointStates[i].updateConstraint(); + _jointStates[i].buildConstraint(); } for (int i = 0; i < _jointStates.size(); i++) { _jointStates[i].slaveVisibleTransform(); @@ -692,16 +692,26 @@ bool Model::getVisibleJointState(int index, glm::quat& rotation) const { return !state.rotationIsDefault(rotation); } +void Model::clearJointState(int index) { + if (index != -1 && index < _jointStates.size()) { + JointState& state = _jointStates[index]; + state.setRotationInConstrainedFrame(glm::quat(), 0.0f); + } +} + +void Model::clearJointAnimationPriority(int index) { + if (index != -1 && index < _jointStates.size()) { + _jointStates[index]._animationPriority = 0.0f; + } +} + void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) { if (index != -1 && index < _jointStates.size()) { JointState& state = _jointStates[index]; - if (priority >= state._animationPriority) { - if (valid) { - state.setRotationInConstrainedFrame(rotation); - state._animationPriority = priority; - } else { - state.restoreRotation(1.0f, priority); - } + if (valid) { + state.setRotationInConstrainedFrame(rotation, priority); + } else { + state.restoreRotation(1.0f, priority); } } } @@ -1184,7 +1194,7 @@ void Model::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm: } // Apply the rotation, but use mixRotationDelta() which blends a bit of the default pose - // at in the process. This provides stability to the IK solution for most models. + // in the process. This provides stability to the IK solution for most models. glm::quat oldNextRotation = nextState.getRotation(); float mixFactor = 0.03f; nextState.mixRotationDelta(deltaRotation, mixFactor, priority); @@ -1729,10 +1739,7 @@ void AnimationHandle::applyFrame(float frameIndex) { int mapping = _jointMappings.at(i); if (mapping != -1) { JointState& state = _model->_jointStates[mapping]; - if (_priority >= state._animationPriority) { - state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction)); - state._animationPriority = _priority; - } + state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction), _priority); } } } diff --git a/interface/src/renderer/Model.h b/interface/src/renderer/Model.h index da72d43133..66baaac90d 100644 --- a/interface/src/renderer/Model.h +++ b/interface/src/renderer/Model.h @@ -118,6 +118,12 @@ public: /// \return whether or not the joint state is "valid" (that is, non-default) bool getVisibleJointState(int index, glm::quat& rotation) const; + /// Clear the joint states + void clearJointState(int index); + + /// Clear the joint animation priority + void clearJointAnimationPriority(int index); + /// Sets the joint state at the specified index. void setJointState(int index, bool valid, const glm::quat& rotation = glm::quat(), float priority = 1.0f); diff --git a/interface/src/renderer/TextureCache.cpp b/interface/src/renderer/TextureCache.cpp index 01c3dc1cc1..d960525817 100644 --- a/interface/src/renderer/TextureCache.cpp +++ b/interface/src/renderer/TextureCache.cpp @@ -145,6 +145,8 @@ GLuint TextureCache::getPermutationNormalTextureID() { } const unsigned char OPAQUE_WHITE[] = { 0xFF, 0xFF, 0xFF, 0xFF }; +const unsigned char TRANSPARENT_WHITE[] = { 0xFF, 0xFF, 0xFF, 0x0 }; +const unsigned char OPAQUE_BLACK[] = { 0x0, 0x0, 0x0, 0xFF }; const unsigned char OPAQUE_BLUE[] = { 0x80, 0x80, 0xFF, 0xFF }; static void loadSingleColorTexture(const unsigned char* color) { @@ -175,19 +177,18 @@ GLuint TextureCache::getBlueTextureID() { /// Extra data for creating textures. class TextureExtra { public: - bool normalMap; + TextureType type; const QByteArray& content; }; -QSharedPointer TextureCache::getTexture(const QUrl& url, bool normalMap, - bool dilatable, const QByteArray& content) { +NetworkTexturePointer TextureCache::getTexture(const QUrl& url, TextureType type, bool dilatable, const QByteArray& content) { if (!dilatable) { - TextureExtra extra = { normalMap, content }; + TextureExtra extra = { type, content }; return ResourceCache::getResource(url, QUrl(), false, &extra).staticCast(); } - QSharedPointer texture = _dilatableNetworkTextures.value(url); + NetworkTexturePointer texture = _dilatableNetworkTextures.value(url); if (texture.isNull()) { - texture = QSharedPointer(new DilatableNetworkTexture(url, content), &Resource::allReferencesCleared); + texture = NetworkTexturePointer(new DilatableNetworkTexture(url, content), &Resource::allReferencesCleared); texture->setSelf(texture); texture->setCache(this); _dilatableNetworkTextures.insert(url, texture); @@ -293,7 +294,7 @@ bool TextureCache::eventFilter(QObject* watched, QEvent* event) { QSharedPointer TextureCache::createResource(const QUrl& url, const QSharedPointer& fallback, bool delayLoad, const void* extra) { const TextureExtra* textureExtra = static_cast(extra); - return QSharedPointer(new NetworkTexture(url, textureExtra->normalMap, textureExtra->content), + return QSharedPointer(new NetworkTexture(url, textureExtra->type, textureExtra->content), &Resource::allReferencesCleared); } @@ -317,17 +318,34 @@ Texture::~Texture() { glDeleteTextures(1, &_id); } -NetworkTexture::NetworkTexture(const QUrl& url, bool normalMap, const QByteArray& content) : +NetworkTexture::NetworkTexture(const QUrl& url, TextureType type, const QByteArray& content) : Resource(url, !content.isEmpty()), + _type(type), _translucent(false) { if (!url.isValid()) { _loaded = true; } - // default to white/blue + // default to white/blue/black glBindTexture(GL_TEXTURE_2D, getID()); - loadSingleColorTexture(normalMap ? OPAQUE_BLUE : OPAQUE_WHITE); + switch (type) { + case NORMAL_TEXTURE: + loadSingleColorTexture(OPAQUE_BLUE); + break; + + case SPECULAR_TEXTURE: + loadSingleColorTexture(OPAQUE_BLACK); + break; + + case SPLAT_TEXTURE: + loadSingleColorTexture(TRANSPARENT_WHITE); + break; + + default: + loadSingleColorTexture(OPAQUE_WHITE); + break; + } glBindTexture(GL_TEXTURE_2D, 0); // if we have content, load it after we have our self pointer @@ -382,12 +400,28 @@ void ImageReader::run() { qDebug() << "Image greater than maximum size:" << _url << image.width() << image.height(); image = image.scaled(MAXIMUM_SIZE, MAXIMUM_SIZE, Qt::KeepAspectRatio); } + int imageArea = image.width() * image.height(); + const int EIGHT_BIT_MAXIMUM = 255; if (!image.hasAlphaChannel()) { if (image.format() != QImage::Format_RGB888) { image = image.convertToFormat(QImage::Format_RGB888); } - QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false)); + int redTotal = 0, greenTotal = 0, blueTotal = 0; + for (int y = 0; y < image.height(); y++) { + for (int x = 0; x < image.width(); x++) { + QRgb rgb = image.pixel(x, y); + redTotal += qRed(rgb); + greenTotal += qGreen(rgb); + blueTotal += qBlue(rgb); + } + } + QColor averageColor(EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM); + if (imageArea > 0) { + averageColor.setRgb(redTotal / imageArea, greenTotal / imageArea, blueTotal / imageArea); + } + QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false), + Q_ARG(const QColor&, averageColor)); return; } if (image.format() != QImage::Format_ARGB32) { @@ -397,11 +431,15 @@ void ImageReader::run() { // check for translucency/false transparency int opaquePixels = 0; int translucentPixels = 0; - const int EIGHT_BIT_MAXIMUM = 255; - const int RGB_BITS = 24; + int redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0; for (int y = 0; y < image.height(); y++) { for (int x = 0; x < image.width(); x++) { - int alpha = image.pixel(x, y) >> RGB_BITS; + QRgb rgb = image.pixel(x, y); + redTotal += qRed(rgb); + greenTotal += qGreen(rgb); + blueTotal += qBlue(rgb); + int alpha = qAlpha(rgb); + alphaTotal += alpha; if (alpha == EIGHT_BIT_MAXIMUM) { opaquePixels++; } else if (alpha != 0) { @@ -409,13 +447,13 @@ void ImageReader::run() { } } } - int imageArea = image.width() * image.height(); if (opaquePixels == imageArea) { qDebug() << "Image with alpha channel is completely opaque:" << _url; image = image.convertToFormat(QImage::Format_RGB888); } QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), - Q_ARG(bool, translucentPixels >= imageArea / 2)); + Q_ARG(bool, translucentPixels >= imageArea / 2), Q_ARG(const QColor&, QColor(redTotal / imageArea, + greenTotal / imageArea, blueTotal / imageArea, alphaTotal / imageArea))); } void NetworkTexture::downloadFinished(QNetworkReply* reply) { @@ -427,8 +465,9 @@ void NetworkTexture::loadContent(const QByteArray& content) { QThreadPool::globalInstance()->start(new ImageReader(_self, NULL, _url, content)); } -void NetworkTexture::setImage(const QImage& image, bool translucent) { +void NetworkTexture::setImage(const QImage& image, bool translucent, const QColor& averageColor) { _translucent = translucent; + _averageColor = averageColor; finishedLoading(true); imageLoaded(image); @@ -440,7 +479,13 @@ void NetworkTexture::setImage(const QImage& image, bool translucent) { glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 0, GL_RGB, GL_UNSIGNED_BYTE, image.constBits()); } - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + if (_type == SPLAT_TEXTURE) { + // generate mipmaps for splat textures + glGenerateMipmap(GL_TEXTURE_2D); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); + } else { + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + } glBindTexture(GL_TEXTURE_2D, 0); } @@ -449,7 +494,7 @@ void NetworkTexture::imageLoaded(const QImage& image) { } DilatableNetworkTexture::DilatableNetworkTexture(const QUrl& url, const QByteArray& content) : - NetworkTexture(url, false, content), + NetworkTexture(url, DEFAULT_TEXTURE, content), _innerRadius(0), _outerRadius(0) { diff --git a/interface/src/renderer/TextureCache.h b/interface/src/renderer/TextureCache.h index 248a451e3a..e1d69677f6 100644 --- a/interface/src/renderer/TextureCache.h +++ b/interface/src/renderer/TextureCache.h @@ -23,6 +23,10 @@ class QOpenGLFramebufferObject; class NetworkTexture; +typedef QSharedPointer NetworkTexturePointer; + +enum TextureType { DEFAULT_TEXTURE, NORMAL_TEXTURE, SPECULAR_TEXTURE, SPLAT_TEXTURE }; + /// Stores cached textures, including render-to-texture targets. class TextureCache : public ResourceCache { Q_OBJECT @@ -47,7 +51,7 @@ public: GLuint getBlueTextureID(); /// Loads a texture from the specified URL. - QSharedPointer getTexture(const QUrl& url, bool normalMap = false, bool dilatable = false, + NetworkTexturePointer getTexture(const QUrl& url, TextureType type = DEFAULT_TEXTURE, bool dilatable = false, const QByteArray& content = QByteArray()); /// Returns a pointer to the primary framebuffer object. This render target includes a depth component, and is @@ -121,24 +125,29 @@ class NetworkTexture : public Resource, public Texture { public: - NetworkTexture(const QUrl& url, bool normalMap, const QByteArray& content); + NetworkTexture(const QUrl& url, TextureType type, const QByteArray& content); /// Checks whether it "looks like" this texture is translucent /// (majority of pixels neither fully opaque or fully transparent). bool isTranslucent() const { return _translucent; } + /// Returns the lazily-computed average texture color. + const QColor& getAverageColor() const { return _averageColor; } + protected: virtual void downloadFinished(QNetworkReply* reply); Q_INVOKABLE void loadContent(const QByteArray& content); - Q_INVOKABLE void setImage(const QImage& image, bool translucent); + Q_INVOKABLE void setImage(const QImage& image, bool translucent, const QColor& averageColor); virtual void imageLoaded(const QImage& image); private: + TextureType _type; bool _translucent; + QColor _averageColor; }; /// Caches derived, dilated textures. diff --git a/interface/src/scripting/LocationScriptingInterface.cpp b/interface/src/scripting/LocationScriptingInterface.cpp index 44ff94aa1f..9e68778942 100644 --- a/interface/src/scripting/LocationScriptingInterface.cpp +++ b/interface/src/scripting/LocationScriptingInterface.cpp @@ -20,6 +20,10 @@ LocationScriptingInterface* LocationScriptingInterface::getInstance() { return &sharedInstance; } +bool LocationScriptingInterface::isConnected() { + return NodeList::getInstance()->getDomainHandler().isConnected(); +} + QString LocationScriptingInterface::getHref() { return getProtocol() + "//" + getHostname() + getPathname(); } diff --git a/interface/src/scripting/LocationScriptingInterface.h b/interface/src/scripting/LocationScriptingInterface.h index 36b6d97561..20f63bceed 100644 --- a/interface/src/scripting/LocationScriptingInterface.h +++ b/interface/src/scripting/LocationScriptingInterface.h @@ -22,6 +22,7 @@ class LocationScriptingInterface : public QObject { Q_OBJECT + Q_PROPERTY(bool isConnected READ isConnected) Q_PROPERTY(QString href READ getHref) Q_PROPERTY(QString protocol READ getProtocol) Q_PROPERTY(QString hostname READ getHostname) @@ -30,6 +31,7 @@ class LocationScriptingInterface : public QObject { public: static LocationScriptingInterface* getInstance(); + bool isConnected(); QString getHref(); QString getProtocol() { return CUSTOM_URL_SCHEME; }; QString getPathname(); diff --git a/interface/src/scripting/WindowScriptingInterface.cpp b/interface/src/scripting/WindowScriptingInterface.cpp index ea0eeb0dd9..7a85fc7117 100644 --- a/interface/src/scripting/WindowScriptingInterface.cpp +++ b/interface/src/scripting/WindowScriptingInterface.cpp @@ -100,14 +100,52 @@ QScriptValue WindowScriptingInterface::showConfirm(const QString& message) { return QScriptValue(response == QMessageBox::Yes); } +void WindowScriptingInterface::chooseDirectory() { + QPushButton* button = reinterpret_cast(sender()); + + QString title = button->property("title").toString(); + QString path = button->property("path").toString(); + QRegExp displayAs = button->property("displayAs").toRegExp(); + QRegExp validateAs = button->property("validateAs").toRegExp(); + QString errorMessage = button->property("errorMessage").toString(); + + QString directory = QFileDialog::getExistingDirectory(button, title, path); + if (directory.isEmpty()) { + return; + } + + if (!validateAs.exactMatch(directory)) { + QMessageBox::warning(NULL, "Invalid Directory", errorMessage); + return; + } + + button->setProperty("path", directory); + + displayAs.indexIn(directory); + QString buttonText = displayAs.cap(1) != "" ? displayAs.cap(1) : "."; + button->setText(buttonText); +} + +QString WindowScriptingInterface::jsRegExp2QtRegExp(QString string) { + // Converts string representation of RegExp from JavaScript format to Qt format. + return string.mid(1, string.length() - 2) // No enclosing slashes. + .replace("\\/", "/"); // No escaping of forward slash. +} + /// Display a form layout with an edit box /// \param const QString& title title to display -/// \param const QScriptValue form to display (array containing labels and values) -/// \return QScriptValue result form (unchanged is dialog canceled) +/// \param const QScriptValue form to display as an array of objects: +/// - label, value +/// - label, directory, title, display regexp, validate regexp, error message +/// - button ("Cancel") +/// \return QScriptValue `true` if 'OK' was clicked, `false` otherwise QScriptValue WindowScriptingInterface::showForm(const QString& title, QScriptValue form) { + if (form.isArray() && form.property("length").toInt32() > 0) { QDialog* editDialog = new QDialog(Application::getInstance()->getWindow()); editDialog->setWindowTitle(title); + + bool cancelButton = false; QVBoxLayout* layout = new QVBoxLayout(); editDialog->setLayout(layout); @@ -127,44 +165,104 @@ QScriptValue WindowScriptingInterface::showForm(const QString& title, QScriptVal area->setWidget(container); QVector edits; + QVector directories; for (int i = 0; i < form.property("length").toInt32(); ++i) { QScriptValue item = form.property(i); - edits.push_back(new QLineEdit(item.property("value").toString())); - formLayout->addRow(item.property("label").toString(), edits.back()); + + if (item.property("button").toString() != "") { + cancelButton = cancelButton || item.property("button").toString().toLower() == "cancel"; + + } else if (item.property("directory").toString() != "") { + QString path = item.property("directory").toString(); + QString title = item.property("title").toString(); + if (title == "") { + title = "Choose Directory"; + } + QString displayAsString = item.property("displayAs").toString(); + QRegExp displayAs = QRegExp(displayAsString != "" ? jsRegExp2QtRegExp(displayAsString) : "^(.*)$"); + QString validateAsString = item.property("validateAs").toString(); + QRegExp validateAs = QRegExp(validateAsString != "" ? jsRegExp2QtRegExp(validateAsString) : ".*"); + QString errorMessage = item.property("errorMessage").toString(); + if (errorMessage == "") { + errorMessage = "Invalid directory"; + } + + QPushButton* directory = new QPushButton(displayAs.cap(1)); + directory->setProperty("title", title); + directory->setProperty("path", path); + directory->setProperty("displayAs", displayAs); + directory->setProperty("validateAs", validateAs); + directory->setProperty("errorMessage", errorMessage); + displayAs.indexIn(path); + directory->setText(displayAs.cap(1) != "" ? displayAs.cap(1) : "."); + + directory->setMinimumWidth(200); + directories.push_back(directory); + + formLayout->addRow(new QLabel(item.property("label").toString()), directory); + connect(directory, SIGNAL(clicked(bool)), SLOT(chooseDirectory())); + + } else { + QLineEdit* edit = new QLineEdit(item.property("value").toString()); + edit->setMinimumWidth(200); + edits.push_back(edit); + formLayout->addRow(new QLabel(item.property("label").toString()), edit); + } } - QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok); + + QDialogButtonBox* buttons = new QDialogButtonBox( + QDialogButtonBox::Ok + | (cancelButton ? QDialogButtonBox::Cancel : QDialogButtonBox::NoButton) + ); connect(buttons, SIGNAL(accepted()), editDialog, SLOT(accept())); + connect(buttons, SIGNAL(rejected()), editDialog, SLOT(reject())); layout->addWidget(buttons); - if (editDialog->exec() == QDialog::Accepted) { + int result = editDialog->exec(); + if (result == QDialog::Accepted) { + int e = -1; + int d = -1; for (int i = 0; i < form.property("length").toInt32(); ++i) { QScriptValue item = form.property(i); QScriptValue value = item.property("value"); - bool ok = true; - if (value.isNumber()) { - value = edits.at(i)->text().toDouble(&ok); - } else if (value.isString()) { - value = edits.at(i)->text(); - } else if (value.isBool()) { - if (edits.at(i)->text() == "true") { - value = true; - } else if (edits.at(i)->text() == "false") { - value = false; - } else { - ok = false; - } - } - if (ok) { - item.setProperty("value", value); + + if (item.property("button").toString() != "") { + // Nothing to do + } else if (item.property("directory").toString() != "") { + d += 1; + value = directories.at(d)->property("path").toString(); + item.setProperty("directory", value); form.setProperty(i, item); + } else { + e += 1; + bool ok = true; + if (value.isNumber()) { + value = edits.at(e)->text().toDouble(&ok); + } else if (value.isString()) { + value = edits.at(e)->text(); + } else if (value.isBool()) { + if (edits.at(e)->text() == "true") { + value = true; + } else if (edits.at(e)->text() == "false") { + value = false; + } else { + ok = false; + } + } + if (ok) { + item.setProperty("value", value); + form.setProperty(i, item); + } } } } delete editDialog; + + return (result == QDialog::Accepted); } - return form; + return false; } /// Display a prompt with a text box @@ -197,7 +295,6 @@ QScriptValue WindowScriptingInterface::showBrowse(const QString& title, const QS // filename if the directory is valid. QString path = ""; QFileInfo fileInfo = QFileInfo(directory); - qDebug() << "File: " << directory << fileInfo.isFile(); if (fileInfo.isDir()) { fileInfo.setFile(directory, "__HIFI_INVALID_FILE__"); path = fileInfo.filePath(); @@ -205,7 +302,6 @@ QScriptValue WindowScriptingInterface::showBrowse(const QString& title, const QS QFileDialog fileDialog(Application::getInstance()->getWindow(), title, path, nameFilter); fileDialog.setAcceptMode(acceptMode); - qDebug() << "Opening!"; QUrl fileUrl(directory); if (acceptMode == QFileDialog::AcceptSave) { fileDialog.setFileMode(QFileDialog::Directory); diff --git a/interface/src/scripting/WindowScriptingInterface.h b/interface/src/scripting/WindowScriptingInterface.h index b04c927427..ec7e1b224e 100644 --- a/interface/src/scripting/WindowScriptingInterface.h +++ b/interface/src/scripting/WindowScriptingInterface.h @@ -42,9 +42,12 @@ private slots: QScriptValue showBrowse(const QString& title, const QString& directory, const QString& nameFilter, QFileDialog::AcceptMode acceptMode = QFileDialog::AcceptOpen); QScriptValue showS3Browse(const QString& nameFilter); + void chooseDirectory(); private: WindowScriptingInterface(); + + QString jsRegExp2QtRegExp(QString string); }; #endif // hifi_WindowScriptingInterface_h diff --git a/interface/src/ui/MetavoxelEditor.cpp b/interface/src/ui/MetavoxelEditor.cpp index b7057532fb..ce09e3657d 100644 --- a/interface/src/ui/MetavoxelEditor.cpp +++ b/interface/src/ui/MetavoxelEditor.cpp @@ -120,6 +120,7 @@ MetavoxelEditor::MetavoxelEditor() : addTool(new EraseHeightfieldTool(this)); addTool(new HeightfieldHeightBrushTool(this)); addTool(new HeightfieldColorBrushTool(this)); + addTool(new HeightfieldTextureBrushTool(this)); updateAttributes(); @@ -956,14 +957,29 @@ void ImportHeightfieldTool::apply() { HeightfieldBuffer* buffer = static_cast(bufferData.data()); MetavoxelData data; data.setSize(scale); - HeightfieldDataPointer heightPointer(new HeightfieldData(buffer->getUnextendedHeight())); + + QByteArray height = buffer->getUnextendedHeight(); + HeightfieldHeightDataPointer heightPointer(new HeightfieldHeightData(height)); data.setRoot(AttributeRegistry::getInstance()->getHeightfieldAttribute(), new MetavoxelNode(AttributeValue( AttributeRegistry::getInstance()->getHeightfieldAttribute(), encodeInline(heightPointer)))); - if (!buffer->getColor().isEmpty()) { - HeightfieldDataPointer colorPointer(new HeightfieldData(buffer->getUnextendedColor())); - data.setRoot(AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), new MetavoxelNode(AttributeValue( - AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), encodeInline(colorPointer)))); + + QByteArray color; + if (buffer->getColor().isEmpty()) { + const int WHITE_VALUE = 0xFF; + color = QByteArray(height.size() * HeightfieldData::COLOR_BYTES, WHITE_VALUE); + } else { + color = buffer->getUnextendedColor(); } + HeightfieldColorDataPointer colorPointer(new HeightfieldColorData(color)); + data.setRoot(AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), new MetavoxelNode(AttributeValue( + AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), encodeInline(colorPointer)))); + + int size = glm::sqrt(height.size()) + HeightfieldBuffer::SHARED_EDGE; + QByteArray texture(size * size, 0); + HeightfieldTextureDataPointer texturePointer(new HeightfieldTextureData(texture)); + data.setRoot(AttributeRegistry::getInstance()->getHeightfieldTextureAttribute(), new MetavoxelNode(AttributeValue( + AttributeRegistry::getInstance()->getHeightfieldTextureAttribute(), encodeInline(texturePointer)))); + MetavoxelEditMessage message = { QVariant::fromValue(SetDataEdit( _translation->getValue() + buffer->getTranslation() * scale, data)) }; Application::getInstance()->getMetavoxels()->applyEdit(message, true); @@ -1100,6 +1116,10 @@ HeightfieldBrushTool::HeightfieldBrushTool(MetavoxelEditor* editor, const QStrin _radius->setValue(1.0); } +bool HeightfieldBrushTool::appliesTo(const AttributePointer& attribute) const { + return attribute->inherits("HeightfieldAttribute"); +} + void HeightfieldBrushTool::render() { if (Application::getInstance()->isMouseHidden()) { return; @@ -1153,5 +1173,29 @@ HeightfieldColorBrushTool::HeightfieldColorBrushTool(MetavoxelEditor* editor) : } QVariant HeightfieldColorBrushTool::createEdit(bool alternate) { - return QVariant::fromValue(PaintHeightfieldColorEdit(_position, _radius->value(), _color->getColor())); + return QVariant::fromValue(PaintHeightfieldColorEdit(_position, _radius->value(), + alternate ? QColor() : _color->getColor())); +} + +HeightfieldTextureBrushTool::HeightfieldTextureBrushTool(MetavoxelEditor* editor) : + HeightfieldBrushTool(editor, "Texture Brush") { + + _form->addRow(_textureEditor = new SharedObjectEditor(&HeightfieldTexture::staticMetaObject, false)); + connect(_textureEditor, &SharedObjectEditor::objectChanged, this, &HeightfieldTextureBrushTool::updateTexture); +} + +QVariant HeightfieldTextureBrushTool::createEdit(bool alternate) { + if (alternate) { + return QVariant::fromValue(PaintHeightfieldTextureEdit(_position, _radius->value(), SharedObjectPointer(), QColor())); + } else { + SharedObjectPointer texture = _textureEditor->getObject(); + _textureEditor->detachObject(); + return QVariant::fromValue(PaintHeightfieldTextureEdit(_position, _radius->value(), texture, + _texture ? _texture->getAverageColor() : QColor())); + } +} + +void HeightfieldTextureBrushTool::updateTexture() { + HeightfieldTexture* texture = static_cast(_textureEditor->getObject().data()); + _texture = Application::getInstance()->getTextureCache()->getTexture(texture->getURL()); } diff --git a/interface/src/ui/MetavoxelEditor.h b/interface/src/ui/MetavoxelEditor.h index 87d95a6927..7e37b819d7 100644 --- a/interface/src/ui/MetavoxelEditor.h +++ b/interface/src/ui/MetavoxelEditor.h @@ -28,6 +28,7 @@ class QScrollArea; class QSpinBox; class MetavoxelTool; +class SharedObjectEditor; class Vec3Editor; /// Allows editing metavoxels. @@ -311,6 +312,8 @@ public: HeightfieldBrushTool(MetavoxelEditor* editor, const QString& name); + virtual bool appliesTo(const AttributePointer& attribute) const; + virtual void render(); virtual bool eventFilter(QObject* watched, QEvent* event); @@ -359,4 +362,26 @@ private: QColorEditor* _color; }; +/// Allows texturing parts of the heightfield. +class HeightfieldTextureBrushTool : public HeightfieldBrushTool { + Q_OBJECT + +public: + + HeightfieldTextureBrushTool(MetavoxelEditor* editor); + +protected: + + virtual QVariant createEdit(bool alternate); + +private slots: + + void updateTexture(); + +private: + + SharedObjectEditor* _textureEditor; + QSharedPointer _texture; +}; + #endif // hifi_MetavoxelEditor_h diff --git a/interface/src/ui/PreferencesDialog.cpp b/interface/src/ui/PreferencesDialog.cpp index 4ebd5f4c1a..c585b6ba0c 100644 --- a/interface/src/ui/PreferencesDialog.cpp +++ b/interface/src/ui/PreferencesDialog.cpp @@ -149,9 +149,16 @@ void PreferencesDialog::loadPreferences() { ui.faceshiftEyeDeflectionSider->setValue(menuInstance->getFaceshiftEyeDeflection() * ui.faceshiftEyeDeflectionSider->maximum()); - ui.audioJitterSpin->setValue(menuInstance->getAudioJitterBufferFrames()); + const InboundAudioStream::Settings& streamSettings = menuInstance->getReceivedAudioStreamSettings(); - ui.maxFramesOverDesiredSpin->setValue(menuInstance->getMaxFramesOverDesired()); + ui.dynamicJitterBuffersCheckBox->setChecked(streamSettings._dynamicJitterBuffers); + ui.staticDesiredJitterBufferFramesSpin->setValue(streamSettings._staticDesiredJitterBufferFrames); + ui.maxFramesOverDesiredSpin->setValue(streamSettings._maxFramesOverDesired); + ui.useStdevForJitterCalcCheckBox->setChecked(streamSettings._useStDevForJitterCalc); + ui.windowStarveThresholdSpin->setValue(streamSettings._windowStarveThreshold); + ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->setValue(streamSettings._windowSecondsForDesiredCalcOnTooManyStarves); + ui.windowSecondsForDesiredReductionSpin->setValue(streamSettings._windowSecondsForDesiredReduction); + ui.repetitionWithFadeCheckBox->setChecked(streamSettings._repetitionWithFade); ui.realWorldFieldOfViewSpin->setValue(menuInstance->getRealWorldFieldOfView()); @@ -241,16 +248,18 @@ void PreferencesDialog::savePreferences() { Menu::getInstance()->setInvertSixenseButtons(ui.invertSixenseButtonsCheckBox->isChecked()); - Menu::getInstance()->setAudioJitterBufferFrames(ui.audioJitterSpin->value()); - if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) { - Application::getInstance()->getAudio()->setDynamicJitterBuffers(false); - Application::getInstance()->getAudio()->setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames()); - } else { - Application::getInstance()->getAudio()->setDynamicJitterBuffers(true); - } + InboundAudioStream::Settings streamSettings; + streamSettings._dynamicJitterBuffers = ui.dynamicJitterBuffersCheckBox->isChecked(); + streamSettings._staticDesiredJitterBufferFrames = ui.staticDesiredJitterBufferFramesSpin->value(); + streamSettings._maxFramesOverDesired = ui.maxFramesOverDesiredSpin->value(); + streamSettings._useStDevForJitterCalc = ui.useStdevForJitterCalcCheckBox->isChecked(); + streamSettings._windowStarveThreshold = ui.windowStarveThresholdSpin->value(); + streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->value(); + streamSettings._windowSecondsForDesiredReduction = ui.windowSecondsForDesiredReductionSpin->value(); + streamSettings._repetitionWithFade = ui.repetitionWithFadeCheckBox->isChecked(); - Menu::getInstance()->setMaxFramesOverDesired(ui.maxFramesOverDesiredSpin->value()); - Application::getInstance()->getAudio()->setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired()); + Menu::getInstance()->setReceivedAudioStreamSettings(streamSettings); + Application::getInstance()->getAudio()->setReceivedAudioStreamSettings(streamSettings); Application::getInstance()->resizeGL(Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height()); diff --git a/interface/src/ui/Stats.cpp b/interface/src/ui/Stats.cpp index c4e7d6ff30..58a93fa0ae 100644 --- a/interface/src/ui/Stats.cpp +++ b/interface/src/ui/Stats.cpp @@ -282,19 +282,10 @@ void Stats::display( pingVoxel = totalPingVoxel/voxelServerCount; } - - Audio* audio = Application::getInstance()->getAudio(); - lines = _expanded ? 4 : 3; drawBackground(backgroundColor, horizontalOffset, 0, _pingStatsWidth, lines * STATS_PELS_PER_LINE + 10); horizontalOffset += 5; - char audioJitter[30]; - sprintf(audioJitter, - "Buffer msecs %.1f", - audio->getDesiredJitterBufferFrames() * BUFFER_SEND_INTERVAL_USECS / (float)USECS_PER_MSEC); - drawText(30, glWidget->height() - 22, scale, rotation, font, audioJitter, color); - char audioPing[30]; sprintf(audioPing, "Audio ping: %d", pingAudio); @@ -698,27 +689,6 @@ void Stats::display( drawText(horizontalOffset, verticalOffset, 0.10f, 0.f, 2.f, reflectionsStatus, color); } - - // draw local light stats - QVector localLights = Application::getInstance()->getAvatarManager().getLocalLights(); - verticalOffset = 400; - horizontalOffset = 20; - - char buffer[128]; - for (int i = 0; i < localLights.size(); i++) { - glm::vec3 lightDirection = localLights.at(i).direction; - snprintf(buffer, sizeof(buffer), "Light %d direction (%.2f, %.2f, %.2f)", i, lightDirection.x, lightDirection.y, lightDirection.z); - drawText(horizontalOffset, verticalOffset, scale, rotation, font, buffer, color); - - verticalOffset += STATS_PELS_PER_LINE; - - glm::vec3 lightColor = localLights.at(i).color; - snprintf(buffer, sizeof(buffer), "Light %d color (%.2f, %.2f, %.2f)", i, lightColor.x, lightColor.y, lightColor.z); - drawText(horizontalOffset, verticalOffset, scale, rotation, font, buffer, color); - - verticalOffset += STATS_PELS_PER_LINE; - } - } diff --git a/interface/ui/preferencesDialog.ui b/interface/ui/preferencesDialog.ui index 566c24e4e3..e35c66af5a 100644 --- a/interface/ui/preferencesDialog.ui +++ b/interface/ui/preferencesDialog.ui @@ -1464,6 +1464,97 @@ padding: 10px;margin-top:10px + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Enable Dynamic Jitter Buffers + + + 15 + + + dynamicJitterBuffersCheckBox + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 32 + 0 + + + + + 0 + 0 + + + + + + + + 32 + 32 + + + + + + + + + @@ -1489,13 +1580,13 @@ padding: 10px;margin-top:10px color: rgb(51, 51, 51) - Audio Jitter Buffer Frames (0 for automatic) + Static Jitter Buffer Frames 15 - audioJitterSpin + staticDesiredJitterBufferFramesSpin @@ -1518,7 +1609,7 @@ padding: 10px;margin-top:10px - + 0 @@ -1555,6 +1646,7 @@ padding: 10px;margin-top:10px + @@ -1591,7 +1683,7 @@ padding: 10px;margin-top:10px - + Arial @@ -1646,7 +1738,467 @@ padding: 10px;margin-top:10px - + + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Use Stdev for Dynamic Jitter Calc + + + 15 + + + useStdevForJitterCalcCheckBox + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 32 + 0 + + + + + 0 + 0 + + + + + + + + 32 + 32 + + + + + + + + + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Window A Starve Threshold + + + 15 + + + windowStarveThresholdSpin + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 95 + 36 + + + + + 70 + 16777215 + + + + + Arial + + + + 0 + + + 10000 + + + 1 + + + + + + + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Window A (raise desired on N starves) Seconds + + + 15 + + + windowSecondsForDesiredCalcOnTooManyStarvesSpin + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 95 + 36 + + + + + 70 + 16777215 + + + + + Arial + + + + 0 + + + 10000 + + + 1 + + + + + + + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Window B (desired ceiling) Seconds + + + 15 + + + windowSecondsForDesiredReductionSpin + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 95 + 36 + + + + + 70 + 16777215 + + + + + Arial + + + + 0 + + + 10000 + + + 1 + + + + + + + + + + + + 0 + + + 10 + + + 0 + + + 10 + + + + + + Arial + + + + color: rgb(51, 51, 51) + + + Repetition with Fade + + + 15 + + + repetitionWithFadeCheckBox + + + + + + + + Arial + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + 0 + 0 + + + + + 32 + 0 + + + + + 0 + 0 + + + + + + + + 32 + 32 + + + + + + diff --git a/libraries/audio/src/AudioFilter.cpp b/libraries/audio/src/AudioFilter.cpp deleted file mode 100644 index 28e7716578..0000000000 --- a/libraries/audio/src/AudioFilter.cpp +++ /dev/null @@ -1,26 +0,0 @@ -// -// AudioFilter.cpp -// hifi -// -// Created by Craig Hansen-Sturm on 8/10/14. -// Copyright 2014 High Fidelity, Inc. -// -// Distributed under the Apache License, Version 2.0. -// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -// - -#include -#include -#include -#include "AudioRingBuffer.h" -#include "AudioFilter.h" - -template<> -AudioFilterPEQ3::FilterParameter AudioFilterPEQ3::_profiles[ AudioFilterPEQ3::_profileCount ][ AudioFilterPEQ3::_filterCount ] = { - - // Freq Gain Q Freq Gain Q Freq Gain Q - { { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default) - { { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut - { { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut - { { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve -}; diff --git a/libraries/audio/src/AudioFilter.h b/libraries/audio/src/AudioFilter.h index 0f3ec06f64..c2de3860db 100644 --- a/libraries/audio/src/AudioFilter.h +++ b/libraries/audio/src/AudioFilter.h @@ -12,7 +12,7 @@ #ifndef hifi_AudioFilter_h #define hifi_AudioFilter_h -//////////////////////////////////////////////////////////////////////////////////////////// +// // Implements a standard biquad filter in "Direct Form 1" // Reference http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt // @@ -51,15 +51,15 @@ public: // // public interface // - void setParameters( const float a0, const float a1, const float a2, const float b1, const float b2 ) { + void setParameters(const float a0, const float a1, const float a2, const float b1, const float b2) { _a0 = a0; _a1 = a1; _a2 = a2; _b1 = b1; _b2 = b2; } - void getParameters( float& a0, float& a1, float& a2, float& b1, float& b2 ) { + void getParameters(float& a0, float& a1, float& a2, float& b1, float& b2) { a0 = _a0; a1 = _a1; a2 = _a2; b1 = _b1; b2 = _b2; } - void render( const float* in, float* out, const int frames) { + void render(const float* in, float* out, const int frames) { float x; float y; @@ -90,209 +90,223 @@ public: } }; -//////////////////////////////////////////////////////////////////////////////////////////// -// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration -// -// gain > 1.0 boosts the center frequency -// gain < 1.0 cuts the center frequency -// -class AudioParametricEQ { +// +// Implements common base class interface for all Audio Filter Objects +// +template< class T > +class AudioFilter { + +protected: + // - // private data + // data // AudioBiquad _kernel; float _sampleRate; float _frequency; float _gain; float _slope; - + + // // helpers + // void updateKernel() { - - /* - a0 = 1 + alpha*A - a1 = -2*cos(w0) - a2 = 1 - alpha*A - b1 = -2*cos(w0) - b2 = 1 - alpha/A - */ - - const float a = _gain; - const float omega = TWO_PI * _frequency / _sampleRate; - const float alpha = 0.5f * sinf(omega) / _slope; - const float gamma = 1.0f / ( 1.0f + (alpha/a) ); - - const float a0 = 1.0f + (alpha*a); - const float a1 = -2.0f * cosf(omega); - const float a2 = 1.0f - (alpha*a); - const float b1 = a1; - const float b2 = 1.0f - (alpha/a); - - _kernel.setParameters( a0*gamma,a1*gamma,a2*gamma,b1*gamma,b2*gamma ); + static_cast(this)->updateKernel(); } - + public: // // ctor/dtor // - AudioParametricEQ() { - + AudioFilter() { setParameters(0.,0.,0.,0.); - updateKernel(); } - - ~AudioParametricEQ() { + + ~AudioFilter() { } - + // // public interface // - void setParameters( const float sampleRate, const float frequency, const float gain, const float slope ) { - - _sampleRate = std::max(sampleRate,1.0f); - _frequency = std::max(frequency,2.0f); - _gain = std::max(gain,0.0f); - _slope = std::max(slope,0.00001f); - + void setParameters(const float sampleRate, const float frequency, const float gain, const float slope) { + + _sampleRate = std::max(sampleRate, 1.0f); + _frequency = std::max(frequency, 2.0f); + _gain = std::max(gain, 0.0f); + _slope = std::max(slope, 0.00001f); + updateKernel(); } - - void getParameters( float& sampleRate, float& frequency, float& gain, float& slope ) { + + void getParameters(float& sampleRate, float& frequency, float& gain, float& slope) { sampleRate = _sampleRate; frequency = _frequency; gain = _gain; slope = _slope; } - - void render(const float* in, float* out, const int frames ) { + + void render(const float* in, float* out, const int frames) { _kernel.render(in,out,frames); } - + void reset() { _kernel.reset(); } }; -//////////////////////////////////////////////////////////////////////////////////////////// -// Helper/convenience class that implements a bank of EQ objects // -template< typename T, const int N> -class AudioFilterBank { - - // - // types - // - struct FilterParameter { - float _p1; - float _p2; - float _p3; - }; - - // - // private static data - // - static const int _filterCount = N; - static const int _profileCount = 4; - - static FilterParameter _profiles[_profileCount][_filterCount]; - - // - // private data - // - T _filters[ _filterCount ]; - float* _buffer; - float _sampleRate; - uint16_t _frameCount; - +// Implements a low-shelf filter using a biquad +// +class AudioFilterLSF : public AudioFilter< AudioFilterLSF > +{ public: - + // - // ctor/dtor + // helpers // - AudioFilterBank() - : _buffer(NULL) - , _sampleRate(0.) - , _frameCount(0) { - } - - ~AudioFilterBank() { - finalize(); - } - - // - // public interface - // - void initialize( const float sampleRate, const int frameCount ) { - finalize(); + void updateKernel() { - _buffer = (float*)malloc( frameCount * sizeof(float) ); - if(!_buffer) { - return; - } + const float a = _gain; + const float aAdd1 = a + 1.0f; + const float aSub1 = a - 1.0f; + const float omega = TWO_PI * _frequency / _sampleRate; + const float aAdd1TimesCosOmega = aAdd1 * cosf(omega); + const float aSub1TimesCosOmega = aSub1 * cosf(omega); + const float alpha = 0.5f * sinf(omega) / _slope; + const float zeta = 2.0f * sqrtf(a) * alpha; + /* + b0 = A*( (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha ) + b1 = 2*A*( (A-1) - (A+1)*cos(w0) ) + b2 = A*( (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha ) + a0 = (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha + a1 = -2*( (A-1) + (A+1)*cos(w0) ) + a2 = (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha + */ + const float b0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta) * a; + const float b1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO) * a; + const float b2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta) * a; + const float a0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta); + const float a1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO); + const float a2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta); - _sampleRate = sampleRate; - _frameCount = frameCount; - - reset(); - loadProfile(0); // load default profile "flat response" into the bank (see AudioFilter.cpp) + const float normA0 = 1.0f / a0; + + _kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0); } - - void finalize() { - if (_buffer ) { - free (_buffer); - _buffer = NULL; - } - } - - void loadProfile( int profileIndex ) { - if (profileIndex >= 0 && profileIndex < _profileCount) { - - for (int i = 0; i < _filterCount; ++i) { - FilterParameter p = _profiles[profileIndex][i]; - - _filters[i].setParameters(_sampleRate,p._p1,p._p2,p._p3); - } - } - } - - void render( const float* in, float* out, const int frameCount ) { - for (int i = 0; i < _filterCount; ++i) { - _filters[i].render( in, out, frameCount ); - } - } - - void render( const int16_t* in, int16_t* out, const int frameCount ) { - if (!_buffer || ( frameCount > _frameCount )) - return; - - const int scale = (2 << ((8*sizeof(int16_t))-1)); - - // convert int16_t to float32 (normalized to -1. ... 1.) - for (int i = 0; i < frameCount; ++i) { - _buffer[i] = ((float)(*in++)) / scale; - } - // for this filter, we share input/output buffers at each stage, but our design does not mandate this - render( _buffer, _buffer, frameCount ); - - // convert float32 to int16_t - for (int i = 0; i < frameCount; ++i) { - *out++ = (int16_t)(_buffer[i] * scale); - } - } - - void reset() { - for (int i = 0; i < _filterCount; ++i ) { - _filters[i].reset(); - } - } - }; -//////////////////////////////////////////////////////////////////////////////////////////// -// Specializations of AudioFilterBank // -typedef AudioFilterBank< AudioParametricEQ, 1> AudioFilterPEQ1; // bank with one band of PEQ -typedef AudioFilterBank< AudioParametricEQ, 2> AudioFilterPEQ2; // bank with two bands of PEQ -typedef AudioFilterBank< AudioParametricEQ, 3> AudioFilterPEQ3; // bank with three bands of PEQ -// etc.... +// Implements a hi-shelf filter using a biquad +// +class AudioFilterHSF : public AudioFilter< AudioFilterHSF > +{ +public: + + // + // helpers + // + void updateKernel() { + + const float a = _gain; + const float aAdd1 = a + 1.0f; + const float aSub1 = a - 1.0f; + const float omega = TWO_PI * _frequency / _sampleRate; + const float aAdd1TimesCosOmega = aAdd1 * cosf(omega); + const float aSub1TimesCosOmega = aSub1 * cosf(omega); + const float alpha = 0.5f * sinf(omega) / _slope; + const float zeta = 2.0f * sqrtf(a) * alpha; + /* + b0 = A*( (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha ) + b1 = -2*A*( (A-1) + (A+1)*cos(w0) ) + b2 = A*( (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha ) + a0 = (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha + a1 = 2*( (A-1) - (A+1)*cos(w0) ) + a2 = (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha + */ + const float b0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta) * a; + const float b1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO) * a; + const float b2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta) * a; + const float a0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta); + const float a1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO); + const float a2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta); + + const float normA0 = 1.0f / a0; + + _kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0); + } +}; +// +// Implements a all-pass filter using a biquad +// +class AudioFilterALL : public AudioFilter< AudioFilterALL > +{ +public: + + // + // helpers + // + void updateKernel() { + + const float omega = TWO_PI * _frequency / _sampleRate; + const float cosOmega = cosf(omega); + const float alpha = 0.5f * sinf(omega) / _slope; + /* + b0 = 1 - alpha + b1 = -2*cos(w0) + b2 = 1 + alpha + a0 = 1 + alpha + a1 = -2*cos(w0) + a2 = 1 - alpha + */ + const float b0 = +1.0f - alpha; + const float b1 = -2.0f * cosOmega; + const float b2 = +1.0f + alpha; + const float a0 = +1.0f + alpha; + const float a1 = -2.0f * cosOmega; + const float a2 = +1.0f - alpha; + + const float normA0 = 1.0f / a0; + + _kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0); + } +}; + +// +// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration +// +class AudioFilterPEQ : public AudioFilter< AudioFilterPEQ > +{ +public: + + // + // helpers + // + void updateKernel() { + + const float a = _gain; + const float omega = TWO_PI * _frequency / _sampleRate; + const float cosOmega = cosf(omega); + const float alpha = 0.5f * sinf(omega) / _slope; + const float alphaMulA = alpha * a; + const float alphaDivA = alpha / a; + /* + b0 = 1 + alpha*A + b1 = -2*cos(w0) + b2 = 1 - alpha*A + a0 = 1 + alpha/A + a1 = -2*cos(w0) + a2 = 1 - alpha/A + */ + const float b0 = +1.0f + alphaMulA; + const float b1 = -2.0f * cosOmega; + const float b2 = +1.0f - alphaMulA; + const float a0 = +1.0f + alphaDivA; + const float a1 = -2.0f * cosOmega; + const float a2 = +1.0f - alphaDivA; + + const float normA0 = 1.0f / a0; + + _kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0); + } +}; #endif // hifi_AudioFilter_h diff --git a/libraries/audio/src/AudioFilterBank.cpp b/libraries/audio/src/AudioFilterBank.cpp new file mode 100644 index 0000000000..a7b969540a --- /dev/null +++ b/libraries/audio/src/AudioFilterBank.cpp @@ -0,0 +1,48 @@ +// +// AudioFilterBank.cpp +// hifi +// +// Created by Craig Hansen-Sturm on 8/10/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#include +#include +#include "AudioRingBuffer.h" +#include "AudioFilter.h" +#include "AudioFilterBank.h" + +template<> +AudioFilterLSF1s::FilterParameter +AudioFilterLSF1s::_profiles[ AudioFilterLSF1s::_profileCount ][ AudioFilterLSF1s::_filterCount ] = { + // Freq Gain Slope + { { 1000.0f, 1.0f, 1.0f } } // flat response (default) +}; + +template<> +AudioFilterHSF1s::FilterParameter +AudioFilterHSF1s::_profiles[ AudioFilterHSF1s::_profileCount ][ AudioFilterHSF1s::_filterCount ] = { + // Freq Gain Slope + { { 1000.0f, 1.0f, 1.0f } } // flat response (default) +}; + +template<> +AudioFilterPEQ1s::FilterParameter +AudioFilterPEQ1s::_profiles[ AudioFilterPEQ1s::_profileCount ][ AudioFilterPEQ1s::_filterCount ] = { + // Freq Gain Q + { { 1000.0f, 1.0f, 1.0f } } // flat response (default) +}; + +template<> +AudioFilterPEQ3m::FilterParameter +AudioFilterPEQ3m::_profiles[ AudioFilterPEQ3m::_profileCount ][ AudioFilterPEQ3m::_filterCount ] = { + + // Freq Gain Q Freq Gain Q Freq Gain Q + { { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default) + { { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut + { { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut + { { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve +}; diff --git a/libraries/audio/src/AudioFilterBank.h b/libraries/audio/src/AudioFilterBank.h new file mode 100644 index 0000000000..c523736a57 --- /dev/null +++ b/libraries/audio/src/AudioFilterBank.h @@ -0,0 +1,170 @@ +// +// AudioFilterBank.h +// hifi +// +// Created by Craig Hansen-Sturm on 8/23/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_AudioFilterBank_h +#define hifi_AudioFilterBank_h + +// +// Helper/convenience class that implements a bank of Filter objects +// +template< typename T, const int N, const int C > +class AudioFilterBank { + + // + // types + // + struct FilterParameter { + float _p1; + float _p2; + float _p3; + }; + + // + // private static data + // + static const int _filterCount = N; + static const int _channelCount = C; + static const int _profileCount = 4; + + static FilterParameter _profiles[ _profileCount ][ _filterCount ]; + + // + // private data + // + T _filters[ _filterCount ][ _channelCount ]; + float* _buffer[ _channelCount ]; + float _sampleRate; + uint16_t _frameCount; + +public: + + // + // ctor/dtor + // + AudioFilterBank() + : _sampleRate(0.) + , _frameCount(0) { + for (int i = 0; i < _channelCount; ++i) { + _buffer[ i ] = NULL; + } + } + + ~AudioFilterBank() { + finalize(); + } + + // + // public interface + // + void initialize(const float sampleRate, const int frameCount) { + finalize(); + + for (int i = 0; i < _channelCount; ++i) { + _buffer[i] = (float*)malloc(frameCount * sizeof(float)); + } + + _sampleRate = sampleRate; + _frameCount = frameCount; + + reset(); + loadProfile(0); // load default profile "flat response" into the bank (see AudioFilterBank.cpp) + } + + void finalize() { + for (int i = 0; i < _channelCount; ++i) { + if (_buffer[i]) { + free (_buffer[i]); + _buffer[i] = NULL; + } + } + } + + void loadProfile(int profileIndex) { + if (profileIndex >= 0 && profileIndex < _profileCount) { + + for (int i = 0; i < _filterCount; ++i) { + FilterParameter p = _profiles[profileIndex][i]; + + for (int j = 0; j < _channelCount; ++j) { + _filters[i][j].setParameters(_sampleRate,p._p1,p._p2,p._p3); + } + } + } + } + + void setParameters(int filterStage, int filterChannel, const float sampleRate, const float frequency, const float gain, + const float slope) { + if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) { + _filters[filterStage][filterChannel].setParameters(sampleRate,frequency,gain,slope); + } + } + + void getParameters(int filterStage, int filterChannel, float& sampleRate, float& frequency, float& gain, float& slope) { + if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) { + _filters[filterStage][filterChannel].getParameters(sampleRate,frequency,gain,slope); + } + } + + void render(const int16_t* in, int16_t* out, const int frameCount) { + if (!_buffer || (frameCount > _frameCount)) + return; + + const int scale = (2 << ((8 * sizeof(int16_t)) - 1)); + + // de-interleave and convert int16_t to float32 (normalized to -1. ... 1.) + for (int i = 0; i < frameCount; ++i) { + for (int j = 0; j < _channelCount; ++j) { + _buffer[j][i] = ((float)(*in++)) / scale; + } + } + + // now step through each filter + for (int i = 0; i < _channelCount; ++i) { + for (int j = 0; j < _filterCount; ++j) { + _filters[j][i].render( &_buffer[i][0], &_buffer[i][0], frameCount ); + } + } + + // convert float32 to int16_t and interleave + for (int i = 0; i < frameCount; ++i) { + for (int j = 0; j < _channelCount; ++j) { + *out++ = (int16_t)(_buffer[j][i] * scale); + } + } + } + + void reset() { + for (int i = 0; i < _filterCount; ++i) { + for (int j = 0; j < _channelCount; ++j) { + _filters[i][j].reset(); + } + } + } + +}; + +// +// Specializations of AudioFilterBank +// +typedef AudioFilterBank< AudioFilterLSF, 1, 1> AudioFilterLSF1m; // mono bank with one band of LSF +typedef AudioFilterBank< AudioFilterLSF, 1, 2> AudioFilterLSF1s; // stereo bank with one band of LSF +typedef AudioFilterBank< AudioFilterHSF, 1, 1> AudioFilterHSF1m; // mono bank with one band of HSF +typedef AudioFilterBank< AudioFilterHSF, 1, 2> AudioFilterHSF1s; // stereo bank with one band of HSF +typedef AudioFilterBank< AudioFilterPEQ, 1, 1> AudioFilterPEQ1m; // mono bank with one band of PEQ +typedef AudioFilterBank< AudioFilterPEQ, 2, 1> AudioFilterPEQ2m; // mono bank with two bands of PEQ +typedef AudioFilterBank< AudioFilterPEQ, 3, 1> AudioFilterPEQ3m; // mono bank with three bands of PEQ +typedef AudioFilterBank< AudioFilterPEQ, 1, 2> AudioFilterPEQ1s; // stereo bank with one band of PEQ +typedef AudioFilterBank< AudioFilterPEQ, 2, 2> AudioFilterPEQ2s; // stereo bank with two bands of PEQ +typedef AudioFilterBank< AudioFilterPEQ, 3, 2> AudioFilterPEQ3s; // stereo bank with three bands of PEQ +// etc.... + + +#endif // hifi_AudioFilter_h diff --git a/libraries/audio/src/AudioInjector.cpp b/libraries/audio/src/AudioInjector.cpp index e5c1230832..88251808a9 100644 --- a/libraries/audio/src/AudioInjector.cpp +++ b/libraries/audio/src/AudioInjector.cpp @@ -27,7 +27,6 @@ AudioInjector::AudioInjector(QObject* parent) : _options(), _shouldStop(false) { - } AudioInjector::AudioInjector(Sound* sound, const AudioInjectorOptions& injectorOptions) : @@ -35,7 +34,10 @@ AudioInjector::AudioInjector(Sound* sound, const AudioInjectorOptions& injectorO _options(injectorOptions), _shouldStop(false) { - +} + +void AudioInjector::setOptions(AudioInjectorOptions& options) { + _options = options; } const uchar MAX_INJECTOR_VOLUME = 0xFF; @@ -55,8 +57,6 @@ void AudioInjector::injectAudio() { } - NodeList* nodeList = NodeList::getInstance(); - // setup the packet for injected audio QByteArray injectAudioPacket = byteArrayWithPopulatedHeader(PacketTypeInjectAudio); QDataStream packetStream(&injectAudioPacket, QIODevice::Append); @@ -73,9 +73,11 @@ void AudioInjector::injectAudio() { packetStream << loopbackFlag; // pack the position for injected audio + int positionOptionOffset = injectAudioPacket.size(); packetStream.writeRawData(reinterpret_cast(&_options.getPosition()), sizeof(_options.getPosition())); // pack our orientation for injected audio + int orientationOptionOffset = injectAudioPacket.size(); packetStream.writeRawData(reinterpret_cast(&_options.getOrientation()), sizeof(_options.getOrientation())); // pack zero for radius @@ -101,6 +103,12 @@ void AudioInjector::injectAudio() { int bytesToCopy = std::min(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL, soundByteArray.size() - currentSendPosition); + memcpy(injectAudioPacket.data() + positionOptionOffset, + &_options.getPosition(), + sizeof(_options.getPosition())); + memcpy(injectAudioPacket.data() + orientationOptionOffset, + &_options.getOrientation(), + sizeof(_options.getOrientation())); // resize the QByteArray to the right size injectAudioPacket.resize(numPreAudioDataBytes + bytesToCopy); @@ -112,6 +120,7 @@ void AudioInjector::injectAudio() { memcpy(injectAudioPacket.data() + numPreAudioDataBytes, soundByteArray.data() + currentSendPosition, bytesToCopy); // grab our audio mixer from the NodeList, if it exists + NodeList* nodeList = NodeList::getInstance(); SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer); // send off this audio packet diff --git a/libraries/audio/src/AudioInjector.h b/libraries/audio/src/AudioInjector.h index 08fe544255..966a4dd1cf 100644 --- a/libraries/audio/src/AudioInjector.h +++ b/libraries/audio/src/AudioInjector.h @@ -29,6 +29,7 @@ public: public slots: void injectAudio(); void stop() { _shouldStop = true; } + void setOptions(AudioInjectorOptions& options); signals: void finished(); private: diff --git a/libraries/audio/src/AudioInjectorOptions.cpp b/libraries/audio/src/AudioInjectorOptions.cpp index 49f1571c98..01aa43a0cd 100644 --- a/libraries/audio/src/AudioInjectorOptions.cpp +++ b/libraries/audio/src/AudioInjectorOptions.cpp @@ -19,7 +19,6 @@ AudioInjectorOptions::AudioInjectorOptions(QObject* parent) : _orientation(glm::vec3(0.0f, 0.0f, 0.0f)), _loopbackAudioInterface(NULL) { - } AudioInjectorOptions::AudioInjectorOptions(const AudioInjectorOptions& other) { @@ -29,3 +28,11 @@ AudioInjectorOptions::AudioInjectorOptions(const AudioInjectorOptions& other) { _orientation = other._orientation; _loopbackAudioInterface = other._loopbackAudioInterface; } + +void AudioInjectorOptions::operator=(const AudioInjectorOptions& other) { + _position = other._position; + _volume = other._volume; + _loop = other._loop; + _orientation = other._orientation; + _loopbackAudioInterface = other._loopbackAudioInterface; +} \ No newline at end of file diff --git a/libraries/audio/src/AudioInjectorOptions.h b/libraries/audio/src/AudioInjectorOptions.h index b90deb93f1..64936e4bc9 100644 --- a/libraries/audio/src/AudioInjectorOptions.h +++ b/libraries/audio/src/AudioInjectorOptions.h @@ -30,6 +30,7 @@ class AudioInjectorOptions : public QObject { public: AudioInjectorOptions(QObject* parent = 0); AudioInjectorOptions(const AudioInjectorOptions& other); + void operator=(const AudioInjectorOptions& other); const glm::vec3& getPosition() const { return _position; } void setPosition(const glm::vec3& position) { _position = position; } @@ -37,8 +38,8 @@ public: float getVolume() const { return _volume; } void setVolume(float volume) { _volume = volume; } - float getLoop() const { return _loop; } - void setLoop(float loop) { _loop = loop; } + bool getLoop() const { return _loop; } + void setLoop(bool loop) { _loop = loop; } const glm::quat& getOrientation() const { return _orientation; } void setOrientation(const glm::quat& orientation) { _orientation = orientation; } diff --git a/libraries/audio/src/AudioRingBuffer.cpp b/libraries/audio/src/AudioRingBuffer.cpp index cae663758d..89882bdb66 100644 --- a/libraries/audio/src/AudioRingBuffer.cpp +++ b/libraries/audio/src/AudioRingBuffer.cpp @@ -20,18 +20,16 @@ #include "AudioRingBuffer.h" AudioRingBuffer::AudioRingBuffer(int numFrameSamples, bool randomAccessMode, int numFramesCapacity) : - _frameCapacity(numFramesCapacity), - _sampleCapacity(numFrameSamples * numFramesCapacity), - _isFull(false), - _numFrameSamples(numFrameSamples), - _randomAccessMode(randomAccessMode), - _overflowCount(0) +_frameCapacity(numFramesCapacity), +_sampleCapacity(numFrameSamples * numFramesCapacity), +_bufferLength(numFrameSamples * (numFramesCapacity + 1)), +_numFrameSamples(numFrameSamples), +_randomAccessMode(randomAccessMode), +_overflowCount(0) { if (numFrameSamples) { - _buffer = new int16_t[_sampleCapacity]; - if (_randomAccessMode) { - memset(_buffer, 0, _sampleCapacity * sizeof(int16_t)); - } + _buffer = new int16_t[_bufferLength]; + memset(_buffer, 0, _bufferLength * sizeof(int16_t)); _nextOutput = _buffer; _endOfLastWrite = _buffer; } else { @@ -53,28 +51,29 @@ void AudioRingBuffer::reset() { void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) { delete[] _buffer; _sampleCapacity = numFrameSamples * _frameCapacity; + _bufferLength = numFrameSamples * (_frameCapacity + 1); _numFrameSamples = numFrameSamples; - _buffer = new int16_t[_sampleCapacity]; + _buffer = new int16_t[_bufferLength]; + memset(_buffer, 0, _bufferLength * sizeof(int16_t)); if (_randomAccessMode) { - memset(_buffer, 0, _sampleCapacity * sizeof(int16_t)); + memset(_buffer, 0, _bufferLength * sizeof(int16_t)); } reset(); } void AudioRingBuffer::clear() { - _isFull = false; _endOfLastWrite = _buffer; _nextOutput = _buffer; } int AudioRingBuffer::readSamples(int16_t* destination, int maxSamples) { - return readData((char*) destination, maxSamples * sizeof(int16_t)); + return readData((char*)destination, maxSamples * sizeof(int16_t)) / sizeof(int16_t); } int AudioRingBuffer::readData(char *data, int maxSize) { // only copy up to the number of samples we have available - int numReadSamples = std::min((int) (maxSize / sizeof(int16_t)), samplesAvailable()); + int numReadSamples = std::min((int)(maxSize / sizeof(int16_t)), samplesAvailable()); // If we're in random access mode, then we consider our number of available read samples slightly // differently. Namely, if anything has been written, we say we have as many samples as they ask for @@ -83,16 +82,16 @@ int AudioRingBuffer::readData(char *data, int maxSize) { numReadSamples = _endOfLastWrite ? (maxSize / sizeof(int16_t)) : 0; } - if (_nextOutput + numReadSamples > _buffer + _sampleCapacity) { + if (_nextOutput + numReadSamples > _buffer + _bufferLength) { // we're going to need to do two reads to get this data, it wraps around the edge // read to the end of the buffer - int numSamplesToEnd = (_buffer + _sampleCapacity) - _nextOutput; + int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput; memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t)); if (_randomAccessMode) { memset(_nextOutput, 0, numSamplesToEnd * sizeof(int16_t)); // clear it } - + // read the rest from the beginning of the buffer memcpy(data + (numSamplesToEnd * sizeof(int16_t)), _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t)); if (_randomAccessMode) { @@ -108,22 +107,19 @@ int AudioRingBuffer::readData(char *data, int maxSize) { // push the position of _nextOutput by the number of samples read _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numReadSamples); - if (numReadSamples > 0) { - _isFull = false; - } return numReadSamples * sizeof(int16_t); } -int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) { - return writeData((const char*) source, maxSamples * sizeof(int16_t)); +int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) { + return writeData((const char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t); } int AudioRingBuffer::writeData(const char* data, int maxSize) { // make sure we have enough bytes left for this to be the right amount of audio // otherwise we should not copy that data, and leave the buffer pointers where they are int samplesToCopy = std::min((int)(maxSize / sizeof(int16_t)), _sampleCapacity); - + int samplesRoomFor = _sampleCapacity - samplesAvailable(); if (samplesToCopy > samplesRoomFor) { // there's not enough room for this write. erase old data to make room for this new data @@ -132,19 +128,16 @@ int AudioRingBuffer::writeData(const char* data, int maxSize) { _overflowCount++; qDebug() << "Overflowed ring buffer! Overwriting old data"; } - - if (_endOfLastWrite + samplesToCopy <= _buffer + _sampleCapacity) { + + if (_endOfLastWrite + samplesToCopy <= _buffer + _bufferLength) { memcpy(_endOfLastWrite, data, samplesToCopy * sizeof(int16_t)); } else { - int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite; + int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite; memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t)); memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (samplesToCopy - numSamplesToEnd) * sizeof(int16_t)); } _endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy); - if (samplesToCopy > 0 && _endOfLastWrite == _nextOutput) { - _isFull = true; - } return samplesToCopy * sizeof(int16_t); } @@ -158,61 +151,52 @@ const int16_t& AudioRingBuffer::operator[] (const int index) const { } void AudioRingBuffer::shiftReadPosition(unsigned int numSamples) { - if (numSamples > 0) { - _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples); - _isFull = false; - } + _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples); } int AudioRingBuffer::samplesAvailable() const { if (!_endOfLastWrite) { return 0; } - if (_isFull) { - return _sampleCapacity; - } int sampleDifference = _endOfLastWrite - _nextOutput; if (sampleDifference < 0) { - sampleDifference += _sampleCapacity; + sampleDifference += _bufferLength; } return sampleDifference; } -int AudioRingBuffer::addSilentFrame(int numSilentSamples) { +int AudioRingBuffer::addSilentSamples(int silentSamples) { int samplesRoomFor = _sampleCapacity - samplesAvailable(); - if (numSilentSamples > samplesRoomFor) { + if (silentSamples > samplesRoomFor) { // there's not enough room for this write. write as many silent samples as we have room for - numSilentSamples = samplesRoomFor; + silentSamples = samplesRoomFor; qDebug() << "Dropping some silent samples to prevent ring buffer overflow"; } // memset zeroes into the buffer, accomodate a wrap around the end // push the _endOfLastWrite to the correct spot - if (_endOfLastWrite + numSilentSamples <= _buffer + _sampleCapacity) { - memset(_endOfLastWrite, 0, numSilentSamples * sizeof(int16_t)); + if (_endOfLastWrite + silentSamples <= _buffer + _bufferLength) { + memset(_endOfLastWrite, 0, silentSamples * sizeof(int16_t)); } else { - int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite; + int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite; memset(_endOfLastWrite, 0, numSamplesToEnd * sizeof(int16_t)); - memset(_buffer, 0, (numSilentSamples - numSamplesToEnd) * sizeof(int16_t)); - } - _endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numSilentSamples); - if (numSilentSamples > 0 && _nextOutput == _endOfLastWrite) { - _isFull = true; + memset(_buffer, 0, (silentSamples - numSamplesToEnd) * sizeof(int16_t)); } + _endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, silentSamples); - return numSilentSamples * sizeof(int16_t); + return silentSamples; } int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const { - if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _sampleCapacity) { + if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _bufferLength) { // this shift will wrap the position around to the beginning of the ring - return position + numSamplesShift - _sampleCapacity; + return position + numSamplesShift - _bufferLength; } else if (numSamplesShift < 0 && position + numSamplesShift < _buffer) { // this shift will go around to the end of the ring - return position + numSamplesShift + _sampleCapacity; + return position + numSamplesShift + _bufferLength; } else { return position + numSamplesShift; } @@ -221,7 +205,7 @@ int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const { float loudness = 0.0f; const int16_t* sampleAt = frameStart; - const int16_t* _bufferLastAt = _buffer + _sampleCapacity - 1; + const int16_t* _bufferLastAt = _buffer + _bufferLength - 1; for (int i = 0; i < _numFrameSamples; ++i) { loudness += fabsf(*sampleAt); @@ -229,11 +213,14 @@ float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const { } loudness /= _numFrameSamples; loudness /= MAX_SAMPLE_VALUE; - + return loudness; } float AudioRingBuffer::getFrameLoudness(ConstIterator frameStart) const { + if (frameStart.isNull()) { + return 0.0f; + } return getFrameLoudness(&(*frameStart)); } @@ -241,3 +228,44 @@ float AudioRingBuffer::getNextOutputFrameLoudness() const { return getFrameLoudness(_nextOutput); } +int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) { + int samplesToCopy = std::min(maxSamples, _sampleCapacity); + int samplesRoomFor = _sampleCapacity - samplesAvailable(); + if (samplesToCopy > samplesRoomFor) { + // there's not enough room for this write. erase old data to make room for this new data + int samplesToDelete = samplesToCopy - samplesRoomFor; + _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete); + _overflowCount++; + qDebug() << "Overflowed ring buffer! Overwriting old data"; + } + + int16_t* bufferLast = _buffer + _bufferLength - 1; + for (int i = 0; i < samplesToCopy; i++) { + *_endOfLastWrite = *source; + _endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1; + ++source; + } + + return samplesToCopy; +} + +int AudioRingBuffer::writeSamplesWithFade(ConstIterator source, int maxSamples, float fade) { + int samplesToCopy = std::min(maxSamples, _sampleCapacity); + int samplesRoomFor = _sampleCapacity - samplesAvailable(); + if (samplesToCopy > samplesRoomFor) { + // there's not enough room for this write. erase old data to make room for this new data + int samplesToDelete = samplesToCopy - samplesRoomFor; + _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete); + _overflowCount++; + qDebug() << "Overflowed ring buffer! Overwriting old data"; + } + + int16_t* bufferLast = _buffer + _bufferLength - 1; + for (int i = 0; i < samplesToCopy; i++) { + *_endOfLastWrite = (int16_t)((float)(*source) * fade); + _endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1; + ++source; + } + + return samplesToCopy; +} diff --git a/libraries/audio/src/AudioRingBuffer.h b/libraries/audio/src/AudioRingBuffer.h index b4b30b1f56..f033ffa80f 100644 --- a/libraries/audio/src/AudioRingBuffer.h +++ b/libraries/audio/src/AudioRingBuffer.h @@ -28,7 +28,7 @@ const int NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL = 512; const int NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL / sizeof(int16_t); const unsigned int BUFFER_SEND_INTERVAL_USECS = floorf((NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL - / (float) SAMPLE_RATE) * USECS_PER_SECOND); + / (float)SAMPLE_RATE) * USECS_PER_SECOND); const int MAX_SAMPLE_VALUE = std::numeric_limits::max(); const int MIN_SAMPLE_VALUE = std::numeric_limits::min(); @@ -42,33 +42,33 @@ public: void reset(); void resizeForFrameSize(int numFrameSamples); - + void clear(); int getSampleCapacity() const { return _sampleCapacity; } int getFrameCapacity() const { return _frameCapacity; } - + int readSamples(int16_t* destination, int maxSamples); int writeSamples(const int16_t* source, int maxSamples); - + int readData(char* data, int maxSize); int writeData(const char* data, int maxSize); - + int16_t& operator[](const int index); const int16_t& operator[] (const int index) const; - + void shiftReadPosition(unsigned int numSamples); float getNextOutputFrameLoudness() const; - + int samplesAvailable() const; int framesAvailable() const { return samplesAvailable() / _numFrameSamples; } int getNumFrameSamples() const { return _numFrameSamples; } - + int getOverflowCount() const { return _overflowCount; } /// how many times has the ring buffer has overwritten old data - - int addSilentFrame(int numSilentSamples); + + int addSilentSamples(int samples); private: float getFrameLoudness(const int16_t* frameStart) const; @@ -77,12 +77,12 @@ protected: // disallow copying of AudioRingBuffer objects AudioRingBuffer(const AudioRingBuffer&); AudioRingBuffer& operator= (const AudioRingBuffer&); - + int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const; int _frameCapacity; int _sampleCapacity; - bool _isFull; + int _bufferLength; // actual length of _buffer: will be one frame larger than _sampleCapacity int _numFrameSamples; int16_t* _nextOutput; int16_t* _endOfLastWrite; @@ -95,23 +95,25 @@ public: class ConstIterator { //public std::iterator < std::forward_iterator_tag, int16_t > { public: ConstIterator() - : _capacity(0), + : _bufferLength(0), _bufferFirst(NULL), _bufferLast(NULL), _at(NULL) {} ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at) - : _capacity(capacity), + : _bufferLength(capacity), _bufferFirst(bufferFirst), _bufferLast(bufferFirst + capacity - 1), _at(at) {} + bool isNull() const { return _at == NULL; } + bool operator==(const ConstIterator& rhs) { return _at == rhs._at; } bool operator!=(const ConstIterator& rhs) { return _at != rhs._at; } const int16_t& operator*() { return *_at; } ConstIterator& operator=(const ConstIterator& rhs) { - _capacity = rhs._capacity; + _bufferLength = rhs._bufferLength; _bufferFirst = rhs._bufferFirst; _bufferLast = rhs._bufferLast; _at = rhs._at; @@ -145,40 +147,54 @@ public: } ConstIterator operator+(int i) { - return ConstIterator(_bufferFirst, _capacity, atShiftedBy(i)); + return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i)); } ConstIterator operator-(int i) { - return ConstIterator(_bufferFirst, _capacity, atShiftedBy(-i)); + return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i)); } void readSamples(int16_t* dest, int numSamples) { + int16_t* at = _at; for (int i = 0; i < numSamples; i++) { - *dest = *(*this); + *dest = *at; ++dest; - ++(*this); + at = (at == _bufferLast) ? _bufferFirst : at + 1; } } - + + void readSamplesWithFade(int16_t* dest, int numSamples, float fade) { + int16_t* at = _at; + for (int i = 0; i < numSamples; i++) { + *dest = (float)*at * fade; + ++dest; + at = (at == _bufferLast) ? _bufferFirst : at + 1; + } + } + private: int16_t* atShiftedBy(int i) { - i = (_at - _bufferFirst + i) % _capacity; + i = (_at - _bufferFirst + i) % _bufferLength; if (i < 0) { - i += _capacity; + i += _bufferLength; } return _bufferFirst + i; } private: - int _capacity; + int _bufferLength; int16_t* _bufferFirst; int16_t* _bufferLast; int16_t* _at; }; - ConstIterator nextOutput() const { return ConstIterator(_buffer, _sampleCapacity, _nextOutput); } + ConstIterator nextOutput() const { return ConstIterator(_buffer, _bufferLength, _nextOutput); } + ConstIterator lastFrameWritten() const { return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples; } float getFrameLoudness(ConstIterator frameStart) const; + + int writeSamples(ConstIterator source, int maxSamples); + int writeSamplesWithFade(ConstIterator source, int maxSamples, float fade); }; #endif // hifi_AudioRingBuffer_h diff --git a/libraries/audio/src/InboundAudioStream.cpp b/libraries/audio/src/InboundAudioStream.cpp index ba8d9481b5..e12dbb42a9 100644 --- a/libraries/audio/src/InboundAudioStream.cpp +++ b/libraries/audio/src/InboundAudioStream.cpp @@ -14,30 +14,37 @@ #include "InboundAudioStream.h" #include "PacketHeaders.h" -InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, - bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc) : +const int STARVE_HISTORY_CAPACITY = 50; + +InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings) : _ringBuffer(numFrameSamples, false, numFramesCapacity), _lastPopSucceeded(false), _lastPopOutput(), - _dynamicJitterBuffers(dynamicJitterBuffers), - _staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames), - _useStDevForJitterCalc(useStDevForJitterCalc), - _calculatedJitterBufferFramesUsingMaxGap(0), - _calculatedJitterBufferFramesUsingStDev(0), - _desiredJitterBufferFrames(dynamicJitterBuffers ? 1 : staticDesiredJitterBufferFrames), - _maxFramesOverDesired(maxFramesOverDesired), + _dynamicJitterBuffers(settings._dynamicJitterBuffers), + _staticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames), + _useStDevForJitterCalc(settings._useStDevForJitterCalc), + _desiredJitterBufferFrames(settings._dynamicJitterBuffers ? 1 : settings._staticDesiredJitterBufferFrames), + _maxFramesOverDesired(settings._maxFramesOverDesired), _isStarved(true), _hasStarted(false), _consecutiveNotMixedCount(0), _starveCount(0), _silentFramesDropped(0), _oldFramesDropped(0), - _incomingSequenceNumberStats(INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS), - _lastFrameReceivedTime(0), - _interframeTimeGapStatsForJitterCalc(TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES, TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS), - _interframeTimeGapStatsForStatsPacket(TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES, TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS), + _incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS), + _lastPacketReceivedTime(0), + _timeGapStatsForDesiredCalcOnTooManyStarves(0, settings._windowSecondsForDesiredCalcOnTooManyStarves), + _calculatedJitterBufferFramesUsingMaxGap(0), + _stdevStatsForDesiredCalcOnTooManyStarves(), + _calculatedJitterBufferFramesUsingStDev(0), + _timeGapStatsForDesiredReduction(0, settings._windowSecondsForDesiredReduction), + _starveHistoryWindowSeconds(settings._windowSecondsForDesiredCalcOnTooManyStarves), + _starveHistory(STARVE_HISTORY_CAPACITY), + _starveThreshold(settings._windowStarveThreshold), _framesAvailableStat(), - _currentJitterBufferFrames(0) + _currentJitterBufferFrames(0), + _timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS), + _repetitionWithFade(settings._repetitionWithFade) { } @@ -59,11 +66,14 @@ void InboundAudioStream::resetStats() { _silentFramesDropped = 0; _oldFramesDropped = 0; _incomingSequenceNumberStats.reset(); - _lastFrameReceivedTime = 0; - _interframeTimeGapStatsForJitterCalc.reset(); - _interframeTimeGapStatsForStatsPacket.reset(); + _lastPacketReceivedTime = 0; + _timeGapStatsForDesiredCalcOnTooManyStarves.reset(); + _stdevStatsForDesiredCalcOnTooManyStarves = StDev(); + _timeGapStatsForDesiredReduction.reset(); + _starveHistory.clear(); _framesAvailableStat.reset(); _currentJitterBufferFrames = 0; + _timeGapStatsForStatsPacket.reset(); } void InboundAudioStream::clearBuffer() { @@ -72,8 +82,11 @@ void InboundAudioStream::clearBuffer() { _currentJitterBufferFrames = 0; } -int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) { - return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t)); +void InboundAudioStream::perSecondCallbackForUpdatingStats() { + _incomingSequenceNumberStats.pushStatsToHistory(); + _timeGapStatsForDesiredCalcOnTooManyStarves.currentIntervalComplete(); + _timeGapStatsForDesiredReduction.currentIntervalComplete(); + _timeGapStatsForStatsPacket.currentIntervalComplete(); } int InboundAudioStream::parseData(const QByteArray& packet) { @@ -83,36 +96,51 @@ int InboundAudioStream::parseData(const QByteArray& packet) { // parse header int numBytesHeader = numBytesForPacketHeader(packet); - const char* sequenceAt = packet.constData() + numBytesHeader; + const char* dataAt = packet.constData() + numBytesHeader; int readBytes = numBytesHeader; // parse sequence number and track it - quint16 sequence = *(reinterpret_cast(sequenceAt)); + quint16 sequence = *(reinterpret_cast(dataAt)); + dataAt += sizeof(quint16); readBytes += sizeof(quint16); SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence, senderUUID); - frameReceivedUpdateTimingStats(); + packetReceivedUpdateTimingStats(); - // TODO: handle generalized silent packet here????? + int networkSamples; - // parse the info after the seq number and before the audio data.(the stream properties) - int numAudioSamples; - readBytes += parseStreamProperties(packetType, packet.mid(readBytes), numAudioSamples); + if (packetType == PacketTypeSilentAudioFrame) { + quint16 numSilentSamples = *(reinterpret_cast(dataAt)); + readBytes += sizeof(quint16); + networkSamples = (int)numSilentSamples; + } else { + // parse the info after the seq number and before the audio data (the stream properties) + readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples); + } // handle this packet based on its arrival status. - // For now, late packets are ignored. It may be good in the future to insert the late audio frame - // into the ring buffer to fill in the missing frame if it hasn't been mixed yet. switch (arrivalInfo._status) { case SequenceNumberStats::Early: { + // Packet is early; write droppable silent samples for each of the skipped packets. + // NOTE: we assume that each dropped packet contains the same number of samples + // as the packet we just received. int packetsDropped = arrivalInfo._seqDiffFromExpected; - writeSamplesForDroppedPackets(packetsDropped * numAudioSamples); + writeSamplesForDroppedPackets(packetsDropped * networkSamples); + // fall through to OnTime case } case SequenceNumberStats::OnTime: { - readBytes += parseAudioData(packetType, packet.mid(readBytes), numAudioSamples); + // Packet is on time; parse its data to the ringbuffer + if (packetType == PacketTypeSilentAudioFrame) { + writeDroppableSilentSamples(networkSamples); + } else { + readBytes += parseAudioData(packetType, packet.mid(readBytes), networkSamples); + } break; } default: { + // For now, late packets are ignored. It may be good in the future to insert the late audio packet data + // into the ring buffer to fill in the missing frame if it hasn't been mixed yet. break; } } @@ -139,6 +167,43 @@ int InboundAudioStream::parseData(const QByteArray& packet) { return readBytes; } +int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) { + // mixed audio packets do not have any info between the seq num and the audio data. + numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t); + return 0; +} + +int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) { + return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t)); +} + +int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) { + // calculate how many silent frames we should drop. + int samplesPerFrame = _ringBuffer.getNumFrameSamples(); + int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING; + int numSilentFramesToDrop = 0; + + if (silentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) { + + // our avg jitter buffer size exceeds its desired value, so ignore some silent + // frames to get that size as close to desired as possible + int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding; + int numSilentFramesReceived = silentSamples / samplesPerFrame; + numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived); + + // dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames + // without waiting for _framesAvailableStat to fill up to 10s of samples. + _currentJitterBufferFrames -= numSilentFramesToDrop; + _silentFramesDropped += numSilentFramesToDrop; + + _framesAvailableStat.reset(); + } + + int ret = _ringBuffer.addSilentSamples(silentSamples - numSilentFramesToDrop * samplesPerFrame); + + return ret; +} + int InboundAudioStream::popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped) { int samplesPopped = 0; int samplesAvailable = _ringBuffer.samplesAvailable(); @@ -216,12 +281,61 @@ void InboundAudioStream::framesAvailableChanged() { } void InboundAudioStream::setToStarved() { - _isStarved = true; _consecutiveNotMixedCount = 0; _starveCount++; // if we have more than the desired frames when setToStarved() is called, then we'll immediately // be considered refilled. in that case, there's no need to set _isStarved to true. _isStarved = (_ringBuffer.framesAvailable() < _desiredJitterBufferFrames); + + // record the time of this starve in the starve history + quint64 now = usecTimestampNow(); + _starveHistory.insert(now); + + if (_dynamicJitterBuffers) { + // dynamic jitter buffers are enabled. check if this starve put us over the window + // starve threshold + quint64 windowEnd = now - _starveHistoryWindowSeconds * USECS_PER_SECOND; + RingBufferHistory::Iterator starvesIterator = _starveHistory.begin(); + RingBufferHistory::Iterator end = _starveHistory.end(); + int starvesInWindow = 1; + do { + ++starvesIterator; + if (*starvesIterator < windowEnd) { + break; + } + starvesInWindow++; + } while (starvesIterator != end); + + // this starve put us over the starve threshold. update _desiredJitterBufferFrames to + // value determined by window A. + if (starvesInWindow >= _starveThreshold) { + int calculatedJitterBufferFrames; + if (_useStDevForJitterCalc) { + calculatedJitterBufferFrames = _calculatedJitterBufferFramesUsingStDev; + } else { + // we don't know when the next packet will arrive, so it's possible the gap between the last packet and the + // next packet will exceed the max time gap in the window. If the time since the last packet has already exceeded + // the window max gap, then we should use that value to calculate desired frames. + int framesSinceLastPacket = ceilf((float)(now - _lastPacketReceivedTime) / (float)BUFFER_SEND_INTERVAL_USECS); + calculatedJitterBufferFrames = std::max(_calculatedJitterBufferFramesUsingMaxGap, framesSinceLastPacket); + } + // make sure _desiredJitterBufferFrames does not become lower here + if (calculatedJitterBufferFrames >= _desiredJitterBufferFrames) { + _desiredJitterBufferFrames = calculatedJitterBufferFrames; + } + } + } +} + +void InboundAudioStream::setSettings(const Settings& settings) { + setMaxFramesOverDesired(settings._maxFramesOverDesired); + setDynamicJitterBuffers(settings._dynamicJitterBuffers); + setStaticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames); + setUseStDevForJitterCalc(settings._useStDevForJitterCalc); + setWindowStarveThreshold(settings._windowStarveThreshold); + setWindowSecondsForDesiredCalcOnTooManyStarves(settings._windowSecondsForDesiredCalcOnTooManyStarves); + setWindowSecondsForDesiredReduction(settings._windowSecondsForDesiredReduction); + setRepetitionWithFade(settings._repetitionWithFade); } void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) { @@ -229,6 +343,7 @@ void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) { _desiredJitterBufferFrames = _staticDesiredJitterBufferFrames; } else { if (!_dynamicJitterBuffers) { + // if we're enabling dynamic jitter buffer frames, start desired frames at 1 _desiredJitterBufferFrames = 1; } } @@ -242,90 +357,102 @@ void InboundAudioStream::setStaticDesiredJitterBufferFrames(int staticDesiredJit } } +void InboundAudioStream::setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves) { + _timeGapStatsForDesiredCalcOnTooManyStarves.setWindowIntervals(windowSecondsForDesiredCalcOnTooManyStarves); + _starveHistoryWindowSeconds = windowSecondsForDesiredCalcOnTooManyStarves; +} + +void InboundAudioStream::setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction) { + _timeGapStatsForDesiredReduction.setWindowIntervals(windowSecondsForDesiredReduction); +} + + int InboundAudioStream::clampDesiredJitterBufferFramesValue(int desired) const { const int MIN_FRAMES_DESIRED = 0; const int MAX_FRAMES_DESIRED = _ringBuffer.getFrameCapacity(); return glm::clamp(desired, MIN_FRAMES_DESIRED, MAX_FRAMES_DESIRED); } -void InboundAudioStream::frameReceivedUpdateTimingStats() { - +void InboundAudioStream::packetReceivedUpdateTimingStats() { + // update our timegap stats and desired jitter buffer frames if necessary // discard the first few packets we receive since they usually have gaps that aren't represensative of normal jitter const int NUM_INITIAL_PACKETS_DISCARD = 3; quint64 now = usecTimestampNow(); if (_incomingSequenceNumberStats.getReceived() > NUM_INITIAL_PACKETS_DISCARD) { - quint64 gap = now - _lastFrameReceivedTime; - _interframeTimeGapStatsForStatsPacket.update(gap); + quint64 gap = now - _lastPacketReceivedTime; + _timeGapStatsForStatsPacket.update(gap); - const float USECS_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * USECS_PER_SECOND / (float)SAMPLE_RATE; + // update all stats used for desired frames calculations under dynamic jitter buffer mode + _timeGapStatsForDesiredCalcOnTooManyStarves.update(gap); + _stdevStatsForDesiredCalcOnTooManyStarves.addValue(gap); + _timeGapStatsForDesiredReduction.update(gap); - // update stats for Freddy's method of jitter calc - _interframeTimeGapStatsForJitterCalc.update(gap); - if (_interframeTimeGapStatsForJitterCalc.getNewStatsAvailableFlag()) { - _calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_interframeTimeGapStatsForJitterCalc.getWindowMax() / USECS_PER_FRAME); - _interframeTimeGapStatsForJitterCalc.clearNewStatsAvailableFlag(); - - if (_dynamicJitterBuffers && !_useStDevForJitterCalc) { - _desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingMaxGap); - } + if (_timeGapStatsForDesiredCalcOnTooManyStarves.getNewStatsAvailableFlag()) { + _calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_timeGapStatsForDesiredCalcOnTooManyStarves.getWindowMax() + / (float)BUFFER_SEND_INTERVAL_USECS); + _timeGapStatsForDesiredCalcOnTooManyStarves.clearNewStatsAvailableFlag(); } - // update stats for Philip's method of jitter calc - _stdev.addValue(gap); const int STANDARD_DEVIATION_SAMPLE_COUNT = 500; - if (_stdev.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) { + if (_stdevStatsForDesiredCalcOnTooManyStarves.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) { const float NUM_STANDARD_DEVIATIONS = 3.0f; - _calculatedJitterBufferFramesUsingStDev = (int)ceilf(NUM_STANDARD_DEVIATIONS * _stdev.getStDev() / USECS_PER_FRAME); - _stdev.reset(); + _calculatedJitterBufferFramesUsingStDev = ceilf(NUM_STANDARD_DEVIATIONS * _stdevStatsForDesiredCalcOnTooManyStarves.getStDev() + / (float)BUFFER_SEND_INTERVAL_USECS); + _stdevStatsForDesiredCalcOnTooManyStarves.reset(); + } - if (_dynamicJitterBuffers && _useStDevForJitterCalc) { - _desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingStDev); + if (_dynamicJitterBuffers) { + // if the max gap in window B (_timeGapStatsForDesiredReduction) corresponds to a smaller number of frames than _desiredJitterBufferFrames, + // then reduce _desiredJitterBufferFrames to that number of frames. + if (_timeGapStatsForDesiredReduction.getNewStatsAvailableFlag() && _timeGapStatsForDesiredReduction.isWindowFilled()) { + int calculatedJitterBufferFrames = ceilf((float)_timeGapStatsForDesiredReduction.getWindowMax() / (float)BUFFER_SEND_INTERVAL_USECS); + if (calculatedJitterBufferFrames < _desiredJitterBufferFrames) { + _desiredJitterBufferFrames = calculatedJitterBufferFrames; + } + _timeGapStatsForDesiredReduction.clearNewStatsAvailableFlag(); } } } - _lastFrameReceivedTime = now; + + _lastPacketReceivedTime = now; } -int InboundAudioStream::writeDroppableSilentSamples(int numSilentSamples) { - - // calculate how many silent frames we should drop. - int samplesPerFrame = _ringBuffer.getNumFrameSamples(); - int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING; - int numSilentFramesToDrop = 0; - - if (numSilentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) { - - // our avg jitter buffer size exceeds its desired value, so ignore some silent - // frames to get that size as close to desired as possible - int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding; - int numSilentFramesReceived = numSilentSamples / samplesPerFrame; - numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived); - - // dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames - // without waiting for _framesAvailableStat to fill up to 10s of samples. - _currentJitterBufferFrames -= numSilentFramesToDrop; - _silentFramesDropped += numSilentFramesToDrop; - - _framesAvailableStat.reset(); +int InboundAudioStream::writeSamplesForDroppedPackets(int networkSamples) { + if (_repetitionWithFade) { + return writeLastFrameRepeatedWithFade(networkSamples); } - - return _ringBuffer.addSilentFrame(numSilentSamples - numSilentFramesToDrop * samplesPerFrame); + return writeDroppableSilentSamples(networkSamples); } -int InboundAudioStream::writeSamplesForDroppedPackets(int numSamples) { - return writeDroppableSilentSamples(numSamples); +int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) { + AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten(); + int frameSize = _ringBuffer.getNumFrameSamples(); + int samplesToWrite = samples; + int indexOfRepeat = 0; + do { + int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize); + float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat); + if (fade == 1.0f) { + samplesToWrite -= _ringBuffer.writeSamples(frameToRepeat, samplesToWriteThisIteration); + } else { + samplesToWrite -= _ringBuffer.writeSamplesWithFade(frameToRepeat, samplesToWriteThisIteration, fade); + } + indexOfRepeat++; + } while (samplesToWrite > 0); + + return samples; } AudioStreamStats InboundAudioStream::getAudioStreamStats() const { AudioStreamStats streamStats; - streamStats._timeGapMin = _interframeTimeGapStatsForStatsPacket.getMin(); - streamStats._timeGapMax = _interframeTimeGapStatsForStatsPacket.getMax(); - streamStats._timeGapAverage = _interframeTimeGapStatsForStatsPacket.getAverage(); - streamStats._timeGapWindowMin = _interframeTimeGapStatsForStatsPacket.getWindowMin(); - streamStats._timeGapWindowMax = _interframeTimeGapStatsForStatsPacket.getWindowMax(); - streamStats._timeGapWindowAverage = _interframeTimeGapStatsForStatsPacket.getWindowAverage(); + streamStats._timeGapMin = _timeGapStatsForStatsPacket.getMin(); + streamStats._timeGapMax = _timeGapStatsForStatsPacket.getMax(); + streamStats._timeGapAverage = _timeGapStatsForStatsPacket.getAverage(); + streamStats._timeGapWindowMin = _timeGapStatsForStatsPacket.getWindowMin(); + streamStats._timeGapWindowMax = _timeGapStatsForStatsPacket.getWindowMax(); + streamStats._timeGapWindowAverage = _timeGapStatsForStatsPacket.getWindowAverage(); streamStats._framesAvailable = _ringBuffer.framesAvailable(); streamStats._framesAvailableAverage = _framesAvailableStat.getAverage(); @@ -341,7 +468,24 @@ AudioStreamStats InboundAudioStream::getAudioStreamStats() const { return streamStats; } -AudioStreamStats InboundAudioStream::updateSeqHistoryAndGetAudioStreamStats() { - _incomingSequenceNumberStats.pushStatsToHistory(); - return getAudioStreamStats(); +float calculateRepeatedFrameFadeFactor(int indexOfRepeat) { + // fade factor scheme is from this paper: + // http://inst.eecs.berkeley.edu/~ee290t/sp04/lectures/packet_loss_recov_paper11.pdf + + const float INITIAL_MSECS_NO_FADE = 20.0f; + const float MSECS_FADE_TO_ZERO = 320.0f; + + const float INITIAL_FRAMES_NO_FADE = INITIAL_MSECS_NO_FADE * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS; + const float FRAMES_FADE_TO_ZERO = MSECS_FADE_TO_ZERO * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS; + + const float SAMPLE_RANGE = std::numeric_limits::max(); + + if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE) { + return 1.0f; + } else if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE + FRAMES_FADE_TO_ZERO) { + return pow(SAMPLE_RANGE, -(indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO); + + //return 1.0f - ((indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO); + } + return 0.0f; } diff --git a/libraries/audio/src/InboundAudioStream.h b/libraries/audio/src/InboundAudioStream.h index b65d5c5de0..ca9591a746 100644 --- a/libraries/audio/src/InboundAudioStream.h +++ b/libraries/audio/src/InboundAudioStream.h @@ -22,43 +22,84 @@ #include "TimeWeightedAvg.h" // This adds some number of frames to the desired jitter buffer frames target we use when we're dropping frames. -// The larger this value is, the less aggressive we are about reducing the jitter buffer length. -// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames long when dropping frames, +// The larger this value is, the less frames we drop when attempting to reduce the jitter buffer length. +// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames when dropping frames, // which could lead to a starve soon after. const int DESIRED_JITTER_BUFFER_FRAMES_PADDING = 1; -// the time gaps stats for _desiredJitterBufferFrames calculation -// will recalculate the max for the past 5000 samples every 500 samples -const int TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES = 500; -const int TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS = 10; - -// the time gap stats for constructing AudioStreamStats will -// recalculate min/max/avg every ~1 second for the past ~30 seconds of time gap data -const int TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES = USECS_PER_SECOND / BUFFER_SEND_INTERVAL_USECS; -const int TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS = 30; +// this controls the length of the window for stats used in the stats packet (not the stats used in +// _desiredJitterBufferFrames calculation) +const int STATS_FOR_STATS_PACKET_WINDOW_SECONDS = 30; // this controls the window size of the time-weighted avg of frames available. Every time the window fills up, // _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset. -const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 2 * USECS_PER_SECOND; - -// the internal history buffer of the incoming seq stats will cover 30s to calculate -// packet loss % over last 30s -const int INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS = 30; - -const int INBOUND_RING_BUFFER_FRAME_CAPACITY = 100; +const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND; +// default values for members of the Settings struct const int DEFAULT_MAX_FRAMES_OVER_DESIRED = 10; -const int DEFAULT_DESIRED_JITTER_BUFFER_FRAMES = 1; +const bool DEFAULT_DYNAMIC_JITTER_BUFFERS = true; +const int DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES = 1; +const bool DEFAULT_USE_STDEV_FOR_JITTER_CALC = false; +const int DEFAULT_WINDOW_STARVE_THRESHOLD = 3; +const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES = 50; +const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION = 10; +const bool DEFAULT_REPETITION_WITH_FADE = true; class InboundAudioStream : public NodeData { Q_OBJECT public: - InboundAudioStream(int numFrameSamples, int numFramesCapacity, - bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, - bool useStDevForJitterCalc = false); + class Settings { + public: + Settings() + : _maxFramesOverDesired(DEFAULT_MAX_FRAMES_OVER_DESIRED), + _dynamicJitterBuffers(DEFAULT_DYNAMIC_JITTER_BUFFERS), + _staticDesiredJitterBufferFrames(DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES), + _useStDevForJitterCalc(DEFAULT_USE_STDEV_FOR_JITTER_CALC), + _windowStarveThreshold(DEFAULT_WINDOW_STARVE_THRESHOLD), + _windowSecondsForDesiredCalcOnTooManyStarves(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES), + _windowSecondsForDesiredReduction(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION), + _repetitionWithFade(DEFAULT_REPETITION_WITH_FADE) + {} + + Settings(int maxFramesOverDesired, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, + bool useStDevForJitterCalc, int windowStarveThreshold, int windowSecondsForDesiredCalcOnTooManyStarves, + int _windowSecondsForDesiredReduction, bool repetitionWithFade) + : _maxFramesOverDesired(maxFramesOverDesired), + _dynamicJitterBuffers(dynamicJitterBuffers), + _staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames), + _useStDevForJitterCalc(useStDevForJitterCalc), + _windowStarveThreshold(windowStarveThreshold), + _windowSecondsForDesiredCalcOnTooManyStarves(windowSecondsForDesiredCalcOnTooManyStarves), + _windowSecondsForDesiredReduction(windowSecondsForDesiredCalcOnTooManyStarves), + _repetitionWithFade(repetitionWithFade) + {} + + // max number of frames over desired in the ringbuffer. + int _maxFramesOverDesired; + + // if false, _desiredJitterBufferFrames will always be _staticDesiredJitterBufferFrames. Otherwise, + // either fred or philip's method will be used to calculate _desiredJitterBufferFrames based on packet timegaps. + bool _dynamicJitterBuffers; + + // settings for static jitter buffer mode + int _staticDesiredJitterBufferFrames; + + // settings for dynamic jitter buffer mode + bool _useStDevForJitterCalc; // if true, philip's method is used. otherwise, fred's method is used. + int _windowStarveThreshold; + int _windowSecondsForDesiredCalcOnTooManyStarves; + int _windowSecondsForDesiredReduction; + + // if true, the prev frame will be repeated (fading to silence) for dropped frames. + // otherwise, silence will be inserted. + bool _repetitionWithFade; + }; + +public: + InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings); void reset(); - void resetStats(); + virtual void resetStats(); void clearBuffer(); virtual int parseData(const QByteArray& packet); @@ -72,14 +113,18 @@ public: void setToStarved(); - - void setDynamicJitterBuffers(bool dynamicJitterBuffers); - void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames); - /// this function should be called once per second to ensure the seq num stats history spans ~30 seconds - AudioStreamStats updateSeqHistoryAndGetAudioStreamStats(); + void setSettings(const Settings& settings); void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; } + void setDynamicJitterBuffers(bool setDynamicJitterBuffers); + void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames); + void setUseStDevForJitterCalc(bool useStDevForJitterCalc) { _useStDevForJitterCalc = useStDevForJitterCalc; } + void setWindowStarveThreshold(int windowStarveThreshold) { _starveThreshold = windowStarveThreshold; } + void setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves); + void setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction); + void setRepetitionWithFade(bool repetitionWithFade) { _repetitionWithFade = repetitionWithFade; } + virtual AudioStreamStats getAudioStreamStats() const; @@ -110,11 +155,17 @@ public: int getPacketsReceived() const { return _incomingSequenceNumberStats.getReceived(); } +public slots: + /// This function should be called every second for all the stats to function properly. If dynamic jitter buffers + /// is enabled, those stats are used to calculate _desiredJitterBufferFrames. + /// If the stats are not used and dynamic jitter buffers is disabled, it's not necessary to call this function. + void perSecondCallbackForUpdatingStats(); + private: - void frameReceivedUpdateTimingStats(); + void packetReceivedUpdateTimingStats(); int clampDesiredJitterBufferFramesValue(int desired) const; - int writeSamplesForDroppedPackets(int numSamples); + int writeSamplesForDroppedPackets(int networkSamples); void popSamplesNoCheck(int samples); void framesAvailableChanged(); @@ -126,13 +177,19 @@ protected: /// parses the info between the seq num and the audio data in the network packet and calculates /// how many audio samples this packet contains (used when filling in samples for dropped packets). - virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) = 0; + /// default implementation assumes no stream properties and raw audio samples after stream propertiess + virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& networkSamples); /// parses the audio data in the network packet. /// default implementation assumes packet contains raw audio samples after stream properties - virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples); + virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples); - int writeDroppableSilentSamples(int numSilentSamples); + /// writes silent samples to the buffer that may be dropped to reduce latency caused by the buffer + virtual int writeDroppableSilentSamples(int silentSamples); + + /// writes the last written frame repeatedly, gradually fading to silence. + /// used for writing samples for dropped packets. + virtual int writeLastFrameRepeatedWithFade(int samples); protected: @@ -147,8 +204,6 @@ protected: // if jitter buffer is dynamic, this determines what method of calculating _desiredJitterBufferFrames // if true, Philip's timegap std dev calculation is used. Otherwise, Freddy's max timegap calculation is used bool _useStDevForJitterCalc; - int _calculatedJitterBufferFramesUsingMaxGap; - int _calculatedJitterBufferFramesUsingStDev; int _desiredJitterBufferFrames; @@ -168,16 +223,28 @@ protected: SequenceNumberStats _incomingSequenceNumberStats; - quint64 _lastFrameReceivedTime; - MovingMinMaxAvg _interframeTimeGapStatsForJitterCalc; - StDev _stdev; - MovingMinMaxAvg _interframeTimeGapStatsForStatsPacket; - + quint64 _lastPacketReceivedTime; + MovingMinMaxAvg _timeGapStatsForDesiredCalcOnTooManyStarves; // for Freddy's method + int _calculatedJitterBufferFramesUsingMaxGap; + StDev _stdevStatsForDesiredCalcOnTooManyStarves; // for Philip's method + int _calculatedJitterBufferFramesUsingStDev; // the most recent desired frames calculated by Philip's method + MovingMinMaxAvg _timeGapStatsForDesiredReduction; + + int _starveHistoryWindowSeconds; + RingBufferHistory _starveHistory; + int _starveThreshold; + TimeWeightedAvg _framesAvailableStat; - // this value is based on the time-weighted avg from _framesAvailableStat. it is only used for + // this value is periodically updated with the time-weighted avg from _framesAvailableStat. it is only used for // dropping silent frames right now. int _currentJitterBufferFrames; + + MovingMinMaxAvg _timeGapStatsForStatsPacket; + + bool _repetitionWithFade; }; +float calculateRepeatedFrameFadeFactor(int indexOfRepeat); + #endif // hifi_InboundAudioStream_h diff --git a/libraries/audio/src/InjectedAudioStream.cpp b/libraries/audio/src/InjectedAudioStream.cpp index 5c1c2ed269..9a757b774e 100644 --- a/libraries/audio/src/InjectedAudioStream.cpp +++ b/libraries/audio/src/InjectedAudioStream.cpp @@ -19,8 +19,8 @@ #include "InjectedAudioStream.h" -InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) : - PositionalAudioStream(PositionalAudioStream::Injector, false, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired), +InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings) : + PositionalAudioStream(PositionalAudioStream::Injector, false, settings), _streamIdentifier(streamIdentifier), _radius(0.0f), _attenuationRatio(0) diff --git a/libraries/audio/src/InjectedAudioStream.h b/libraries/audio/src/InjectedAudioStream.h index d8d9a54c6e..f3840b1029 100644 --- a/libraries/audio/src/InjectedAudioStream.h +++ b/libraries/audio/src/InjectedAudioStream.h @@ -18,7 +18,7 @@ class InjectedAudioStream : public PositionalAudioStream { public: - InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired); + InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings); float getRadius() const { return _radius; } float getAttenuationRatio() const { return _attenuationRatio; } diff --git a/libraries/audio/src/MixedAudioStream.cpp b/libraries/audio/src/MixedAudioStream.cpp index 38c4ae641d..85bf71747a 100644 --- a/libraries/audio/src/MixedAudioStream.cpp +++ b/libraries/audio/src/MixedAudioStream.cpp @@ -11,13 +11,7 @@ #include "MixedAudioStream.h" -MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc) - : InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc) +MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings) + : InboundAudioStream(numFrameSamples, numFramesCapacity, settings) { } - -int MixedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) { - // mixed audio packets do not have any info between the seq num and the audio data. - numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t); - return 0; -} diff --git a/libraries/audio/src/MixedAudioStream.h b/libraries/audio/src/MixedAudioStream.h index d19f19af07..edb26c486f 100644 --- a/libraries/audio/src/MixedAudioStream.h +++ b/libraries/audio/src/MixedAudioStream.h @@ -17,12 +17,9 @@ class MixedAudioStream : public InboundAudioStream { public: - MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc); + MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings); float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); } - -protected: - int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples); }; #endif // hifi_MixedAudioStream_h diff --git a/libraries/audio/src/MixedProcessedAudioStream.cpp b/libraries/audio/src/MixedProcessedAudioStream.cpp index 49990dcd22..844adf36b3 100644 --- a/libraries/audio/src/MixedProcessedAudioStream.cpp +++ b/libraries/audio/src/MixedProcessedAudioStream.cpp @@ -11,35 +11,53 @@ #include "MixedProcessedAudioStream.h" -MixedProcessedAudioStream ::MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc) - : InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc) +static const int STEREO_FACTOR = 2; + +MixedProcessedAudioStream::MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings) + : InboundAudioStream(numFrameSamples, numFramesCapacity, settings) { } void MixedProcessedAudioStream::outputFormatChanged(int outputFormatChannelCountTimesSampleRate) { _outputFormatChannelsTimesSampleRate = outputFormatChannelCountTimesSampleRate; - int deviceOutputFrameSize = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * _outputFormatChannelsTimesSampleRate / SAMPLE_RATE; + int deviceOutputFrameSize = networkToDeviceSamples(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO); _ringBuffer.resizeForFrameSize(deviceOutputFrameSize); } -int MixedProcessedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) { - // mixed audio packets do not have any info between the seq num and the audio data. - int numNetworkSamples = packetAfterSeqNum.size() / sizeof(int16_t); +int MixedProcessedAudioStream::writeDroppableSilentSamples(int silentSamples) { + + int deviceSilentSamplesWritten = InboundAudioStream::writeDroppableSilentSamples(networkToDeviceSamples(silentSamples)); + + emit addedSilence(deviceToNetworkSamples(deviceSilentSamplesWritten) / STEREO_FACTOR); - // since numAudioSamples is used to know how many samples to add for each dropped packet before this one, - // we want to set it to the number of device audio samples since this stream contains device audio samples, not network samples. - const int STEREO_DIVIDER = 2; - numAudioSamples = numNetworkSamples * _outputFormatChannelsTimesSampleRate / (STEREO_DIVIDER * SAMPLE_RATE); - - return 0; + return deviceSilentSamplesWritten; } -int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) { +int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int samples) { + + int deviceSamplesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(networkToDeviceSamples(samples)); + + emit addedLastFrameRepeatedWithFade(deviceToNetworkSamples(deviceSamplesWritten) / STEREO_FACTOR); + + return deviceSamplesWritten; +} + +int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples) { + + emit addedStereoSamples(packetAfterStreamProperties); QByteArray outputBuffer; emit processSamples(packetAfterStreamProperties, outputBuffer); - - _ringBuffer.writeData(outputBuffer.data(), outputBuffer.size()); + _ringBuffer.writeData(outputBuffer.data(), outputBuffer.size()); + return packetAfterStreamProperties.size(); } + +int MixedProcessedAudioStream::networkToDeviceSamples(int networkSamples) { + return (quint64)networkSamples * (quint64)_outputFormatChannelsTimesSampleRate / (quint64)(STEREO_FACTOR * SAMPLE_RATE); +} + +int MixedProcessedAudioStream::deviceToNetworkSamples(int deviceSamples) { + return (quint64)deviceSamples * (quint64)(STEREO_FACTOR * SAMPLE_RATE) / (quint64)_outputFormatChannelsTimesSampleRate; +} diff --git a/libraries/audio/src/MixedProcessedAudioStream.h b/libraries/audio/src/MixedProcessedAudioStream.h index 5a5b73115d..fd1f93a6a1 100644 --- a/libraries/audio/src/MixedProcessedAudioStream.h +++ b/libraries/audio/src/MixedProcessedAudioStream.h @@ -14,21 +14,32 @@ #include "InboundAudioStream.h" +class Audio; + class MixedProcessedAudioStream : public InboundAudioStream { Q_OBJECT public: - MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc); + MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings); signals: + void addedSilence(int silentSamplesPerChannel); + void addedLastFrameRepeatedWithFade(int samplesPerChannel); + void addedStereoSamples(const QByteArray& samples); + void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer); public: void outputFormatChanged(int outputFormatChannelCountTimesSampleRate); protected: - int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples); - int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples); + int writeDroppableSilentSamples(int silentSamples); + int writeLastFrameRepeatedWithFade(int samples); + int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples); + +private: + int networkToDeviceSamples(int networkSamples); + int deviceToNetworkSamples(int deviceSamples); private: int _outputFormatChannelsTimesSampleRate; diff --git a/libraries/audio/src/PositionalAudioStream.cpp b/libraries/audio/src/PositionalAudioStream.cpp index 7b407ba62c..c8d0f66c4d 100644 --- a/libraries/audio/src/PositionalAudioStream.cpp +++ b/libraries/audio/src/PositionalAudioStream.cpp @@ -21,32 +21,41 @@ #include #include -PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers, - int staticDesiredJitterBufferFrames, int maxFramesOverDesired) : +PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings) : InboundAudioStream(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL, - AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired), + AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, settings), _type(type), _position(0.0f, 0.0f, 0.0f), _orientation(0.0f, 0.0f, 0.0f, 0.0f), _shouldLoopbackForNode(false), _isStereo(isStereo), _lastPopOutputTrailingLoudness(0.0f), + _lastPopOutputLoudness(0.0f), _listenerUnattenuatedZone(NULL) { + // constant defined in AudioMixer.h. However, we don't want to include this here + // we will soon find a better common home for these audio-related constants + const int SAMPLE_PHASE_DELAY_AT_90 = 20; + _filter.initialize(SAMPLE_RATE, (NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)) / 2); } -void PositionalAudioStream::updateLastPopOutputTrailingLoudness() { - float lastPopLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput); +void PositionalAudioStream::resetStats() { + _lastPopOutputTrailingLoudness = 0.0f; + _lastPopOutputLoudness = 0.0f; +} + +void PositionalAudioStream::updateLastPopOutputLoudnessAndTrailingLoudness() { + _lastPopOutputLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput); const int TRAILING_AVERAGE_FRAMES = 100; const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES; const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO; const float LOUDNESS_EPSILON = 0.000001f; - if (lastPopLoudness >= _lastPopOutputTrailingLoudness) { - _lastPopOutputTrailingLoudness = lastPopLoudness; + if (_lastPopOutputLoudness >= _lastPopOutputTrailingLoudness) { + _lastPopOutputTrailingLoudness = _lastPopOutputLoudness; } else { - _lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * lastPopLoudness); + _lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * _lastPopOutputLoudness); if (_lastPopOutputTrailingLoudness < LOUDNESS_EPSILON) { _lastPopOutputTrailingLoudness = 0; diff --git a/libraries/audio/src/PositionalAudioStream.h b/libraries/audio/src/PositionalAudioStream.h index f99dc3a464..a7f59aebde 100644 --- a/libraries/audio/src/PositionalAudioStream.h +++ b/libraries/audio/src/PositionalAudioStream.h @@ -16,6 +16,8 @@ #include #include "InboundAudioStream.h" +#include "AudioFilter.h" +#include "AudioFilterBank.h" const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100; @@ -27,13 +29,15 @@ public: Injector }; - PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, - int maxFramesOverDesired); + PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings); + virtual void resetStats(); + virtual AudioStreamStats getAudioStreamStats() const; - void updateLastPopOutputTrailingLoudness(); + void updateLastPopOutputLoudnessAndTrailingLoudness(); float getLastPopOutputTrailingLoudness() const { return _lastPopOutputTrailingLoudness; } + float getLastPopOutputLoudness() const { return _lastPopOutputLoudness; } bool shouldLoopbackForNode() const { return _shouldLoopbackForNode; } bool isStereo() const { return _isStereo; } @@ -44,6 +48,8 @@ public: void setListenerUnattenuatedZone(AABox* listenerUnattenuatedZone) { _listenerUnattenuatedZone = listenerUnattenuatedZone; } + AudioFilterHSF1s& getFilter() { return _filter; } + protected: // disallow copying of PositionalAudioStream objects PositionalAudioStream(const PositionalAudioStream&); @@ -60,7 +66,10 @@ protected: bool _isStereo; float _lastPopOutputTrailingLoudness; + float _lastPopOutputLoudness; AABox* _listenerUnattenuatedZone; + + AudioFilterHSF1s _filter; }; #endif // hifi_PositionalAudioStream_h diff --git a/libraries/audio/src/Sound.cpp b/libraries/audio/src/Sound.cpp index 03c9f6b8ee..f52f5c04dd 100644 --- a/libraries/audio/src/Sound.cpp +++ b/libraries/audio/src/Sound.cpp @@ -82,6 +82,17 @@ Sound::Sound(const QUrl& sampleURL, QObject* parent) : connect(soundDownload, SIGNAL(error(QNetworkReply::NetworkError)), this, SLOT(replyError(QNetworkReply::NetworkError))); } +Sound::Sound(const QByteArray byteArray, QObject* parent) : + QObject(parent), + _byteArray(byteArray), + _hasDownloaded(true) +{ +} + +void Sound::append(const QByteArray byteArray) { + _byteArray.append(byteArray); +} + void Sound::replyFinished() { QNetworkReply* reply = reinterpret_cast(sender()); diff --git a/libraries/audio/src/Sound.h b/libraries/audio/src/Sound.h index c473cdff83..7dae3679f1 100644 --- a/libraries/audio/src/Sound.h +++ b/libraries/audio/src/Sound.h @@ -22,6 +22,8 @@ class Sound : public QObject { public: Sound(const QUrl& sampleURL, QObject* parent = NULL); Sound(float volume, float frequency, float duration, float decay, QObject* parent = NULL); + Sound(const QByteArray byteArray, QObject* parent = NULL); + void append(const QByteArray byteArray); bool hasDownloaded() const { return _hasDownloaded; } diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index 039ccae4e9..17c5d6c259 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -135,9 +135,9 @@ QByteArray AvatarData::toByteArray() { // lazily allocate memory for HeadData in case we're not an Avatar instance if (!_headData) { _headData = new HeadData(this); - if (_forceFaceshiftConnected) { - _headData->_isFaceshiftConnected = true; - } + } + if (_forceFaceshiftConnected) { + _headData->_isFaceshiftConnected = true; } QByteArray avatarDataByteArray; @@ -153,7 +153,7 @@ QByteArray AvatarData::toByteArray() { destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyYaw); destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyPitch); destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyRoll); - + // Body scale destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale); @@ -585,6 +585,101 @@ bool AvatarData::hasReferential() { return _referential != NULL; } +bool AvatarData::isPlaying() { + if (!_player) { + return false; + } + if (QThread::currentThread() != thread()) { + bool result; + QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(bool, result)); + return result; + } + return _player && _player->isPlaying(); +} + +qint64 AvatarData::playerElapsed() { + if (!_player) { + return 0; + } + if (QThread::currentThread() != thread()) { + qint64 result; + QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(qint64, result)); + return result; + } + return _player->elapsed(); +} + +qint64 AvatarData::playerLength() { + if (!_player) { + return 0; + } + if (QThread::currentThread() != thread()) { + qint64 result; + QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(qint64, result)); + return result; + } + return _player->getRecording()->getLength(); +} + +void AvatarData::loadRecording(QString filename) { + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection, + Q_ARG(QString, filename)); + return; + } + if (!_player) { + _player = PlayerPointer(new Player(this)); + } + + _player->loadFromFile(filename); +} + +void AvatarData::startPlaying() { + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection); + return; + } + if (!_player) { + _player = PlayerPointer(new Player(this)); + } + _player->startPlaying(); +} + +void AvatarData::setPlayFromCurrentLocation(bool playFromCurrentLocation) { + _player->setPlayFromCurrentLocation(playFromCurrentLocation); +} + +void AvatarData::setPlayerLoop(bool loop) { + _player->setLoop(loop); +} + +void AvatarData::play() { + if (isPlaying()) { + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "play", Qt::BlockingQueuedConnection); + return; + } + + _player->play(); + } +} + +void AvatarData::stopPlaying() { + if (!_player) { + return; + } + if (QThread::currentThread() != thread()) { + QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection); + return; + } + if (_player) { + _player->stopPlaying(); + } +} + void AvatarData::changeReferential(Referential *ref) { delete _referential; _referential = ref; @@ -683,6 +778,44 @@ glm::quat AvatarData::getJointRotation(const QString& name) const { return getJointRotation(getJointIndex(name)); } +QVector AvatarData::getJointRotations() const { + if (QThread::currentThread() != thread()) { + QVector result; + QMetaObject::invokeMethod(const_cast(this), + "getJointRotations", Qt::BlockingQueuedConnection, + Q_RETURN_ARG(QVector, result)); + return result; + } + QVector jointRotations(_jointData.size()); + for (int i = 0; i < _jointData.size(); ++i) { + jointRotations[i] = _jointData[i].rotation; + } + return jointRotations; +} + +void AvatarData::setJointRotations(QVector jointRotations) { + if (QThread::currentThread() != thread()) { + QVector result; + QMetaObject::invokeMethod(const_cast(this), + "setJointRotations", Qt::BlockingQueuedConnection, + Q_ARG(QVector, jointRotations)); + } + if (_jointData.size() < jointRotations.size()) { + _jointData.resize(jointRotations.size()); + } + for (int i = 0; i < jointRotations.size(); ++i) { + if (i < _jointData.size()) { + setJointData(i, jointRotations[i]); + } + } +} + +void AvatarData::clearJointsData() { + for (int i = 0; i < _jointData.size(); ++i) { + clearJointData(i); + } +} + bool AvatarData::hasIdentityChangedAfterParsing(const QByteArray &packet) { QDataStream packetStream(packet); packetStream.skipRawData(numBytesForPacketHeader(packet)); diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index a4bb0d48bb..432b68a776 100755 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -49,6 +49,7 @@ typedef unsigned long long quint64; #include +#include "Recorder.h" #include "Referential.h" #include "HeadData.h" #include "HandData.h" @@ -210,7 +211,12 @@ public: Q_INVOKABLE void clearJointData(const QString& name); Q_INVOKABLE bool isJointDataValid(const QString& name) const; Q_INVOKABLE glm::quat getJointRotation(const QString& name) const; - + + Q_INVOKABLE virtual QVector getJointRotations() const; + Q_INVOKABLE virtual void setJointRotations(QVector jointRotations); + + Q_INVOKABLE virtual void clearJointsData(); + /// Returns the index of the joint with the specified name, or -1 if not found/unknown. Q_INVOKABLE virtual int getJointIndex(const QString& name) const { return _jointIndices.value(name) - 1; } @@ -293,6 +299,16 @@ public slots: void setSessionUUID(const QUuid& sessionUUID) { _sessionUUID = sessionUUID; } bool hasReferential(); + bool isPlaying(); + qint64 playerElapsed(); + qint64 playerLength(); + void loadRecording(QString filename); + void startPlaying(); + void setPlayFromCurrentLocation(bool playFromCurrentLocation); + void setPlayerLoop(bool loop); + void play(); + void stopPlaying(); + protected: QUuid _sessionUUID; glm::vec3 _position; @@ -346,6 +362,8 @@ protected: QWeakPointer _owningAvatarMixer; QElapsedTimer _lastUpdateTimer; + PlayerPointer _player; + /// Loads the joint indices, names from the FST file (if any) virtual void updateJointMappings(); void changeReferential(Referential* ref); diff --git a/libraries/avatars/src/HeadData.h b/libraries/avatars/src/HeadData.h index 782386c649..7e27365387 100644 --- a/libraries/avatars/src/HeadData.h +++ b/libraries/avatars/src/HeadData.h @@ -41,6 +41,10 @@ public: void setBasePitch(float pitch) { _basePitch = glm::clamp(pitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH); } float getBaseRoll() const { return _baseRoll; } void setBaseRoll(float roll) { _baseRoll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); } + + virtual void setFinalYaw(float finalYaw) { _baseYaw = finalYaw; } + virtual void setFinalPitch(float finalPitch) { _basePitch = finalPitch; } + virtual void setFinalRoll(float finalRoll) { _baseRoll = finalRoll; } virtual float getFinalYaw() const { return _baseYaw; } virtual float getFinalPitch() const { return _basePitch; } virtual float getFinalRoll() const { return _baseRoll; } @@ -56,6 +60,7 @@ public: void setBlendshape(QString name, float val); const QVector& getBlendshapeCoefficients() const { return _blendshapeCoefficients; } + void setBlendshapeCoefficients(const QVector& blendshapeCoefficients) { _blendshapeCoefficients = blendshapeCoefficients; } float getPupilDilation() const { return _pupilDilation; } void setPupilDilation(float pupilDilation) { _pupilDilation = pupilDilation; } @@ -68,6 +73,15 @@ public: const glm::vec3& getLookAtPosition() const { return _lookAtPosition; } void setLookAtPosition(const glm::vec3& lookAtPosition) { _lookAtPosition = lookAtPosition; } + + float getLeanSideways() const { return _leanSideways; } + float getLeanForward() const { return _leanForward; } + virtual float getFinalLeanSideways() const { return _leanSideways; } + virtual float getFinalLeanForward() const { return _leanForward; } + + void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; } + void setLeanForward(float leanForward) { _leanForward = leanForward; } + friend class AvatarData; protected: diff --git a/libraries/avatars/src/Recorder.cpp b/libraries/avatars/src/Recorder.cpp new file mode 100644 index 0000000000..69cde6560e --- /dev/null +++ b/libraries/avatars/src/Recorder.cpp @@ -0,0 +1,610 @@ +// +// Recorder.cpp +// +// +// Created by Clement on 8/7/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#include + +#include +#include +#include + +#include "AvatarData.h" +#include "Recorder.h" + +void RecordingFrame::setBlendshapeCoefficients(QVector blendshapeCoefficients) { + _blendshapeCoefficients = blendshapeCoefficients; +} + +void RecordingFrame::setJointRotations(QVector jointRotations) { + _jointRotations = jointRotations; +} + +void RecordingFrame::setTranslation(glm::vec3 translation) { + _translation = translation; +} + +void RecordingFrame::setRotation(glm::quat rotation) { + _rotation = rotation; +} + +void RecordingFrame::setScale(float scale) { + _scale = scale; +} + +void RecordingFrame::setHeadRotation(glm::quat headRotation) { + _headRotation = headRotation; +} + +void RecordingFrame::setLeanSideways(float leanSideways) { + _leanSideways = leanSideways; +} + +void RecordingFrame::setLeanForward(float leanForward) { + _leanForward = leanForward; +} + +Recording::Recording() : _audio(NULL) { +} + +Recording::~Recording() { + delete _audio; +} + +void Recording::addFrame(int timestamp, RecordingFrame &frame) { + _timestamps << timestamp; + _frames << frame; +} + +void Recording::addAudioPacket(QByteArray byteArray) { + if (!_audio) { + _audio = new Sound(byteArray); + } + _audio->append(byteArray); +} + +void Recording::clear() { + _timestamps.clear(); + _frames.clear(); + delete _audio; + _audio = NULL; +} + +Recorder::Recorder(AvatarData* avatar) : + _recording(new Recording()), + _avatar(avatar) +{ + _timer.invalidate(); +} + +bool Recorder::isRecording() const { + return _timer.isValid(); +} + +qint64 Recorder::elapsed() const { + if (isRecording()) { + return _timer.elapsed(); + } else { + return 0; + } +} + +void Recorder::startRecording() { + qDebug() << "Recorder::startRecording()"; + _recording->clear(); + _timer.start(); + + RecordingFrame frame; + frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients()); + frame.setJointRotations(_avatar->getJointRotations()); + frame.setTranslation(_avatar->getPosition()); + frame.setRotation(_avatar->getOrientation()); + frame.setScale(_avatar->getTargetScale()); + + const HeadData* head = _avatar->getHeadData(); + glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(), + head->getFinalYaw(), + head->getFinalRoll()))); + frame.setHeadRotation(rotation); + frame.setLeanForward(_avatar->getHeadData()->getLeanForward()); + frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways()); + + _recording->addFrame(0, frame); +} + +void Recorder::stopRecording() { + qDebug() << "Recorder::stopRecording()"; + _timer.invalidate(); + + qDebug().nospace() << "Recorded " << _recording->getFrameNumber() << " during " << _recording->getLength() << " msec (" << _recording->getFrameNumber() / (_recording->getLength() / 1000.0f) << " fps)"; +} + +void Recorder::saveToFile(QString file) { + if (_recording->isEmpty()) { + qDebug() << "Cannot save recording to file, recording is empty."; + } + + writeRecordingToFile(_recording, file); +} + +void Recorder::record() { + if (isRecording()) { + const RecordingFrame& referenceFrame = _recording->getFrame(0); + RecordingFrame frame; + frame.setBlendshapeCoefficients(_avatar->getHeadData()->getBlendshapeCoefficients()); + frame.setJointRotations(_avatar->getJointRotations()); + frame.setTranslation(_avatar->getPosition() - referenceFrame.getTranslation()); + frame.setRotation(glm::inverse(referenceFrame.getRotation()) * _avatar->getOrientation()); + frame.setScale(_avatar->getTargetScale() / referenceFrame.getScale()); + + + const HeadData* head = _avatar->getHeadData(); + glm::quat rotation = glm::quat(glm::radians(glm::vec3(head->getFinalPitch(), + head->getFinalYaw(), + head->getFinalRoll()))); + frame.setHeadRotation(rotation); + frame.setLeanForward(_avatar->getHeadData()->getLeanForward()); + frame.setLeanSideways(_avatar->getHeadData()->getLeanSideways()); + + _recording->addFrame(_timer.elapsed(), frame); + } +} + +void Recorder::record(char* samples, int size) { + QByteArray byteArray(samples, size); + _recording->addAudioPacket(byteArray); +} + + +Player::Player(AvatarData* avatar) : + _recording(new Recording()), + _avatar(avatar), + _audioThread(NULL), + _startingScale(1.0f), + _playFromCurrentPosition(true), + _loop(false) +{ + _timer.invalidate(); + _options.setLoop(false); + _options.setVolume(1.0f); +} + +bool Player::isPlaying() const { + return _timer.isValid(); +} + +qint64 Player::elapsed() const { + if (isPlaying()) { + return _timer.elapsed(); + } else { + return 0; + } +} + +glm::quat Player::getHeadRotation() { + if (!computeCurrentFrame()) { + qWarning() << "Incorrect use of Player::getHeadRotation()"; + return glm::quat(); + } + + if (_currentFrame == 0) { + return _recording->getFrame(_currentFrame).getHeadRotation(); + } + return _recording->getFrame(0).getHeadRotation() * + _recording->getFrame(_currentFrame).getHeadRotation(); +} + +float Player::getLeanSideways() { + if (!computeCurrentFrame()) { + qWarning() << "Incorrect use of Player::getLeanSideways()"; + return 0.0f; + } + + return _recording->getFrame(_currentFrame).getLeanSideways(); +} + +float Player::getLeanForward() { + if (!computeCurrentFrame()) { + qWarning() << "Incorrect use of Player::getLeanForward()"; + return 0.0f; + } + + return _recording->getFrame(_currentFrame).getLeanForward(); +} + +void Player::startPlaying() { + if (_recording && _recording->getFrameNumber() > 0) { + qDebug() << "Recorder::startPlaying()"; + _currentFrame = 0; + + // Setup audio thread + _audioThread = new QThread(); + _options.setPosition(_avatar->getPosition()); + _options.setOrientation(_avatar->getOrientation()); + _injector.reset(new AudioInjector(_recording->getAudio(), _options), &QObject::deleteLater); + _injector->moveToThread(_audioThread); + _audioThread->start(); + QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection); + + // Fake faceshift connection + _avatar->setForceFaceshiftConnected(true); + + if (_playFromCurrentPosition) { + _startingPosition = _avatar->getPosition(); + _startingRotation = _avatar->getOrientation(); + _startingScale = _avatar->getTargetScale(); + } else { + _startingPosition = _recording->getFrame(0).getTranslation(); + _startingRotation = _recording->getFrame(0).getRotation(); + _startingScale = _recording->getFrame(0).getScale(); + } + + _timer.start(); + } +} + +void Player::stopPlaying() { + if (!isPlaying()) { + return; + } + + _timer.invalidate(); + + _avatar->clearJointsData(); + + // Cleanup audio thread + _injector->stop(); + QObject::connect(_injector.data(), &AudioInjector::finished, + _injector.data(), &AudioInjector::deleteLater); + QObject::connect(_injector.data(), &AudioInjector::destroyed, + _audioThread, &QThread::quit); + QObject::connect(_audioThread, &QThread::finished, + _audioThread, &QThread::deleteLater); + _injector.clear(); + _audioThread = NULL; + + // Turn off fake faceshift connection + _avatar->setForceFaceshiftConnected(false); + + qDebug() << "Recorder::stopPlaying()"; +} + +void Player::loadFromFile(QString file) { + if (_recording) { + _recording->clear(); + } else { + _recording = RecordingPointer(new Recording()); + } + readRecordingFromFile(_recording, file); +} + +void Player::loadRecording(RecordingPointer recording) { + _recording = recording; +} + +void Player::play() { + computeCurrentFrame(); + if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) { + // If it's the end of the recording, stop playing + stopPlaying(); + + if (_loop) { + startPlaying(); + } + return; + } + + if (_currentFrame == 0) { + // Don't play frame 0 + // only meant to store absolute values + return; + } + + _avatar->setPosition(_startingPosition + + glm::inverse(_recording->getFrame(0).getRotation()) * _startingRotation * + _recording->getFrame(_currentFrame).getTranslation()); + _avatar->setOrientation(_startingRotation * + _recording->getFrame(_currentFrame).getRotation()); + _avatar->setTargetScale(_startingScale * + _recording->getFrame(_currentFrame).getScale()); + _avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations()); + + HeadData* head = const_cast(_avatar->getHeadData()); + if (head) { + head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients()); + head->setLeanSideways(_recording->getFrame(_currentFrame).getLeanSideways()); + head->setLeanForward(_recording->getFrame(_currentFrame).getLeanForward()); + glm::vec3 eulers = glm::degrees(safeEulerAngles(_recording->getFrame(_currentFrame).getHeadRotation())); + head->setFinalPitch(eulers.x); + head->setFinalYaw(eulers.y); + head->setFinalRoll(eulers.z); + } + + _options.setPosition(_avatar->getPosition()); + _options.setOrientation(_avatar->getOrientation()); + _injector->setOptions(_options); +} + +void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) { + _playFromCurrentPosition = playFromCurrentLocation; +} + +void Player::setLoop(bool loop) { + _loop = loop; +} + +bool Player::computeCurrentFrame() { + if (!isPlaying()) { + _currentFrame = -1; + return false; + } + if (_currentFrame < 0) { + _currentFrame = 0; + } + + while (_currentFrame < _recording->getFrameNumber() - 1 && + _recording->getFrameTimestamp(_currentFrame) < _timer.elapsed()) { + ++_currentFrame; + } + + return true; +} + +void writeRecordingToFile(RecordingPointer recording, QString filename) { + if (!recording || recording->getFrameNumber() < 1) { + qDebug() << "Can't save empty recording"; + return; + } + + qDebug() << "Writing recording to " << filename << "."; + QElapsedTimer timer; + QFile file(filename); + if (!file.open(QIODevice::WriteOnly)){ + return; + } + timer.start(); + + + QDataStream fileStream(&file); + + fileStream << recording->_timestamps; + + RecordingFrame& baseFrame = recording->_frames[0]; + int totalLength = 0; + + // Blendshape coefficients + fileStream << baseFrame._blendshapeCoefficients; + totalLength += baseFrame._blendshapeCoefficients.size(); + + // Joint Rotations + int jointRotationSize = baseFrame._jointRotations.size(); + fileStream << jointRotationSize; + for (int i = 0; i < jointRotationSize; ++i) { + fileStream << baseFrame._jointRotations[i].x << baseFrame._jointRotations[i].y << baseFrame._jointRotations[i].z << baseFrame._jointRotations[i].w; + } + totalLength += jointRotationSize; + + // Translation + fileStream << baseFrame._translation.x << baseFrame._translation.y << baseFrame._translation.z; + totalLength += 1; + + // Rotation + fileStream << baseFrame._rotation.x << baseFrame._rotation.y << baseFrame._rotation.z << baseFrame._rotation.w; + totalLength += 1; + + // Scale + fileStream << baseFrame._scale; + totalLength += 1; + + // Head Rotation + fileStream << baseFrame._headRotation.x << baseFrame._headRotation.y << baseFrame._headRotation.z << baseFrame._headRotation.w; + totalLength += 1; + + // Lean Sideways + fileStream << baseFrame._leanSideways; + totalLength += 1; + + // Lean Forward + fileStream << baseFrame._leanForward; + totalLength += 1; + + for (int i = 1; i < recording->_timestamps.size(); ++i) { + QBitArray mask(totalLength); + int maskIndex = 0; + QByteArray buffer; + QDataStream stream(&buffer, QIODevice::WriteOnly); + RecordingFrame& previousFrame = recording->_frames[i - 1]; + RecordingFrame& frame = recording->_frames[i]; + + // Blendshape coefficients + for (int i = 0; i < frame._blendshapeCoefficients.size(); ++i) { + if (frame._blendshapeCoefficients[i] != previousFrame._blendshapeCoefficients[i]) { + stream << frame._blendshapeCoefficients[i]; + mask.setBit(maskIndex); + } + maskIndex++; + } + + // Joint Rotations + for (int i = 0; i < frame._jointRotations.size(); ++i) { + if (frame._jointRotations[i] != previousFrame._jointRotations[i]) { + stream << frame._jointRotations[i].x << frame._jointRotations[i].y << frame._jointRotations[i].z << frame._jointRotations[i].w; + mask.setBit(maskIndex); + } + maskIndex++; + } + + // Translation + if (frame._translation != previousFrame._translation) { + stream << frame._translation.x << frame._translation.y << frame._translation.z; + mask.setBit(maskIndex); + } + maskIndex++; + + // Rotation + if (frame._rotation != previousFrame._rotation) { + stream << frame._rotation.x << frame._rotation.y << frame._rotation.z << frame._rotation.w; + mask.setBit(maskIndex); + } + maskIndex++; + + // Scale + if (frame._scale != previousFrame._scale) { + stream << frame._scale; + mask.setBit(maskIndex); + } + maskIndex++; + + // Head Rotation + if (frame._headRotation != previousFrame._headRotation) { + stream << frame._headRotation.x << frame._headRotation.y << frame._headRotation.z << frame._headRotation.w; + mask.setBit(maskIndex); + } + maskIndex++; + + // Lean Sideways + if (frame._leanSideways != previousFrame._leanSideways) { + stream << frame._leanSideways; + mask.setBit(maskIndex); + } + maskIndex++; + + // Lean Forward + if (frame._leanForward != previousFrame._leanForward) { + stream << frame._leanForward; + mask.setBit(maskIndex); + } + maskIndex++; + + fileStream << mask; + fileStream << buffer; + } + + fileStream << recording->_audio->getByteArray(); + + qDebug() << "Wrote " << file.size() << " bytes in " << timer.elapsed() << " ms."; +} + +RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) { + qDebug() << "Reading recording from " << filename << "."; + if (!recording) { + recording.reset(new Recording()); + } + + QElapsedTimer timer; + QFile file(filename); + if (!file.open(QIODevice::ReadOnly)){ + return recording; + } + timer.start(); + QDataStream fileStream(&file); + + fileStream >> recording->_timestamps; + RecordingFrame baseFrame; + + // Blendshape coefficients + fileStream >> baseFrame._blendshapeCoefficients; + + // Joint Rotations + int jointRotationSize; + fileStream >> jointRotationSize; + baseFrame._jointRotations.resize(jointRotationSize); + for (int i = 0; i < jointRotationSize; ++i) { + fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w; + } + + fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z; + fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w; + fileStream >> baseFrame._scale; + fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w; + fileStream >> baseFrame._leanSideways; + fileStream >> baseFrame._leanForward; + + recording->_frames << baseFrame; + + for (int i = 1; i < recording->_timestamps.size(); ++i) { + QBitArray mask; + QByteArray buffer; + QDataStream stream(&buffer, QIODevice::ReadOnly); + RecordingFrame frame; + RecordingFrame& previousFrame = recording->_frames.last(); + + fileStream >> mask; + fileStream >> buffer; + int maskIndex = 0; + + // Blendshape Coefficients + frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size()); + for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) { + if (mask[maskIndex++]) { + stream >> frame._blendshapeCoefficients[i]; + } else { + frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i]; + } + } + + // Joint Rotations + frame._jointRotations.resize(baseFrame._jointRotations.size()); + for (int i = 0; i < baseFrame._jointRotations.size(); ++i) { + if (mask[maskIndex++]) { + stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w; + } else { + frame._jointRotations[i] = previousFrame._jointRotations[i]; + } + } + + if (mask[maskIndex++]) { + stream >> frame._translation.x >> frame._translation.y >> frame._translation.z; + } else { + frame._translation = previousFrame._translation; + } + + if (mask[maskIndex++]) { + stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w; + } else { + frame._rotation = previousFrame._rotation; + } + + if (mask[maskIndex++]) { + stream >> frame._scale; + } else { + frame._scale = previousFrame._scale; + } + + if (mask[maskIndex++]) { + stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w; + } else { + frame._headRotation = previousFrame._headRotation; + } + + if (mask[maskIndex++]) { + stream >> frame._leanSideways; + } else { + frame._leanSideways = previousFrame._leanSideways; + } + + if (mask[maskIndex++]) { + stream >> frame._leanForward; + } else { + frame._leanForward = previousFrame._leanForward; + } + + recording->_frames << frame; + } + + QByteArray audioArray; + fileStream >> audioArray; + recording->addAudioPacket(audioArray); + + + qDebug() << "Read " << file.size() << " bytes in " << timer.elapsed() << " ms."; + return recording; +} + + diff --git a/libraries/avatars/src/Recorder.h b/libraries/avatars/src/Recorder.h new file mode 100644 index 0000000000..1f41672749 --- /dev/null +++ b/libraries/avatars/src/Recorder.h @@ -0,0 +1,183 @@ +// +// Recorder.h +// +// +// Created by Clement on 8/7/14. +// Copyright 2014 High Fidelity, Inc. +// +// Distributed under the Apache License, Version 2.0. +// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html +// + +#ifndef hifi_Recorder_h +#define hifi_Recorder_h + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include + +class AvatarData; +class Recorder; +class Recording; +class Player; + +typedef QSharedPointer RecordingPointer; +typedef QSharedPointer RecorderPointer; +typedef QWeakPointer WeakRecorderPointer; +typedef QSharedPointer PlayerPointer; +typedef QWeakPointer WeakPlayerPointer; + +/// Stores the different values associated to one recording frame +class RecordingFrame { +public: + QVector getBlendshapeCoefficients() const { return _blendshapeCoefficients; } + QVector getJointRotations() const { return _jointRotations; } + glm::vec3 getTranslation() const { return _translation; } + glm::quat getRotation() const { return _rotation; } + float getScale() const { return _scale; } + glm::quat getHeadRotation() const { return _headRotation; } + float getLeanSideways() const { return _leanSideways; } + float getLeanForward() const { return _leanForward; } + +protected: + void setBlendshapeCoefficients(QVector blendshapeCoefficients); + void setJointRotations(QVector jointRotations); + void setTranslation(glm::vec3 translation); + void setRotation(glm::quat rotation); + void setScale(float scale); + void setHeadRotation(glm::quat headRotation); + void setLeanSideways(float leanSideways); + void setLeanForward(float leanForward); + +private: + QVector _blendshapeCoefficients; + QVector _jointRotations; + glm::vec3 _translation; + glm::quat _rotation; + float _scale; + glm::quat _headRotation; + float _leanSideways; + float _leanForward; + + friend class Recorder; + friend void writeRecordingToFile(RecordingPointer recording, QString file); + friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file); +}; + +/// Stores a recording +class Recording { +public: + Recording(); + ~Recording(); + + bool isEmpty() const { return _timestamps.isEmpty(); } + int getLength() const { return _timestamps.last(); } // in ms + + int getFrameNumber() const { return _frames.size(); } + qint32 getFrameTimestamp(int i) const { return _timestamps[i]; } + const RecordingFrame& getFrame(int i) const { return _frames[i]; } + Sound* getAudio() const { return _audio; } + +protected: + void addFrame(int timestamp, RecordingFrame& frame); + void addAudioPacket(QByteArray byteArray); + void clear(); + +private: + QVector _timestamps; + QVector _frames; + + bool _stereo; + Sound* _audio; + + friend class Recorder; + friend class Player; + friend void writeRecordingToFile(RecordingPointer recording, QString file); + friend RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file); +}; + +/// Records a recording +class Recorder { +public: + Recorder(AvatarData* avatar); + + bool isRecording() const; + qint64 elapsed() const; + + RecordingPointer getRecording() const { return _recording; } + +public slots: + void startRecording(); + void stopRecording(); + void saveToFile(QString file); + void record(); + void record(char* samples, int size); + +private: + QElapsedTimer _timer; + RecordingPointer _recording; + + AvatarData* _avatar; +}; + +/// Plays back a recording +class Player { +public: + Player(AvatarData* avatar); + + bool isPlaying() const; + qint64 elapsed() const; + + RecordingPointer getRecording() const { return _recording; } + + // Those should only be called if isPlaying() returns true + glm::quat getHeadRotation(); + float getLeanSideways(); + float getLeanForward(); + + +public slots: + void startPlaying(); + void stopPlaying(); + void loadFromFile(QString file); + void loadRecording(RecordingPointer recording); + void play(); + + void setPlayFromCurrentLocation(bool playFromCurrentLocation); + void setLoop(bool loop); + +private: + bool computeCurrentFrame(); + + QElapsedTimer _timer; + RecordingPointer _recording; + int _currentFrame; + + QSharedPointer _injector; + AudioInjectorOptions _options; + + AvatarData* _avatar; + QThread* _audioThread; + + glm::vec3 _startingPosition; + glm::quat _startingRotation; + float _startingScale; + + bool _playFromCurrentPosition; + bool _loop; +}; + +void writeRecordingToFile(RecordingPointer recording, QString file); +RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file); + +#endif // hifi_Recorder_h \ No newline at end of file diff --git a/libraries/fbx/src/FBXReader.cpp b/libraries/fbx/src/FBXReader.cpp index 1a152dc217..d8b52fb794 100644 --- a/libraries/fbx/src/FBXReader.cpp +++ b/libraries/fbx/src/FBXReader.cpp @@ -1503,7 +1503,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) joint.inverseBindRotation = joint.inverseDefaultRotation; joint.name = model.name; joint.shapePosition = glm::vec3(0.f); - joint.shapeType = Shape::UNKNOWN_SHAPE; + joint.shapeType = UNKNOWN_SHAPE; geometry.joints.append(joint); geometry.jointIndices.insert(model.name, geometry.joints.size()); @@ -1848,10 +1848,10 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) if (collideLikeCapsule) { joint.shapeRotation = rotationBetween(defaultCapsuleAxis, jointShapeInfo.boneBegin); joint.shapePosition = 0.5f * jointShapeInfo.boneBegin; - joint.shapeType = Shape::CAPSULE_SHAPE; + joint.shapeType = CAPSULE_SHAPE; } else { // collide the joint like a sphere - joint.shapeType = Shape::SPHERE_SHAPE; + joint.shapeType = SPHERE_SHAPE; if (jointShapeInfo.numVertices > 0) { jointShapeInfo.averageVertex /= (float)jointShapeInfo.numVertices; joint.shapePosition = jointShapeInfo.averageVertex; @@ -1872,7 +1872,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping) // The shape is further from both joint endpoints than the endpoints are from each other // which probably means the model has a bad transform somewhere. We disable this shape // by setting its type to UNKNOWN_SHAPE. - joint.shapeType = Shape::UNKNOWN_SHAPE; + joint.shapeType = UNKNOWN_SHAPE; } } } diff --git a/libraries/metavoxels/src/AttributeRegistry.cpp b/libraries/metavoxels/src/AttributeRegistry.cpp index 1e30aee576..425bf8ff4a 100644 --- a/libraries/metavoxels/src/AttributeRegistry.cpp +++ b/libraries/metavoxels/src/AttributeRegistry.cpp @@ -23,8 +23,10 @@ REGISTER_META_OBJECT(QRgbAttribute) REGISTER_META_OBJECT(PackedNormalAttribute) REGISTER_META_OBJECT(SpannerQRgbAttribute) REGISTER_META_OBJECT(SpannerPackedNormalAttribute) +REGISTER_META_OBJECT(HeightfieldTexture) REGISTER_META_OBJECT(HeightfieldAttribute) REGISTER_META_OBJECT(HeightfieldColorAttribute) +REGISTER_META_OBJECT(HeightfieldTextureAttribute) REGISTER_META_OBJECT(SharedObjectAttribute) REGISTER_META_OBJECT(SharedObjectSetAttribute) REGISTER_META_OBJECT(SpannerSetAttribute) @@ -49,7 +51,8 @@ AttributeRegistry::AttributeRegistry() : _spannerNormalAttribute(registerAttribute(new SpannerPackedNormalAttribute("spannerNormal"))), _spannerMaskAttribute(registerAttribute(new FloatAttribute("spannerMask"))), _heightfieldAttribute(registerAttribute(new HeightfieldAttribute("heightfield"))), - _heightfieldColorAttribute(registerAttribute(new HeightfieldColorAttribute("heightfieldColor"))) { + _heightfieldColorAttribute(registerAttribute(new HeightfieldColorAttribute("heightfieldColor"))), + _heightfieldTextureAttribute(registerAttribute(new HeightfieldTextureAttribute("heightfieldTexture"))) { // our baseline LOD threshold is for voxels; spanners and heightfields are a different story const float SPANNER_LOD_THRESHOLD_MULTIPLIER = 8.0f; @@ -58,6 +61,7 @@ AttributeRegistry::AttributeRegistry() : const float HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER = 32.0f; _heightfieldAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER); _heightfieldColorAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER); + _heightfieldTextureAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER); } static QScriptValue qDebugFunction(QScriptContext* context, QScriptEngine* engine) { @@ -204,6 +208,16 @@ Attribute::Attribute(const QString& name) : Attribute::~Attribute() { } +void Attribute::readSubdivided(MetavoxelStreamState& state, void*& value, + const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const { + read(state.base.stream, value, isLeaf); +} + +void Attribute::writeSubdivided(MetavoxelStreamState& state, void* value, + const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const { + write(state.base.stream, value, isLeaf); +} + MetavoxelNode* Attribute::createMetavoxelNode(const AttributeValue& value, const MetavoxelNode* original) const { return new MetavoxelNode(value); } @@ -260,9 +274,7 @@ MetavoxelNode* Attribute::expandMetavoxelRoot(const MetavoxelNode& root) { MetavoxelNode* newGrandchild = new MetavoxelNode(attribute); newChild->setChild((index + j) % MetavoxelNode::CHILD_COUNT, newGrandchild); } - newChild->mergeChildren(attribute); } - newParent->mergeChildren(attribute); return newParent; } @@ -489,20 +501,19 @@ HeightfieldData::HeightfieldData(const QByteArray& contents) : _contents(contents) { } -HeightfieldData::HeightfieldData(Bitstream& in, int bytes, bool color) { - read(in, bytes, color); +HeightfieldData::~HeightfieldData() { } enum HeightfieldImage { NULL_HEIGHTFIELD_IMAGE, NORMAL_HEIGHTFIELD_IMAGE, DEFLATED_HEIGHTFIELD_IMAGE }; -static QByteArray encodeHeightfieldImage(const QImage& image) { +static QByteArray encodeHeightfieldImage(const QImage& image, bool lossless = false) { if (image.isNull()) { return QByteArray(1, NULL_HEIGHTFIELD_IMAGE); } QBuffer buffer; buffer.open(QIODevice::WriteOnly); const int JPEG_ENCODE_THRESHOLD = 16; - if (image.width() >= JPEG_ENCODE_THRESHOLD && image.height() >= JPEG_ENCODE_THRESHOLD) { + if (image.width() >= JPEG_ENCODE_THRESHOLD && image.height() >= JPEG_ENCODE_THRESHOLD && !lossless) { qint32 offsetX = image.offset().x(), offsetY = image.offset().y(); buffer.write((char*)&offsetX, sizeof(qint32)); buffer.write((char*)&offsetY, sizeof(qint32)); @@ -536,65 +547,93 @@ const QImage decodeHeightfieldImage(const QByteArray& data) { } } -HeightfieldData::HeightfieldData(Bitstream& in, int bytes, const HeightfieldDataPointer& reference, bool color) { +HeightfieldHeightData::HeightfieldHeightData(const QByteArray& contents) : + HeightfieldData(contents) { +} + +HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes) { + read(in, bytes); +} + +HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& reference) { if (!reference) { - read(in, bytes, color); + read(in, bytes); return; } - QMutexLocker locker(&reference->_encodedDeltaMutex); - reference->_encodedDelta = in.readAligned(bytes); - reference->_deltaData = this; - _contents = reference->_contents; - QImage image = decodeHeightfieldImage(reference->_encodedDelta); + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + reference->setEncodedDelta(in.readAligned(bytes)); + reference->setDeltaData(HeightfieldDataPointer(this)); + _contents = reference->getContents(); + QImage image = decodeHeightfieldImage(reference->getEncodedDelta()); if (image.isNull()) { return; } QPoint offset = image.offset(); image = image.convertToFormat(QImage::Format_RGB888); if (offset.x() == 0) { - set(image, color); + set(image); return; } int minX = offset.x() - 1; int minY = offset.y() - 1; - if (color) { - int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); - char* dest = _contents.data() + (minY * size + minX) * COLOR_BYTES; - int destStride = size * COLOR_BYTES; - int srcStride = image.width() * COLOR_BYTES; - for (int y = 0; y < image.height(); y++) { - memcpy(dest, image.constScanLine(y), srcStride); - dest += destStride; + int size = glm::sqrt((float)_contents.size()); + char* lineDest = _contents.data() + minY * size + minX; + for (int y = 0; y < image.height(); y++) { + const uchar* src = image.constScanLine(y); + for (char* dest = lineDest, *end = dest + image.width(); dest != end; dest++, src += COLOR_BYTES) { + *dest = *src; } - } else { - int size = glm::sqrt((float)_contents.size()); - char* lineDest = _contents.data() + minY * size + minX; - for (int y = 0; y < image.height(); y++) { - const uchar* src = image.constScanLine(y); - for (char* dest = lineDest, *end = dest + image.width(); dest != end; dest++, src += COLOR_BYTES) { - *dest = *src; - } - lineDest += size; + lineDest += size; + } +} + +HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& ancestor, + const glm::vec3& minimum, float size) { + QMutexLocker locker(&_encodedSubdivisionsMutex); + int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1; + if (_encodedSubdivisions.size() <= index) { + _encodedSubdivisions.resize(index + 1); + } + EncodedSubdivision& subdivision = _encodedSubdivisions[index]; + subdivision.data = in.readAligned(bytes); + subdivision.ancestor = ancestor; + QImage image = decodeHeightfieldImage(subdivision.data); + if (image.isNull()) { + return; + } + image = image.convertToFormat(QImage::Format_RGB888); + int destSize = image.width(); + const uchar* src = image.constBits(); + const QByteArray& ancestorContents = ancestor->getContents(); + + int ancestorSize = glm::sqrt((float)ancestorContents.size()); + float ancestorY = minimum.z * ancestorSize; + float ancestorIncrement = size * ancestorSize / destSize; + + _contents = QByteArray(destSize * destSize, 0); + char* dest = _contents.data(); + + for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) { + const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorSize; + float ancestorX = minimum.x * ancestorSize; + for (char* end = dest + destSize; dest != end; src += COLOR_BYTES, ancestorX += ancestorIncrement) { + const uchar* ref = lineRef + (int)ancestorX; + *dest++ = *ref++ + *src; } } } -void HeightfieldData::write(Bitstream& out, bool color) { +void HeightfieldHeightData::write(Bitstream& out) { QMutexLocker locker(&_encodedMutex); if (_encoded.isEmpty()) { QImage image; - if (color) { - int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); - image = QImage((uchar*)_contents.data(), size, size, QImage::Format_RGB888); - } else { - int size = glm::sqrt((float)_contents.size()); - image = QImage(size, size, QImage::Format_RGB888); - uchar* dest = image.bits(); - for (const char* src = _contents.constData(), *end = src + _contents.size(); src != end; src++) { - *dest++ = *src; - *dest++ = *src; - *dest++ = *src; - } + int size = glm::sqrt((float)_contents.size()); + image = QImage(size, size, QImage::Format_RGB888); + uchar* dest = image.bits(); + for (const char* src = _contents.constData(), *end = src + _contents.size(); src != end; src++) { + *dest++ = *src; + *dest++ = *src; + *dest++ = *src; } _encoded = encodeHeightfieldImage(image); } @@ -602,114 +641,427 @@ void HeightfieldData::write(Bitstream& out, bool color) { out.writeAligned(_encoded); } -void HeightfieldData::writeDelta(Bitstream& out, const HeightfieldDataPointer& reference, bool color) { +void HeightfieldHeightData::writeDelta(Bitstream& out, const HeightfieldHeightDataPointer& reference) { if (!reference || reference->getContents().size() != _contents.size()) { - write(out, color); + write(out); return; } - QMutexLocker locker(&reference->_encodedDeltaMutex); - if (reference->_encodedDelta.isEmpty() || reference->_deltaData != this) { + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) { QImage image; - int minX, minY; - if (color) { - int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); - minX = size; - minY = size; - int maxX = -1, maxY = -1; - const char* src = _contents.constData(); - const char* ref = reference->_contents.constData(); - for (int y = 0; y < size; y++) { - bool difference = false; - for (int x = 0; x < size; x++, src += COLOR_BYTES, ref += COLOR_BYTES) { - if (src[0] != ref[0] || src[1] != ref[1] || src[2] != ref[2]) { - minX = qMin(minX, x); - maxX = qMax(maxX, x); - difference = true; - } - } - if (difference) { - minY = qMin(minY, y); - maxY = qMax(maxY, y); + int size = glm::sqrt((float)_contents.size()); + int minX = size, minY = size; + int maxX = -1, maxY = -1; + const char* src = _contents.constData(); + const char* ref = reference->getContents().constData(); + for (int y = 0; y < size; y++) { + bool difference = false; + for (int x = 0; x < size; x++) { + if (*src++ != *ref++) { + minX = qMin(minX, x); + maxX = qMax(maxX, x); + difference = true; } } - if (maxX >= minX) { - int width = maxX - minX + 1; - int height = maxY - minY + 1; - image = QImage(width, height, QImage::Format_RGB888); - src = _contents.constData() + (minY * size + minX) * COLOR_BYTES; - int srcStride = size * COLOR_BYTES; - int destStride = width * COLOR_BYTES; - for (int y = 0; y < height; y++) { - memcpy(image.scanLine(y), src, destStride); - src += srcStride; - } + if (difference) { + minY = qMin(minY, y); + maxY = qMax(maxY, y); } - } else { - int size = glm::sqrt((float)_contents.size()); - minX = size; - minY = size; - int maxX = -1, maxY = -1; - const char* src = _contents.constData(); - const char* ref = reference->_contents.constData(); - for (int y = 0; y < size; y++) { - bool difference = false; - for (int x = 0; x < size; x++) { - if (*src++ != *ref++) { - minX = qMin(minX, x); - maxX = qMax(maxX, x); - difference = true; - } - } - if (difference) { - minY = qMin(minY, y); - maxY = qMax(maxY, y); - } - } - if (maxX >= minX) { - int width = qMax(maxX - minX + 1, 0); - int height = qMax(maxY - minY + 1, 0); - image = QImage(width, height, QImage::Format_RGB888); - const uchar* lineSrc = (const uchar*)_contents.constData() + minY * size + minX; - for (int y = 0; y < height; y++) { - uchar* dest = image.scanLine(y); - for (const uchar* src = lineSrc, *end = src + width; src != end; src++) { - *dest++ = *src; - *dest++ = *src; - *dest++ = *src; - } - lineSrc += size; + } + if (maxX >= minX) { + int width = qMax(maxX - minX + 1, 0); + int height = qMax(maxY - minY + 1, 0); + image = QImage(width, height, QImage::Format_RGB888); + const uchar* lineSrc = (const uchar*)_contents.constData() + minY * size + minX; + for (int y = 0; y < height; y++) { + uchar* dest = image.scanLine(y); + for (const uchar* src = lineSrc, *end = src + width; src != end; src++) { + *dest++ = *src; + *dest++ = *src; + *dest++ = *src; } + lineSrc += size; } } image.setOffset(QPoint(minX + 1, minY + 1)); - reference->_encodedDelta = encodeHeightfieldImage(image); - reference->_deltaData = this; + reference->setEncodedDelta(encodeHeightfieldImage(image)); + reference->setDeltaData(HeightfieldDataPointer(this)); } - out << reference->_encodedDelta.size(); - out.writeAligned(reference->_encodedDelta); + out << reference->getEncodedDelta().size(); + out.writeAligned(reference->getEncodedDelta()); } -void HeightfieldData::read(Bitstream& in, int bytes, bool color) { - set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888), color); -} - -void HeightfieldData::set(const QImage& image, bool color) { - if (color) { - _contents.resize(image.width() * image.height() * COLOR_BYTES); - memcpy(_contents.data(), image.constBits(), _contents.size()); +void HeightfieldHeightData::writeSubdivided(Bitstream& out, const HeightfieldHeightDataPointer& ancestor, + const glm::vec3& minimum, float size) { + QMutexLocker locker(&_encodedSubdivisionsMutex); + int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1; + if (_encodedSubdivisions.size() <= index) { + _encodedSubdivisions.resize(index + 1); + } + EncodedSubdivision& subdivision = _encodedSubdivisions[index]; + if (subdivision.data.isEmpty() || subdivision.ancestor != ancestor) { + QImage image; + const QByteArray& ancestorContents = ancestor->getContents(); + const uchar* src = (const uchar*)_contents.constData(); - } else { - _contents.resize(image.width() * image.height()); - char* dest = _contents.data(); - for (const uchar* src = image.constBits(), *end = src + _contents.size() * COLOR_BYTES; - src != end; src += COLOR_BYTES) { - *dest++ = *src; + int destSize = glm::sqrt((float)_contents.size()); + image = QImage(destSize, destSize, QImage::Format_RGB888); + uchar* dest = image.bits(); + + int ancestorSize = glm::sqrt((float)ancestorContents.size()); + float ancestorY = minimum.z * ancestorSize; + float ancestorIncrement = size * ancestorSize / destSize; + + for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) { + const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorSize; + float ancestorX = minimum.x * ancestorSize; + for (const uchar* end = src + destSize; src != end; ancestorX += ancestorIncrement) { + const uchar* ref = lineRef + (int)ancestorX; + uchar difference = *src++ - *ref; + *dest++ = difference; + *dest++ = difference; + *dest++ = difference; + } + } + subdivision.data = encodeHeightfieldImage(image, true); + subdivision.ancestor = ancestor; + } + out << subdivision.data.size(); + out.writeAligned(subdivision.data); +} + +void HeightfieldHeightData::read(Bitstream& in, int bytes) { + set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888)); +} + +void HeightfieldHeightData::set(const QImage& image) { + _contents.resize(image.width() * image.height()); + char* dest = _contents.data(); + for (const uchar* src = image.constBits(), *end = src + _contents.size() * COLOR_BYTES; + src != end; src += COLOR_BYTES) { + *dest++ = *src; + } +} + +HeightfieldColorData::HeightfieldColorData(const QByteArray& contents) : + HeightfieldData(contents) { +} + +HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes) { + read(in, bytes); +} + +HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& reference) { + if (!reference) { + read(in, bytes); + return; + } + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + reference->setEncodedDelta(in.readAligned(bytes)); + reference->setDeltaData(HeightfieldDataPointer(this)); + _contents = reference->getContents(); + QImage image = decodeHeightfieldImage(reference->getEncodedDelta()); + if (image.isNull()) { + return; + } + QPoint offset = image.offset(); + image = image.convertToFormat(QImage::Format_RGB888); + if (offset.x() == 0) { + set(image); + return; + } + int minX = offset.x() - 1; + int minY = offset.y() - 1; + int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); + char* dest = _contents.data() + (minY * size + minX) * COLOR_BYTES; + int destStride = size * COLOR_BYTES; + int srcStride = image.width() * COLOR_BYTES; + for (int y = 0; y < image.height(); y++) { + memcpy(dest, image.constScanLine(y), srcStride); + dest += destStride; + } +} + +HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& ancestor, + const glm::vec3& minimum, float size) { + QMutexLocker locker(&_encodedSubdivisionsMutex); + int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1; + if (_encodedSubdivisions.size() <= index) { + _encodedSubdivisions.resize(index + 1); + } + EncodedSubdivision& subdivision = _encodedSubdivisions[index]; + subdivision.data = in.readAligned(bytes); + subdivision.ancestor = ancestor; + QImage image = decodeHeightfieldImage(subdivision.data); + if (image.isNull()) { + return; + } + image = image.convertToFormat(QImage::Format_RGB888); + int destSize = image.width(); + const uchar* src = image.constBits(); + const QByteArray& ancestorContents = ancestor->getContents(); + + int ancestorSize = glm::sqrt(ancestorContents.size() / (float)COLOR_BYTES); + float ancestorY = minimum.z * ancestorSize; + float ancestorIncrement = size * ancestorSize / destSize; + int ancestorStride = ancestorSize * COLOR_BYTES; + + _contents = QByteArray(destSize * destSize * COLOR_BYTES, 0); + char* dest = _contents.data(); + int stride = image.width() * COLOR_BYTES; + + for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) { + const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorStride; + float ancestorX = minimum.x * ancestorSize; + for (char* end = dest + stride; dest != end; ancestorX += ancestorIncrement) { + const uchar* ref = lineRef + (int)ancestorX * COLOR_BYTES; + *dest++ = *ref++ + *src++; + *dest++ = *ref++ + *src++; + *dest++ = *ref++ + *src++; } } } +void HeightfieldColorData::write(Bitstream& out) { + QMutexLocker locker(&_encodedMutex); + if (_encoded.isEmpty()) { + QImage image; + int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); + image = QImage((uchar*)_contents.data(), size, size, QImage::Format_RGB888); + _encoded = encodeHeightfieldImage(image); + } + out << _encoded.size(); + out.writeAligned(_encoded); +} + +void HeightfieldColorData::writeDelta(Bitstream& out, const HeightfieldColorDataPointer& reference) { + if (!reference || reference->getContents().size() != _contents.size()) { + write(out); + return; + } + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) { + QImage image; + int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES); + int minX = size, minY = size; + int maxX = -1, maxY = -1; + const char* src = _contents.constData(); + const char* ref = reference->getContents().constData(); + for (int y = 0; y < size; y++) { + bool difference = false; + for (int x = 0; x < size; x++, src += COLOR_BYTES, ref += COLOR_BYTES) { + if (src[0] != ref[0] || src[1] != ref[1] || src[2] != ref[2]) { + minX = qMin(minX, x); + maxX = qMax(maxX, x); + difference = true; + } + } + if (difference) { + minY = qMin(minY, y); + maxY = qMax(maxY, y); + } + } + if (maxX >= minX) { + int width = maxX - minX + 1; + int height = maxY - minY + 1; + image = QImage(width, height, QImage::Format_RGB888); + src = _contents.constData() + (minY * size + minX) * COLOR_BYTES; + int srcStride = size * COLOR_BYTES; + int destStride = width * COLOR_BYTES; + for (int y = 0; y < height; y++) { + memcpy(image.scanLine(y), src, destStride); + src += srcStride; + } + } + image.setOffset(QPoint(minX + 1, minY + 1)); + reference->setEncodedDelta(encodeHeightfieldImage(image)); + reference->setDeltaData(HeightfieldDataPointer(this)); + } + out << reference->getEncodedDelta().size(); + out.writeAligned(reference->getEncodedDelta()); +} + +void HeightfieldColorData::writeSubdivided(Bitstream& out, const HeightfieldColorDataPointer& ancestor, + const glm::vec3& minimum, float size) { + QMutexLocker locker(&_encodedSubdivisionsMutex); + int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1; + if (_encodedSubdivisions.size() <= index) { + _encodedSubdivisions.resize(index + 1); + } + EncodedSubdivision& subdivision = _encodedSubdivisions[index]; + if (subdivision.data.isEmpty() || subdivision.ancestor != ancestor) { + QImage image; + const QByteArray& ancestorContents = ancestor->getContents(); + const uchar* src = (const uchar*)_contents.constData(); + + int destSize = glm::sqrt(_contents.size() / (float)COLOR_BYTES); + image = QImage(destSize, destSize, QImage::Format_RGB888); + uchar* dest = image.bits(); + int stride = destSize * COLOR_BYTES; + + int ancestorSize = glm::sqrt(ancestorContents.size() / (float)COLOR_BYTES); + float ancestorY = minimum.z * ancestorSize; + float ancestorIncrement = size * ancestorSize / destSize; + int ancestorStride = ancestorSize * COLOR_BYTES; + + for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) { + const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorStride; + float ancestorX = minimum.x * ancestorSize; + for (const uchar* end = src + stride; src != end; ancestorX += ancestorIncrement) { + const uchar* ref = lineRef + (int)ancestorX * COLOR_BYTES; + *dest++ = *src++ - *ref++; + *dest++ = *src++ - *ref++; + *dest++ = *src++ - *ref++; + } + } + subdivision.data = encodeHeightfieldImage(image, true); + subdivision.ancestor = ancestor; + } + out << subdivision.data.size(); + out.writeAligned(subdivision.data); +} + +void HeightfieldColorData::read(Bitstream& in, int bytes) { + set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888)); +} + +void HeightfieldColorData::set(const QImage& image) { + _contents.resize(image.width() * image.height() * COLOR_BYTES); + memcpy(_contents.data(), image.constBits(), _contents.size()); +} + +const int TEXTURE_HEADER_SIZE = sizeof(qint32) * 4; + +static QByteArray encodeTexture(int offsetX, int offsetY, int width, int height, const QByteArray& contents) { + QByteArray inflated(TEXTURE_HEADER_SIZE, 0); + qint32* header = (qint32*)inflated.data(); + *header++ = offsetX; + *header++ = offsetY; + *header++ = width; + *header++ = height; + inflated.append(contents); + return qCompress(inflated); +} + +static QByteArray decodeTexture(const QByteArray& encoded, int& offsetX, int& offsetY, int& width, int& height) { + QByteArray inflated = qUncompress(encoded); + const qint32* header = (const qint32*)inflated.constData(); + offsetX = *header++; + offsetY = *header++; + width = *header++; + height = *header++; + return inflated.mid(TEXTURE_HEADER_SIZE); +} + +HeightfieldTextureData::HeightfieldTextureData(const QByteArray& contents, const QVector& textures) : + HeightfieldData(contents), + _textures(textures) { +} + +HeightfieldTextureData::HeightfieldTextureData(Bitstream& in, int bytes) { + read(in, bytes); +} + +HeightfieldTextureData::HeightfieldTextureData(Bitstream& in, int bytes, const HeightfieldTextureDataPointer& reference) { + if (!reference) { + read(in, bytes); + return; + } + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + reference->setEncodedDelta(in.readAligned(bytes)); + in.readDelta(_textures, reference->getTextures()); + reference->setDeltaData(HeightfieldDataPointer(this)); + _contents = reference->getContents(); + + int offsetX, offsetY, width, height; + QByteArray delta = decodeTexture(reference->getEncodedDelta(), offsetX, offsetY, width, height); + if (delta.isEmpty()) { + return; + } + if (offsetX == 0) { + _contents = delta; + return; + } + int minX = offsetX - 1; + int minY = offsetY - 1; + int size = glm::sqrt((float)_contents.size()); + const char* src = delta.constData(); + char* dest = _contents.data() + minY * size + minX; + for (int y = 0; y < height; y++, src += width, dest += size) { + memcpy(dest, src, width); + } +} + +void HeightfieldTextureData::write(Bitstream& out) { + QMutexLocker locker(&_encodedMutex); + if (_encoded.isEmpty()) { + int size = glm::sqrt((float)_contents.size()); + _encoded = encodeTexture(0, 0, size, size, _contents); + } + out << _encoded.size(); + out.writeAligned(_encoded); + out << _textures; +} + +void HeightfieldTextureData::writeDelta(Bitstream& out, const HeightfieldTextureDataPointer& reference) { + if (!reference || reference->getContents().size() != _contents.size()) { + write(out); + return; + } + QMutexLocker locker(&reference->getEncodedDeltaMutex()); + if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) { + int size = glm::sqrt((float)_contents.size()); + int minX = size, minY = size; + int maxX = -1, maxY = -1; + const char* src = _contents.constData(); + const char* ref = reference->getContents().constData(); + for (int y = 0; y < size; y++) { + bool difference = false; + for (int x = 0; x < size; x++) { + if (*src++ != *ref++) { + minX = qMin(minX, x); + maxX = qMax(maxX, x); + difference = true; + } + } + if (difference) { + minY = qMin(minY, y); + maxY = qMax(maxY, y); + } + } + QByteArray delta; + int width = 0, height = 0; + if (maxX >= minX) { + width = maxX - minX + 1; + height = maxY - minY + 1; + delta = QByteArray(width * height, 0); + char* dest = delta.data(); + src = _contents.constData() + minY * size + minX; + for (int y = 0; y < height; y++, src += size, dest += width) { + memcpy(dest, src, width); + } + } + reference->setEncodedDelta(encodeTexture(minX + 1, minY + 1, width, height, delta)); + reference->setDeltaData(HeightfieldDataPointer(this)); + } + out << reference->getEncodedDelta().size(); + out.writeAligned(reference->getEncodedDelta()); + out.writeDelta(_textures, reference->getTextures()); +} + +void HeightfieldTextureData::read(Bitstream& in, int bytes) { + int offsetX, offsetY, width, height; + _contents = decodeTexture(_encoded = in.readAligned(bytes), offsetX, offsetY, width, height); + in >> _textures; +} + +HeightfieldTexture::HeightfieldTexture() : + _scaleS(1.0f), + _scaleT(1.0f) { +} + HeightfieldAttribute::HeightfieldAttribute(const QString& name) : - InlineAttribute(name) { + InlineAttribute(name) { } void HeightfieldAttribute::read(Bitstream& in, void*& value, bool isLeaf) const { @@ -719,9 +1071,9 @@ void HeightfieldAttribute::read(Bitstream& in, void*& value, bool isLeaf) const int size; in >> size; if (size == 0) { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(); + *(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(); } else { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(in, size, false)); + *(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(new HeightfieldHeightData(in, size)); } } @@ -729,9 +1081,9 @@ void HeightfieldAttribute::write(Bitstream& out, void* value, bool isLeaf) const if (!isLeaf) { return; } - HeightfieldDataPointer data = decodeInline(value); + HeightfieldHeightDataPointer data = decodeInline(value); if (data) { - data->write(out, false); + data->write(out); } else { out << 0; } @@ -744,10 +1096,10 @@ void HeightfieldAttribute::readDelta(Bitstream& in, void*& value, void* referenc int size; in >> size; if (size == 0) { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(); + *(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(); } else { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData( - in, size, decodeInline(reference), false)); + *(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(new HeightfieldHeightData( + in, size, decodeInline(reference))); } } @@ -755,9 +1107,9 @@ void HeightfieldAttribute::writeDelta(Bitstream& out, void* value, void* referen if (!isLeaf) { return; } - HeightfieldDataPointer data = decodeInline(value); + HeightfieldHeightDataPointer data = decodeInline(value); if (data) { - data->writeDelta(out, decodeInline(reference), false); + data->writeDelta(out, decodeInline(reference)); } else { out << 0; } @@ -766,20 +1118,20 @@ void HeightfieldAttribute::writeDelta(Bitstream& out, void* value, void* referen bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead) const { int maxSize = 0; for (int i = 0; i < MERGE_COUNT; i++) { - HeightfieldDataPointer pointer = decodeInline(children[i]); + HeightfieldHeightDataPointer pointer = decodeInline(children[i]); if (pointer) { maxSize = qMax(maxSize, pointer->getContents().size()); } } if (maxSize == 0) { - *(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(); + *(HeightfieldHeightDataPointer*)&parent = HeightfieldHeightDataPointer(); return true; } int size = glm::sqrt((float)maxSize); QByteArray contents(size * size, 0); int halfSize = size / 2; for (int i = 0; i < MERGE_COUNT; i++) { - HeightfieldDataPointer child = decodeInline(children[i]); + HeightfieldHeightDataPointer child = decodeInline(children[i]); if (!child) { continue; } @@ -789,7 +1141,7 @@ bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead) int xIndex = i & INDEX_MASK; const int Y_SHIFT = 1; int yIndex = (i >> Y_SHIFT) & INDEX_MASK; - if (yIndex == 0 && decodeInline(children[i | (1 << Y_SHIFT)])) { + if (yIndex == 0 && decodeInline(children[i | (1 << Y_SHIFT)])) { continue; // bottom is overriden by top } const int HALF_RANGE = 128; @@ -828,12 +1180,12 @@ bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead) } } } - *(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(new HeightfieldData(contents)); + *(HeightfieldHeightDataPointer*)&parent = HeightfieldHeightDataPointer(new HeightfieldHeightData(contents)); return false; } HeightfieldColorAttribute::HeightfieldColorAttribute(const QString& name) : - InlineAttribute(name) { + InlineAttribute(name) { } void HeightfieldColorAttribute::read(Bitstream& in, void*& value, bool isLeaf) const { @@ -843,9 +1195,9 @@ void HeightfieldColorAttribute::read(Bitstream& in, void*& value, bool isLeaf) c int size; in >> size; if (size == 0) { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(); + *(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(); } else { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(in, size, true)); + *(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(new HeightfieldColorData(in, size)); } } @@ -853,9 +1205,9 @@ void HeightfieldColorAttribute::write(Bitstream& out, void* value, bool isLeaf) if (!isLeaf) { return; } - HeightfieldDataPointer data = decodeInline(value); + HeightfieldColorDataPointer data = decodeInline(value); if (data) { - data->write(out, true); + data->write(out); } else { out << 0; } @@ -868,10 +1220,10 @@ void HeightfieldColorAttribute::readDelta(Bitstream& in, void*& value, void* ref int size; in >> size; if (size == 0) { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(); + *(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(); } else { - *(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData( - in, size, decodeInline(reference), true)); + *(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(new HeightfieldColorData( + in, size, decodeInline(reference))); } } @@ -879,9 +1231,9 @@ void HeightfieldColorAttribute::writeDelta(Bitstream& out, void* value, void* re if (!isLeaf) { return; } - HeightfieldDataPointer data = decodeInline(value); + HeightfieldColorDataPointer data = decodeInline(value); if (data) { - data->writeDelta(out, decodeInline(reference), true); + data->writeDelta(out, decodeInline(reference)); } else { out << 0; } @@ -890,20 +1242,20 @@ void HeightfieldColorAttribute::writeDelta(Bitstream& out, void* value, void* re bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool postRead) const { int maxSize = 0; for (int i = 0; i < MERGE_COUNT; i++) { - HeightfieldDataPointer pointer = decodeInline(children[i]); + HeightfieldColorDataPointer pointer = decodeInline(children[i]); if (pointer) { maxSize = qMax(maxSize, pointer->getContents().size()); } } if (maxSize == 0) { - *(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(); + *(HeightfieldColorDataPointer*)&parent = HeightfieldColorDataPointer(); return true; } int size = glm::sqrt(maxSize / (float)HeightfieldData::COLOR_BYTES); QByteArray contents(size * size * HeightfieldData::COLOR_BYTES, 0); int halfSize = size / 2; for (int i = 0; i < MERGE_COUNT; i++) { - HeightfieldDataPointer child = decodeInline(children[i]); + HeightfieldColorDataPointer child = decodeInline(children[i]); if (!child) { continue; } @@ -913,7 +1265,7 @@ bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool post int xIndex = i & INDEX_MASK; const int Y_SHIFT = 1; int yIndex = (i >> Y_SHIFT) & INDEX_MASK; - if (yIndex == 0 && decodeInline(children[i | (1 << Y_SHIFT)])) { + if (yIndex == 0 && decodeInline(children[i | (1 << Y_SHIFT)])) { continue; // bottom is overriden by top } int Z_SHIFT = 2; @@ -967,10 +1319,77 @@ bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool post } } } - *(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(new HeightfieldData(contents)); + *(HeightfieldColorDataPointer*)&parent = HeightfieldColorDataPointer(new HeightfieldColorData(contents)); return false; } +HeightfieldTextureAttribute::HeightfieldTextureAttribute(const QString& name) : + InlineAttribute(name) { +} + +void HeightfieldTextureAttribute::read(Bitstream& in, void*& value, bool isLeaf) const { + if (!isLeaf) { + return; + } + int size; + in >> size; + if (size == 0) { + *(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(); + } else { + *(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(new HeightfieldTextureData(in, size)); + } +} + +void HeightfieldTextureAttribute::write(Bitstream& out, void* value, bool isLeaf) const { + if (!isLeaf) { + return; + } + HeightfieldTextureDataPointer data = decodeInline(value); + if (data) { + data->write(out); + } else { + out << 0; + } +} + +void HeightfieldTextureAttribute::readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const { + if (!isLeaf) { + return; + } + int size; + in >> size; + if (size == 0) { + *(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(); + } else { + *(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(new HeightfieldTextureData( + in, size, decodeInline(reference))); + } +} + +void HeightfieldTextureAttribute::writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const { + if (!isLeaf) { + return; + } + HeightfieldTextureDataPointer data = decodeInline(value); + if (data) { + data->writeDelta(out, decodeInline(reference)); + } else { + out << 0; + } +} + +bool HeightfieldTextureAttribute::merge(void*& parent, void* children[], bool postRead) const { + int maxSize = 0; + for (int i = 0; i < MERGE_COUNT; i++) { + HeightfieldTextureDataPointer pointer = decodeInline(children[i]); + if (pointer) { + maxSize = qMax(maxSize, pointer->getContents().size()); + } + } + *(HeightfieldTextureDataPointer*)&parent = HeightfieldTextureDataPointer(); + return maxSize == 0; +} + SharedObjectAttribute::SharedObjectAttribute(const QString& name, const QMetaObject* metaObject, const SharedObjectPointer& defaultValue) : InlineAttribute(name, defaultValue), @@ -1082,9 +1501,7 @@ MetavoxelNode* SharedObjectSetAttribute::expandMetavoxelRoot(const MetavoxelNode MetavoxelNode* newGrandchild = new MetavoxelNode(attribute); newChild->setChild((index + j) % MetavoxelNode::CHILD_COUNT, newGrandchild); } - newChild->mergeChildren(attribute); } - newParent->mergeChildren(attribute); return newParent; } diff --git a/libraries/metavoxels/src/AttributeRegistry.h b/libraries/metavoxels/src/AttributeRegistry.h index ddf6105662..66da7a9b6f 100644 --- a/libraries/metavoxels/src/AttributeRegistry.h +++ b/libraries/metavoxels/src/AttributeRegistry.h @@ -18,6 +18,7 @@ #include #include #include +#include #include #include "Bitstream.h" @@ -28,7 +29,10 @@ class QScriptEngine; class QScriptValue; class Attribute; +class HeightfieldColorData; class HeightfieldData; +class HeightfieldHeightData; +class HeightfieldTextureData; class MetavoxelData; class MetavoxelLOD; class MetavoxelNode; @@ -96,12 +100,15 @@ public: /// Returns a reference to the standard "spannerMask" attribute. const AttributePointer& getSpannerMaskAttribute() const { return _spannerMaskAttribute; } - /// Returns a reference to the standard HeightfieldPointer "heightfield" attribute. + /// Returns a reference to the standard HeightfieldDataPointer "heightfield" attribute. const AttributePointer& getHeightfieldAttribute() const { return _heightfieldAttribute; } - /// Returns a reference to the standard HeightfieldColorPointer "heightfieldColor" attribute. + /// Returns a reference to the standard HeightfieldDataPointer "heightfieldColor" attribute. const AttributePointer& getHeightfieldColorAttribute() const { return _heightfieldColorAttribute; } + /// Returns a reference to the standard HeightfieldDataPointer "heightfieldTexture" attribute. + const AttributePointer& getHeightfieldTextureAttribute() const { return _heightfieldTextureAttribute; } + private: static QScriptValue getAttribute(QScriptContext* context, QScriptEngine* engine); @@ -119,6 +126,7 @@ private: AttributePointer _spannerMaskAttribute; AttributePointer _heightfieldAttribute; AttributePointer _heightfieldColorAttribute; + AttributePointer _heightfieldTextureAttribute; }; /// Converts a value to a void pointer. @@ -221,6 +229,11 @@ public: virtual void readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const { read(in, value, isLeaf); } virtual void writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const { write(out, value, isLeaf); } + virtual void readSubdivided(MetavoxelStreamState& state, void*& value, + const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const; + virtual void writeSubdivided(MetavoxelStreamState& state, void* value, + const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const; + virtual MetavoxelNode* createMetavoxelNode(const AttributeValue& value, const MetavoxelNode* original) const; virtual void readMetavoxelRoot(MetavoxelData& data, MetavoxelStreamState& state); @@ -430,19 +443,20 @@ public: static const int COLOR_BYTES = 3; - HeightfieldData(const QByteArray& contents); - HeightfieldData(Bitstream& in, int bytes, bool color); - HeightfieldData(Bitstream& in, int bytes, const HeightfieldDataPointer& reference, bool color); + HeightfieldData(const QByteArray& contents = QByteArray()); + virtual ~HeightfieldData(); const QByteArray& getContents() const { return _contents; } - void write(Bitstream& out, bool color); - void writeDelta(Bitstream& out, const HeightfieldDataPointer& reference, bool color); - -private: + void setDeltaData(const HeightfieldDataPointer& deltaData) { _deltaData = deltaData; } + const HeightfieldDataPointer& getDeltaData() const { return _deltaData; } - void read(Bitstream& in, int bytes, bool color); - void set(const QImage& image, bool color); + void setEncodedDelta(const QByteArray& encodedDelta) { _encodedDelta = encodedDelta; } + const QByteArray& getEncodedDelta() const { return _encodedDelta; } + + QMutex& getEncodedDeltaMutex() { return _encodedDeltaMutex; } + +protected: QByteArray _contents; QByteArray _encoded; @@ -451,10 +465,110 @@ private: HeightfieldDataPointer _deltaData; QByteArray _encodedDelta; QMutex _encodedDeltaMutex; + + class EncodedSubdivision { + public: + HeightfieldDataPointer ancestor; + QByteArray data; + }; + QVector _encodedSubdivisions; + QMutex _encodedSubdivisionsMutex; +}; + +typedef QExplicitlySharedDataPointer HeightfieldHeightDataPointer; + +/// Contains a block of heightfield height data. +class HeightfieldHeightData : public HeightfieldData { +public: + + HeightfieldHeightData(const QByteArray& contents); + HeightfieldHeightData(Bitstream& in, int bytes); + HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& reference); + HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& ancestor, + const glm::vec3& minimum, float size); + + void write(Bitstream& out); + void writeDelta(Bitstream& out, const HeightfieldHeightDataPointer& reference); + void writeSubdivided(Bitstream& out, const HeightfieldHeightDataPointer& ancestor, + const glm::vec3& minimum, float size); + +private: + + void read(Bitstream& in, int bytes); + void set(const QImage& image); +}; + +typedef QExplicitlySharedDataPointer HeightfieldColorDataPointer; + +/// Contains a block of heightfield color data. +class HeightfieldColorData : public HeightfieldData { +public: + + HeightfieldColorData(const QByteArray& contents); + HeightfieldColorData(Bitstream& in, int bytes); + HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& reference); + HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& ancestor, + const glm::vec3& minimum, float size); + + void write(Bitstream& out); + void writeDelta(Bitstream& out, const HeightfieldColorDataPointer& reference); + void writeSubdivided(Bitstream& out, const HeightfieldColorDataPointer& ancestor, + const glm::vec3& minimum, float size); + +private: + + void read(Bitstream& in, int bytes); + void set(const QImage& image); +}; + +typedef QExplicitlySharedDataPointer HeightfieldTextureDataPointer; + +/// Contains a block of heightfield texture data. +class HeightfieldTextureData : public HeightfieldData { +public: + + HeightfieldTextureData(const QByteArray& contents, + const QVector& textures = QVector()); + HeightfieldTextureData(Bitstream& in, int bytes); + HeightfieldTextureData(Bitstream& in, int bytes, const HeightfieldTextureDataPointer& reference); + + const QVector& getTextures() const { return _textures; } + + void write(Bitstream& out); + void writeDelta(Bitstream& out, const HeightfieldTextureDataPointer& reference); + +private: + + void read(Bitstream& in, int bytes); + + QVector _textures; +}; + +/// Contains the description of a heightfield texture. +class HeightfieldTexture : public SharedObject { + Q_OBJECT + Q_PROPERTY(QUrl url MEMBER _url) + Q_PROPERTY(float scaleS MEMBER _scaleS) + Q_PROPERTY(float scaleT MEMBER _scaleT) + +public: + + Q_INVOKABLE HeightfieldTexture(); + + const QUrl& getURL() const { return _url; } + + float getScaleS() const { return _scaleS; } + float getScaleT() const { return _scaleT; } + +private: + + QUrl _url; + float _scaleS; + float _scaleT; }; /// An attribute that stores heightfield data. -class HeightfieldAttribute : public InlineAttribute { +class HeightfieldAttribute : public InlineAttribute { Q_OBJECT public: @@ -471,7 +585,7 @@ public: }; /// An attribute that stores heightfield colors. -class HeightfieldColorAttribute : public InlineAttribute { +class HeightfieldColorAttribute : public InlineAttribute { Q_OBJECT public: @@ -487,6 +601,23 @@ public: virtual bool merge(void*& parent, void* children[], bool postRead = false) const; }; +/// An attribute that stores heightfield textures. +class HeightfieldTextureAttribute : public InlineAttribute { + Q_OBJECT + +public: + + Q_INVOKABLE HeightfieldTextureAttribute(const QString& name = QString()); + + virtual void read(Bitstream& in, void*& value, bool isLeaf) const; + virtual void write(Bitstream& out, void* value, bool isLeaf) const; + + virtual void readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const; + virtual void writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const; + + virtual bool merge(void*& parent, void* children[], bool postRead = false) const; +}; + /// An attribute that takes the form of QObjects of a given meta-type (a subclass of SharedObject). class SharedObjectAttribute : public InlineAttribute { Q_OBJECT diff --git a/libraries/metavoxels/src/MetavoxelData.cpp b/libraries/metavoxels/src/MetavoxelData.cpp index 3607441461..67fafe1633 100644 --- a/libraries/metavoxels/src/MetavoxelData.cpp +++ b/libraries/metavoxels/src/MetavoxelData.cpp @@ -998,7 +998,7 @@ MetavoxelNode* MetavoxelNode::readSubdivision(MetavoxelStreamState& state) { for (int i = 0; i < CHILD_COUNT; i++) { nextState.setMinimum(state.minimum, i); newNode->_children[i] = new MetavoxelNode(state.base.attribute); - newNode->_children[i]->read(nextState); + newNode->_children[i]->readSubdivided(nextState, state, _attributeValue); } return newNode; } @@ -1037,7 +1037,7 @@ void MetavoxelNode::writeSubdivision(MetavoxelStreamState& state) const { MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f }; for (int i = 0; i < CHILD_COUNT; i++) { nextState.setMinimum(state.minimum, i); - _children[i]->write(nextState); + _children[i]->writeSubdivided(nextState, state, _attributeValue); } } } else if (!leaf) { @@ -1051,6 +1051,46 @@ void MetavoxelNode::writeSubdivision(MetavoxelStreamState& state) const { } } +void MetavoxelNode::readSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, + void* ancestorValue) { + clearChildren(state.base.attribute); + + if (!state.shouldSubdivide()) { + state.base.attribute->readSubdivided(state, _attributeValue, ancestorState, ancestorValue, true); + return; + } + bool leaf; + state.base.stream >> leaf; + state.base.attribute->readSubdivided(state, _attributeValue, ancestorState, ancestorValue, leaf); + if (!leaf) { + MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f }; + for (int i = 0; i < CHILD_COUNT; i++) { + nextState.setMinimum(state.minimum, i); + _children[i] = new MetavoxelNode(state.base.attribute); + _children[i]->readSubdivided(nextState, ancestorState, ancestorValue); + } + mergeChildren(state.base.attribute, true); + } +} + +void MetavoxelNode::writeSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, + void* ancestorValue) const { + if (!state.shouldSubdivide()) { + state.base.attribute->writeSubdivided(state, _attributeValue, ancestorState, ancestorValue, true); + return; + } + bool leaf = isLeaf(); + state.base.stream << leaf; + state.base.attribute->writeSubdivided(state, _attributeValue, ancestorState, ancestorValue, leaf); + if (!leaf) { + MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f }; + for (int i = 0; i < CHILD_COUNT; i++) { + nextState.setMinimum(state.minimum, i); + _children[i]->writeSubdivided(nextState, ancestorState, ancestorValue); + } + } +} + void MetavoxelNode::writeSpanners(MetavoxelStreamState& state) const { foreach (const SharedObjectPointer& object, decodeInline(_attributeValue)) { if (static_cast(object.data())->testAndSetVisited(state.base.visit)) { diff --git a/libraries/metavoxels/src/MetavoxelData.h b/libraries/metavoxels/src/MetavoxelData.h index 9e5b2f04d1..8308c3c69b 100644 --- a/libraries/metavoxels/src/MetavoxelData.h +++ b/libraries/metavoxels/src/MetavoxelData.h @@ -225,6 +225,9 @@ public: MetavoxelNode* readSubdivision(MetavoxelStreamState& state); void writeSubdivision(MetavoxelStreamState& state) const; + void readSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, void* ancestorValue); + void writeSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, void* ancestorValue) const; + void writeSpanners(MetavoxelStreamState& state) const; void writeSpannerDelta(const MetavoxelNode& reference, MetavoxelStreamState& state) const; void writeSpannerSubdivision(MetavoxelStreamState& state) const; diff --git a/libraries/metavoxels/src/MetavoxelMessages.cpp b/libraries/metavoxels/src/MetavoxelMessages.cpp index df6e8172e4..1a8f64d935 100644 --- a/libraries/metavoxels/src/MetavoxelMessages.cpp +++ b/libraries/metavoxels/src/MetavoxelMessages.cpp @@ -347,6 +347,8 @@ PaintHeightfieldHeightEditVisitor::PaintHeightfieldHeightEditVisitor(const Paint _bounds = Box(_edit.position - extents, _edit.position + extents); } +const int EIGHT_BIT_MAXIMUM = 255; + int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) { if (!info.getBounds().intersects(_bounds)) { return STOP_RECURSION; @@ -354,7 +356,7 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) { if (!info.isLeaf) { return DEFAULT_ORDER; } - HeightfieldDataPointer pointer = info.inputValues.at(0).getInlineValue(); + HeightfieldHeightDataPointer pointer = info.inputValues.at(0).getInlineValue(); if (!pointer) { return STOP_RECURSION; } @@ -375,8 +377,7 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) { float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest); uchar* lineDest = (uchar*)contents.data() + (int)z * size + (int)startX; float squaredRadius = scaledRadius * scaledRadius; - float squaredRadiusReciprocal = 1.0f / squaredRadius; - const int EIGHT_BIT_MAXIMUM = 255; + float squaredRadiusReciprocal = 1.0f / squaredRadius; float scaledHeight = _edit.height * EIGHT_BIT_MAXIMUM / info.size; bool changed = false; for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) { @@ -396,8 +397,8 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) { lineDest += size; } if (changed) { - HeightfieldDataPointer newPointer(new HeightfieldData(contents)); - info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(newPointer)); + HeightfieldHeightDataPointer newPointer(new HeightfieldHeightData(contents)); + info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(newPointer)); } return STOP_RECURSION; } @@ -435,25 +436,18 @@ PaintHeightfieldColorEditVisitor::PaintHeightfieldColorEditVisitor(const PaintHe _bounds = Box(_edit.position - extents, _edit.position + extents); } -int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) { - if (!info.getBounds().intersects(_bounds)) { - return STOP_RECURSION; - } - if (!info.isLeaf) { - return DEFAULT_ORDER; - } - HeightfieldDataPointer pointer = info.inputValues.at(0).getInlineValue(); +static void paintColor(MetavoxelInfo& info, int index, const glm::vec3& position, float radius, const QColor& color) { + HeightfieldColorDataPointer pointer = info.inputValues.at(index).getInlineValue(); if (!pointer) { - return STOP_RECURSION; + return; } QByteArray contents(pointer->getContents()); - const int BYTES_PER_PIXEL = 3; - int size = glm::sqrt((float)contents.size() / BYTES_PER_PIXEL); + int size = glm::sqrt((float)contents.size() / HeightfieldData::COLOR_BYTES); int highest = size - 1; float heightScale = size / info.size; - glm::vec3 center = (_edit.position - info.minimum) * heightScale; - float scaledRadius = _edit.radius * heightScale; + glm::vec3 center = (position - info.minimum) * heightScale; + float scaledRadius = radius * heightScale; glm::vec3 extents(scaledRadius, scaledRadius, scaledRadius); glm::vec3 start = glm::floor(center - extents); @@ -462,14 +456,14 @@ int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) { // paint all points within the radius float z = qMax(start.z, 0.0f); float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest); - int stride = size * BYTES_PER_PIXEL; - char* lineDest = contents.data() + (int)z * stride + (int)startX * BYTES_PER_PIXEL; + int stride = size * HeightfieldData::COLOR_BYTES; + char* lineDest = contents.data() + (int)z * stride + (int)startX * HeightfieldData::COLOR_BYTES; float squaredRadius = scaledRadius * scaledRadius; - char red = _edit.color.red(), green = _edit.color.green(), blue = _edit.color.blue(); + char red = color.red(), green = color.green(), blue = color.blue(); bool changed = false; for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) { char* dest = lineDest; - for (float x = startX; x <= endX; x += 1.0f, dest += BYTES_PER_PIXEL) { + for (float x = startX; x <= endX; x += 1.0f, dest += HeightfieldData::COLOR_BYTES) { float dx = x - center.x, dz = z - center.z; if (dx * dx + dz * dz <= squaredRadius) { dest[0] = red; @@ -481,9 +475,20 @@ int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) { lineDest += stride; } if (changed) { - HeightfieldDataPointer newPointer(new HeightfieldData(contents)); - info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(newPointer)); + HeightfieldColorDataPointer newPointer(new HeightfieldColorData(contents)); + info.outputValues[index] = AttributeValue(info.inputValues.at(index).getAttribute(), + encodeInline(newPointer)); } +} + +int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) { + if (!info.getBounds().intersects(_bounds)) { + return STOP_RECURSION; + } + if (!info.isLeaf) { + return DEFAULT_ORDER; + } + paintColor(info, 0, _edit.position, _edit.radius, _edit.color); return STOP_RECURSION; } @@ -492,3 +497,148 @@ void PaintHeightfieldColorEdit::apply(MetavoxelData& data, const WeakSharedObjec data.guide(visitor); } +PaintHeightfieldTextureEdit::PaintHeightfieldTextureEdit(const glm::vec3& position, float radius, + const SharedObjectPointer& texture, const QColor& averageColor) : + position(position), + radius(radius), + texture(texture), + averageColor(averageColor) { +} + +class PaintHeightfieldTextureEditVisitor : public MetavoxelVisitor { +public: + + PaintHeightfieldTextureEditVisitor(const PaintHeightfieldTextureEdit& edit); + + virtual int visit(MetavoxelInfo& info); + +private: + + PaintHeightfieldTextureEdit _edit; + Box _bounds; +}; + +PaintHeightfieldTextureEditVisitor::PaintHeightfieldTextureEditVisitor(const PaintHeightfieldTextureEdit& edit) : + MetavoxelVisitor(QVector() << AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() << + AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), QVector() << + AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() << + AttributeRegistry::getInstance()->getHeightfieldColorAttribute()), + _edit(edit) { + + glm::vec3 extents(_edit.radius, _edit.radius, _edit.radius); + _bounds = Box(_edit.position - extents, _edit.position + extents); +} + +static QHash countIndices(const QByteArray& contents) { + QHash counts; + for (const uchar* src = (const uchar*)contents.constData(), *end = src + contents.size(); src != end; src++) { + if (*src != 0) { + counts[*src]++; + } + } + return counts; +} + +int PaintHeightfieldTextureEditVisitor::visit(MetavoxelInfo& info) { + if (!info.getBounds().intersects(_bounds)) { + return STOP_RECURSION; + } + if (!info.isLeaf) { + return DEFAULT_ORDER; + } + HeightfieldTextureDataPointer pointer = info.inputValues.at(0).getInlineValue(); + if (!pointer) { + return STOP_RECURSION; + } + QVector textures = pointer->getTextures(); + QByteArray contents(pointer->getContents()); + uchar textureIndex = 0; + if (_edit.texture && static_cast(_edit.texture.data())->getURL().isValid()) { + // first look for a matching existing texture, noting the first reusable slot + int firstEmptyIndex = -1; + for (int i = 0; i < textures.size(); i++) { + const SharedObjectPointer& texture = textures.at(i); + if (texture) { + if (texture->equals(_edit.texture.data())) { + textureIndex = i + 1; + break; + } + } else if (firstEmptyIndex == -1) { + firstEmptyIndex = i; + } + } + // if nothing found, use the first empty slot or append + if (textureIndex == 0) { + if (firstEmptyIndex != -1) { + textures[firstEmptyIndex] = _edit.texture; + textureIndex = firstEmptyIndex + 1; + + } else if (textures.size() < EIGHT_BIT_MAXIMUM) { + textures.append(_edit.texture); + textureIndex = textures.size(); + + } else { + // last resort: find the least-used texture and remove it + QHash counts = countIndices(contents); + int lowestCount = INT_MAX; + for (QHash::const_iterator it = counts.constBegin(); it != counts.constEnd(); it++) { + if (it.value() < lowestCount) { + textureIndex = it.key(); + lowestCount = it.value(); + } + } + contents.replace((char)textureIndex, (char)0); + } + } + } + int size = glm::sqrt((float)contents.size()); + int highest = size - 1; + float heightScale = highest / info.size; + + glm::vec3 center = (_edit.position - info.minimum) * heightScale; + float scaledRadius = _edit.radius * heightScale; + glm::vec3 extents(scaledRadius, scaledRadius, scaledRadius); + + glm::vec3 start = glm::floor(center - extents); + glm::vec3 end = glm::ceil(center + extents); + + // paint all points within the radius + float z = qMax(start.z, 0.0f); + float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest); + uchar* lineDest = (uchar*)contents.data() + (int)z * size + (int)startX; + float squaredRadius = scaledRadius * scaledRadius; + bool changed = false; + QHash counts; + for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) { + uchar* dest = lineDest; + for (float x = startX; x <= endX; x += 1.0f, dest++) { + float dx = x - center.x, dz = z - center.z; + if (dx * dx + dz * dz <= squaredRadius) { + *dest = textureIndex; + changed = true; + } + } + lineDest += size; + } + if (changed) { + // clear any unused textures + QHash counts = countIndices(contents); + for (int i = 0; i < textures.size(); i++) { + if (counts.value(i + 1) == 0) { + textures[i] = SharedObjectPointer(); + } + } + while (!(textures.isEmpty() || textures.last())) { + textures.removeLast(); + } + HeightfieldTextureDataPointer newPointer(new HeightfieldTextureData(contents, textures)); + info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(newPointer)); + } + paintColor(info, 1, _edit.position, _edit.radius, _edit.averageColor); + return STOP_RECURSION; +} + +void PaintHeightfieldTextureEdit::apply(MetavoxelData& data, const WeakSharedObjectHash& objects) const { + PaintHeightfieldTextureEditVisitor visitor(*this); + data.guide(visitor); +} diff --git a/libraries/metavoxels/src/MetavoxelMessages.h b/libraries/metavoxels/src/MetavoxelMessages.h index 2fc8cbf030..3d610b10df 100644 --- a/libraries/metavoxels/src/MetavoxelMessages.h +++ b/libraries/metavoxels/src/MetavoxelMessages.h @@ -241,4 +241,23 @@ public: DECLARE_STREAMABLE_METATYPE(PaintHeightfieldColorEdit) +/// An edit that sets a region of a heightfield texture. +class PaintHeightfieldTextureEdit : public MetavoxelEdit { + STREAMABLE + +public: + + STREAM glm::vec3 position; + STREAM float radius; + STREAM SharedObjectPointer texture; + STREAM QColor averageColor; + + PaintHeightfieldTextureEdit(const glm::vec3& position = glm::vec3(), float radius = 0.0f, + const SharedObjectPointer& texture = SharedObjectPointer(), const QColor& averageColor = QColor()); + + virtual void apply(MetavoxelData& data, const WeakSharedObjectHash& objects) const; +}; + +DECLARE_STREAMABLE_METATYPE(PaintHeightfieldTextureEdit) + #endif // hifi_MetavoxelMessages_h diff --git a/libraries/metavoxels/src/SharedObject.cpp b/libraries/metavoxels/src/SharedObject.cpp index 053ef57bad..bf9b123a36 100644 --- a/libraries/metavoxels/src/SharedObject.cpp +++ b/libraries/metavoxels/src/SharedObject.cpp @@ -158,7 +158,7 @@ SharedObjectEditor::SharedObjectEditor(const QMetaObject* metaObject, bool nulla _type->addItem("(none)"); } foreach (const QMetaObject* metaObject, Bitstream::getMetaObjectSubClasses(metaObject)) { - // add add constructable subclasses + // add constructable subclasses if (metaObject->constructorCount() > 0) { _type->addItem(metaObject->className(), QVariant::fromValue(metaObject)); } @@ -193,7 +193,9 @@ void SharedObjectEditor::detachObject() { for (int i = 0; i < form->rowCount(); i++) { QWidget* widget = form->itemAt(i, QFormLayout::FieldRole)->widget(); QMetaProperty property = metaObject->property(widget->property("propertyIndex").toInt()); - connect(_object.data(), signal(property.notifySignal().methodSignature()), SLOT(updateProperty())); + if (property.hasNotifySignal()) { + connect(_object.data(), signal(property.notifySignal().methodSignature()), SLOT(updateProperty())); + } } } @@ -226,6 +228,7 @@ void SharedObjectEditor::updateType() { const QMetaObject* metaObject = _type->itemData(_type->currentIndex()).value(); if (!metaObject) { _object.reset(); + emit objectChanged(_object); return; } QObject* newObject = metaObject->newInstance(); @@ -259,7 +262,7 @@ void SharedObjectEditor::updateType() { } } } - _object = static_cast(newObject); + emit objectChanged(_object = static_cast(newObject)); } void SharedObjectEditor::propertyChanged() { @@ -275,6 +278,7 @@ void SharedObjectEditor::propertyChanged() { QByteArray valuePropertyName = QItemEditorFactory::defaultFactory()->valuePropertyName(property.userType()); property.write(object, widget->property(valuePropertyName)); } + emit objectChanged(_object); } void SharedObjectEditor::updateProperty() { diff --git a/libraries/metavoxels/src/SharedObject.h b/libraries/metavoxels/src/SharedObject.h index 407fc820c8..157987ed6f 100644 --- a/libraries/metavoxels/src/SharedObject.h +++ b/libraries/metavoxels/src/SharedObject.h @@ -211,7 +211,7 @@ Q_DECLARE_METATYPE(SharedObjectSet) /// Allows editing shared object instances. class SharedObjectEditor : public QWidget { Q_OBJECT - Q_PROPERTY(SharedObjectPointer object READ getObject WRITE setObject USER true) + Q_PROPERTY(SharedObjectPointer object READ getObject WRITE setObject NOTIFY objectChanged USER true) public: @@ -222,6 +222,10 @@ public: /// "Detaches" the object pointer, copying it if anyone else is holding a reference. void detachObject(); +signals: + + void objectChanged(const SharedObjectPointer& object); + public slots: void setObject(const SharedObjectPointer& object); diff --git a/libraries/networking/src/PacketHeaders.cpp b/libraries/networking/src/PacketHeaders.cpp index a1306d8610..91de48296f 100644 --- a/libraries/networking/src/PacketHeaders.cpp +++ b/libraries/networking/src/PacketHeaders.cpp @@ -49,8 +49,9 @@ PacketVersion versionForPacketType(PacketType type) { switch (type) { case PacketTypeMicrophoneAudioNoEcho: case PacketTypeMicrophoneAudioWithEcho: - case PacketTypeSilentAudioFrame: return 2; + case PacketTypeSilentAudioFrame: + return 3; case PacketTypeMixedAudio: return 1; case PacketTypeAvatarData: @@ -81,7 +82,7 @@ PacketVersion versionForPacketType(PacketType type) { case PacketTypeAudioStreamStats: return 1; case PacketTypeMetavoxelData: - return 1; + return 3; default: return 0; } diff --git a/libraries/script-engine/src/ArrayBufferPrototype.cpp b/libraries/script-engine/src/ArrayBufferPrototype.cpp index 53ebebc740..9739f67381 100644 --- a/libraries/script-engine/src/ArrayBufferPrototype.cpp +++ b/libraries/script-engine/src/ArrayBufferPrototype.cpp @@ -11,9 +11,15 @@ #include +#include +#include + #include "ArrayBufferClass.h" #include "ArrayBufferPrototype.h" +static const int QCOMPRESS_HEADER_POSITION = 0; +static const int QCOMPRESS_HEADER_SIZE = 4; + Q_DECLARE_METATYPE(QByteArray*) ArrayBufferPrototype::ArrayBufferPrototype(QObject* parent) : QObject(parent) { @@ -43,6 +49,41 @@ QByteArray ArrayBufferPrototype::slice(qint32 begin) const { return ba->mid(begin, -1); } +QByteArray ArrayBufferPrototype::compress() const { + // Compresses the ArrayBuffer data in Zlib format. + QByteArray* ba = thisArrayBuffer(); + + QByteArray buffer = qCompress(*ba); + buffer.remove(QCOMPRESS_HEADER_POSITION, QCOMPRESS_HEADER_SIZE); // Remove Qt's custom header to make it proper Zlib. + + return buffer; +} + +QByteArray ArrayBufferPrototype::recodeImage(const QString& sourceFormat, const QString& targetFormat, qint32 maxSize) const { + // Recodes image data if sourceFormat and targetFormat are different. + // Rescales image data if either dimension is greater than the specified maximum. + QByteArray* ba = thisArrayBuffer(); + + bool mustRecode = sourceFormat.toLower() != targetFormat.toLower(); + + QImage image = QImage::fromData(*ba); + if (image.width() > maxSize || image.height() > maxSize) { + image = image.scaled(maxSize, maxSize, Qt::KeepAspectRatio); + mustRecode = true; + } + + if (mustRecode) { + QBuffer buffer; + buffer.open(QIODevice::WriteOnly); + std::string str = targetFormat.toUpper().toStdString(); + const char* format = str.c_str(); + image.save(&buffer, format); + return buffer.data(); + } + + return *ba; +} + QByteArray* ArrayBufferPrototype::thisArrayBuffer() const { return qscriptvalue_cast(thisObject().data()); } diff --git a/libraries/script-engine/src/ArrayBufferPrototype.h b/libraries/script-engine/src/ArrayBufferPrototype.h index 09d4596f28..f9dd667dc4 100644 --- a/libraries/script-engine/src/ArrayBufferPrototype.h +++ b/libraries/script-engine/src/ArrayBufferPrototype.h @@ -23,6 +23,8 @@ public: public slots: QByteArray slice(qint32 begin, qint32 end) const; QByteArray slice(qint32 begin) const; + QByteArray compress() const; + QByteArray recodeImage(const QString& sourceFormat, const QString& targetFormat, qint32 maxSize) const; private: QByteArray* thisArrayBuffer() const; diff --git a/libraries/script-engine/src/EventTypes.cpp b/libraries/script-engine/src/EventTypes.cpp index 9cf6c5b1a0..0e6a27bc42 100644 --- a/libraries/script-engine/src/EventTypes.cpp +++ b/libraries/script-engine/src/EventTypes.cpp @@ -78,6 +78,8 @@ KeyEvent::KeyEvent(const QKeyEvent& event) { text = "LEFT"; } else if (key == Qt::Key_Right) { text = "RIGHT"; + } else if (key == Qt::Key_Space) { + text = "SPACE"; } else if (key == Qt::Key_Escape) { text = "ESC"; } else if (key == Qt::Key_Tab) { @@ -220,6 +222,8 @@ void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event) { } else if (event.text.toUpper() == "RIGHT") { event.key = Qt::Key_Right; event.isKeypad = true; + } else if (event.text.toUpper() == "SPACE") { + event.key = Qt::Key_Space; } else if (event.text.toUpper() == "ESC") { event.key = Qt::Key_Escape; } else if (event.text.toUpper() == "TAB") { diff --git a/libraries/script-engine/src/ScriptEngine.cpp b/libraries/script-engine/src/ScriptEngine.cpp index 906eb8e4e0..c114620cbd 100644 --- a/libraries/script-engine/src/ScriptEngine.cpp +++ b/libraries/script-engine/src/ScriptEngine.cpp @@ -493,14 +493,6 @@ void ScriptEngine::run() { // pack a placeholder value for sequence number for now, will be packed when destination node is known int numPreSequenceNumberBytes = audioPacket.size(); packetStream << (quint16) 0; - - // assume scripted avatar audio is mono and set channel flag to zero - packetStream << (quint8) 0; - - // use the orientation and position of this avatar for the source of this audio - packetStream.writeRawData(reinterpret_cast(&_avatarData->getPosition()), sizeof(glm::vec3)); - glm::quat headOrientation = _avatarData->getHeadOrientation(); - packetStream.writeRawData(reinterpret_cast(&headOrientation), sizeof(glm::quat)); if (silentFrame) { if (!_isListeningToAudioStream) { @@ -510,12 +502,20 @@ void ScriptEngine::run() { // write the number of silent samples so the audio-mixer can uphold timing packetStream.writeRawData(reinterpret_cast(&SCRIPT_AUDIO_BUFFER_SAMPLES), sizeof(int16_t)); - } else if (nextSoundOutput) { - // write the raw audio data - packetStream.writeRawData(reinterpret_cast(nextSoundOutput), - numAvailableSamples * sizeof(int16_t)); - } + } else if (nextSoundOutput) { + // assume scripted avatar audio is mono and set channel flag to zero + packetStream << (quint8)0; + + // use the orientation and position of this avatar for the source of this audio + packetStream.writeRawData(reinterpret_cast(&_avatarData->getPosition()), sizeof(glm::vec3)); + glm::quat headOrientation = _avatarData->getHeadOrientation(); + packetStream.writeRawData(reinterpret_cast(&headOrientation), sizeof(glm::quat)); + + // write the raw audio data + packetStream.writeRawData(reinterpret_cast(nextSoundOutput), numAvailableSamples * sizeof(int16_t)); + } + // write audio packet to AudioMixer nodes NodeList* nodeList = NodeList::getInstance(); foreach(const SharedNodePointer& node, nodeList->getNodeHash()) { diff --git a/libraries/script-engine/src/XMLHttpRequestClass.cpp b/libraries/script-engine/src/XMLHttpRequestClass.cpp index d9b7312bf4..cb891c2ab1 100644 --- a/libraries/script-engine/src/XMLHttpRequestClass.cpp +++ b/libraries/script-engine/src/XMLHttpRequestClass.cpp @@ -13,10 +13,15 @@ // #include +#include #include +#include #include "XMLHttpRequestClass.h" +#include "ScriptEngine.h" + +Q_DECLARE_METATYPE(QByteArray*) XMLHttpRequestClass::XMLHttpRequestClass(QScriptEngine* engine) : _engine(engine), @@ -33,6 +38,7 @@ XMLHttpRequestClass::XMLHttpRequestClass(QScriptEngine* engine) : _onReadyStateChange(QScriptValue::NullValue), _readyState(XMLHttpRequestClass::UNSENT), _errorCode(QNetworkReply::NoError), + _file(NULL), _timeout(0), _timer(this), _numRedirects(0) { @@ -52,6 +58,20 @@ QScriptValue XMLHttpRequestClass::constructor(QScriptContext* context, QScriptEn QScriptValue XMLHttpRequestClass::getStatus() const { if (_reply) { return QScriptValue(_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt()); + } + if(_url.isLocalFile()) { + switch (_errorCode) { + case QNetworkReply::NoError: + return QScriptValue(200); + case QNetworkReply::ContentNotFoundError: + return QScriptValue(404); + case QNetworkReply::ContentAccessDenied: + return QScriptValue(409); + case QNetworkReply::TimeoutError: + return QScriptValue(408); + case QNetworkReply::ContentOperationNotPermittedError: + return QScriptValue(501); + } } return QScriptValue(0); } @@ -60,6 +80,20 @@ QString XMLHttpRequestClass::getStatusText() const { if (_reply) { return _reply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString(); } + if (_url.isLocalFile()) { + switch (_errorCode) { + case QNetworkReply::NoError: + return "OK"; + case QNetworkReply::ContentNotFoundError: + return "Not Found"; + case QNetworkReply::ContentAccessDenied: + return "Conflict"; + case QNetworkReply::TimeoutError: + return "Timeout"; + case QNetworkReply::ContentOperationNotPermittedError: + return "Not Implemented"; + } + } return ""; } @@ -104,6 +138,13 @@ QScriptValue XMLHttpRequestClass::getAllResponseHeaders() const { } return QString(headers.data()); } + if (_url.isLocalFile()) { + QString headers = QString("Content-Type: application/octet-stream\n"); + headers.append("Content-Length: "); + headers.append(QString("%1").arg(_rawResponseData.length())); + headers.append("\n"); + return headers; + } return QScriptValue(""); } @@ -111,6 +152,14 @@ QScriptValue XMLHttpRequestClass::getResponseHeader(const QString& name) const { if (_reply && _reply->hasRawHeader(name.toLatin1())) { return QScriptValue(QString(_reply->rawHeader(name.toLatin1()))); } + if (_url.isLocalFile()) { + if (name.toLower() == "content-type") { + return QString("application/octet-stream"); + } + if (name.toLower() == "content-length") { + return QString("%1").arg(_rawResponseData.length()); + } + } return QScriptValue::NullValue; } @@ -126,34 +175,72 @@ void XMLHttpRequestClass::setReadyState(ReadyState readyState) { void XMLHttpRequestClass::open(const QString& method, const QString& url, bool async, const QString& username, const QString& password) { if (_readyState == UNSENT) { - _async = async; - _url.setUrl(url); - if (!username.isEmpty()) { - _url.setUserName(username); - } - if (!password.isEmpty()) { - _url.setPassword(password); - } - _request.setUrl(_url); _method = method; - setReadyState(OPENED); + _url.setUrl(url); + _async = async; + + if (_url.isLocalFile()) { + if (_method.toUpper() == "GET" && !_async && username.isEmpty() && password.isEmpty()) { + _file = new QFile(_url.toLocalFile()); + if (!_file->exists()) { + qDebug() << "Can't find file " << _url.fileName(); + abortRequest(); + _errorCode = QNetworkReply::ContentNotFoundError; + setReadyState(DONE); + emit requestComplete(); + } else if (!_file->open(QIODevice::ReadOnly)) { + qDebug() << "Can't open file " << _url.fileName(); + abortRequest(); + //_errorCode = QNetworkReply::ContentConflictError; // TODO: Use this status when update to Qt 5.3 + _errorCode = QNetworkReply::ContentAccessDenied; + setReadyState(DONE); + emit requestComplete(); + } else { + setReadyState(OPENED); + } + } else { + notImplemented(); + } + } else { + if (url.toLower().left(33) == "https://data.highfidelity.io/api/") { + _url.setQuery("access_token=" + AccountManager::getInstance().getAccountInfo().getAccessToken().token); + } + if (!username.isEmpty()) { + _url.setUserName(username); + } + if (!password.isEmpty()) { + _url.setPassword(password); + } + _request.setUrl(_url); + setReadyState(OPENED); + } } } void XMLHttpRequestClass::send() { - send(QString::Null()); + send(QScriptValue::NullValue); } -void XMLHttpRequestClass::send(const QString& data) { +void XMLHttpRequestClass::send(const QScriptValue& data) { if (_readyState == OPENED && !_reply) { if (!data.isNull()) { - _sendData = new QBuffer(this); - _sendData->setData(data.toUtf8()); + if (_url.isLocalFile()) { + notImplemented(); + return; + } else { + _sendData = new QBuffer(this); + if (data.isObject()) { + QByteArray ba = qscriptvalue_cast(data); + _sendData->setData(ba); + } else { + _sendData->setData(data.toString().toUtf8()); + } + } } doSend(); - if (!_async) { + if (!_async && !_url.isLocalFile()) { QEventLoop loop; connect(this, SIGNAL(requestComplete()), &loop, SLOT(quit())); loop.exec(); @@ -162,14 +249,24 @@ void XMLHttpRequestClass::send(const QString& data) { } void XMLHttpRequestClass::doSend() { - _reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData); - - connectToReply(_reply); + + if (!_url.isLocalFile()) { + _reply = NetworkAccessManager::getInstance().sendCustomRequest(_request, _method.toLatin1(), _sendData); + connectToReply(_reply); + } if (_timeout > 0) { _timer.start(_timeout); connect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout())); } + + if (_url.isLocalFile()) { + setReadyState(HEADERS_RECEIVED); + setReadyState(LOADING); + _rawResponseData = _file->readAll(); + _file->close(); + requestFinished(); + } } void XMLHttpRequestClass::requestTimeout() { @@ -188,9 +285,16 @@ void XMLHttpRequestClass::requestError(QNetworkReply::NetworkError code) { void XMLHttpRequestClass::requestFinished() { disconnect(&_timer, SIGNAL(timeout()), this, SLOT(requestTimeout())); - _errorCode = _reply->error(); + if (!_url.isLocalFile()) { + _errorCode = _reply->error(); + } else { + _errorCode = QNetworkReply::NoError; + } + if (_errorCode == QNetworkReply::NoError) { - _rawResponseData.append(_reply->readAll()); + if (!_url.isLocalFile()) { + _rawResponseData.append(_reply->readAll()); + } if (_responseType == "json") { _responseData = _engine->evaluate("(" + QString(_rawResponseData.data()) + ")"); @@ -199,11 +303,13 @@ void XMLHttpRequestClass::requestFinished() { _responseData = QScriptValue::NullValue; } } else if (_responseType == "arraybuffer") { - _responseData = QScriptValue(_rawResponseData.data()); + QScriptValue data = _engine->newVariant(QVariant::fromValue(_rawResponseData)); + _responseData = _engine->newObject(reinterpret_cast(_engine)->getArrayBufferClass(), data); } else { _responseData = QScriptValue(QString(_rawResponseData.data())); } } + setReadyState(DONE); emit requestComplete(); } @@ -217,6 +323,19 @@ void XMLHttpRequestClass::abortRequest() { delete _reply; _reply = NULL; } + + if (_file != NULL) { + _file->close(); + _file = NULL; + } +} + +void XMLHttpRequestClass::notImplemented() { + abortRequest(); + //_errorCode = QNetworkReply::OperationNotImplementedError; TODO: Use this status code when update to Qt 5.3 + _errorCode = QNetworkReply::ContentOperationNotPermittedError; + setReadyState(DONE); + emit requestComplete(); } void XMLHttpRequestClass::connectToReply(QNetworkReply* reply) { diff --git a/libraries/script-engine/src/XMLHttpRequestClass.h b/libraries/script-engine/src/XMLHttpRequestClass.h index 48f1a596e1..55bf646476 100644 --- a/libraries/script-engine/src/XMLHttpRequestClass.h +++ b/libraries/script-engine/src/XMLHttpRequestClass.h @@ -84,7 +84,7 @@ public slots: void open(const QString& method, const QString& url, bool async = true, const QString& username = "", const QString& password = ""); void send(); - void send(const QString& data); + void send(const QScriptValue& data); QScriptValue getAllResponseHeaders() const; QScriptValue getResponseHeader(const QString& name) const; @@ -97,6 +97,7 @@ private: void connectToReply(QNetworkReply* reply); void disconnectFromReply(QNetworkReply* reply); void abortRequest(); + void notImplemented(); QScriptEngine* _engine; bool _async; @@ -112,6 +113,7 @@ private: QScriptValue _onReadyStateChange; ReadyState _readyState; QNetworkReply::NetworkError _errorCode; + QFile* _file; int _timeout; QTimer _timer; int _numRedirects; diff --git a/libraries/shared/src/CapsuleShape.cpp b/libraries/shared/src/CapsuleShape.cpp index 03bc48bd94..09776a233f 100644 --- a/libraries/shared/src/CapsuleShape.cpp +++ b/libraries/shared/src/CapsuleShape.cpp @@ -18,20 +18,20 @@ #include "SharedUtil.h" -CapsuleShape::CapsuleShape() : Shape(Shape::CAPSULE_SHAPE), _radius(0.0f), _halfHeight(0.0f) {} +CapsuleShape::CapsuleShape() : Shape(CAPSULE_SHAPE), _radius(0.0f), _halfHeight(0.0f) {} -CapsuleShape::CapsuleShape(float radius, float halfHeight) : Shape(Shape::CAPSULE_SHAPE), +CapsuleShape::CapsuleShape(float radius, float halfHeight) : Shape(CAPSULE_SHAPE), _radius(radius), _halfHeight(halfHeight) { updateBoundingRadius(); } CapsuleShape::CapsuleShape(float radius, float halfHeight, const glm::vec3& position, const glm::quat& rotation) : - Shape(Shape::CAPSULE_SHAPE, position, rotation), _radius(radius), _halfHeight(halfHeight) { + Shape(CAPSULE_SHAPE, position, rotation), _radius(radius), _halfHeight(halfHeight) { updateBoundingRadius(); } CapsuleShape::CapsuleShape(float radius, const glm::vec3& startPoint, const glm::vec3& endPoint) : - Shape(Shape::CAPSULE_SHAPE), _radius(radius), _halfHeight(0.0f) { + Shape(CAPSULE_SHAPE), _radius(radius), _halfHeight(0.0f) { setEndPoints(startPoint, endPoint); } diff --git a/libraries/shared/src/ContactPoint.cpp b/libraries/shared/src/ContactPoint.cpp index 27a496d445..02cf896594 100644 --- a/libraries/shared/src/ContactPoint.cpp +++ b/libraries/shared/src/ContactPoint.cpp @@ -96,10 +96,10 @@ float ContactPoint::enforce() { bool constraintViolation = (pDotN > CONTACT_PENETRATION_ALLOWANCE); // the contact point will be the average of the two points on the shapes - _contactPoint = 0.5f * (pointA + pointB); + _contactPoint = _relativeMassA * pointA + _relativeMassB * pointB; if (constraintViolation) { - for (int i = 0; i < _numPoints; ++i) { + for (int i = 0; i < _numPointsA; ++i) { VerletPoint* point = _points[i]; glm::vec3 offset = _offsets[i]; @@ -111,8 +111,31 @@ float ContactPoint::enforce() { // use the relative sizes of the components to decide how much perpenducular delta to use // perpendicular < parallel ==> static friction ==> perpFactor = 1.0 // perpendicular > parallel ==> dynamic friction ==> cap to length of paraDelta ==> perpFactor < 1.0 - float paraLength = glm::length(paraDelta); - float perpLength = glm::length(perpDelta); + float paraLength = _relativeMassB * glm::length(paraDelta); + float perpLength = _relativeMassA * glm::length(perpDelta); + float perpFactor = (perpLength > paraLength && perpLength > EPSILON) ? (paraLength / perpLength) : 1.0f; + + // recombine the two components to get the final delta + delta = paraDelta + perpFactor * perpDelta; + + glm::vec3 targetPosition = point->_position + delta; + _distances[i] = glm::distance(_contactPoint, targetPosition); + point->_position += delta; + } + for (int i = _numPointsA; i < _numPoints; ++i) { + VerletPoint* point = _points[i]; + glm::vec3 offset = _offsets[i]; + + // split delta into parallel and perpendicular components + glm::vec3 delta = _contactPoint + offset - point->_position; + glm::vec3 paraDelta = glm::dot(delta, _normal) * _normal; + glm::vec3 perpDelta = delta - paraDelta; + + // use the relative sizes of the components to decide how much perpenducular delta to use + // perpendicular < parallel ==> static friction ==> perpFactor = 1.0 + // perpendicular > parallel ==> dynamic friction ==> cap to length of paraDelta ==> perpFactor < 1.0 + float paraLength = _relativeMassA * glm::length(paraDelta); + float perpLength = _relativeMassB * glm::length(perpDelta); float perpFactor = (perpLength > paraLength && perpLength > EPSILON) ? (paraLength / perpLength) : 1.0f; // recombine the two components to get the final delta diff --git a/libraries/shared/src/MovingMinMaxAvg.h b/libraries/shared/src/MovingMinMaxAvg.h index 7d4b3df124..4a044392c1 100644 --- a/libraries/shared/src/MovingMinMaxAvg.h +++ b/libraries/shared/src/MovingMinMaxAvg.h @@ -18,45 +18,63 @@ #include "RingBufferHistory.h" template -class MovingMinMaxAvg { +class MinMaxAvg { +public: + MinMaxAvg() + : _min(std::numeric_limits::max()), + _max(std::numeric_limits::min()), + _average(0.0), + _samples(0) + {} + + void reset() { + _min = std::numeric_limits::max(); + _max = std::numeric_limits::min(); + _average = 0.0; + _samples = 0; + } + + void update(T sample) { + if (sample < _min) { + _min = sample; + } + if (sample > _max) { + _max = sample; + } + double totalSamples = _samples + 1; + _average = _average * ((double)_samples / totalSamples) + + (double)sample / totalSamples; + _samples++; + } + + void update(const MinMaxAvg& other) { + if (other._min < _min) { + _min = other._min; + } + if (other._max > _max) { + _max = other._max; + } + double totalSamples = _samples + other._samples; + _average = _average * ((double)_samples / totalSamples) + + other._average * ((double)other._samples / totalSamples); + _samples += other._samples; + } + + T getMin() const { return _min; } + T getMax() const { return _max; } + double getAverage() const { return _average; } + int getSamples() const { return _samples; } + double getSum() const { return _samples * _average; } private: - class Stats { - public: - Stats() - : _min(std::numeric_limits::max()), - _max(std::numeric_limits::min()), - _average(0.0) {} - - void updateWithSample(T sample, int& numSamplesInAverage) { - if (sample < _min) { - _min = sample; - } - if (sample > _max) { - _max = sample; - } - _average = _average * ((double)numSamplesInAverage / (numSamplesInAverage + 1)) - + (double)sample / (numSamplesInAverage + 1); - numSamplesInAverage++; - } - - void updateWithOtherStats(const Stats& other, int& numStatsInAverage) { - if (other._min < _min) { - _min = other._min; - } - if (other._max > _max) { - _max = other._max; - } - _average = _average * ((double)numStatsInAverage / (numStatsInAverage + 1)) - + other._average / (numStatsInAverage + 1); - numStatsInAverage++; - } - - T _min; - T _max; - double _average; - }; + T _min; + T _max; + double _average; + int _samples; +}; +template +class MovingMinMaxAvg { public: // This class collects 3 stats (min, max, avg) over a moving window of samples. // The moving window contains _windowIntervals * _intervalLength samples. @@ -66,66 +84,98 @@ public: // this class with MovingMinMaxAvg(100, 50). If you want a moving min of the past 100 samples updated on every // new sample, instantiate this class with MovingMinMaxAvg(1, 100). + + /// use intervalLength = 0 to use in manual mode, where the currentIntervalComplete() function must + /// be called to complete an interval MovingMinMaxAvg(int intervalLength, int windowIntervals) : _intervalLength(intervalLength), _windowIntervals(windowIntervals), _overallStats(), - _samplesCollected(0), _windowStats(), - _existingSamplesInCurrentInterval(0), _currentIntervalStats(), _intervalStats(windowIntervals), _newStatsAvailable(false) {} void reset() { - _overallStats = Stats(); - _samplesCollected = 0; - _windowStats = Stats(); - _existingSamplesInCurrentInterval = 0; - _currentIntervalStats = Stats(); + _overallStats.reset(); + _windowStats.reset(); + _currentIntervalStats.reset(); _intervalStats.clear(); _newStatsAvailable = false; } + void setWindowIntervals(int windowIntervals) { + _windowIntervals = windowIntervals; + _overallStats.reset(); + _windowStats.reset(); + _currentIntervalStats.reset(); + _intervalStats.setCapacity(_windowIntervals); + _newStatsAvailable = false; + } + void update(T newSample) { // update overall stats - _overallStats.updateWithSample(newSample, _samplesCollected); + _overallStats.update(newSample); // update the current interval stats - _currentIntervalStats.updateWithSample(newSample, _existingSamplesInCurrentInterval); + _currentIntervalStats.update(newSample); // if the current interval of samples is now full, record its stats into our past intervals' stats - if (_existingSamplesInCurrentInterval == _intervalLength) { - - // record current interval's stats, then reset them - _intervalStats.insert(_currentIntervalStats); - _currentIntervalStats = Stats(); - _existingSamplesInCurrentInterval = 0; - - // update the window's stats by combining the intervals' stats - typename RingBufferHistory::Iterator i = _intervalStats.begin(); - typename RingBufferHistory::Iterator end = _intervalStats.end(); - _windowStats = Stats(); - int intervalsIncludedInWindowStats = 0; - while (i != end) { - _windowStats.updateWithOtherStats(*i, intervalsIncludedInWindowStats); - i++; - } - - _newStatsAvailable = true; + // NOTE: if _intervalLength is 0 (manual mode), currentIntervalComplete() will not be called here. + if (_currentIntervalStats.getSamples() == _intervalLength) { + currentIntervalComplete(); } } + /// This function can be called to manually control when each interval ends. For example, if each interval + /// needs to last T seconds as opposed to N samples, this function should be called every T seconds. + void currentIntervalComplete() { + // record current interval's stats, then reset them + _intervalStats.insert(_currentIntervalStats); + _currentIntervalStats.reset(); + + // update the window's stats by combining the intervals' stats + typename RingBufferHistory< MinMaxAvg >::Iterator i = _intervalStats.begin(); + typename RingBufferHistory< MinMaxAvg >::Iterator end = _intervalStats.end(); + _windowStats.reset(); + while (i != end) { + _windowStats.update(*i); + ++i; + } + + _newStatsAvailable = true; + } + bool getNewStatsAvailableFlag() const { return _newStatsAvailable; } void clearNewStatsAvailableFlag() { _newStatsAvailable = false; } - T getMin() const { return _overallStats._min; } - T getMax() const { return _overallStats._max; } - double getAverage() const { return _overallStats._average; } - T getWindowMin() const { return _windowStats._min; } - T getWindowMax() const { return _windowStats._max; } - double getWindowAverage() const { return _windowStats._average; } + T getMin() const { return _overallStats.getMin(); } + T getMax() const { return _overallStats.getMax(); } + double getAverage() const { return _overallStats.getAverage(); } + int getSamples() const { return _overallStats.getSamples(); } + double getSum() const { return _overallStats.getSum(); } + + T getWindowMin() const { return _windowStats.getMin(); } + T getWindowMax() const { return _windowStats.getMax(); } + double getWindowAverage() const { return _windowStats.getAverage(); } + int getWindowSamples() const { return _windowStats.getSamples(); } + double getWindowSum() const { return _windowStats.getSum(); } + + T getCurrentIntervalMin() const { return _currentIntervalStats.getMin(); } + T getCurrentIntervalMax() const { return _currentIntervalStats.getMax(); } + double getCurrentIntervalAverage() const { return _currentIntervalStats.getAverage(); } + int getCurrentIntervalSamples() const { return _currentIntervalStats.getSamples(); } + double getCurrentIntervalSum() const { return _currentIntervalStats.getSum(); } + + const MinMaxAvg& getOverallStats() const{ return _overallStats; } + const MinMaxAvg& getWindowStats() const{ return _windowStats; } + const MinMaxAvg& getCurrentIntervalStats() const { return _currentIntervalStats; } + + MinMaxAvg getLastCompleteIntervalStats() const { + const MinMaxAvg* stats = _intervalStats.getNewestEntry(); + return stats == NULL ? MinMaxAvg() : *stats; + } bool isWindowFilled() const { return _intervalStats.isFilled(); } @@ -134,18 +184,16 @@ private: int _windowIntervals; // these are min/max/avg stats for all samples collected. - Stats _overallStats; - int _samplesCollected; + MinMaxAvg _overallStats; // these are the min/max/avg stats for the samples in the moving window - Stats _windowStats; - int _existingSamplesInCurrentInterval; + MinMaxAvg _windowStats; - // these are the min/max/avg stats for the current interval - Stats _currentIntervalStats; + // these are the min/max/avg stats for the samples in the current interval + MinMaxAvg _currentIntervalStats; // these are stored stats for the past intervals in the window - RingBufferHistory _intervalStats; + RingBufferHistory< MinMaxAvg > _intervalStats; bool _newStatsAvailable; }; diff --git a/libraries/shared/src/PhysicsEntity.cpp b/libraries/shared/src/PhysicsEntity.cpp index 09b00c201c..6be37a7528 100644 --- a/libraries/shared/src/PhysicsEntity.cpp +++ b/libraries/shared/src/PhysicsEntity.cpp @@ -12,8 +12,10 @@ #include "PhysicsEntity.h" #include "PhysicsSimulation.h" +#include "PlaneShape.h" #include "Shape.h" #include "ShapeCollider.h" +#include "SphereShape.h" PhysicsEntity::PhysicsEntity() : _translation(0.0f), diff --git a/libraries/shared/src/PhysicsSimulation.cpp b/libraries/shared/src/PhysicsSimulation.cpp index a62b3816af..b58f62dfd4 100644 --- a/libraries/shared/src/PhysicsSimulation.cpp +++ b/libraries/shared/src/PhysicsSimulation.cpp @@ -16,8 +16,9 @@ #include "PerfStat.h" #include "PhysicsEntity.h" #include "Ragdoll.h" -#include "SharedUtil.h" +#include "Shape.h" #include "ShapeCollider.h" +#include "SharedUtil.h" int MAX_DOLLS_PER_SIMULATION = 16; int MAX_ENTITIES_PER_SIMULATION = 64; @@ -163,10 +164,10 @@ bool PhysicsSimulation::addRagdoll(Ragdoll* doll) { } void PhysicsSimulation::removeRagdoll(Ragdoll* doll) { - int numDolls = _otherRagdolls.size(); - if (doll->_simulation != this) { + if (!doll || doll->_simulation != this) { return; } + int numDolls = _otherRagdolls.size(); for (int i = 0; i < numDolls; ++i) { if (doll == _otherRagdolls[i]) { if (i == numDolls - 1) { @@ -205,10 +206,11 @@ void PhysicsSimulation::stepForward(float deltaTime, float minError, int maxIter } } + bool collidedWithOtherRagdoll = false; int iterations = 0; float error = 0.0f; do { - computeCollisions(); + collidedWithOtherRagdoll = computeCollisions() || collidedWithOtherRagdoll; updateContacts(); resolveCollisions(); @@ -225,6 +227,14 @@ void PhysicsSimulation::stepForward(float deltaTime, float minError, int maxIter now = usecTimestampNow(); } while (_collisions.size() != 0 && (iterations < maxIterations) && (error > minError) && (now < expiry)); + // the collisions may have moved the main ragdoll from the simulation center + // so we remove this offset (potentially storing it as movement of the Ragdoll owner) + _ragdoll->removeRootOffset(collidedWithOtherRagdoll); + + // also remove any offsets from the other ragdolls + for (int i = 0; i < numDolls; ++i) { + _otherRagdolls[i]->removeRootOffset(false); + } pruneContacts(); } @@ -237,7 +247,7 @@ void PhysicsSimulation::moveRagdolls(float deltaTime) { } } -void PhysicsSimulation::computeCollisions() { +bool PhysicsSimulation::computeCollisions() { PerformanceTimer perfTimer("collide"); _collisions.clear(); @@ -258,11 +268,13 @@ void PhysicsSimulation::computeCollisions() { } // collide main ragdoll with others + bool otherCollisions = false; int numEntities = _otherEntities.size(); for (int i = 0; i < numEntities; ++i) { const QVector otherShapes = _otherEntities.at(i)->getShapes(); - ShapeCollider::collideShapesWithShapes(shapes, otherShapes, _collisions); + otherCollisions = ShapeCollider::collideShapesWithShapes(shapes, otherShapes, _collisions) || otherCollisions; } + return otherCollisions; } void PhysicsSimulation::resolveCollisions() { diff --git a/libraries/shared/src/PhysicsSimulation.h b/libraries/shared/src/PhysicsSimulation.h index 881007208b..1db56a46e2 100644 --- a/libraries/shared/src/PhysicsSimulation.h +++ b/libraries/shared/src/PhysicsSimulation.h @@ -53,9 +53,11 @@ public: protected: void moveRagdolls(float deltaTime); - void computeCollisions(); - void resolveCollisions(); + /// \return true if main ragdoll collides with other avatar + bool computeCollisions(); + + void resolveCollisions(); void enforceContacts(); void applyContactFriction(); void updateContacts(); diff --git a/libraries/shared/src/PlaneShape.cpp b/libraries/shared/src/PlaneShape.cpp index 15ea281510..72704c3116 100644 --- a/libraries/shared/src/PlaneShape.cpp +++ b/libraries/shared/src/PlaneShape.cpp @@ -15,7 +15,7 @@ const glm::vec3 UNROTATED_NORMAL(0.0f, 1.0f, 0.0f); PlaneShape::PlaneShape(const glm::vec4& coefficients) : - Shape(Shape::PLANE_SHAPE) { + Shape(PLANE_SHAPE) { glm::vec3 normal = glm::vec3(coefficients); _translation = -normal * coefficients.w; diff --git a/libraries/shared/src/Ragdoll.cpp b/libraries/shared/src/Ragdoll.cpp index 7eeaf0b609..c0f0eb4b27 100644 --- a/libraries/shared/src/Ragdoll.cpp +++ b/libraries/shared/src/Ragdoll.cpp @@ -19,7 +19,8 @@ #include "PhysicsSimulation.h" #include "SharedUtil.h" // for EPSILON -Ragdoll::Ragdoll() : _massScale(1.0f), _translation(0.0f), _translationInSimulationFrame(0.0f), _simulation(NULL) { +Ragdoll::Ragdoll() : _massScale(1.0f), _translation(0.0f), _translationInSimulationFrame(0.0f), + _rootIndex(0), _accumulatedMovement(0.0f), _simulation(NULL) { } Ragdoll::~Ragdoll() { @@ -34,7 +35,7 @@ void Ragdoll::stepForward(float deltaTime) { updateSimulationTransforms(_translation - _simulation->getTranslation(), _rotation); } int numPoints = _points.size(); - for (int i = 0; i < numPoints; ++i) { + for (int i = _rootIndex; i < numPoints; ++i) { _points[i].integrateForward(); } } @@ -76,7 +77,9 @@ void Ragdoll::initTransform() { } void Ragdoll::setTransform(const glm::vec3& translation, const glm::quat& rotation) { - _translation = translation; + if (translation != _translation) { + _translation = translation; + } _rotation = rotation; } @@ -94,7 +97,7 @@ void Ragdoll::updateSimulationTransforms(const glm::vec3& translation, const glm // apply the deltas to all ragdollPoints int numPoints = _points.size(); - for (int i = 0; i < numPoints; ++i) { + for (int i = _rootIndex; i < numPoints; ++i) { _points[i].move(deltaPosition, deltaRotation, _translationInSimulationFrame); } @@ -110,9 +113,33 @@ void Ragdoll::setMassScale(float scale) { if (scale != _massScale) { float rescale = scale / _massScale; int numPoints = _points.size(); - for (int i = 0; i < numPoints; ++i) { + for (int i = _rootIndex; i < numPoints; ++i) { _points[i].setMass(rescale * _points[i].getMass()); } _massScale = scale; } } + +void Ragdoll::removeRootOffset(bool accumulateMovement) { + const int numPoints = _points.size(); + if (numPoints > 0) { + // shift all points so that the root aligns with the the ragdoll's position in the simulation + glm::vec3 offset = _translationInSimulationFrame - _points[_rootIndex]._position; + float offsetLength = glm::length(offset); + if (offsetLength > EPSILON) { + for (int i = _rootIndex; i < numPoints; ++i) { + _points[i].shift(offset); + } + const float MIN_ROOT_OFFSET = 0.02f; + if (accumulateMovement && offsetLength > MIN_ROOT_OFFSET) { + _accumulatedMovement -= (1.0f - MIN_ROOT_OFFSET / offsetLength) * offset; + } + } + } +} + +glm::vec3 Ragdoll::getAndClearAccumulatedMovement() { + glm::vec3 movement = _accumulatedMovement; + _accumulatedMovement = glm::vec3(0.0f); + return movement; +} diff --git a/libraries/shared/src/Ragdoll.h b/libraries/shared/src/Ragdoll.h index c82295d9a5..5234397833 100644 --- a/libraries/shared/src/Ragdoll.h +++ b/libraries/shared/src/Ragdoll.h @@ -52,20 +52,35 @@ public: void setMassScale(float scale); float getMassScale() const { return _massScale; } + // the ragdoll's rootIndex (within a Model's joints) is not always zero so must be settable + void setRootIndex(int index) { _rootIndex = index; } + int getRootIndex() const { return _rootIndex; } + void clearConstraintsAndPoints(); virtual void initPoints() = 0; virtual void buildConstraints() = 0; + void removeRootOffset(bool accumulateMovement); + + glm::vec3 getAndClearAccumulatedMovement(); + protected: float _massScale; glm::vec3 _translation; // world-frame glm::quat _rotation; // world-frame glm::vec3 _translationInSimulationFrame; glm::quat _rotationInSimulationFrame; + int _rootIndex; QVector _points; QVector _boneConstraints; QVector _fixedConstraints; + + // The collisions are typically done in a simulation frame that is slaved to the center of one of the Ragdolls. + // To allow the Ragdoll to provide feedback of its own displacement we store it in _accumulatedMovement. + // The owner of the Ragdoll can harvest this displacement to update the rest of the object positions in the simulation. + glm::vec3 _accumulatedMovement; + private: void updateSimulationTransforms(const glm::vec3& translation, const glm::quat& rotation); diff --git a/libraries/shared/src/RingBufferHistory.h b/libraries/shared/src/RingBufferHistory.h index 27a78c0055..9534b2f1db 100644 --- a/libraries/shared/src/RingBufferHistory.h +++ b/libraries/shared/src/RingBufferHistory.h @@ -35,6 +35,14 @@ public: _numEntries = 0; } + void setCapacity(int capacity) { + _size = capacity + 1; + _capacity = capacity; + _newestEntryAtIndex = 0; + _numEntries = 0; + _buffer.resize(_size); + } + void insert(const T& entry) { // increment newest entry index cyclically _newestEntryAtIndex = (_newestEntryAtIndex == _size - 1) ? 0 : _newestEntryAtIndex + 1; @@ -83,9 +91,14 @@ private: QVector _buffer; public: - class Iterator : public std::iterator < std::forward_iterator_tag, T > { + class Iterator : public std::iterator < std::random_access_iterator_tag, T > { public: - Iterator(T* bufferFirst, T* bufferLast, T* at) : _bufferFirst(bufferFirst), _bufferLast(bufferLast), _at(at) {} + Iterator(T* bufferFirst, T* bufferLast, T* newestAt, T* at) + : _bufferFirst(bufferFirst), + _bufferLast(bufferLast), + _bufferLength(bufferLast - bufferFirst + 1), + _newestAt(newestAt), + _at(at) {} bool operator==(const Iterator& rhs) { return _at == rhs._at; } bool operator!=(const Iterator& rhs) { return _at != rhs._at; } @@ -103,20 +116,95 @@ public: return tmp; } + Iterator& operator--() { + _at = (_at == _bufferLast) ? _bufferFirst : _at + 1; + return *this; + } + + Iterator operator--(int) { + Iterator tmp(*this); + --(*this); + return tmp; + } + + Iterator operator+(int add) { + Iterator sum(*this); + sum._at = atShiftedBy(add); + return sum; + } + + Iterator operator-(int sub) { + Iterator sum(*this); + sum._at = atShiftedBy(-sub); + return sum; + } + + Iterator& operator+=(int add) { + _at = atShiftedBy(add); + return *this; + } + + Iterator& operator-=(int sub) { + _at = atShiftedBy(-sub); + return *this; + } + + T& operator[](int i) { + return *(atShiftedBy(i)); + } + + bool operator<(const Iterator& rhs) { + return age() < rhs.age(); + } + + bool operator>(const Iterator& rhs) { + return age() > rhs.age(); + } + + bool operator<=(const Iterator& rhs) { + return age() < rhs.age(); + } + + bool operator>=(const Iterator& rhs) { + return age() >= rhs.age(); + } + + int operator-(const Iterator& rhs) { + return age() - rhs.age(); + } + private: - T* const _bufferFirst; - T* const _bufferLast; + T* atShiftedBy(int i) { // shifts i places towards _bufferFirst (towards older entries) + i = (_at - _bufferFirst - i) % _bufferLength; + if (i < 0) { + i += _bufferLength; + } + return _bufferFirst + i; + } + + int age() { + int age = _newestAt - _at; + if (age < 0) { + age += _bufferLength; + } + return age; + } + + T* _bufferFirst; + T* _bufferLast; + int _bufferLength; + T* _newestAt; T* _at; }; - Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex]); } + Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[_newestEntryAtIndex]); } Iterator end() { int endAtIndex = _newestEntryAtIndex - _numEntries; if (endAtIndex < 0) { endAtIndex += _size; } - return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[endAtIndex]); + return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[endAtIndex]); } }; diff --git a/libraries/shared/src/Shape.h b/libraries/shared/src/Shape.h index 2efa5b824f..9c30a0fdf4 100644 --- a/libraries/shared/src/Shape.h +++ b/libraries/shared/src/Shape.h @@ -22,17 +22,18 @@ class VerletPoint; const float MAX_SHAPE_MASS = 1.0e18f; // something less than sqrt(FLT_MAX) +const quint8 SPHERE_SHAPE = 0; +const quint8 CAPSULE_SHAPE = 1; +const quint8 PLANE_SHAPE = 2; +const quint8 LIST_SHAPE = 3; +const quint8 UNKNOWN_SHAPE = 4; + class Shape { public: - static quint32 getNextID() { static quint32 nextID = 0; return ++nextID; } - enum Type{ - UNKNOWN_SHAPE = 0, - SPHERE_SHAPE, - CAPSULE_SHAPE, - PLANE_SHAPE, - LIST_SHAPE - }; + typedef quint8 Type; + + static quint32 getNextID() { static quint32 nextID = 0; return ++nextID; } Shape() : _type(UNKNOWN_SHAPE), _owningEntity(NULL), _boundingRadius(0.f), _translation(0.f), _rotation(), _mass(MAX_SHAPE_MASS) { @@ -40,7 +41,7 @@ public: } virtual ~Shape() { } - int getType() const { return _type; } + Type getType() const { return _type; } quint32 getID() const { return _id; } void setEntity(PhysicsEntity* entity) { _owningEntity = entity; } @@ -95,8 +96,8 @@ protected: void setBoundingRadius(float radius) { _boundingRadius = radius; } - int _type; - unsigned int _id; + Type _type; + quint32 _id; PhysicsEntity* _owningEntity; float _boundingRadius; glm::vec3 _translation; diff --git a/libraries/shared/src/ShapeCollider.cpp b/libraries/shared/src/ShapeCollider.cpp index 805e7f30f6..ec0c88bd0f 100644 --- a/libraries/shared/src/ShapeCollider.cpp +++ b/libraries/shared/src/ShapeCollider.cpp @@ -15,85 +15,70 @@ #include "GeometryUtil.h" #include "ShapeCollider.h" +#include "CapsuleShape.h" +#include "ListShape.h" +#include "PlaneShape.h" +#include "SphereShape.h" // NOTE: // // * Large ListShape's are inefficient keep the lists short. // * Collisions between lists of lists work in theory but are not recommended. +const Shape::Type NUM_SHAPE_TYPES = 5; +const quint8 NUM__DISPATCH_CELLS = NUM_SHAPE_TYPES * NUM_SHAPE_TYPES; + +Shape::Type getDispatchKey(Shape::Type typeA, Shape::Type typeB) { + return typeA + NUM_SHAPE_TYPES * typeB; +} + +// dummy dispatch for any non-implemented pairings +bool notImplemented(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + return false; +} + +// NOTE: hardcode the number of dispatchTable entries (NUM_SHAPE_TYPES ^2) +bool (*dispatchTable[NUM__DISPATCH_CELLS])(const Shape*, const Shape*, CollisionList&); + namespace ShapeCollider { -bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { - // TODO: make a fast lookup for correct method - int typeA = shapeA->getType(); - int typeB = shapeB->getType(); - if (typeA == Shape::SPHERE_SHAPE) { - const SphereShape* sphereA = static_cast(shapeA); - if (typeB == Shape::SPHERE_SHAPE) { - return sphereSphere(sphereA, static_cast(shapeB), collisions); - } else if (typeB == Shape::CAPSULE_SHAPE) { - return sphereCapsule(sphereA, static_cast(shapeB), collisions); - } else if (typeB == Shape::PLANE_SHAPE) { - return spherePlane(sphereA, static_cast(shapeB), collisions); - } - } else if (typeA == Shape::CAPSULE_SHAPE) { - const CapsuleShape* capsuleA = static_cast(shapeA); - if (typeB == Shape::SPHERE_SHAPE) { - return capsuleSphere(capsuleA, static_cast(shapeB), collisions); - } else if (typeB == Shape::CAPSULE_SHAPE) { - return capsuleCapsule(capsuleA, static_cast(shapeB), collisions); - } else if (typeB == Shape::PLANE_SHAPE) { - return capsulePlane(capsuleA, static_cast(shapeB), collisions); - } - } else if (typeA == Shape::PLANE_SHAPE) { - const PlaneShape* planeA = static_cast(shapeA); - if (typeB == Shape::SPHERE_SHAPE) { - return planeSphere(planeA, static_cast(shapeB), collisions); - } else if (typeB == Shape::CAPSULE_SHAPE) { - return planeCapsule(planeA, static_cast(shapeB), collisions); - } else if (typeB == Shape::PLANE_SHAPE) { - return planePlane(planeA, static_cast(shapeB), collisions); - } - } else if (typeA == Shape::LIST_SHAPE) { - const ListShape* listA = static_cast(shapeA); - if (typeB == Shape::SPHERE_SHAPE) { - return listSphere(listA, static_cast(shapeB), collisions); - } else if (typeB == Shape::CAPSULE_SHAPE) { - return listCapsule(listA, static_cast(shapeB), collisions); - } else if (typeB == Shape::PLANE_SHAPE) { - return listPlane(listA, static_cast(shapeB), collisions); - } +// NOTE: the dispatch table must be initialized before the ShapeCollider is used. +void initDispatchTable() { + for (Shape::Type i = 0; i < NUM__DISPATCH_CELLS; ++i) { + dispatchTable[i] = ¬Implemented; } - return false; + + // NOTE: no need to update any that are notImplemented, but we leave them + // commented out in the code so that we remember that they exist. + dispatchTable[getDispatchKey(SPHERE_SHAPE, SPHERE_SHAPE)] = &sphereVsSphere; + dispatchTable[getDispatchKey(SPHERE_SHAPE, CAPSULE_SHAPE)] = &sphereVsCapsule; + dispatchTable[getDispatchKey(SPHERE_SHAPE, PLANE_SHAPE)] = &sphereVsPlane; + dispatchTable[getDispatchKey(SPHERE_SHAPE, LIST_SHAPE)] = &shapeVsList; + + dispatchTable[getDispatchKey(CAPSULE_SHAPE, SPHERE_SHAPE)] = &capsuleVsSphere; + dispatchTable[getDispatchKey(CAPSULE_SHAPE, CAPSULE_SHAPE)] = &capsuleVsCapsule; + dispatchTable[getDispatchKey(CAPSULE_SHAPE, PLANE_SHAPE)] = &capsuleVsPlane; + dispatchTable[getDispatchKey(CAPSULE_SHAPE, LIST_SHAPE)] = &shapeVsList; + + dispatchTable[getDispatchKey(PLANE_SHAPE, SPHERE_SHAPE)] = &planeVsSphere; + dispatchTable[getDispatchKey(PLANE_SHAPE, CAPSULE_SHAPE)] = &planeVsCapsule; + dispatchTable[getDispatchKey(PLANE_SHAPE, PLANE_SHAPE)] = &planeVsPlane; + dispatchTable[getDispatchKey(PLANE_SHAPE, LIST_SHAPE)] = &shapeVsList; + + dispatchTable[getDispatchKey(LIST_SHAPE, SPHERE_SHAPE)] = &listVsShape; + dispatchTable[getDispatchKey(LIST_SHAPE, CAPSULE_SHAPE)] = &listVsShape; + dispatchTable[getDispatchKey(LIST_SHAPE, PLANE_SHAPE)] = &listVsShape; + dispatchTable[getDispatchKey(LIST_SHAPE, LIST_SHAPE)] = &listVsList; + + // all of the UNKNOWN_SHAPE pairings are notImplemented +} + +bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + return (*dispatchTable[shapeA->getType() + NUM_SHAPE_TYPES * shapeB->getType()])(shapeA, shapeB, collisions); } static CollisionList tempCollisions(32); -bool collideShapesCoarse(const QVector& shapesA, const QVector& shapesB, CollisionInfo& collision) { - tempCollisions.clear(); - foreach (const Shape* shapeA, shapesA) { - foreach (const Shape* shapeB, shapesB) { - collideShapes(shapeA, shapeB, tempCollisions); - } - } - if (tempCollisions.size() > 0) { - glm::vec3 totalPenetration(0.0f); - glm::vec3 averageContactPoint(0.0f); - for (int j = 0; j < tempCollisions.size(); ++j) { - CollisionInfo* c = tempCollisions.getCollision(j); - totalPenetration = addPenetrations(totalPenetration, c->_penetration); - averageContactPoint += c->_contactPoint; - } - collision._penetration = totalPenetration; - collision._contactPoint = averageContactPoint / (float)(tempCollisions.size()); - // there are no valid shape pointers for this collision so we set them NULL - collision._shapeA = NULL; - collision._shapeB = NULL; - return true; - } - return false; -} - bool collideShapeWithShapes(const Shape* shapeA, const QVector& shapes, int startIndex, CollisionList& collisions) { bool collided = false; if (shapeA) { @@ -133,21 +118,21 @@ bool collideShapesWithShapes(const QVector& shapesA, const QVectorgetType(); - if (typeA == Shape::SPHERE_SHAPE) { - return sphereAACube(static_cast(shapeA), cubeCenter, cubeSide, collisions); - } else if (typeA == Shape::CAPSULE_SHAPE) { - return capsuleAACube(static_cast(shapeA), cubeCenter, cubeSide, collisions); - } else if (typeA == Shape::LIST_SHAPE) { + Shape::Type typeA = shapeA->getType(); + if (typeA == SPHERE_SHAPE) { + return sphereVsAACube(static_cast(shapeA), cubeCenter, cubeSide, collisions); + } else if (typeA == CAPSULE_SHAPE) { + return capsuleVsAACube(static_cast(shapeA), cubeCenter, cubeSide, collisions); + } else if (typeA == LIST_SHAPE) { const ListShape* listA = static_cast(shapeA); bool touching = false; for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) { const Shape* subShape = listA->getSubShape(i); int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = sphereAACube(static_cast(subShape), cubeCenter, cubeSide, collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = capsuleAACube(static_cast(subShape), cubeCenter, cubeSide, collisions) || touching; + if (subType == SPHERE_SHAPE) { + touching = sphereVsAACube(static_cast(subShape), cubeCenter, cubeSide, collisions) || touching; + } else if (subType == CAPSULE_SHAPE) { + touching = capsuleVsAACube(static_cast(subShape), cubeCenter, cubeSide, collisions) || touching; } } return touching; @@ -155,7 +140,9 @@ bool collideShapeWithAACube(const Shape* shapeA, const glm::vec3& cubeCenter, fl return false; } -bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, CollisionList& collisions) { +bool sphereVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + const SphereShape* sphereA = static_cast(shapeA); + const SphereShape* sphereB = static_cast(shapeB); glm::vec3 BA = sphereB->getTranslation() - sphereA->getTranslation(); float distanceSquared = glm::dot(BA, BA); float totalRadius = sphereA->getRadius() + sphereB->getRadius(); @@ -183,7 +170,9 @@ bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, Collis return false; } -bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, CollisionList& collisions) { +bool sphereVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + const SphereShape* sphereA = static_cast(shapeA); + const CapsuleShape* capsuleB = static_cast(shapeB); // find sphereA's closest approach to axis of capsuleB glm::vec3 BA = capsuleB->getTranslation() - sphereA->getTranslation(); glm::vec3 capsuleAxis; @@ -252,7 +241,9 @@ bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, Col return false; } -bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, CollisionList& collisions) { +bool sphereVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + const SphereShape* sphereA = static_cast(shapeA); + const PlaneShape* planeB = static_cast(shapeB); glm::vec3 penetration; if (findSpherePlanePenetration(sphereA->getTranslation(), sphereA->getRadius(), planeB->getCoefficients(), penetration)) { CollisionInfo* collision = collisions.getNewCollision(); @@ -268,79 +259,8 @@ bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, Collision return false; } -bool capsuleSphere(const CapsuleShape* capsuleA, const SphereShape* sphereB, CollisionList& collisions) { - // find sphereB's closest approach to axis of capsuleA - glm::vec3 AB = capsuleA->getTranslation() - sphereB->getTranslation(); - glm::vec3 capsuleAxis; - capsuleA->computeNormalizedAxis(capsuleAxis); - float axialDistance = - glm::dot(AB, capsuleAxis); - float absAxialDistance = fabsf(axialDistance); - float totalRadius = sphereB->getRadius() + capsuleA->getRadius(); - if (absAxialDistance < totalRadius + capsuleA->getHalfHeight()) { - glm::vec3 radialAxis = AB + axialDistance * capsuleAxis; // from sphereB to axis of capsuleA - float radialDistance2 = glm::length2(radialAxis); - float totalRadius2 = totalRadius * totalRadius; - if (radialDistance2 > totalRadius2) { - // sphere is too far from capsule axis - return false; - } - - // closestApproach = point on capsuleA's axis that is closest to sphereB's center - glm::vec3 closestApproach = capsuleA->getTranslation() + axialDistance * capsuleAxis; - - if (absAxialDistance > capsuleA->getHalfHeight()) { - // sphere hits capsule on a cap - // --> recompute radialAxis and closestApproach - float sign = (axialDistance > 0.0f) ? 1.0f : -1.0f; - closestApproach = capsuleA->getTranslation() + (sign * capsuleA->getHalfHeight()) * capsuleAxis; - radialAxis = closestApproach - sphereB->getTranslation(); - radialDistance2 = glm::length2(radialAxis); - if (radialDistance2 > totalRadius2) { - return false; - } - } - if (radialDistance2 > EPSILON * EPSILON) { - CollisionInfo* collision = collisions.getNewCollision(); - if (!collision) { - // collisions list is full - return false; - } - // normalize the radialAxis - float radialDistance = sqrtf(radialDistance2); - radialAxis /= radialDistance; - // penetration points from A into B - collision->_penetration = (radialDistance - totalRadius) * radialAxis; // points from A into B - // contactPoint is on surface of capsuleA - collision->_contactPoint = closestApproach - capsuleA->getRadius() * radialAxis; - collision->_shapeA = capsuleA; - collision->_shapeB = sphereB; - } else { - // A is on B's axis, so the penetration is undefined... - if (absAxialDistance > capsuleA->getHalfHeight()) { - // ...for the cylinder case (for now we pretend the collision doesn't exist) - return false; - } else { - CollisionInfo* collision = collisions.getNewCollision(); - if (!collision) { - // collisions list is full - return false; - } - // ... but still defined for the cap case - if (axialDistance < 0.0f) { - // we're hitting the start cap, so we negate the capsuleAxis - capsuleAxis *= -1; - } - float sign = (axialDistance > 0.0f) ? 1.0f : -1.0f; - collision->_penetration = (sign * (totalRadius + capsuleA->getHalfHeight() - absAxialDistance)) * capsuleAxis; - // contactPoint is on surface of sphereA - collision->_contactPoint = closestApproach + (sign * capsuleA->getRadius()) * capsuleAxis; - collision->_shapeA = capsuleA; - collision->_shapeB = sphereB; - } - } - return true; - } - return false; +bool capsuleVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + return sphereVsCapsule(shapeB, shapeA, collisions); } /// \param lineP point on line @@ -409,7 +329,9 @@ bool lineCylinder(const glm::vec3& lineP, const glm::vec3& lineDir, return true; } -bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB, CollisionList& collisions) { +bool capsuleVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + const CapsuleShape* capsuleA = static_cast(shapeA); + const CapsuleShape* capsuleB = static_cast(shapeB); glm::vec3 axisA; capsuleA->computeNormalizedAxis(axisA); glm::vec3 axisB; @@ -568,7 +490,9 @@ bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB, return false; } -bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, CollisionList& collisions) { +bool capsuleVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + const CapsuleShape* capsuleA = static_cast(shapeA); + const PlaneShape* planeB = static_cast(shapeB); glm::vec3 start, end, penetration; capsuleA->getStartPoint(start); capsuleA->getEndPoint(end); @@ -588,147 +512,44 @@ bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, Collis return false; } -bool planeSphere(const PlaneShape* planeA, const SphereShape* sphereB, CollisionList& collisions) { - glm::vec3 penetration; - if (findSpherePlanePenetration(sphereB->getTranslation(), sphereB->getRadius(), planeA->getCoefficients(), penetration)) { - CollisionInfo* collision = collisions.getNewCollision(); - if (!collision) { - return false; // collision list is full - } - collision->_penetration = -penetration; - collision->_contactPoint = sphereB->getTranslation() + - (sphereB->getRadius() / glm::length(penetration) - 1.0f) * penetration; - collision->_shapeA = planeA; - collision->_shapeB = sphereB; - return true; - } - return false; +bool planeVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + return sphereVsPlane(shapeB, shapeA, collisions); } -bool planeCapsule(const PlaneShape* planeA, const CapsuleShape* capsuleB, CollisionList& collisions) { - glm::vec3 start, end, penetration; - capsuleB->getStartPoint(start); - capsuleB->getEndPoint(end); - glm::vec4 plane = planeA->getCoefficients(); - if (findCapsulePlanePenetration(start, end, capsuleB->getRadius(), plane, penetration)) { - CollisionInfo* collision = collisions.getNewCollision(); - if (!collision) { - return false; // collision list is full - } - collision->_penetration = -penetration; - glm::vec3 deepestEnd = (glm::dot(start, glm::vec3(plane)) < glm::dot(end, glm::vec3(plane))) ? start : end; - collision->_contactPoint = deepestEnd + (capsuleB->getRadius() / glm::length(penetration) - 1.0f) * penetration; - collision->_shapeA = planeA; - collision->_shapeB = capsuleB; - return true; - } - return false; +bool planeVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { + return capsuleVsPlane(shapeB, shapeA, collisions); } -bool planePlane(const PlaneShape* planeA, const PlaneShape* planeB, CollisionList& collisions) { +bool planeVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { // technically, planes always collide unless they're parallel and not coincident; however, that's // not going to give us any useful information return false; } -bool sphereList(const SphereShape* sphereA, const ListShape* listB, CollisionList& collisions) { +bool shapeVsList(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { bool touching = false; + const ListShape* listB = static_cast(shapeB); for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) { const Shape* subShape = listB->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = sphereSphere(sphereA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = sphereCapsule(sphereA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = spherePlane(sphereA, static_cast(subShape), collisions) || touching; - } + touching = collideShapes(shapeA, subShape, collisions) || touching; } return touching; } -bool capsuleList(const CapsuleShape* capsuleA, const ListShape* listB, CollisionList& collisions) { - bool touching = false; - for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) { - const Shape* subShape = listB->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = capsuleSphere(capsuleA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = capsuleCapsule(capsuleA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = capsulePlane(capsuleA, static_cast(subShape), collisions) || touching; - } - } - return touching; -} - -bool planeList(const PlaneShape* planeA, const ListShape* listB, CollisionList& collisions) { - bool touching = false; - for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) { - const Shape* subShape = listB->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = planeSphere(planeA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = planeCapsule(planeA, static_cast(subShape), collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = planePlane(planeA, static_cast(subShape), collisions) || touching; - } - } - return touching; -} - -bool listSphere(const ListShape* listA, const SphereShape* sphereB, CollisionList& collisions) { +bool listVsShape(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { bool touching = false; + const ListShape* listA = static_cast(shapeA); for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) { const Shape* subShape = listA->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = sphereSphere(static_cast(subShape), sphereB, collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = capsuleSphere(static_cast(subShape), sphereB, collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = planeSphere(static_cast(subShape), sphereB, collisions) || touching; - } + touching = collideShapes(subShape, shapeB, collisions) || touching; } return touching; } -bool listCapsule(const ListShape* listA, const CapsuleShape* capsuleB, CollisionList& collisions) { - bool touching = false; - for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) { - const Shape* subShape = listA->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = sphereCapsule(static_cast(subShape), capsuleB, collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = capsuleCapsule(static_cast(subShape), capsuleB, collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = planeCapsule(static_cast(subShape), capsuleB, collisions) || touching; - } - } - return touching; -} - -bool listPlane(const ListShape* listA, const PlaneShape* planeB, CollisionList& collisions) { - bool touching = false; - for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) { - const Shape* subShape = listA->getSubShape(i); - int subType = subShape->getType(); - if (subType == Shape::SPHERE_SHAPE) { - touching = spherePlane(static_cast(subShape), planeB, collisions) || touching; - } else if (subType == Shape::CAPSULE_SHAPE) { - touching = capsulePlane(static_cast(subShape), planeB, collisions) || touching; - } else if (subType == Shape::PLANE_SHAPE) { - touching = planePlane(static_cast(subShape), planeB, collisions) || touching; - } - } - return touching; -} - -bool listList(const ListShape* listA, const ListShape* listB, CollisionList& collisions) { +bool listVsList(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) { bool touching = false; + const ListShape* listA = static_cast(shapeA); + const ListShape* listB = static_cast(shapeB); for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) { const Shape* subShape = listA->getSubShape(i); for (int j = 0; j < listB->size() && !collisions.isFull(); ++j) { @@ -739,7 +560,7 @@ bool listList(const ListShape* listA, const ListShape* listB, CollisionList& col } // helper function -bool sphereAACube(const glm::vec3& sphereCenter, float sphereRadius, const glm::vec3& cubeCenter, +bool sphereVsAACube(const glm::vec3& sphereCenter, float sphereRadius, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) { // sphere is A // cube is B @@ -887,11 +708,11 @@ bool sphereAACube_StarkAngles(const glm::vec3& sphereCenter, float sphereRadius, } */ -bool sphereAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) { - return sphereAACube(sphereA->getTranslation(), sphereA->getRadius(), cubeCenter, cubeSide, collisions); +bool sphereVsAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) { + return sphereVsAACube(sphereA->getTranslation(), sphereA->getRadius(), cubeCenter, cubeSide, collisions); } -bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) { +bool capsuleVsAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) { // find nerest approach of capsule line segment to cube glm::vec3 capsuleAxis; capsuleA->computeNormalizedAxis(capsuleAxis); @@ -904,7 +725,7 @@ bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, fl } glm::vec3 nearestApproach = capsuleA->getTranslation() + offset * capsuleAxis; // collide nearest approach like a sphere at that point - return sphereAACube(nearestApproach, capsuleA->getRadius(), cubeCenter, cubeSide, collisions); + return sphereVsAACube(nearestApproach, capsuleA->getRadius(), cubeCenter, cubeSide, collisions); } bool findRayIntersectionWithShapes(const QVector shapes, const glm::vec3& rayStart, const glm::vec3& rayDirection, float& minDistance) { diff --git a/libraries/shared/src/ShapeCollider.h b/libraries/shared/src/ShapeCollider.h index b1be75fa40..279cbe3810 100644 --- a/libraries/shared/src/ShapeCollider.h +++ b/libraries/shared/src/ShapeCollider.h @@ -14,27 +14,24 @@ #include -#include "CapsuleShape.h" #include "CollisionInfo.h" -#include "ListShape.h" -#include "PlaneShape.h" #include "SharedUtil.h" -#include "SphereShape.h" + +class Shape; +class SphereShape; +class CapsuleShape; namespace ShapeCollider { + /// MUST CALL this FIRST before using the ShapeCollider + void initDispatchTable(); + /// \param shapeA pointer to first shape (cannot be NULL) /// \param shapeB pointer to second shape (cannot be NULL) /// \param collisions[out] collision details /// \return true if shapes collide bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions); - /// \param shapesA list of shapes - /// \param shapeB list of shapes - /// \param collisions[out] average collision details - /// \return true if any shapes collide - bool collideShapesCoarse(const QVector& shapesA, const QVector& shapesB, CollisionInfo& collision); - bool collideShapeWithShapes(const Shape* shapeA, const QVector& shapes, int startIndex, CollisionList& collisions); bool collideShapesWithShapes(const QVector& shapesA, const QVector& shapesB, CollisionList& collisions); @@ -49,111 +46,87 @@ namespace ShapeCollider { /// \param sphereB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, CollisionList& collisions); + bool sphereVsSphere(const Shape* sphereA, const Shape* sphereB, CollisionList& collisions); /// \param sphereA pointer to first shape (cannot be NULL) /// \param capsuleB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, CollisionList& collisions); + bool sphereVsCapsule(const Shape* sphereA, const Shape* capsuleB, CollisionList& collisions); /// \param sphereA pointer to first shape (cannot be NULL) /// \param planeB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, CollisionList& collisions); + bool sphereVsPlane(const Shape* sphereA, const Shape* planeB, CollisionList& collisions); /// \param capsuleA pointer to first shape (cannot be NULL) /// \param sphereB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool capsuleSphere(const CapsuleShape* capsuleA, const SphereShape* sphereB, CollisionList& collisions); + bool capsuleVsSphere(const Shape* capsuleA, const Shape* sphereB, CollisionList& collisions); /// \param capsuleA pointer to first shape (cannot be NULL) /// \param capsuleB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB, CollisionList& collisions); + bool capsuleVsCapsule(const Shape* capsuleA, const Shape* capsuleB, CollisionList& collisions); /// \param capsuleA pointer to first shape (cannot be NULL) /// \param planeB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, CollisionList& collisions); + bool capsuleVsPlane(const Shape* capsuleA, const Shape* planeB, CollisionList& collisions); /// \param planeA pointer to first shape (cannot be NULL) /// \param sphereB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool planeSphere(const PlaneShape* planeA, const SphereShape* sphereB, CollisionList& collisions); + bool planeVsSphere(const Shape* planeA, const Shape* sphereB, CollisionList& collisions); /// \param planeA pointer to first shape (cannot be NULL) /// \param capsuleB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool planeCapsule(const PlaneShape* planeA, const CapsuleShape* capsuleB, CollisionList& collisions); + bool planeVsCapsule(const Shape* planeA, const Shape* capsuleB, CollisionList& collisions); /// \param planeA pointer to first shape (cannot be NULL) /// \param planeB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool planePlane(const PlaneShape* planeA, const PlaneShape* planeB, CollisionList& collisions); + bool planeVsPlane(const Shape* planeA, const Shape* planeB, CollisionList& collisions); - /// \param sphereA pointer to first shape (cannot be NULL) + /// \param shapeA pointer to first shape (cannot be NULL) /// \param listB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool sphereList(const SphereShape* sphereA, const ListShape* listB, CollisionList& collisions); + bool shapeVsList(const Shape* shapeA, const Shape* listB, CollisionList& collisions); - /// \param capuleA pointer to first shape (cannot be NULL) + /// \param listA pointer to first shape (cannot be NULL) + /// \param shapeB pointer to second shape (cannot be NULL) + /// \param[out] collisions where to append collision details + /// \return true if shapes collide + bool listVsShape(const Shape* listA, const Shape* shapeB, CollisionList& collisions); + + /// \param listA pointer to first shape (cannot be NULL) /// \param listB pointer to second shape (cannot be NULL) /// \param[out] collisions where to append collision details /// \return true if shapes collide - bool capsuleList(const CapsuleShape* capsuleA, const ListShape* listB, CollisionList& collisions); - - /// \param planeA pointer to first shape (cannot be NULL) - /// \param listB pointer to second shape (cannot be NULL) - /// \param[out] collisions where to append collision details - /// \return true if shapes collide - bool planeList(const PlaneShape* planeA, const ListShape* listB, CollisionList& collisions); - - /// \param listA pointer to first shape (cannot be NULL) - /// \param sphereB pointer to second shape (cannot be NULL) - /// \param[out] collisions where to append collision details - /// \return true if shapes collide - bool listSphere(const ListShape* listA, const SphereShape* sphereB, CollisionList& collisions); - - /// \param listA pointer to first shape (cannot be NULL) - /// \param capsuleB pointer to second shape (cannot be NULL) - /// \param[out] collisions where to append collision details - /// \return true if shapes collide - bool listCapsule(const ListShape* listA, const CapsuleShape* capsuleB, CollisionList& collisions); - - /// \param listA pointer to first shape (cannot be NULL) - /// \param planeB pointer to second shape (cannot be NULL) - /// \param[out] collisions where to append collision details - /// \return true if shapes collide - bool listPlane(const ListShape* listA, const PlaneShape* planeB, CollisionList& collisions); - - /// \param listA pointer to first shape (cannot be NULL) - /// \param capsuleB pointer to second shape (cannot be NULL) - /// \param[out] collisions where to append collision details - /// \return true if shapes collide - bool listList(const ListShape* listA, const ListShape* listB, CollisionList& collisions); + bool listVsList(const Shape* listA, const Shape* listB, CollisionList& collisions); /// \param sphereA pointer to sphere (cannot be NULL) /// \param cubeCenter center of cube /// \param cubeSide lenght of side of cube /// \param[out] collisions where to append collision details /// \return true if sphereA collides with axis aligned cube - bool sphereAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions); + bool sphereVsAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions); /// \param capsuleA pointer to capsule (cannot be NULL) /// \param cubeCenter center of cube /// \param cubeSide lenght of side of cube /// \param[out] collisions where to append collision details /// \return true if capsuleA collides with axis aligned cube - bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions); + bool capsuleVsAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions); /// \param shapes list of pointers to shapes (shape pointers may be NULL) /// \param startPoint beginning of ray diff --git a/libraries/shared/src/SphereShape.h b/libraries/shared/src/SphereShape.h index d2f2a8596f..0626927453 100644 --- a/libraries/shared/src/SphereShape.h +++ b/libraries/shared/src/SphereShape.h @@ -18,13 +18,13 @@ class SphereShape : public Shape { public: - SphereShape() : Shape(Shape::SPHERE_SHAPE) {} + SphereShape() : Shape(SPHERE_SHAPE) {} - SphereShape(float radius) : Shape(Shape::SPHERE_SHAPE) { + SphereShape(float radius) : Shape(SPHERE_SHAPE) { _boundingRadius = radius; } - SphereShape(float radius, const glm::vec3& position) : Shape(Shape::SPHERE_SHAPE, position) { + SphereShape(float radius, const glm::vec3& position) : Shape(SPHERE_SHAPE, position) { _boundingRadius = radius; } diff --git a/libraries/shared/src/VerletPoint.cpp b/libraries/shared/src/VerletPoint.cpp index d2dd985587..cf9aeca149 100644 --- a/libraries/shared/src/VerletPoint.cpp +++ b/libraries/shared/src/VerletPoint.cpp @@ -39,6 +39,11 @@ void VerletPoint::move(const glm::vec3& deltaPosition, const glm::quat& deltaRot _lastPosition += deltaPosition + (deltaRotation * arm - arm); } +void VerletPoint::shift(const glm::vec3& deltaPosition) { + _position += deltaPosition; + _lastPosition += deltaPosition; +} + void VerletPoint::setMass(float mass) { const float MIN_MASS = 1.0e-6f; const float MAX_MASS = 1.0e18f; diff --git a/libraries/shared/src/VerletPoint.h b/libraries/shared/src/VerletPoint.h index 6f94656966..3c73e5eb01 100644 --- a/libraries/shared/src/VerletPoint.h +++ b/libraries/shared/src/VerletPoint.h @@ -25,6 +25,7 @@ public: void accumulateDelta(const glm::vec3& delta); void applyAccumulatedDelta(); void move(const glm::vec3& deltaPosition, const glm::quat& deltaRotation, const glm::vec3& oldPivot); + void shift(const glm::vec3& deltaPosition); void setMass(float mass); float getMass() const { return _mass; } diff --git a/tests/audio/src/AudioRingBufferTests.cpp b/tests/audio/src/AudioRingBufferTests.cpp index b9ed596e52..f31f9988d6 100644 --- a/tests/audio/src/AudioRingBufferTests.cpp +++ b/tests/audio/src/AudioRingBufferTests.cpp @@ -27,28 +27,28 @@ void AudioRingBufferTests::runAllTests() { int16_t readData[10000]; int readIndexAt; - + AudioRingBuffer ringBuffer(10, false, 10); // makes buffer of 100 int16_t samples for (int T = 0; T < 300; T++) { - + writeIndexAt = 0; readIndexAt = 0; // write 73 samples, 73 samples in buffer - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73); assertBufferSize(ringBuffer, 73); // read 43 samples, 30 samples in buffer - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43); assertBufferSize(ringBuffer, 30); // write 70 samples, 100 samples in buffer (full) - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70); assertBufferSize(ringBuffer, 100); // read 100 samples, 0 samples in buffer (empty) - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100); assertBufferSize(ringBuffer, 0); @@ -65,15 +65,15 @@ void AudioRingBufferTests::runAllTests() { readIndexAt = 0; // write 59 samples, 59 samples in buffer - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59); assertBufferSize(ringBuffer, 59); // write 99 samples, 100 samples in buffer - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99); assertBufferSize(ringBuffer, 100); // read 100 samples, 0 samples in buffer - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100); assertBufferSize(ringBuffer, 0); // verify 100 samples of read data @@ -88,23 +88,23 @@ void AudioRingBufferTests::runAllTests() { readIndexAt = 0; // write 77 samples, 77 samples in buffer - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77); assertBufferSize(ringBuffer, 77); // write 24 samples, 100 samples in buffer (overwrote one sample: "0") - writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24) / sizeof(int16_t); + writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24); assertBufferSize(ringBuffer, 100); // write 29 silent samples, 100 samples in buffer, make sure non were added int samplesWritten; - if ((samplesWritten = ringBuffer.addSilentFrame(29)) != 0) { - qDebug("addSilentFrame(29) incorrect! Expected: 0 Actual: %d", samplesWritten); + if ((samplesWritten = ringBuffer.addSilentSamples(29)) != 0) { + qDebug("addSilentSamples(29) incorrect! Expected: 0 Actual: %d", samplesWritten); return; } assertBufferSize(ringBuffer, 100); // read 3 samples, 97 samples in buffer (expect to read "1", "2", "3") - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3); for (int i = 0; i < 3; i++) { if (readData[i] != i + 1) { qDebug("Second readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]); @@ -114,14 +114,14 @@ void AudioRingBufferTests::runAllTests() { assertBufferSize(ringBuffer, 97); // write 4 silent samples, 100 samples in buffer - if ((samplesWritten = ringBuffer.addSilentFrame(4) / sizeof(int16_t)) != 3) { - qDebug("addSilentFrame(4) incorrect! Exptected: 3 Actual: %d", samplesWritten); + if ((samplesWritten = ringBuffer.addSilentSamples(4)) != 3) { + qDebug("addSilentSamples(4) incorrect! Exptected: 3 Actual: %d", samplesWritten); return; } assertBufferSize(ringBuffer, 100); // read back 97 samples (the non-silent samples), 3 samples in buffer (expect to read "4" thru "100") - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97); for (int i = 3; i < 100; i++) { if (readData[i] != i + 1) { qDebug("third readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]); @@ -131,7 +131,7 @@ void AudioRingBufferTests::runAllTests() { assertBufferSize(ringBuffer, 3); // read back 3 silent samples, 0 samples in buffer - readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t); + readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3); for (int i = 100; i < 103; i++) { if (readData[i] != 0) { qDebug("Fourth readData[%d] incorrect! Expcted: %d Actual: %d", i, 0, readData[i]); @@ -143,4 +143,3 @@ void AudioRingBufferTests::runAllTests() { qDebug() << "PASSED"; } - diff --git a/tests/jitter/src/main.cpp b/tests/jitter/src/main.cpp index 8c93b7dbec..985ce99530 100644 --- a/tests/jitter/src/main.cpp +++ b/tests/jitter/src/main.cpp @@ -271,9 +271,10 @@ void runReceive(const char* addressOption, int port, int gap, int size, int repo quint64 networkStart = usecTimestampNow(); n = recvfrom(sockfd, inputBuffer, size, 0, NULL, NULL); // we don't care about where it came from + quint64 networkEnd = usecTimestampNow(); float networkElapsed = (float)(networkEnd - networkStart); - + if (n < 0) { std::cout << "Receive error: " << strerror(errno) << "\n"; } diff --git a/tests/physics/src/ShapeColliderTests.cpp b/tests/physics/src/ShapeColliderTests.cpp index bde29ea588..45d3ed6508 100644 --- a/tests/physics/src/ShapeColliderTests.cpp +++ b/tests/physics/src/ShapeColliderTests.cpp @@ -16,7 +16,9 @@ #include #include +#include #include +#include #include #include #include @@ -71,8 +73,7 @@ void ShapeColliderTests::sphereMissesSphere() { if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -112,6 +113,7 @@ void ShapeColliderTests::sphereTouchesSphere() { if (!collision) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: null collision" << std::endl; + return; } // penetration points from sphereA into sphereB @@ -119,7 +121,7 @@ void ShapeColliderTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -129,7 +131,7 @@ void ShapeColliderTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } @@ -149,7 +151,7 @@ void ShapeColliderTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -159,7 +161,7 @@ void ShapeColliderTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } } @@ -199,23 +201,20 @@ void ShapeColliderTests::sphereMissesCapsule() { if (ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should NOT touch" - << std::endl; + << " ERROR: sphere and capsule should NOT touch" << std::endl; } // capsuleB against sphereA if (ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should NOT touch" - << std::endl; + << " ERROR: sphere and capsule should NOT touch" << std::endl; } } if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -241,8 +240,7 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -254,7 +252,7 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -263,15 +261,14 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -279,33 +276,41 @@ void ShapeColliderTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = - (radialOffset - totalRadius) * xAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 BtoA = sphereA.getTranslation() - capsuleB.getTranslation(); glm::vec3 closestApproach = capsuleB.getTranslation() + glm::dot(BtoA, yAxis) * yAxis; expectedContactPoint = closestApproach + radiusB * glm::normalize(BtoA - closestApproach); + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + closestApproach = sphereA.getTranslation() - glm::dot(BtoA, yAxis) * yAxis; + expectedContactPoint = closestApproach - radiusB * glm::normalize(BtoA - closestApproach); + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } { // sphereA hits end cap at axis glm::vec3 axialOffset = (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis; - sphereA.setTranslation(axialOffset * yAxis); + sphereA.setTranslation(axialOffset); if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -317,7 +322,7 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -326,15 +331,14 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -342,33 +346,40 @@ void ShapeColliderTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 endPoint; capsuleB.getEndPoint(endPoint); expectedContactPoint = endPoint + radiusB * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedContactPoint = axialOffset - radiusA * yAxis; + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } { // sphereA hits start cap at axis glm::vec3 axialOffset = - (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis; - sphereA.setTranslation(axialOffset * yAxis); + sphereA.setTranslation(axialOffset); if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -380,7 +391,7 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -389,15 +400,14 @@ void ShapeColliderTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -405,22 +415,30 @@ void ShapeColliderTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = - ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 startPoint; capsuleB.getStartPoint(startPoint); expectedContactPoint = startPoint - radiusB * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedContactPoint = axialOffset + radiusA * yAxis; + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } if (collisions.size() != numCollisions) { @@ -450,14 +468,12 @@ void ShapeColliderTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } // end to end @@ -465,14 +481,12 @@ void ShapeColliderTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } // rotate B and move it to the side @@ -482,20 +496,17 @@ void ShapeColliderTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -520,16 +531,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -541,16 +550,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -564,16 +571,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -590,8 +595,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -602,7 +606,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * xAxis; @@ -610,15 +614,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB vs capsuleA if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -629,8 +632,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration - << std::endl; + << " actual = " << collision->_penetration << std::endl; } expectedContactPoint = capsuleB.getTranslation() - (radiusB + halfHeightB) * xAxis; @@ -638,8 +640,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint - << std::endl; + << " actual = " << collision->_contactPoint << std::endl; } } @@ -655,8 +656,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -667,8 +667,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration - << std::endl; + << " actual = " << collision->_penetration << std::endl; } glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * zAxis + shift * yAxis; @@ -676,8 +675,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint - << std::endl; + << " actual = " << collision->_contactPoint << std::endl; } } } @@ -710,8 +708,9 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() { sphereCenter = cubeCenter + sphereOffset * axis; sphere.setTranslation(sphereCenter); - if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis << std::endl; + if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis + << std::endl; } CollisionInfo* collision = collisions[0]; if (!collision) { @@ -721,17 +720,13 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() { glm::vec3 expectedPenetration = - overlap * axis; if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration - << " expected " << expectedPenetration - << " axis = " << axis - << std::endl; + << " expected " << expectedPenetration << " axis = " << axis << std::endl; } glm::vec3 expectedContact = sphereCenter - sphereRadius * axis; if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint - << " expected " << expectedContact - << " axis = " << axis - << std::endl; + << " expected " << expectedContact << " axis = " << axis << std::endl; } } @@ -743,32 +738,26 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() { sphereCenter = cubeCenter + sphereOffset * axis; sphere.setTranslation(sphereCenter); - if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube." - << " axis = " << axis - << std::endl; + << " axis = " << axis << std::endl; } CollisionInfo* collision = collisions[0]; if (!collision) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: no CollisionInfo on y-axis." - << " axis = " << axis - << std::endl; + << " axis = " << axis << std::endl; } glm::vec3 expectedPenetration = - overlap * axis; if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration - << " expected " << expectedPenetration - << " axis = " << axis - << std::endl; + << " expected " << expectedPenetration << " axis = " << axis << std::endl; } glm::vec3 expectedContact = sphereCenter - sphereRadius * axis; if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint - << " expected " << expectedContact - << " axis = " << axis - << std::endl; + << " expected " << expectedContact << " axis = " << axis << std::endl; } } } @@ -817,7 +806,7 @@ void ShapeColliderTests::sphereTouchesAACubeEdges() { sphereCenter = cubeCenter + (lengthAxis * 0.5f * cubeSide + sphereRadius - overlap) * axis; sphere.setTranslation(sphereCenter); - if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis << std::endl; } CollisionInfo* collision = collisions[i]; @@ -828,17 +817,13 @@ void ShapeColliderTests::sphereTouchesAACubeEdges() { glm::vec3 expectedPenetration = - overlap * axis; if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration - << " expected " << expectedPenetration - << " axis = " << axis - << std::endl; + << " expected " << expectedPenetration << " axis = " << axis << std::endl; } glm::vec3 expectedContact = sphereCenter - sphereRadius * axis; if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint - << " expected " << expectedContact - << " axis = " << axis - << std::endl; + << " expected " << expectedContact << " axis = " << axis << std::endl; } } } @@ -858,42 +843,42 @@ void ShapeColliderTests::sphereMissesAACube() { // top sphereCenter = cubeCenter + sphereOffset * yAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } // bottom sphereCenter = cubeCenter - sphereOffset * yAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } // left sphereCenter = cubeCenter + sphereOffset * xAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } // right sphereCenter = cubeCenter - sphereOffset * xAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } // forward sphereCenter = cubeCenter + sphereOffset * zAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } // back sphereCenter = cubeCenter - sphereOffset * zAxis; sphere.setTranslation(sphereCenter); - if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){ + if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){ std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl; } } @@ -965,7 +950,8 @@ void ShapeColliderTests::rayHitsSphere() { float expectedDistance = startDistance - radius; float relativeError = fabsf(distance - expectedDistance) / startDistance; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray sphere intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray sphere intersection distance error = " + << relativeError << std::endl; } } } @@ -1022,7 +1008,8 @@ void ShapeColliderTests::rayBarelyMissesSphere() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should just barely miss sphere" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } // translate and rotate the whole system... @@ -1040,7 +1027,8 @@ void ShapeColliderTests::rayBarelyMissesSphere() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should just barely miss sphere" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } } @@ -1062,7 +1050,8 @@ void ShapeColliderTests::rayHitsCapsule() { float expectedDistance = startDistance - radius; float relativeError = fabsf(distance - expectedDistance) / startDistance; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } // toward top of cylindrical wall @@ -1073,7 +1062,8 @@ void ShapeColliderTests::rayHitsCapsule() { } relativeError = fabsf(distance - expectedDistance) / startDistance; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } // toward top cap @@ -1085,7 +1075,8 @@ void ShapeColliderTests::rayHitsCapsule() { } relativeError = fabsf(distance - expectedDistance) / startDistance; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } const float EDGE_CASE_SLOP_FACTOR = 20.0f; @@ -1100,7 +1091,8 @@ void ShapeColliderTests::rayHitsCapsule() { relativeError = fabsf(distance - expectedDistance) / startDistance; // for edge cases we allow a LOT of error if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } // toward tip of bottom cap @@ -1113,7 +1105,8 @@ void ShapeColliderTests::rayHitsCapsule() { relativeError = fabsf(distance - expectedDistance) / startDistance; // for edge cases we allow a LOT of error if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } // toward edge of capsule cylindrical face @@ -1127,7 +1120,8 @@ void ShapeColliderTests::rayHitsCapsule() { relativeError = fabsf(distance - expectedDistance) / startDistance; // for edge cases we allow a LOT of error if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " + << relativeError << std::endl; } } // TODO: test at steep angles near cylinder/cap junction @@ -1154,7 +1148,8 @@ void ShapeColliderTests::rayMissesCapsule() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } // below bottom cap @@ -1164,7 +1159,8 @@ void ShapeColliderTests::rayMissesCapsule() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } // past edge of capsule cylindrical face @@ -1175,7 +1171,8 @@ void ShapeColliderTests::rayMissesCapsule() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } } // TODO: test at steep angles near edge @@ -1201,7 +1198,8 @@ void ShapeColliderTests::rayHitsPlane() { float expectedDistance = SQUARE_ROOT_OF_3 * planeDistanceFromOrigin; float relativeError = fabsf(distance - expectedDistance) / planeDistanceFromOrigin; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " + << relativeError << std::endl; } // rotate the whole system and try again @@ -1222,7 +1220,8 @@ void ShapeColliderTests::rayHitsPlane() { expectedDistance = SQUARE_ROOT_OF_3 * planeDistanceFromOrigin; relativeError = fabsf(distance - expectedDistance) / planeDistanceFromOrigin; if (relativeError > EPSILON) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " << relativeError << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " + << relativeError << std::endl; } } @@ -1243,7 +1242,8 @@ void ShapeColliderTests::rayMissesPlane() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } // rotate the whole system and try again @@ -1261,7 +1261,8 @@ void ShapeColliderTests::rayMissesPlane() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } } @@ -1275,7 +1276,8 @@ void ShapeColliderTests::rayMissesPlane() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } // rotate the whole system and try again @@ -1293,12 +1295,47 @@ void ShapeColliderTests::rayMissesPlane() { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl; } if (distance != FLT_MAX) { - std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl; + std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" + << std::endl; } } } +void ShapeColliderTests::measureTimeOfCollisionDispatch() { + /* KEEP for future manual testing + // create two non-colliding spheres + float radiusA = 7.0f; + float radiusB = 3.0f; + float alpha = 1.2f; + float beta = 1.3f; + glm::vec3 offsetDirection = glm::normalize(glm::vec3(1.0f, 2.0f, 3.0f)); + float offsetDistance = alpha * radiusA + beta * radiusB; + + SphereShape sphereA(radiusA, origin); + SphereShape sphereB(radiusB, offsetDistance * offsetDirection); + CollisionList collisions(16); + + //int numTests = 1; + quint64 oldTime; + quint64 newTime; + int numTests = 100000000; + { + quint64 startTime = usecTimestampNow(); + for (int i = 0; i < numTests; ++i) { + ShapeCollider::collideShapes(&sphereA, &sphereB, collisions); + } + quint64 endTime = usecTimestampNow(); + std::cout << numTests << " non-colliding collisions in " << (endTime - startTime) << " usec" << std::endl; + newTime = endTime - startTime; + } + */ +} + void ShapeColliderTests::runAllTests() { + ShapeCollider::initDispatchTable(); + + //measureTimeOfCollisionDispatch(); + sphereMissesSphere(); sphereTouchesSphere(); diff --git a/tests/physics/src/ShapeColliderTests.h b/tests/physics/src/ShapeColliderTests.h index fd9f1f9706..4a51651cb8 100644 --- a/tests/physics/src/ShapeColliderTests.h +++ b/tests/physics/src/ShapeColliderTests.h @@ -35,6 +35,8 @@ namespace ShapeColliderTests { void rayHitsPlane(); void rayMissesPlane(); + void measureTimeOfCollisionDispatch(); + void runAllTests(); } diff --git a/tests/physics/src/VerletShapeTests.cpp b/tests/physics/src/VerletShapeTests.cpp index 3a3bd43278..df5cdc5c6b 100644 --- a/tests/physics/src/VerletShapeTests.cpp +++ b/tests/physics/src/VerletShapeTests.cpp @@ -102,8 +102,7 @@ void VerletShapeTests::sphereMissesSphere() { if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -159,7 +158,7 @@ void VerletShapeTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -169,7 +168,7 @@ void VerletShapeTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } @@ -189,7 +188,7 @@ void VerletShapeTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -199,7 +198,7 @@ void VerletShapeTests::sphereTouchesSphere() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } } @@ -247,23 +246,20 @@ void VerletShapeTests::sphereMissesCapsule() { if (ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should NOT touch" - << std::endl; + << " ERROR: sphere and capsule should NOT touch" << std::endl; } // capsuleB against sphereA if (ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should NOT touch" - << std::endl; + << " ERROR: sphere and capsule should NOT touch" << std::endl; } } if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -297,8 +293,7 @@ void VerletShapeTests::sphereTouchesCapsule() { if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -310,7 +305,7 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -319,15 +314,14 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -335,33 +329,41 @@ void VerletShapeTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = - (radialOffset - totalRadius) * xAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 BtoA = sphereA.getTranslation() - capsuleB.getTranslation(); glm::vec3 closestApproach = capsuleB.getTranslation() + glm::dot(BtoA, yAxis) * yAxis; expectedContactPoint = closestApproach + radiusB * glm::normalize(BtoA - closestApproach); + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + closestApproach = sphereA.getTranslation() - glm::dot(BtoA, yAxis) * yAxis; + expectedContactPoint = closestApproach - radiusB * glm::normalize(BtoA - closestApproach); + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } { // sphereA hits end cap at axis glm::vec3 axialOffset = (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis; - sphereA.setTranslation(axialOffset * yAxis); + sphereA.setTranslation(axialOffset); if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -373,7 +375,7 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -382,15 +384,14 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -398,33 +399,40 @@ void VerletShapeTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 endPoint; capsuleB.getEndPoint(endPoint); expectedContactPoint = endPoint + radiusB * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedContactPoint = axialOffset - radiusA * yAxis; + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } { // sphereA hits start cap at axis glm::vec3 axialOffset = - (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis; - sphereA.setTranslation(axialOffset * yAxis); + sphereA.setTranslation(axialOffset); if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: sphere and capsule should touch" - << std::endl; + << " ERROR: sphere and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -436,7 +444,7 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of sphereA @@ -445,15 +453,14 @@ void VerletShapeTests::sphereTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB collides with sphereA if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and sphere should touch" - << std::endl; + << " ERROR: capsule and sphere should touch" << std::endl; } else { ++numCollisions; } @@ -461,22 +468,30 @@ void VerletShapeTests::sphereTouchesCapsule() { // penetration points from sphereA into capsuleB collision = collisions.getCollision(numCollisions - 1); expectedPenetration = - ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedPenetration *= -1.0f; + } inaccuracy = glm::length(collision->_penetration - expectedPenetration); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } // contactPoint is on surface of capsuleB glm::vec3 startPoint; capsuleB.getStartPoint(startPoint); expectedContactPoint = startPoint - radiusB * yAxis; + if (collision->_shapeA == &sphereA) { + // the ShapeCollider swapped the order of the shapes + expectedContactPoint = axialOffset + radiusA * yAxis; + } inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint); if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } } if (collisions.size() != numCollisions) { @@ -515,14 +530,12 @@ void VerletShapeTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } // end to end @@ -530,14 +543,12 @@ void VerletShapeTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } // rotate B and move it to the side @@ -547,20 +558,17 @@ void VerletShapeTests::capsuleMissesCapsule() { if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should NOT touch" - << std::endl; + << " ERROR: capsule and capsule should NOT touch" << std::endl; } if (collisions.size() > 0) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: expected empty collision list but size is " << collisions.size() - << std::endl; + << " ERROR: expected empty collision list but size is " << collisions.size() << std::endl; } } @@ -594,16 +602,14 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -615,16 +621,14 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -638,16 +642,14 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -664,8 +666,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -676,7 +677,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration; + << " actual = " << collision->_penetration << std::endl; } glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * xAxis; @@ -684,15 +685,14 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint; + << " actual = " << collision->_contactPoint << std::endl; } // capsuleB vs capsuleA if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -703,8 +703,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration - << std::endl; + << " actual = " << collision->_penetration << std::endl; } expectedContactPoint = capsuleB.getTranslation() - (radiusB + halfHeightB) * xAxis; @@ -712,8 +711,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint - << std::endl; + << " actual = " << collision->_contactPoint << std::endl; } } @@ -729,8 +727,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions)) { std::cout << __FILE__ << ":" << __LINE__ - << " ERROR: capsule and capsule should touch" - << std::endl; + << " ERROR: capsule and capsule should touch" << std::endl; } else { ++numCollisions; } @@ -741,8 +738,7 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad penetration: expected = " << expectedPenetration - << " actual = " << collision->_penetration - << std::endl; + << " actual = " << collision->_penetration << std::endl; } glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * zAxis + shift * yAxis; @@ -750,13 +746,14 @@ void VerletShapeTests::capsuleTouchesCapsule() { if (fabs(inaccuracy) > EPSILON) { std::cout << __FILE__ << ":" << __LINE__ << " ERROR: bad contactPoint: expected = " << expectedContactPoint - << " actual = " << collision->_contactPoint - << std::endl; + << " actual = " << collision->_contactPoint << std::endl; } } } void VerletShapeTests::runAllTests() { + ShapeCollider::initDispatchTable(); + setSpherePosition(); sphereMissesSphere(); sphereTouchesSphere(); diff --git a/tests/shared/src/main.cpp b/tests/shared/src/main.cpp index d4251eef7a..34ba515062 100644 --- a/tests/shared/src/main.cpp +++ b/tests/shared/src/main.cpp @@ -16,6 +16,7 @@ int main(int argc, char** argv) { MovingMinMaxAvgTests::runAllTests(); MovingPercentileTests::runAllTests(); AngularConstraintTests::runAllTests(); + printf("tests complete, press enter to exit\n"); getchar(); return 0; }