mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Merge branch 'master' of git://github.com/worklist/hifi into 19498
Conflicts: interface/src/Application.cpp
This commit is contained in:
commit
8ed30d9da2
140 changed files with 1723 additions and 683 deletions
2
BUILD.md
2
BUILD.md
|
@ -38,7 +38,7 @@ Any variables that need to be set for CMake to find dependencies can be set as E
|
||||||
|
|
||||||
For example, to pass the QT_CMAKE_PREFIX_PATH variable during build file generation:
|
For example, to pass the QT_CMAKE_PREFIX_PATH variable during build file generation:
|
||||||
|
|
||||||
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/clang_64/lib/cmake
|
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/lib/cmake
|
||||||
|
|
||||||
|
|
||||||
UNIX
|
UNIX
|
||||||
|
|
|
@ -16,7 +16,11 @@ elseif (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
||||||
endif(WIN32)
|
endif(WIN32)
|
||||||
|
|
||||||
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} $ENV{QT_CMAKE_PREFIX_PATH})
|
if (NOT QT_CMAKE_PREFIX_PATH)
|
||||||
|
set(QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${QT_CMAKE_PREFIX_PATH})
|
||||||
|
|
||||||
# set our Base SDK to 10.8
|
# set our Base SDK to 10.8
|
||||||
set(CMAKE_OSX_SYSROOT /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.8.sdk)
|
set(CMAKE_OSX_SYSROOT /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.8.sdk)
|
||||||
|
|
|
@ -63,4 +63,5 @@ To test things out you'll want to run the Interface client.
|
||||||
|
|
||||||
To access your local domain in Interface, open your Preferences -- on OS X this is available in the Interface menu, on Linux you'll find it in the File menu. Enter "localhost" in the "Domain server" field.
|
To access your local domain in Interface, open your Preferences -- on OS X this is available in the Interface menu, on Linux you'll find it in the File menu. Enter "localhost" in the "Domain server" field.
|
||||||
|
|
||||||
If everything worked you should see that you are connected to at least one server. Nice work!
|
If everything worked you should see that you are connected to at least one server.
|
||||||
|
Nice work!
|
||||||
|
|
|
@ -120,10 +120,12 @@ void Agent::readPendingDatagrams() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const QString AGENT_LOGGING_NAME = "agent";
|
||||||
|
|
||||||
void Agent::run() {
|
void Agent::run() {
|
||||||
NodeList* nodeList = NodeList::getInstance();
|
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
|
||||||
nodeList->setOwnerType(NodeType::Agent);
|
|
||||||
|
|
||||||
|
NodeList* nodeList = NodeList::getInstance();
|
||||||
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer);
|
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer);
|
||||||
|
|
||||||
// figure out the URL for the script for this agent assignment
|
// figure out the URL for the script for this agent assignment
|
||||||
|
@ -148,17 +150,6 @@ void Agent::run() {
|
||||||
|
|
||||||
qDebug() << "Downloaded script:" << scriptContents;
|
qDebug() << "Downloaded script:" << scriptContents;
|
||||||
|
|
||||||
timeval startTime;
|
|
||||||
gettimeofday(&startTime, NULL);
|
|
||||||
|
|
||||||
QTimer* domainServerTimer = new QTimer(this);
|
|
||||||
connect(domainServerTimer, SIGNAL(timeout()), this, SLOT(checkInWithDomainServerOrExit()));
|
|
||||||
domainServerTimer->start(DOMAIN_SERVER_CHECK_IN_USECS / 1000);
|
|
||||||
|
|
||||||
QTimer* silentNodeTimer = new QTimer(this);
|
|
||||||
connect(silentNodeTimer, SIGNAL(timeout()), nodeList, SLOT(removeSilentNodes()));
|
|
||||||
silentNodeTimer->start(NODE_SILENCE_THRESHOLD_USECS / 1000);
|
|
||||||
|
|
||||||
// setup an Avatar for the script to use
|
// setup an Avatar for the script to use
|
||||||
AvatarData scriptedAvatar;
|
AvatarData scriptedAvatar;
|
||||||
|
|
||||||
|
@ -189,4 +180,9 @@ void Agent::run() {
|
||||||
|
|
||||||
_scriptEngine.setScriptContents(scriptContents);
|
_scriptEngine.setScriptContents(scriptContents);
|
||||||
_scriptEngine.run();
|
_scriptEngine.run();
|
||||||
|
setFinished(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Agent::aboutToFinish() {
|
||||||
|
_scriptEngine.stop();
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,8 @@ public:
|
||||||
bool isListeningToAudioStream() const { return _scriptEngine.isListeningToAudioStream(); }
|
bool isListeningToAudioStream() const { return _scriptEngine.isListeningToAudioStream(); }
|
||||||
void setIsListeningToAudioStream(bool isListeningToAudioStream)
|
void setIsListeningToAudioStream(bool isListeningToAudioStream)
|
||||||
{ _scriptEngine.setIsListeningToAudioStream(isListeningToAudioStream); }
|
{ _scriptEngine.setIsListeningToAudioStream(isListeningToAudioStream); }
|
||||||
|
|
||||||
|
virtual void aboutToFinish();
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void run();
|
void run();
|
||||||
|
|
|
@ -33,6 +33,7 @@
|
||||||
#include <glm/gtx/vector_angle.hpp>
|
#include <glm/gtx/vector_angle.hpp>
|
||||||
|
|
||||||
#include <QtCore/QCoreApplication>
|
#include <QtCore/QCoreApplication>
|
||||||
|
#include <QtCore/QJsonObject>
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
|
|
||||||
#include <Logging.h>
|
#include <Logging.h>
|
||||||
|
@ -53,6 +54,8 @@
|
||||||
const short JITTER_BUFFER_MSECS = 12;
|
const short JITTER_BUFFER_MSECS = 12;
|
||||||
const short JITTER_BUFFER_SAMPLES = JITTER_BUFFER_MSECS * (SAMPLE_RATE / 1000.0);
|
const short JITTER_BUFFER_SAMPLES = JITTER_BUFFER_MSECS * (SAMPLE_RATE / 1000.0);
|
||||||
|
|
||||||
|
const float LOUDNESS_TO_DISTANCE_RATIO = 0.00305f;
|
||||||
|
|
||||||
const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
|
const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
|
||||||
|
|
||||||
void attachNewBufferToNode(Node *newNode) {
|
void attachNewBufferToNode(Node *newNode) {
|
||||||
|
@ -64,10 +67,11 @@ void attachNewBufferToNode(Node *newNode) {
|
||||||
AudioMixer::AudioMixer(const QByteArray& packet) :
|
AudioMixer::AudioMixer(const QByteArray& packet) :
|
||||||
ThreadedAssignment(packet),
|
ThreadedAssignment(packet),
|
||||||
_trailingSleepRatio(1.0f),
|
_trailingSleepRatio(1.0f),
|
||||||
_minSourceLoudnessInFrame(1.0f),
|
_minAudibilityThreshold(LOUDNESS_TO_DISTANCE_RATIO / 2.0f),
|
||||||
_maxSourceLoudnessInFrame(0.0f),
|
_performanceThrottlingRatio(0.0f),
|
||||||
_loudnessCutoffRatio(0.0f),
|
_numStatFrames(0),
|
||||||
_minRequiredLoudness(0.0f)
|
_sumListeners(0),
|
||||||
|
_sumMixes(0)
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -81,10 +85,24 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
|
||||||
|
|
||||||
if (bufferToAdd != listeningNodeBuffer) {
|
if (bufferToAdd != listeningNodeBuffer) {
|
||||||
// if the two buffer pointers do not match then these are different buffers
|
// if the two buffer pointers do not match then these are different buffers
|
||||||
|
|
||||||
glm::vec3 relativePosition = bufferToAdd->getPosition() - listeningNodeBuffer->getPosition();
|
glm::vec3 relativePosition = bufferToAdd->getPosition() - listeningNodeBuffer->getPosition();
|
||||||
|
|
||||||
|
float distanceBetween = glm::length(relativePosition);
|
||||||
|
|
||||||
|
if (distanceBetween < EPSILON) {
|
||||||
|
distanceBetween = EPSILON;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bufferToAdd->getAverageLoudness() / distanceBetween <= _minAudibilityThreshold) {
|
||||||
|
// according to mixer performance we have decided this does not get to be mixed in
|
||||||
|
// bail out
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
++_sumMixes;
|
||||||
|
|
||||||
glm::quat inverseOrientation = glm::inverse(listeningNodeBuffer->getOrientation());
|
glm::quat inverseOrientation = glm::inverse(listeningNodeBuffer->getOrientation());
|
||||||
|
|
||||||
float distanceSquareToSource = glm::dot(relativePosition, relativePosition);
|
float distanceSquareToSource = glm::dot(relativePosition, relativePosition);
|
||||||
float radius = 0.0f;
|
float radius = 0.0f;
|
||||||
|
|
||||||
|
@ -306,7 +324,7 @@ void AudioMixer::prepareMixForListeningNode(Node* node) {
|
||||||
if ((*otherNode != *node
|
if ((*otherNode != *node
|
||||||
|| otherNodeBuffer->shouldLoopbackForNode())
|
|| otherNodeBuffer->shouldLoopbackForNode())
|
||||||
&& otherNodeBuffer->willBeAddedToMix()
|
&& otherNodeBuffer->willBeAddedToMix()
|
||||||
&& otherNodeBuffer->getAverageLoudness() > _minRequiredLoudness) {
|
&& otherNodeBuffer->getAverageLoudness() > 0) {
|
||||||
addBufferToMixForListeningNodeWithBuffer(otherNodeBuffer, nodeRingBuffer);
|
addBufferToMixForListeningNodeWithBuffer(otherNodeBuffer, nodeRingBuffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -338,9 +356,29 @@ void AudioMixer::readPendingDatagrams() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AudioMixer::sendStatsPacket() {
|
||||||
|
static QJsonObject statsObject;
|
||||||
|
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||||
|
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||||
|
|
||||||
|
statsObject["average_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
|
||||||
|
|
||||||
|
if (_sumListeners > 0) {
|
||||||
|
statsObject["average_mixes_per_listener"] = (float) _sumMixes / (float) _sumListeners;
|
||||||
|
} else {
|
||||||
|
statsObject["average_mixes_per_listener"] = 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
|
||||||
|
|
||||||
|
_sumListeners = 0;
|
||||||
|
_sumMixes = 0;
|
||||||
|
_numStatFrames = 0;
|
||||||
|
}
|
||||||
|
|
||||||
void AudioMixer::run() {
|
void AudioMixer::run() {
|
||||||
|
|
||||||
commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||||
|
|
||||||
NodeList* nodeList = NodeList::getInstance();
|
NodeList* nodeList = NodeList::getInstance();
|
||||||
|
|
||||||
|
@ -357,20 +395,66 @@ void AudioMixer::run() {
|
||||||
+ numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)];
|
+ numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)];
|
||||||
|
|
||||||
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
|
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
|
||||||
|
|
||||||
|
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||||
|
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||||
|
|
||||||
while (!_isFinished) {
|
while (!_isFinished) {
|
||||||
|
|
||||||
_minSourceLoudnessInFrame = 1.0f;
|
|
||||||
_maxSourceLoudnessInFrame = 0.0f;
|
|
||||||
|
|
||||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||||
if (node->getLinkedData()) {
|
if (node->getLinkedData()) {
|
||||||
((AudioMixerClientData*) node->getLinkedData())->checkBuffersBeforeFrameSend(JITTER_BUFFER_SAMPLES,
|
((AudioMixerClientData*) node->getLinkedData())->checkBuffersBeforeFrameSend(JITTER_BUFFER_SAMPLES);
|
||||||
_minSourceLoudnessInFrame,
|
|
||||||
_maxSourceLoudnessInFrame);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||||
|
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||||
|
|
||||||
|
const float RATIO_BACK_OFF = 0.02f;
|
||||||
|
|
||||||
|
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||||
|
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||||
|
|
||||||
|
if (usecToSleep < 0) {
|
||||||
|
usecToSleep = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||||
|
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) BUFFER_SEND_INTERVAL_USECS);
|
||||||
|
|
||||||
|
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||||
|
bool hasRatioChanged = false;
|
||||||
|
|
||||||
|
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||||
|
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||||
|
// we're struggling - change our min required loudness to reduce some load
|
||||||
|
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||||
|
|
||||||
|
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||||
|
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||||
|
hasRatioChanged = true;
|
||||||
|
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||||
|
// we've recovered and can back off the required loudness
|
||||||
|
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||||
|
|
||||||
|
if (_performanceThrottlingRatio < 0) {
|
||||||
|
_performanceThrottlingRatio = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||||
|
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||||
|
hasRatioChanged = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasRatioChanged) {
|
||||||
|
// set out min audability threshold from the new ratio
|
||||||
|
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||||
|
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||||
|
|
||||||
|
framesSinceCutoffEvent = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||||
if (node->getType() == NodeType::Agent && node->getActiveSocket() && node->getLinkedData()
|
if (node->getType() == NodeType::Agent && node->getActiveSocket() && node->getLinkedData()
|
||||||
&& ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioRingBuffer()) {
|
&& ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioRingBuffer()) {
|
||||||
|
@ -380,6 +464,8 @@ void AudioMixer::run() {
|
||||||
|
|
||||||
memcpy(clientMixBuffer + numBytesPacketHeader, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
memcpy(clientMixBuffer + numBytesPacketHeader, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||||
nodeList->writeDatagram(clientMixBuffer, NETWORK_BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader, node);
|
nodeList->writeDatagram(clientMixBuffer, NETWORK_BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader, node);
|
||||||
|
|
||||||
|
++_sumListeners;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -390,6 +476,8 @@ void AudioMixer::run() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
++_numStatFrames;
|
||||||
|
|
||||||
QCoreApplication::processEvents();
|
QCoreApplication::processEvents();
|
||||||
|
|
||||||
if (_isFinished) {
|
if (_isFinished) {
|
||||||
|
@ -400,10 +488,7 @@ void AudioMixer::run() {
|
||||||
|
|
||||||
if (usecToSleep > 0) {
|
if (usecToSleep > 0) {
|
||||||
usleep(usecToSleep);
|
usleep(usecToSleep);
|
||||||
} else {
|
|
||||||
qDebug() << "AudioMixer loop took" << -usecToSleep << "of extra time. Not sleeping.";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
delete[] clientMixBuffer;
|
delete[] clientMixBuffer;
|
||||||
|
|
|
@ -28,6 +28,8 @@ public slots:
|
||||||
void run();
|
void run();
|
||||||
|
|
||||||
void readPendingDatagrams();
|
void readPendingDatagrams();
|
||||||
|
|
||||||
|
void sendStatsPacket();
|
||||||
private:
|
private:
|
||||||
/// adds one buffer to the mix for a listening node
|
/// adds one buffer to the mix for a listening node
|
||||||
void addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuffer* bufferToAdd,
|
void addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuffer* bufferToAdd,
|
||||||
|
@ -41,10 +43,11 @@ private:
|
||||||
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
|
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
|
||||||
|
|
||||||
float _trailingSleepRatio;
|
float _trailingSleepRatio;
|
||||||
float _minSourceLoudnessInFrame;
|
float _minAudibilityThreshold;
|
||||||
float _maxSourceLoudnessInFrame;
|
float _performanceThrottlingRatio;
|
||||||
float _loudnessCutoffRatio;
|
int _numStatFrames;
|
||||||
float _minRequiredLoudness;
|
int _sumListeners;
|
||||||
|
int _sumMixes;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* defined(__hifi__AudioMixer__) */
|
#endif /* defined(__hifi__AudioMixer__) */
|
||||||
|
|
|
@ -83,20 +83,16 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioMixerClientData::checkBuffersBeforeFrameSend(int jitterBufferLengthSamples,
|
void AudioMixerClientData::checkBuffersBeforeFrameSend(int jitterBufferLengthSamples) {
|
||||||
float& currentMinLoudness,
|
|
||||||
float& currentMaxLoudness) {
|
|
||||||
for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
|
for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
|
||||||
if (_ringBuffers[i]->shouldBeAddedToMix(jitterBufferLengthSamples)) {
|
if (_ringBuffers[i]->shouldBeAddedToMix(jitterBufferLengthSamples)) {
|
||||||
// this is a ring buffer that is ready to go
|
// this is a ring buffer that is ready to go
|
||||||
// set its flag so we know to push its buffer when all is said and done
|
// set its flag so we know to push its buffer when all is said and done
|
||||||
_ringBuffers[i]->setWillBeAddedToMix(true);
|
_ringBuffers[i]->setWillBeAddedToMix(true);
|
||||||
|
|
||||||
|
|
||||||
// calculate the average loudness for the next NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL
|
// calculate the average loudness for the next NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL
|
||||||
// that would be mixed in
|
// that would be mixed in
|
||||||
_ringBuffers[i]->updateAverageLoudnessForBoundarySamples(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
_ringBuffers[i]->updateAverageLoudnessForBoundarySamples(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ public:
|
||||||
AvatarAudioRingBuffer* getAvatarAudioRingBuffer() const;
|
AvatarAudioRingBuffer* getAvatarAudioRingBuffer() const;
|
||||||
|
|
||||||
int parseData(const QByteArray& packet);
|
int parseData(const QByteArray& packet);
|
||||||
void checkBuffersBeforeFrameSend(int jitterBufferLengthSamples, float& currentMinLoudness, float& currentMaxLoudness);
|
void checkBuffersBeforeFrameSend(int jitterBufferLengthSamples);
|
||||||
void pushBuffersAfterFrameSend();
|
void pushBuffersAfterFrameSend();
|
||||||
private:
|
private:
|
||||||
std::vector<PositionalAudioRingBuffer*> _ringBuffers;
|
std::vector<PositionalAudioRingBuffer*> _ringBuffers;
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
|
|
||||||
#include <QtCore/QCoreApplication>
|
#include <QtCore/QCoreApplication>
|
||||||
#include <QtCore/QElapsedTimer>
|
#include <QtCore/QElapsedTimer>
|
||||||
|
#include <QtCore/QJsonObject>
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
|
|
||||||
#include <Logging.h>
|
#include <Logging.h>
|
||||||
|
@ -29,7 +30,11 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
|
||||||
const unsigned int AVATAR_DATA_SEND_INTERVAL_USECS = (1 / 60.0) * 1000 * 1000;
|
const unsigned int AVATAR_DATA_SEND_INTERVAL_USECS = (1 / 60.0) * 1000 * 1000;
|
||||||
|
|
||||||
AvatarMixer::AvatarMixer(const QByteArray& packet) :
|
AvatarMixer::AvatarMixer(const QByteArray& packet) :
|
||||||
ThreadedAssignment(packet)
|
ThreadedAssignment(packet),
|
||||||
|
_trailingSleepRatio(1.0f),
|
||||||
|
_performanceThrottlingRatio(0.0f),
|
||||||
|
_sumListeners(0),
|
||||||
|
_numStatFrames(0)
|
||||||
{
|
{
|
||||||
// make sure we hear about node kills so we can tell the other nodes
|
// make sure we hear about node kills so we can tell the other nodes
|
||||||
connect(NodeList::getInstance(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
|
connect(NodeList::getInstance(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
|
||||||
|
@ -48,7 +53,7 @@ void attachAvatarDataToNode(Node* newNode) {
|
||||||
// 3) if we need to rate limit the amount of data we send, we can use a distance weighted "semi-random" function to
|
// 3) if we need to rate limit the amount of data we send, we can use a distance weighted "semi-random" function to
|
||||||
// determine which avatars are included in the packet stream
|
// determine which avatars are included in the packet stream
|
||||||
// 4) we should optimize the avatar data format to be more compact (100 bytes is pretty wasteful).
|
// 4) we should optimize the avatar data format to be more compact (100 bytes is pretty wasteful).
|
||||||
void broadcastAvatarData() {
|
void AvatarMixer::broadcastAvatarData() {
|
||||||
static QByteArray mixedAvatarByteArray;
|
static QByteArray mixedAvatarByteArray;
|
||||||
|
|
||||||
int numPacketHeaderBytes = populatePacketHeader(mixedAvatarByteArray, PacketTypeBulkAvatarData);
|
int numPacketHeaderBytes = populatePacketHeader(mixedAvatarByteArray, PacketTypeBulkAvatarData);
|
||||||
|
@ -57,6 +62,7 @@ void broadcastAvatarData() {
|
||||||
|
|
||||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||||
if (node->getLinkedData() && node->getType() == NodeType::Agent && node->getActiveSocket()) {
|
if (node->getLinkedData() && node->getType() == NodeType::Agent && node->getActiveSocket()) {
|
||||||
|
++_sumListeners;
|
||||||
|
|
||||||
// reset packet pointers for this node
|
// reset packet pointers for this node
|
||||||
mixedAvatarByteArray.resize(numPacketHeaderBytes);
|
mixedAvatarByteArray.resize(numPacketHeaderBytes);
|
||||||
|
@ -78,7 +84,8 @@ void broadcastAvatarData() {
|
||||||
// at a distance of twice the full rate distance, there will be a 50% chance of sending this avatar's update
|
// at a distance of twice the full rate distance, there will be a 50% chance of sending this avatar's update
|
||||||
const float FULL_RATE_DISTANCE = 2.f;
|
const float FULL_RATE_DISTANCE = 2.f;
|
||||||
// Decide whether to send this avatar's data based on it's distance from us
|
// Decide whether to send this avatar's data based on it's distance from us
|
||||||
if ((distanceToAvatar == 0.f) || (randFloat() < FULL_RATE_DISTANCE / distanceToAvatar)) {
|
if ((distanceToAvatar == 0.f) || (randFloat() < FULL_RATE_DISTANCE / distanceToAvatar)
|
||||||
|
* (1 - _performanceThrottlingRatio)) {
|
||||||
QByteArray avatarByteArray;
|
QByteArray avatarByteArray;
|
||||||
avatarByteArray.append(otherNode->getUUID().toRfc4122());
|
avatarByteArray.append(otherNode->getUUID().toRfc4122());
|
||||||
avatarByteArray.append(otherAvatar.toByteArray());
|
avatarByteArray.append(otherAvatar.toByteArray());
|
||||||
|
@ -241,11 +248,24 @@ void AvatarMixer::readPendingDatagrams() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AvatarMixer::sendStatsPacket() {
|
||||||
|
QJsonObject statsObject;
|
||||||
|
statsObject["average_listeners_last_second"] = (float) _sumListeners / (float) _numStatFrames;
|
||||||
|
|
||||||
|
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100;
|
||||||
|
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||||
|
|
||||||
|
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
|
||||||
|
|
||||||
|
_sumListeners = 0;
|
||||||
|
_numStatFrames = 0;
|
||||||
|
}
|
||||||
|
|
||||||
const qint64 AVATAR_IDENTITY_KEYFRAME_MSECS = 5000;
|
const qint64 AVATAR_IDENTITY_KEYFRAME_MSECS = 5000;
|
||||||
const qint64 AVATAR_BILLBOARD_KEYFRAME_MSECS = 5000;
|
const qint64 AVATAR_BILLBOARD_KEYFRAME_MSECS = 5000;
|
||||||
|
|
||||||
void AvatarMixer::run() {
|
void AvatarMixer::run() {
|
||||||
commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
|
ThreadedAssignment::commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
|
||||||
|
|
||||||
NodeList* nodeList = NodeList::getInstance();
|
NodeList* nodeList = NodeList::getInstance();
|
||||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||||
|
@ -263,12 +283,57 @@ void AvatarMixer::run() {
|
||||||
QElapsedTimer billboardTimer;
|
QElapsedTimer billboardTimer;
|
||||||
billboardTimer.start();
|
billboardTimer.start();
|
||||||
|
|
||||||
|
int usecToSleep = AVATAR_DATA_SEND_INTERVAL_USECS;
|
||||||
|
|
||||||
|
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||||
|
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||||
|
|
||||||
while (!_isFinished) {
|
while (!_isFinished) {
|
||||||
|
|
||||||
QCoreApplication::processEvents();
|
++_numStatFrames;
|
||||||
|
|
||||||
if (_isFinished) {
|
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||||
break;
|
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||||
|
|
||||||
|
const float RATIO_BACK_OFF = 0.02f;
|
||||||
|
|
||||||
|
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||||
|
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||||
|
|
||||||
|
if (usecToSleep < 0) {
|
||||||
|
usecToSleep = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||||
|
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) AVATAR_DATA_SEND_INTERVAL_USECS);
|
||||||
|
|
||||||
|
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||||
|
bool hasRatioChanged = false;
|
||||||
|
|
||||||
|
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||||
|
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||||
|
// we're struggling - change our min required loudness to reduce some load
|
||||||
|
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||||
|
|
||||||
|
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||||
|
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||||
|
hasRatioChanged = true;
|
||||||
|
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||||
|
// we've recovered and can back off the required loudness
|
||||||
|
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||||
|
|
||||||
|
if (_performanceThrottlingRatio < 0) {
|
||||||
|
_performanceThrottlingRatio = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||||
|
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||||
|
hasRatioChanged = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasRatioChanged) {
|
||||||
|
framesSinceCutoffEvent = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
broadcastAvatarData();
|
broadcastAvatarData();
|
||||||
|
@ -286,7 +351,13 @@ void AvatarMixer::run() {
|
||||||
billboardTimer.restart();
|
billboardTimer.restart();
|
||||||
}
|
}
|
||||||
|
|
||||||
int usecToSleep = usecTimestamp(&startTime) + (++nextFrame * AVATAR_DATA_SEND_INTERVAL_USECS) - usecTimestampNow();
|
QCoreApplication::processEvents();
|
||||||
|
|
||||||
|
if (_isFinished) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
usecToSleep = usecTimestamp(&startTime) + (++nextFrame * AVATAR_DATA_SEND_INTERVAL_USECS) - usecTimestampNow();
|
||||||
|
|
||||||
if (usecToSleep > 0) {
|
if (usecToSleep > 0) {
|
||||||
usleep(usecToSleep);
|
usleep(usecToSleep);
|
||||||
|
|
|
@ -24,6 +24,17 @@ public slots:
|
||||||
void nodeKilled(SharedNodePointer killedNode);
|
void nodeKilled(SharedNodePointer killedNode);
|
||||||
|
|
||||||
void readPendingDatagrams();
|
void readPendingDatagrams();
|
||||||
|
|
||||||
|
void sendStatsPacket();
|
||||||
|
|
||||||
|
private:
|
||||||
|
void broadcastAvatarData();
|
||||||
|
|
||||||
|
float _trailingSleepRatio;
|
||||||
|
float _performanceThrottlingRatio;
|
||||||
|
|
||||||
|
int _sumListeners;
|
||||||
|
int _numStatFrames;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* defined(__hifi__AvatarMixer__) */
|
#endif /* defined(__hifi__AvatarMixer__) */
|
||||||
|
|
|
@ -15,8 +15,10 @@
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main(int argc, char* argv[]) {
|
||||||
|
|
||||||
|
#ifndef WIN32
|
||||||
setvbuf(stdout, NULL, _IOLBF, 0);
|
setvbuf(stdout, NULL, _IOLBF, 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
// use the verbose message handler in Logging
|
// use the verbose message handler in Logging
|
||||||
qInstallMessageHandler(Logging::verboseMessageHandler);
|
qInstallMessageHandler(Logging::verboseMessageHandler);
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ public:
|
||||||
virtual void run();
|
virtual void run();
|
||||||
|
|
||||||
virtual void readPendingDatagrams();
|
virtual void readPendingDatagrams();
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
|
|
||||||
void maybeAttachSession(const SharedNodePointer& node);
|
void maybeAttachSession(const SharedNodePointer& node);
|
||||||
|
|
|
@ -67,6 +67,14 @@ OctreeQueryNode::~OctreeQueryNode() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void OctreeQueryNode::deleteLater() {
|
||||||
|
_isShuttingDown = true;
|
||||||
|
if (_octreeSendThread) {
|
||||||
|
_octreeSendThread->setIsShuttingDown();
|
||||||
|
}
|
||||||
|
OctreeQuery::deleteLater();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void OctreeQueryNode::initializeOctreeSendThread(OctreeServer* octreeServer, const QUuid& nodeUUID) {
|
void OctreeQueryNode::initializeOctreeSendThread(OctreeServer* octreeServer, const QUuid& nodeUUID) {
|
||||||
// Create octree sending thread...
|
// Create octree sending thread...
|
||||||
|
|
|
@ -27,6 +27,7 @@ class OctreeQueryNode : public OctreeQuery {
|
||||||
public:
|
public:
|
||||||
OctreeQueryNode();
|
OctreeQueryNode();
|
||||||
virtual ~OctreeQueryNode();
|
virtual ~OctreeQueryNode();
|
||||||
|
virtual void deleteLater();
|
||||||
|
|
||||||
void init(); // called after creation to set up some virtual items
|
void init(); // called after creation to set up some virtual items
|
||||||
virtual PacketType getMyPacketType() const = 0;
|
virtual PacketType getMyPacketType() const = 0;
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include <QMutexLocker>
|
||||||
|
|
||||||
#include <NodeList.h>
|
#include <NodeList.h>
|
||||||
#include <PacketHeaders.h>
|
#include <PacketHeaders.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
|
@ -21,7 +23,9 @@ OctreeSendThread::OctreeSendThread(const QUuid& nodeUUID, OctreeServer* myServer
|
||||||
_nodeUUID(nodeUUID),
|
_nodeUUID(nodeUUID),
|
||||||
_myServer(myServer),
|
_myServer(myServer),
|
||||||
_packetData(),
|
_packetData(),
|
||||||
_nodeMissingCount(0)
|
_nodeMissingCount(0),
|
||||||
|
_processLock(),
|
||||||
|
_isShuttingDown(false)
|
||||||
{
|
{
|
||||||
QString safeServerName("Octree");
|
QString safeServerName("Octree");
|
||||||
if (_myServer) {
|
if (_myServer) {
|
||||||
|
@ -43,8 +47,19 @@ OctreeSendThread::~OctreeSendThread() {
|
||||||
OctreeServer::clientDisconnected();
|
OctreeServer::clientDisconnected();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void OctreeSendThread::setIsShuttingDown() {
|
||||||
|
QMutexLocker locker(&_processLock); // this will cause us to wait till the process loop is complete
|
||||||
|
_isShuttingDown = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool OctreeSendThread::process() {
|
bool OctreeSendThread::process() {
|
||||||
|
QMutexLocker locker(&_processLock);
|
||||||
|
|
||||||
|
if (_isShuttingDown) {
|
||||||
|
return false; // exit early if we're shutting down
|
||||||
|
}
|
||||||
|
|
||||||
const int MAX_NODE_MISSING_CHECKS = 10;
|
const int MAX_NODE_MISSING_CHECKS = 10;
|
||||||
if (_nodeMissingCount > MAX_NODE_MISSING_CHECKS) {
|
if (_nodeMissingCount > MAX_NODE_MISSING_CHECKS) {
|
||||||
qDebug() << "our target node:" << _nodeUUID << "has been missing the last" << _nodeMissingCount
|
qDebug() << "our target node:" << _nodeUUID << "has been missing the last" << _nodeMissingCount
|
||||||
|
@ -56,7 +71,10 @@ bool OctreeSendThread::process() {
|
||||||
|
|
||||||
// don't do any send processing until the initial load of the octree is complete...
|
// don't do any send processing until the initial load of the octree is complete...
|
||||||
if (_myServer->isInitialLoadComplete()) {
|
if (_myServer->isInitialLoadComplete()) {
|
||||||
SharedNodePointer node = NodeList::getInstance()->nodeWithUUID(_nodeUUID);
|
|
||||||
|
// see if we can get access to our node, but don't wait on the lock, if the nodeList is busy
|
||||||
|
// it might not return a node that is known, but that's ok we can handle that case.
|
||||||
|
SharedNodePointer node = NodeList::getInstance()->nodeWithUUID(_nodeUUID, false);
|
||||||
|
|
||||||
if (node) {
|
if (node) {
|
||||||
_nodeMissingCount = 0;
|
_nodeMissingCount = 0;
|
||||||
|
@ -113,19 +131,6 @@ int OctreeSendThread::handlePacketSend(const SharedNodePointer& node,
|
||||||
bool packetSent = false; // did we send a packet?
|
bool packetSent = false; // did we send a packet?
|
||||||
int packetsSent = 0;
|
int packetsSent = 0;
|
||||||
|
|
||||||
// double check that the node has an active socket, otherwise, don't send...
|
|
||||||
|
|
||||||
quint64 lockWaitStart = usecTimestampNow();
|
|
||||||
QMutexLocker locker(&node->getMutex());
|
|
||||||
quint64 lockWaitEnd = usecTimestampNow();
|
|
||||||
float lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
|
|
||||||
OctreeServer::trackNodeWaitTime(lockWaitElapsedUsec);
|
|
||||||
|
|
||||||
const HifiSockAddr* nodeAddress = node->getActiveSocket();
|
|
||||||
if (!nodeAddress) {
|
|
||||||
return packetsSent; // without sending...
|
|
||||||
}
|
|
||||||
|
|
||||||
// Here's where we check to see if this packet is a duplicate of the last packet. If it is, we will silently
|
// Here's where we check to see if this packet is a duplicate of the last packet. If it is, we will silently
|
||||||
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
|
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
|
||||||
// this rate control savings.
|
// this rate control savings.
|
||||||
|
|
|
@ -23,6 +23,8 @@ class OctreeSendThread : public GenericThread {
|
||||||
public:
|
public:
|
||||||
OctreeSendThread(const QUuid& nodeUUID, OctreeServer* myServer);
|
OctreeSendThread(const QUuid& nodeUUID, OctreeServer* myServer);
|
||||||
virtual ~OctreeSendThread();
|
virtual ~OctreeSendThread();
|
||||||
|
|
||||||
|
void setIsShuttingDown();
|
||||||
|
|
||||||
static quint64 _totalBytes;
|
static quint64 _totalBytes;
|
||||||
static quint64 _totalWastedBytes;
|
static quint64 _totalWastedBytes;
|
||||||
|
@ -45,6 +47,8 @@ private:
|
||||||
OctreePacketData _packetData;
|
OctreePacketData _packetData;
|
||||||
|
|
||||||
int _nodeMissingCount;
|
int _nodeMissingCount;
|
||||||
|
QMutex _processLock; // don't allow us to have our nodeData, or our thread to be deleted while we're processing
|
||||||
|
bool _isShuttingDown;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __octree_server__OctreeSendThread__
|
#endif // __octree_server__OctreeSendThread__
|
||||||
|
|
|
@ -236,7 +236,7 @@ void OctreeServer::initHTTPManager(int port) {
|
||||||
_httpManager = new HTTPManager(port, documentRoot, this, this);
|
_httpManager = new HTTPManager(port, documentRoot, this, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QString& path) {
|
bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url) {
|
||||||
|
|
||||||
#ifdef FORCE_CRASH
|
#ifdef FORCE_CRASH
|
||||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
|
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
|
||||||
|
@ -259,9 +259,9 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QString&
|
||||||
bool showStats = false;
|
bool showStats = false;
|
||||||
|
|
||||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
||||||
if (path == "/") {
|
if (url.path() == "/") {
|
||||||
showStats = true;
|
showStats = true;
|
||||||
} else if (path == "/resetStats") {
|
} else if (url.path() == "/resetStats") {
|
||||||
_octreeInboundPacketProcessor->resetStats();
|
_octreeInboundPacketProcessor->resetStats();
|
||||||
resetSendingStats();
|
resetSendingStats();
|
||||||
showStats = true;
|
showStats = true;
|
||||||
|
@ -823,9 +823,9 @@ void OctreeServer::run() {
|
||||||
_safeServerName = getMyServerName();
|
_safeServerName = getMyServerName();
|
||||||
// Before we do anything else, create our tree...
|
// Before we do anything else, create our tree...
|
||||||
_tree = createTree();
|
_tree = createTree();
|
||||||
|
|
||||||
// change the logging target name while this is running
|
// use common init to setup common timers and logging
|
||||||
Logging::setTargetName(getMyLoggingServerTargetName());
|
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
|
||||||
|
|
||||||
// Now would be a good time to parse our arguments, if we got them as assignment
|
// Now would be a good time to parse our arguments, if we got them as assignment
|
||||||
if (getPayload().size() > 0) {
|
if (getPayload().size() > 0) {
|
||||||
|
@ -880,7 +880,9 @@ void OctreeServer::run() {
|
||||||
// we need to ask the DS about agents so we can ping/reply with them
|
// we need to ask the DS about agents so we can ping/reply with them
|
||||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||||
|
|
||||||
|
#ifndef WIN32
|
||||||
setvbuf(stdout, NULL, _IOLBF, 0);
|
setvbuf(stdout, NULL, _IOLBF, 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
nodeList->linkedDataCreateCallback = &OctreeServer::attachQueryNodeToNode;
|
nodeList->linkedDataCreateCallback = &OctreeServer::attachQueryNodeToNode;
|
||||||
|
|
||||||
|
@ -986,14 +988,6 @@ void OctreeServer::run() {
|
||||||
strftime(utcBuffer, MAX_TIME_LENGTH, " [%m/%d/%Y %X UTC]", gmtm);
|
strftime(utcBuffer, MAX_TIME_LENGTH, " [%m/%d/%Y %X UTC]", gmtm);
|
||||||
}
|
}
|
||||||
qDebug() << "Now running... started at: " << localBuffer << utcBuffer;
|
qDebug() << "Now running... started at: " << localBuffer << utcBuffer;
|
||||||
|
|
||||||
QTimer* domainServerTimer = new QTimer(this);
|
|
||||||
connect(domainServerTimer, SIGNAL(timeout()), this, SLOT(checkInWithDomainServerOrExit()));
|
|
||||||
domainServerTimer->start(DOMAIN_SERVER_CHECK_IN_USECS / 1000);
|
|
||||||
|
|
||||||
QTimer* silentNodeTimer = new QTimer(this);
|
|
||||||
connect(silentNodeTimer, SIGNAL(timeout()), nodeList, SLOT(removeSilentNodes()));
|
|
||||||
silentNodeTimer->start(NODE_SILENCE_THRESHOLD_USECS / 1000);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void OctreeServer::nodeAdded(SharedNodePointer node) {
|
void OctreeServer::nodeAdded(SharedNodePointer node) {
|
||||||
|
|
|
@ -97,7 +97,7 @@ public:
|
||||||
static void trackPacketSendingTime(float time);
|
static void trackPacketSendingTime(float time);
|
||||||
static float getAveragePacketSendingTime() { return _averagePacketSendingTime.getAverage(); }
|
static float getAveragePacketSendingTime() { return _averagePacketSendingTime.getAverage(); }
|
||||||
|
|
||||||
bool handleHTTPRequest(HTTPConnection* connection, const QString& path);
|
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url);
|
||||||
|
|
||||||
virtual void aboutToFinish();
|
virtual void aboutToFinish();
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,6 @@ public:
|
||||||
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
|
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
|
||||||
virtual int sendSpecialPacket(const SharedNodePointer& node);
|
virtual int sendSpecialPacket(const SharedNodePointer& node);
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool _sendEnvironments;
|
bool _sendEnvironments;
|
||||||
bool _sendMinimalEnvironment;
|
bool _sendMinimalEnvironment;
|
||||||
|
|
|
@ -38,4 +38,8 @@
|
||||||
|
|
||||||
span.port {
|
span.port {
|
||||||
color: #666666;
|
color: #666666;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stale {
|
||||||
|
color: red;
|
||||||
}
|
}
|
|
@ -1,3 +1,3 @@
|
||||||
</div>
|
</div>
|
||||||
<script src='js/jquery-2.0.3.min.js'></script>
|
<script src='/js/jquery-2.0.3.min.js'></script>
|
||||||
<script src="js/bootstrap.min.js"></script>
|
<script src='/js/bootstrap.min.js'></script>
|
|
@ -4,8 +4,8 @@
|
||||||
<title>domain-server</title>
|
<title>domain-server</title>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<!-- Bootstrap -->
|
<!-- Bootstrap -->
|
||||||
<link href="css/bootstrap.min.css" rel="stylesheet" media="screen">
|
<link href="/css/bootstrap.min.css" rel="stylesheet" media="screen">
|
||||||
<link href="css/style.css" rel="stylesheet" media="screen">
|
<link href="/css/style.css" rel="stylesheet" media="screen">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="container">
|
<div class="container">
|
|
@ -7,7 +7,7 @@ $(document).ready(function(){
|
||||||
$.each(json.nodes, function (uuid, data) {
|
$.each(json.nodes, function (uuid, data) {
|
||||||
nodesTableBody += "<tr>";
|
nodesTableBody += "<tr>";
|
||||||
nodesTableBody += "<td>" + data.type + "</td>";
|
nodesTableBody += "<td>" + data.type + "</td>";
|
||||||
nodesTableBody += "<td>" + uuid + "</td>";
|
nodesTableBody += "<td><a href='stats/?uuid=" + uuid + "'>" + uuid + "</a></td>";
|
||||||
nodesTableBody += "<td>" + (data.pool ? data.pool : "") + "</td>";
|
nodesTableBody += "<td>" + (data.pool ? data.pool : "") + "</td>";
|
||||||
nodesTableBody += "<td>" + data.public.ip + "<span class='port'>:" + data.public.port + "</span></td>";
|
nodesTableBody += "<td>" + data.public.ip + "<span class='port'>:" + data.public.port + "</span></td>";
|
||||||
nodesTableBody += "<td>" + data.local.ip + "<span class='port'>:" + data.local.port + "</span></td>";
|
nodesTableBody += "<td>" + data.local.ip + "<span class='port'>:" + data.local.port + "</span></td>";
|
||||||
|
@ -42,7 +42,7 @@ $(document).ready(function(){
|
||||||
$(document.body).on('click', '.glyphicon-remove', function(){
|
$(document.body).on('click', '.glyphicon-remove', function(){
|
||||||
// fire off a delete for this node
|
// fire off a delete for this node
|
||||||
$.ajax({
|
$.ajax({
|
||||||
url: "/node/" + $(this).data('uuid'),
|
url: "/nodes/" + $(this).data('uuid'),
|
||||||
type: 'DELETE',
|
type: 'DELETE',
|
||||||
success: function(result) {
|
success: function(result) {
|
||||||
console.log("Succesful request to delete node.");
|
console.log("Succesful request to delete node.");
|
||||||
|
|
6
domain-server/resources/web/stats/index.shtml
Normal file
6
domain-server/resources/web/stats/index.shtml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<!--#include virtual="header.html"-->
|
||||||
|
<div id="stats-lead" class="table-lead"><h3>Stats</h3><div class="lead-line"></div></div>
|
||||||
|
<table id="stats-table" class="table"><tbody></tbody></table>
|
||||||
|
<!--#include virtual="footer.html"-->
|
||||||
|
<script src='js/stats.js'></script>
|
||||||
|
<!--#include virtual="page-end.html"-->
|
41
domain-server/resources/web/stats/js/stats.js
Normal file
41
domain-server/resources/web/stats/js/stats.js
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
function qs(key) {
|
||||||
|
key = key.replace(/[*+?^$.\[\]{}()|\\\/]/g, "\\$&"); // escape RegEx meta chars
|
||||||
|
var match = location.search.match(new RegExp("[?&]"+key+"=([^&]+)(&|$)"));
|
||||||
|
return match && decodeURIComponent(match[1].replace(/\+/g, " "));
|
||||||
|
}
|
||||||
|
|
||||||
|
$(document).ready(function(){
|
||||||
|
// setup a function to grab the nodeStats
|
||||||
|
function getNodeStats() {
|
||||||
|
|
||||||
|
var uuid = qs("uuid");
|
||||||
|
|
||||||
|
var statsTableBody = "";
|
||||||
|
|
||||||
|
$.getJSON("/nodes/" + uuid + ".json", function(json){
|
||||||
|
|
||||||
|
// update the table header with the right node type
|
||||||
|
$('#stats-lead h3').html(json.node_type + " stats (" + uuid + ")");
|
||||||
|
|
||||||
|
delete json.node_type;
|
||||||
|
|
||||||
|
$.each(json, function(key, value) {
|
||||||
|
statsTableBody += "<tr>";
|
||||||
|
statsTableBody += "<td>" + key + "</td>";
|
||||||
|
statsTableBody += "<td>" + value + "</td>";
|
||||||
|
statsTableBody += "</tr>";
|
||||||
|
});
|
||||||
|
|
||||||
|
$('#stats-table tbody').html(statsTableBody);
|
||||||
|
}).fail(function(data) {
|
||||||
|
$('#stats-table td').each(function(){
|
||||||
|
$(this).addClass('stale');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// do the first GET on page load
|
||||||
|
getNodeStats();
|
||||||
|
// grab the new assignments JSON every second
|
||||||
|
var getNodeStatsInterval = setInterval(getNodeStats, 1000);
|
||||||
|
});
|
|
@ -603,6 +603,11 @@ void DomainServer::readAvailableDatagrams() {
|
||||||
if (noisyMessage) {
|
if (noisyMessage) {
|
||||||
lastNoisyMessage = timeNow;
|
lastNoisyMessage = timeNow;
|
||||||
}
|
}
|
||||||
|
} else if (requestType == PacketTypeNodeJsonStats) {
|
||||||
|
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
|
||||||
|
if (matchingNode) {
|
||||||
|
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->parseJSONStatsPacket(receivedPacket);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -646,14 +651,14 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
|
||||||
return nodeJson;
|
return nodeJson;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString& path) {
|
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url) {
|
||||||
const QString JSON_MIME_TYPE = "application/json";
|
const QString JSON_MIME_TYPE = "application/json";
|
||||||
|
|
||||||
const QString URI_ASSIGNMENT = "/assignment";
|
const QString URI_ASSIGNMENT = "/assignment";
|
||||||
const QString URI_NODE = "/node";
|
const QString URI_NODES = "/nodes";
|
||||||
|
|
||||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
||||||
if (path == "/assignments.json") {
|
if (url.path() == "/assignments.json") {
|
||||||
// user is asking for json list of assignments
|
// user is asking for json list of assignments
|
||||||
|
|
||||||
// setup the JSON
|
// setup the JSON
|
||||||
|
@ -697,7 +702,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
|
||||||
|
|
||||||
// we've processed this request
|
// we've processed this request
|
||||||
return true;
|
return true;
|
||||||
} else if (path == "/nodes.json") {
|
} else if (url.path() == QString("%1.json").arg(URI_NODES)) {
|
||||||
// setup the JSON
|
// setup the JSON
|
||||||
QJsonObject rootJSON;
|
QJsonObject rootJSON;
|
||||||
QJsonObject nodesJSON;
|
QJsonObject nodesJSON;
|
||||||
|
@ -718,14 +723,41 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
|
||||||
|
|
||||||
// send the response
|
// send the response
|
||||||
connection->respond(HTTPConnection::StatusCode200, nodesDocument.toJson(), qPrintable(JSON_MIME_TYPE));
|
connection->respond(HTTPConnection::StatusCode200, nodesDocument.toJson(), qPrintable(JSON_MIME_TYPE));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
const QString NODE_REGEX_STRING =
|
||||||
|
QString("\\%1\\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).json\\/?$").arg(URI_NODES);
|
||||||
|
QRegExp nodeShowRegex(NODE_REGEX_STRING);
|
||||||
|
|
||||||
|
if (nodeShowRegex.indexIn(url.path()) != -1) {
|
||||||
|
QUuid matchingUUID = QUuid(nodeShowRegex.cap(1));
|
||||||
|
|
||||||
|
// see if we have a node that matches this ID
|
||||||
|
SharedNodePointer matchingNode = NodeList::getInstance()->nodeWithUUID(matchingUUID);
|
||||||
|
if (matchingNode) {
|
||||||
|
// create a QJsonDocument with the stats QJsonObject
|
||||||
|
QJsonObject statsObject =
|
||||||
|
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->getStatsJSONObject();
|
||||||
|
|
||||||
|
// add the node type to the JSON data for output purposes
|
||||||
|
statsObject["node_type"] = NodeType::getNodeTypeName(matchingNode->getType()).toLower().replace(' ', '-');
|
||||||
|
|
||||||
|
QJsonDocument statsDocument(statsObject);
|
||||||
|
|
||||||
|
// send the response
|
||||||
|
connection->respond(HTTPConnection::StatusCode200, statsDocument.toJson(), qPrintable(JSON_MIME_TYPE));
|
||||||
|
|
||||||
|
// tell the caller we processed the request
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if (connection->requestOperation() == QNetworkAccessManager::PostOperation) {
|
} else if (connection->requestOperation() == QNetworkAccessManager::PostOperation) {
|
||||||
if (path == URI_ASSIGNMENT) {
|
if (url.path() == URI_ASSIGNMENT) {
|
||||||
// this is a script upload - ask the HTTPConnection to parse the form data
|
// this is a script upload - ask the HTTPConnection to parse the form data
|
||||||
QList<FormData> formData = connection->parseFormData();
|
QList<FormData> formData = connection->parseFormData();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// check how many instances of this assignment the user wants by checking the ASSIGNMENT-INSTANCES header
|
// check how many instances of this assignment the user wants by checking the ASSIGNMENT-INSTANCES header
|
||||||
const QString ASSIGNMENT_INSTANCES_HEADER = "ASSIGNMENT-INSTANCES";
|
const QString ASSIGNMENT_INSTANCES_HEADER = "ASSIGNMENT-INSTANCES";
|
||||||
|
|
||||||
|
@ -765,13 +797,15 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
|
||||||
|
|
||||||
// respond with a 200 code for successful upload
|
// respond with a 200 code for successful upload
|
||||||
connection->respond(HTTPConnection::StatusCode200);
|
connection->respond(HTTPConnection::StatusCode200);
|
||||||
|
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
} else if (connection->requestOperation() == QNetworkAccessManager::DeleteOperation) {
|
} else if (connection->requestOperation() == QNetworkAccessManager::DeleteOperation) {
|
||||||
if (path.startsWith(URI_NODE)) {
|
if (url.path().startsWith(URI_NODES)) {
|
||||||
// this is a request to DELETE a node by UUID
|
// this is a request to DELETE a node by UUID
|
||||||
|
|
||||||
// pull the UUID from the url
|
// pull the UUID from the url
|
||||||
QUuid deleteUUID = QUuid(path.mid(URI_NODE.size() + sizeof('/')));
|
QUuid deleteUUID = QUuid(url.path().mid(URI_NODES.size() + sizeof('/')));
|
||||||
|
|
||||||
if (!deleteUUID.isNull()) {
|
if (!deleteUUID.isNull()) {
|
||||||
SharedNodePointer nodeToKill = NodeList::getInstance()->nodeWithUUID(deleteUUID);
|
SharedNodePointer nodeToKill = NodeList::getInstance()->nodeWithUUID(deleteUUID);
|
||||||
|
|
|
@ -30,7 +30,7 @@ public:
|
||||||
|
|
||||||
bool requiresAuthentication() const { return !_nodeAuthenticationURL.isEmpty(); }
|
bool requiresAuthentication() const { return !_nodeAuthenticationURL.isEmpty(); }
|
||||||
|
|
||||||
bool handleHTTPRequest(HTTPConnection* connection, const QString& path);
|
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url);
|
||||||
|
|
||||||
void exit(int retCode = 0);
|
void exit(int retCode = 0);
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,43 @@
|
||||||
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include <QtCore/QDataStream>
|
||||||
|
#include <QtCore/QJsonObject>
|
||||||
|
#include <QtCore/QVariant>
|
||||||
|
|
||||||
|
#include <PacketHeaders.h>
|
||||||
|
|
||||||
#include "DomainServerNodeData.h"
|
#include "DomainServerNodeData.h"
|
||||||
|
|
||||||
DomainServerNodeData::DomainServerNodeData() :
|
DomainServerNodeData::DomainServerNodeData() :
|
||||||
_sessionSecretHash(),
|
_sessionSecretHash(),
|
||||||
_staticAssignmentUUID()
|
_staticAssignmentUUID(),
|
||||||
|
_statsJSONObject()
|
||||||
{
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
void DomainServerNodeData::parseJSONStatsPacket(const QByteArray& statsPacket) {
|
||||||
|
// push past the packet header
|
||||||
|
QDataStream packetStream(statsPacket);
|
||||||
|
packetStream.skipRawData(numBytesForPacketHeader(statsPacket));
|
||||||
|
|
||||||
|
QVariantMap unpackedVariantMap;
|
||||||
|
|
||||||
|
packetStream >> unpackedVariantMap;
|
||||||
|
|
||||||
|
QJsonObject unpackedStatsJSON = QJsonObject::fromVariantMap(unpackedVariantMap);
|
||||||
|
_statsJSONObject = mergeJSONStatsFromNewObject(unpackedStatsJSON, _statsJSONObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
QJsonObject DomainServerNodeData::mergeJSONStatsFromNewObject(const QJsonObject& newObject, QJsonObject destinationObject) {
|
||||||
|
foreach(const QString& key, newObject.keys()) {
|
||||||
|
if (newObject[key].isObject() && destinationObject.contains(key)) {
|
||||||
|
destinationObject[key] = mergeJSONStatsFromNewObject(newObject[key].toObject(), destinationObject[key].toObject());
|
||||||
|
} else {
|
||||||
|
destinationObject[key] = newObject[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return destinationObject;
|
||||||
}
|
}
|
|
@ -19,13 +19,20 @@ public:
|
||||||
DomainServerNodeData();
|
DomainServerNodeData();
|
||||||
int parseData(const QByteArray& packet) { return 0; }
|
int parseData(const QByteArray& packet) { return 0; }
|
||||||
|
|
||||||
|
const QJsonObject& getStatsJSONObject() const { return _statsJSONObject; }
|
||||||
|
|
||||||
|
void parseJSONStatsPacket(const QByteArray& statsPacket);
|
||||||
|
|
||||||
void setStaticAssignmentUUID(const QUuid& staticAssignmentUUID) { _staticAssignmentUUID = staticAssignmentUUID; }
|
void setStaticAssignmentUUID(const QUuid& staticAssignmentUUID) { _staticAssignmentUUID = staticAssignmentUUID; }
|
||||||
const QUuid& getStaticAssignmentUUID() const { return _staticAssignmentUUID; }
|
const QUuid& getStaticAssignmentUUID() const { return _staticAssignmentUUID; }
|
||||||
|
|
||||||
QHash<QUuid, QUuid>& getSessionSecretHash() { return _sessionSecretHash; }
|
QHash<QUuid, QUuid>& getSessionSecretHash() { return _sessionSecretHash; }
|
||||||
private:
|
private:
|
||||||
|
QJsonObject mergeJSONStatsFromNewObject(const QJsonObject& newObject, QJsonObject destinationObject);
|
||||||
|
|
||||||
QHash<QUuid, QUuid> _sessionSecretHash;
|
QHash<QUuid, QUuid> _sessionSecretHash;
|
||||||
QUuid _staticAssignmentUUID;
|
QUuid _staticAssignmentUUID;
|
||||||
|
QJsonObject _statsJSONObject;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* defined(__hifi__DomainServerNodeData__) */
|
#endif /* defined(__hifi__DomainServerNodeData__) */
|
||||||
|
|
|
@ -20,10 +20,11 @@
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main(int argc, char* argv[]) {
|
||||||
|
|
||||||
|
#ifndef WIN32
|
||||||
setvbuf(stdout, NULL, _IOLBF, 0);
|
setvbuf(stdout, NULL, _IOLBF, 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
qInstallMessageHandler(Logging::verboseMessageHandler);
|
qInstallMessageHandler(Logging::verboseMessageHandler);
|
||||||
|
|
||||||
DomainServer domainServer(argc, argv);
|
DomainServer domainServer(argc, argv);
|
||||||
|
|
||||||
return domainServer.exec();
|
return domainServer.exec();
|
||||||
|
|
52
examples/audioDeviceExample.js
Normal file
52
examples/audioDeviceExample.js
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
//
|
||||||
|
// audioDeviceExample.js
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/22/14
|
||||||
|
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
// This is an example script that demonstrates use of the Menu object
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
var outputDevices = AudioDevice.getOutputDevices();
|
||||||
|
var defaultOutputDevice = AudioDevice.getDefaultOutputDevice();
|
||||||
|
var selectOutputDevice = outputDevices[0];
|
||||||
|
print("Output Devices:");
|
||||||
|
for(var i = 0; i < outputDevices.length; i++) {
|
||||||
|
if (outputDevices[i] == defaultOutputDevice) {
|
||||||
|
print(" " + outputDevices[i] + " << default");
|
||||||
|
} else {
|
||||||
|
print(" " + outputDevices[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print("Default Output Device:" + defaultOutputDevice);
|
||||||
|
print("Selected Output Device:" + selectOutputDevice);
|
||||||
|
print("Current Audio Output Device: " + AudioDevice.getOutputDevice());
|
||||||
|
AudioDevice.setOutputDevice(selectOutputDevice);
|
||||||
|
print("Audio Output Device: " + AudioDevice.getOutputDevice());
|
||||||
|
|
||||||
|
var inputDevices = AudioDevice.getInputDevices();
|
||||||
|
var selectInputDevice = inputDevices[0];
|
||||||
|
var defaultInputDevice = AudioDevice.getDefaultInputDevice();
|
||||||
|
print("Input Devices:");
|
||||||
|
for(var i = 0; i < inputDevices.length; i++) {
|
||||||
|
if (inputDevices[i] == defaultInputDevice) {
|
||||||
|
print(" " + inputDevices[i] + " << default");
|
||||||
|
} else {
|
||||||
|
print(" " + inputDevices[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print("Default Input Device:" + defaultInputDevice);
|
||||||
|
print("Selected Input Device:" + selectInputDevice);
|
||||||
|
print("Current Audio Input Device: " + AudioDevice.getInputDevice());
|
||||||
|
AudioDevice.setInputDevice(selectInputDevice);
|
||||||
|
print("Audio Input Device: " + AudioDevice.getInputDevice());
|
||||||
|
|
||||||
|
print("Audio Input Device Level: " + AudioDevice.getInputVolume());
|
||||||
|
AudioDevice.setInputVolume(AudioDevice.getInputVolume() * 2); // twice as loud!
|
||||||
|
print("Audio Input Device Level: " + AudioDevice.getInputVolume());
|
||||||
|
|
||||||
|
Script.stop();
|
|
@ -26,15 +26,25 @@ var CHANCE_OF_MOVING = 0.005;
|
||||||
var CHANCE_OF_SOUND = 0.005;
|
var CHANCE_OF_SOUND = 0.005;
|
||||||
var CHANCE_OF_HEAD_TURNING = 0.05;
|
var CHANCE_OF_HEAD_TURNING = 0.05;
|
||||||
var CHANCE_OF_BIG_MOVE = 0.1;
|
var CHANCE_OF_BIG_MOVE = 0.1;
|
||||||
|
var CHANCE_OF_WAVING = 0.005; // Currently this isn't working
|
||||||
|
|
||||||
|
var shouldReceiveVoxels = true;
|
||||||
|
var VOXEL_FPS = 60.0;
|
||||||
|
var lastVoxelQueryTime = 0.0;
|
||||||
|
|
||||||
var isMoving = false;
|
var isMoving = false;
|
||||||
var isTurningHead = false;
|
var isTurningHead = false;
|
||||||
|
var isPlayingAudio = false;
|
||||||
|
var isWaving = false;
|
||||||
|
var waveFrequency = 0.0;
|
||||||
|
var waveAmplitude = 0.0;
|
||||||
|
|
||||||
var X_MIN = 0.0;
|
var X_MIN = 0.0;
|
||||||
var X_MAX = 5.0;
|
var X_MAX = 5.0;
|
||||||
var Z_MIN = 0.0;
|
var Z_MIN = 0.0;
|
||||||
var Z_MAX = 5.0;
|
var Z_MAX = 5.0;
|
||||||
var Y_PELVIS = 2.5;
|
var Y_PELVIS = 2.5;
|
||||||
|
var SHOULDER_JOINT_NUMBER = 15;
|
||||||
|
|
||||||
var MOVE_RANGE_SMALL = 0.5;
|
var MOVE_RANGE_SMALL = 0.5;
|
||||||
var MOVE_RANGE_BIG = Math.max(X_MAX - X_MIN, Z_MAX - Z_MIN) / 2.0;
|
var MOVE_RANGE_BIG = Math.max(X_MAX - X_MIN, Z_MAX - Z_MIN) / 2.0;
|
||||||
|
@ -51,6 +61,8 @@ var targetDirection = { x: 0, y: 0, z: 0, w: 0 };
|
||||||
var currentDirection = { x: 0, y: 0, z: 0, w: 0 };
|
var currentDirection = { x: 0, y: 0, z: 0, w: 0 };
|
||||||
var targetHeadPitch = 0.0;
|
var targetHeadPitch = 0.0;
|
||||||
|
|
||||||
|
var cumulativeTime = 0.0;
|
||||||
|
|
||||||
var sounds = [];
|
var sounds = [];
|
||||||
loadSounds();
|
loadSounds();
|
||||||
|
|
||||||
|
@ -100,13 +112,37 @@ Agent.isListeningToAudioStream = true;
|
||||||
Avatar.position = firstPosition;
|
Avatar.position = firstPosition;
|
||||||
printVector("New bot, position = ", Avatar.position);
|
printVector("New bot, position = ", Avatar.position);
|
||||||
|
|
||||||
|
function stopWaving() {
|
||||||
|
isWaving = false;
|
||||||
|
Avatar.clearJointData(SHOULDER_JOINT_NUMBER);
|
||||||
|
}
|
||||||
|
|
||||||
function updateBehavior(deltaTime) {
|
function updateBehavior(deltaTime) {
|
||||||
if (Math.random() < CHANCE_OF_SOUND) {
|
|
||||||
playRandomSound();
|
cumulativeTime += deltaTime;
|
||||||
|
|
||||||
|
if (shouldReceiveVoxels && ((cumulativeTime - lastVoxelQueryTime) > (1.0 / VOXEL_FPS))) {
|
||||||
|
VoxelViewer.setPosition(Avatar.position);
|
||||||
|
VoxelViewer.setOrientation(Avatar.orientation);
|
||||||
|
VoxelViewer.queryOctree();
|
||||||
|
lastVoxelQueryTime = cumulativeTime;
|
||||||
|
/*
|
||||||
|
if (Math.random() < (1.0 / VOXEL_FPS)) {
|
||||||
|
print("Voxels in view = " + VoxelViewer.getOctreeElementsCount());
|
||||||
|
}*/
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Agent.isPlayingAvatarSound) {
|
if (!isWaving && (Math.random() < CHANCE_OF_WAVING)) {
|
||||||
Avatar.handPosition = Vec3.sum(Avatar.position, Quat.getFront(Avatar.orientation));
|
isWaving = true;
|
||||||
|
waveFrequency = 1.0 + Math.random() * 5.0;
|
||||||
|
waveAmplitude = 5.0 + Math.random() * 60.0;
|
||||||
|
Script.setTimeout(stopWaving, 1000 + Math.random() * 2000);
|
||||||
|
} else if (isWaving) {
|
||||||
|
Avatar.setJointData(SHOULDER_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 0.0, waveAmplitude * Math.sin(cumulativeTime * waveFrequency)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Math.random() < CHANCE_OF_SOUND) {
|
||||||
|
playRandomSound();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isTurningHead && (Math.random() < CHANCE_OF_HEAD_TURNING)) {
|
if (!isTurningHead && (Math.random() < CHANCE_OF_HEAD_TURNING)) {
|
||||||
|
|
|
@ -12,6 +12,10 @@ var AMPLITUDE = 45.0;
|
||||||
|
|
||||||
var cumulativeTime = 0.0;
|
var cumulativeTime = 0.0;
|
||||||
|
|
||||||
|
print("Joint List:");
|
||||||
|
var jointList = MyAvatar.getJointNames();
|
||||||
|
print(jointList);
|
||||||
|
|
||||||
Script.update.connect(function(deltaTime) {
|
Script.update.connect(function(deltaTime) {
|
||||||
cumulativeTime += deltaTime;
|
cumulativeTime += deltaTime;
|
||||||
MyAvatar.setJointData("joint_R_hip", Quat.fromPitchYawRollDegrees(0.0, 0.0, AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY)));
|
MyAvatar.setJointData("joint_R_hip", Quat.fromPitchYawRollDegrees(0.0, 0.0, AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY)));
|
||||||
|
|
5
examples/defaultScripts.js
Normal file
5
examples/defaultScripts.js
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
// defaultScripts.js
|
||||||
|
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||||
|
Script.include("lookWithTouch.js");
|
||||||
|
Script.include("editVoxels.js");
|
||||||
|
Script.include("selectAudioDevice.js");
|
|
@ -32,8 +32,6 @@ var MIN_PASTE_VOXEL_SCALE = .256;
|
||||||
var zFightingSizeAdjust = 0.002; // used to adjust preview voxels to prevent z fighting
|
var zFightingSizeAdjust = 0.002; // used to adjust preview voxels to prevent z fighting
|
||||||
var previewLineWidth = 1.5;
|
var previewLineWidth = 1.5;
|
||||||
|
|
||||||
var oldMode = Camera.getMode();
|
|
||||||
|
|
||||||
var isAdding = false;
|
var isAdding = false;
|
||||||
var isExtruding = false;
|
var isExtruding = false;
|
||||||
var extrudeDirection = { x: 0, y: 0, z: 0 };
|
var extrudeDirection = { x: 0, y: 0, z: 0 };
|
||||||
|
@ -614,8 +612,6 @@ function showPreviewVoxel() {
|
||||||
var guidePosition;
|
var guidePosition;
|
||||||
if (trackAsRecolor || recolorToolSelected || trackAsEyedropper || eyedropperToolSelected) {
|
if (trackAsRecolor || recolorToolSelected || trackAsEyedropper || eyedropperToolSelected) {
|
||||||
Overlays.editOverlay(voxelPreview, { visible: true });
|
Overlays.editOverlay(voxelPreview, { visible: true });
|
||||||
} else if (trackAsOrbitOrPan) {
|
|
||||||
Overlays.editOverlay(voxelPreview, { visible: false });
|
|
||||||
} else if (voxelToolSelected && !isExtruding) {
|
} else if (voxelToolSelected && !isExtruding) {
|
||||||
Overlays.editOverlay(voxelPreview, { visible: true });
|
Overlays.editOverlay(voxelPreview, { visible: true });
|
||||||
} else if (isExtruding) {
|
} else if (isExtruding) {
|
||||||
|
@ -706,15 +702,12 @@ function showPreviewGuides() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function trackMouseEvent(event) {
|
function trackMouseEvent(event) {
|
||||||
if (!trackAsOrbitOrPan) {
|
trackLastMouseX = event.x;
|
||||||
trackLastMouseX = event.x;
|
trackLastMouseY = event.y;
|
||||||
trackLastMouseY = event.y;
|
trackAsDelete = event.isControl;
|
||||||
trackAsDelete = event.isControl;
|
trackAsRecolor = event.isShifted;
|
||||||
trackAsRecolor = event.isShifted;
|
trackAsEyedropper = event.isMeta;
|
||||||
trackAsEyedropper = event.isMeta;
|
showPreviewGuides();
|
||||||
trackAsOrbitOrPan = event.isAlt; // TODO: double check this...??
|
|
||||||
showPreviewGuides();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function trackKeyPressEvent(event) {
|
function trackKeyPressEvent(event) {
|
||||||
|
@ -742,6 +735,7 @@ function trackKeyReleaseEvent(event) {
|
||||||
if (event.text == "TAB") {
|
if (event.text == "TAB") {
|
||||||
editToolsOn = !editToolsOn;
|
editToolsOn = !editToolsOn;
|
||||||
moveTools();
|
moveTools();
|
||||||
|
showPreviewGuides();
|
||||||
Audio.playSound(clickSound, audioOptions);
|
Audio.playSound(clickSound, audioOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -788,68 +782,64 @@ function mousePressEvent(event) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// no clicking on overlays while in panning mode
|
var clickedOnSomething = false;
|
||||||
if (!trackAsOrbitOrPan) {
|
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||||
var clickedOnSomething = false;
|
|
||||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
// If the user clicked on the thumb, handle the slider logic
|
||||||
|
if (clickedOverlay == thumb) {
|
||||||
|
isMovingSlider = true;
|
||||||
// If the user clicked on the thumb, handle the slider logic
|
thumbClickOffsetX = event.x - (sliderX + thumbX); // this should be the position of the mouse relative to the thumb
|
||||||
if (clickedOverlay == thumb) {
|
clickedOnSomething = true;
|
||||||
isMovingSlider = true;
|
|
||||||
thumbClickOffsetX = event.x - (sliderX + thumbX); // this should be the position of the mouse relative to the thumb
|
Overlays.editOverlay(thumb, { imageURL: toolIconUrl + "voxel-size-slider-handle.svg", });
|
||||||
clickedOnSomething = true;
|
|
||||||
|
} else if (clickedOverlay == voxelTool) {
|
||||||
Overlays.editOverlay(thumb, { imageURL: toolIconUrl + "voxel-size-slider-handle.svg", });
|
voxelToolSelected = true;
|
||||||
|
recolorToolSelected = false;
|
||||||
} else if (clickedOverlay == voxelTool) {
|
eyedropperToolSelected = false;
|
||||||
voxelToolSelected = true;
|
moveTools();
|
||||||
recolorToolSelected = false;
|
clickedOnSomething = true;
|
||||||
eyedropperToolSelected = false;
|
} else if (clickedOverlay == recolorTool) {
|
||||||
moveTools();
|
voxelToolSelected = false;
|
||||||
clickedOnSomething = true;
|
recolorToolSelected = true;
|
||||||
} else if (clickedOverlay == recolorTool) {
|
eyedropperToolSelected = false;
|
||||||
voxelToolSelected = false;
|
moveTools();
|
||||||
recolorToolSelected = true;
|
clickedOnSomething = true;
|
||||||
eyedropperToolSelected = false;
|
} else if (clickedOverlay == eyedropperTool) {
|
||||||
moveTools();
|
voxelToolSelected = false;
|
||||||
clickedOnSomething = true;
|
recolorToolSelected = false;
|
||||||
} else if (clickedOverlay == eyedropperTool) {
|
eyedropperToolSelected = true;
|
||||||
voxelToolSelected = false;
|
moveTools();
|
||||||
recolorToolSelected = false;
|
clickedOnSomething = true;
|
||||||
eyedropperToolSelected = true;
|
} else if (clickedOverlay == slider) {
|
||||||
moveTools();
|
|
||||||
clickedOnSomething = true;
|
if (event.x < sliderX + minThumbX) {
|
||||||
} else if (clickedOverlay == slider) {
|
thumbX -= thumbDeltaPerStep;
|
||||||
|
calcScaleFromThumb(thumbX);
|
||||||
if (event.x < sliderX + minThumbX) {
|
|
||||||
thumbX -= thumbDeltaPerStep;
|
|
||||||
calcScaleFromThumb(thumbX);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (event.x > sliderX + maxThumbX) {
|
|
||||||
thumbX += thumbDeltaPerStep;
|
|
||||||
calcScaleFromThumb(thumbX);
|
|
||||||
}
|
|
||||||
|
|
||||||
moveTools();
|
|
||||||
clickedOnSomething = true;
|
|
||||||
} else {
|
|
||||||
// if the user clicked on one of the color swatches, update the selectedSwatch
|
|
||||||
for (s = 0; s < numColors; s++) {
|
|
||||||
if (clickedOverlay == swatches[s]) {
|
|
||||||
whichColor = s;
|
|
||||||
moveTools();
|
|
||||||
clickedOnSomething = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (clickedOnSomething) {
|
|
||||||
return; // no further processing
|
if (event.x > sliderX + maxThumbX) {
|
||||||
|
thumbX += thumbDeltaPerStep;
|
||||||
|
calcScaleFromThumb(thumbX);
|
||||||
|
}
|
||||||
|
|
||||||
|
moveTools();
|
||||||
|
clickedOnSomething = true;
|
||||||
|
} else {
|
||||||
|
// if the user clicked on one of the color swatches, update the selectedSwatch
|
||||||
|
for (s = 0; s < numColors; s++) {
|
||||||
|
if (clickedOverlay == swatches[s]) {
|
||||||
|
whichColor = s;
|
||||||
|
moveTools();
|
||||||
|
clickedOnSomething = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (clickedOnSomething) {
|
||||||
|
return; // no further processing
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: does any of this stuff need to execute if we're panning or orbiting?
|
// TODO: does any of this stuff need to execute if we're panning or orbiting?
|
||||||
trackMouseEvent(event); // used by preview support
|
trackMouseEvent(event); // used by preview support
|
||||||
mouseX = event.x;
|
mouseX = event.x;
|
||||||
|
@ -1072,7 +1062,7 @@ function mouseMoveEvent(event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (!trackAsOrbitOrPan && isMovingSlider) {
|
if (isMovingSlider) {
|
||||||
thumbX = (event.x - thumbClickOffsetX) - sliderX;
|
thumbX = (event.x - thumbClickOffsetX) - sliderX;
|
||||||
if (thumbX < minThumbX) {
|
if (thumbX < minThumbX) {
|
||||||
thumbX = minThumbX;
|
thumbX = minThumbX;
|
||||||
|
@ -1082,7 +1072,7 @@ function mouseMoveEvent(event) {
|
||||||
}
|
}
|
||||||
calcScaleFromThumb(thumbX);
|
calcScaleFromThumb(thumbX);
|
||||||
|
|
||||||
} else if (!trackAsOrbitOrPan && isAdding) {
|
} else if (isAdding) {
|
||||||
// Watch the drag direction to tell which way to 'extrude' this voxel
|
// Watch the drag direction to tell which way to 'extrude' this voxel
|
||||||
if (!isExtruding) {
|
if (!isExtruding) {
|
||||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||||
|
@ -1113,7 +1103,6 @@ function mouseMoveEvent(event) {
|
||||||
var dy = event.y - mouseY;
|
var dy = event.y - mouseY;
|
||||||
if (Math.sqrt(dx*dx + dy*dy) > PIXELS_PER_EXTRUDE_VOXEL) {
|
if (Math.sqrt(dx*dx + dy*dy) > PIXELS_PER_EXTRUDE_VOXEL) {
|
||||||
lastVoxelPosition = Vec3.sum(lastVoxelPosition, extrudeDirection);
|
lastVoxelPosition = Vec3.sum(lastVoxelPosition, extrudeDirection);
|
||||||
Voxels.eraseVoxel(voxelDetails.x, voxelDetails.y, voxelDetails.z, voxelDetails.s);
|
|
||||||
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z,
|
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z,
|
||||||
extrudeScale, lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
|
extrudeScale, lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
|
||||||
mouseX = event.x;
|
mouseX = event.x;
|
||||||
|
|
|
@ -12,6 +12,11 @@
|
||||||
//
|
//
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
|
function getRandomFloat(min, max) {
|
||||||
|
return Math.random() * (max - min) + min;
|
||||||
|
}
|
||||||
|
|
||||||
var lastX = 0;
|
var lastX = 0;
|
||||||
var lastY = 0;
|
var lastY = 0;
|
||||||
var yawFromMouse = 0;
|
var yawFromMouse = 0;
|
||||||
|
@ -19,17 +24,22 @@ var pitchFromMouse = 0;
|
||||||
var isMouseDown = false;
|
var isMouseDown = false;
|
||||||
|
|
||||||
var BULLET_VELOCITY = 5.0;
|
var BULLET_VELOCITY = 5.0;
|
||||||
|
var MIN_THROWER_DELAY = 1000;
|
||||||
|
var MAX_THROWER_DELAY = 1000;
|
||||||
var LEFT_BUTTON_3 = 3;
|
var LEFT_BUTTON_3 = 3;
|
||||||
|
|
||||||
// Load some sound to use for loading and firing
|
// Load some sound to use for loading and firing
|
||||||
var fireSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/GUN-SHOT2.raw");
|
var fireSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/GUN-SHOT2.raw");
|
||||||
var loadSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/Gun_Reload_Weapon22.raw");
|
var loadSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/Gun_Reload_Weapon22.raw");
|
||||||
var impactSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/BulletImpact2.raw");
|
var impactSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/BulletImpact2.raw");
|
||||||
var targetLaunchSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/GUN-SHOT2.raw");
|
var targetHitSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Space%20Invaders/hit.raw");
|
||||||
|
var targetLaunchSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Space%20Invaders/shoot.raw");
|
||||||
|
|
||||||
var audioOptions = new AudioInjectionOptions();
|
var audioOptions = new AudioInjectionOptions();
|
||||||
audioOptions.volume = 0.9;
|
audioOptions.volume = 0.9;
|
||||||
|
|
||||||
|
var shotTime = new Date();
|
||||||
|
|
||||||
// initialize our triggers
|
// initialize our triggers
|
||||||
var triggerPulled = new Array();
|
var triggerPulled = new Array();
|
||||||
var numberOfTriggers = Controller.getNumberOfTriggers();
|
var numberOfTriggers = Controller.getNumberOfTriggers();
|
||||||
|
@ -94,7 +104,9 @@ function shootTarget() {
|
||||||
var DISTANCE_TO_LAUNCH_FROM = 3.0;
|
var DISTANCE_TO_LAUNCH_FROM = 3.0;
|
||||||
var camera = Camera.getPosition();
|
var camera = Camera.getPosition();
|
||||||
//printVector("camera", camera);
|
//printVector("camera", camera);
|
||||||
var forwardVector = Quat.getFront(Camera.getOrientation());
|
var targetDirection = Quat.angleAxis(getRandomFloat(-20.0, 20.0), { x:0, y:1, z:0 });
|
||||||
|
targetDirection = Quat.multiply(Camera.getOrientation(), targetDirection);
|
||||||
|
var forwardVector = Quat.getFront(targetDirection);
|
||||||
//printVector("forwardVector", forwardVector);
|
//printVector("forwardVector", forwardVector);
|
||||||
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_TO_LAUNCH_FROM));
|
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_TO_LAUNCH_FROM));
|
||||||
//printVector("newPosition", newPosition);
|
//printVector("newPosition", newPosition);
|
||||||
|
@ -111,6 +123,9 @@ function shootTarget() {
|
||||||
lifetime: 1000.0,
|
lifetime: 1000.0,
|
||||||
damping: 0.99 });
|
damping: 0.99 });
|
||||||
|
|
||||||
|
// Record start time
|
||||||
|
shotTime = new Date();
|
||||||
|
|
||||||
// Play target shoot sound
|
// Play target shoot sound
|
||||||
audioOptions.position = newPosition;
|
audioOptions.position = newPosition;
|
||||||
Audio.playSound(targetLaunchSound, audioOptions);
|
Audio.playSound(targetLaunchSound, audioOptions);
|
||||||
|
@ -119,31 +134,43 @@ function shootTarget() {
|
||||||
|
|
||||||
|
|
||||||
function particleCollisionWithVoxel(particle, voxel, penetration) {
|
function particleCollisionWithVoxel(particle, voxel, penetration) {
|
||||||
Vec3.print('particleCollisionWithVoxel() ... penetration=', penetration);
|
|
||||||
|
|
||||||
var HOLE_SIZE = 0.125;
|
var HOLE_SIZE = 0.125;
|
||||||
var particleProperties = Particles.getParticleProperties(particle);
|
var particleProperties = Particles.getParticleProperties(particle);
|
||||||
var position = particleProperties.position;
|
var position = particleProperties.position;
|
||||||
Particles.deleteParticle(particle);
|
Particles.deleteParticle(particle);
|
||||||
// Make a hole in this voxel
|
// Make a hole in this voxel
|
||||||
|
Vec3.print("penetration", penetration);
|
||||||
|
Vec3.print("position", position);
|
||||||
|
var pointOfEntry = Vec3.subtract(position, penetration);
|
||||||
|
Vec3.print("pointOfEntry", pointOfEntry);
|
||||||
|
Voxels.eraseVoxel(pointOfEntry.x, pointOfEntry.y, pointOfEntry.z, HOLE_SIZE);
|
||||||
Voxels.eraseVoxel(position.x, position.y, position.z, HOLE_SIZE);
|
Voxels.eraseVoxel(position.x, position.y, position.z, HOLE_SIZE);
|
||||||
//audioOptions.position = position;
|
//audioOptions.position = position;
|
||||||
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||||
Audio.playSound(impactSound, audioOptions);
|
Audio.playSound(targetHitSound, audioOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
function particleCollisionWithParticle(particle1, particle2) {
|
function particleCollisionWithParticle(particle1, particle2) {
|
||||||
print("Particle/Particle!");
|
|
||||||
score++;
|
score++;
|
||||||
Overlays.editOverlay(text, { text: "Score: " + score } );
|
Overlays.editOverlay(text, { text: "Score: " + score } );
|
||||||
|
// Sort out which particle is which
|
||||||
|
|
||||||
|
// Record shot time
|
||||||
|
var endTime = new Date();
|
||||||
|
var msecs = endTime.valueOf() - shotTime.valueOf();
|
||||||
|
print("hit, msecs = " + msecs);
|
||||||
Particles.deleteParticle(particle1);
|
Particles.deleteParticle(particle1);
|
||||||
Particles.deleteParticle(particle2);
|
Particles.deleteParticle(particle2);
|
||||||
|
audioOptions.position = newPosition;
|
||||||
|
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||||
|
Audio.playSound(targetHitSound, audioOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
function keyPressEvent(event) {
|
function keyPressEvent(event) {
|
||||||
// if our tools are off, then don't do anything
|
// if our tools are off, then don't do anything
|
||||||
if (event.text == "t") {
|
if (event.text == "t") {
|
||||||
shootTarget();
|
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
|
||||||
|
Script.setTimeout(shootTarget, time);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,7 +191,8 @@ function update(deltaTime) {
|
||||||
// Check hydra controller for launch button press
|
// Check hydra controller for launch button press
|
||||||
if (!isLaunchButtonPressed && Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
if (!isLaunchButtonPressed && Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
||||||
isLaunchButtonPressed = true;
|
isLaunchButtonPressed = true;
|
||||||
shootTarget();
|
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
|
||||||
|
Script.setTimeout(shootTarget, time);
|
||||||
} else if (isLaunchButtonPressed && !Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
} else if (isLaunchButtonPressed && !Controller.isButtonPressed(LEFT_BUTTON_3)) {
|
||||||
isLaunchButtonPressed = false;
|
isLaunchButtonPressed = false;
|
||||||
|
|
||||||
|
|
|
@ -14,9 +14,11 @@
|
||||||
// Dragging the mouse will move your camera according to the mode you are in.
|
// Dragging the mouse will move your camera according to the mode you are in.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
var PI = 3.14 // No need for something more precise
|
||||||
|
|
||||||
var AZIMUTH_RATE = 90.0;
|
var AZIMUTH_RATE = 90.0;
|
||||||
var ALTITUDE_RATE = 200.0;
|
var ALTITUDE_RATE = 200.0;
|
||||||
var RADIUS_RATE = 20.0;
|
var RADIUS_RATE = 1.0 / 100.0;
|
||||||
var PAN_RATE = 50.0;
|
var PAN_RATE = 50.0;
|
||||||
|
|
||||||
var alt = false;
|
var alt = false;
|
||||||
|
@ -46,7 +48,7 @@ var altitude = 0.0;
|
||||||
|
|
||||||
function handleRadialMode(dx, dy) {
|
function handleRadialMode(dx, dy) {
|
||||||
azimuth += dx / AZIMUTH_RATE;
|
azimuth += dx / AZIMUTH_RATE;
|
||||||
radius += radius * dy / RADIUS_RATE;
|
radius += radius * dy * RADIUS_RATE;
|
||||||
if (radius < 1) {
|
if (radius < 1) {
|
||||||
radius = 1;
|
radius = 1;
|
||||||
}
|
}
|
||||||
|
@ -61,6 +63,12 @@ function handleRadialMode(dx, dy) {
|
||||||
function handleOrbitMode(dx, dy) {
|
function handleOrbitMode(dx, dy) {
|
||||||
azimuth += dx / AZIMUTH_RATE;
|
azimuth += dx / AZIMUTH_RATE;
|
||||||
altitude += dy / ALTITUDE_RATE;
|
altitude += dy / ALTITUDE_RATE;
|
||||||
|
if (altitude > PI / 2.0) {
|
||||||
|
altitude = PI / 2.0;
|
||||||
|
}
|
||||||
|
if (altitude < -PI / 2.0) {
|
||||||
|
altitude = -PI / 2.0;
|
||||||
|
}
|
||||||
|
|
||||||
vector = { x:(Math.cos(altitude) * Math.cos(azimuth)) * radius,
|
vector = { x:(Math.cos(altitude) * Math.cos(azimuth)) * radius,
|
||||||
y:Math.sin(altitude) * radius,
|
y:Math.sin(altitude) * radius,
|
||||||
|
@ -165,7 +173,7 @@ function keyReleaseEvent(event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function mousePressEvent(event) {
|
function mousePressEvent(event) {
|
||||||
if (alt) {
|
if (alt && !isActive) {
|
||||||
isActive = true;
|
isActive = true;
|
||||||
mouseLastX = event.x;
|
mouseLastX = event.x;
|
||||||
mouseLastY = event.y;
|
mouseLastY = event.y;
|
||||||
|
|
|
@ -32,6 +32,7 @@ function setupMenus() {
|
||||||
Menu.addSeparator("Foo","Removable Tools");
|
Menu.addSeparator("Foo","Removable Tools");
|
||||||
Menu.addMenuItem("Foo","Remove Foo item 4");
|
Menu.addMenuItem("Foo","Remove Foo item 4");
|
||||||
Menu.addMenuItem("Foo","Remove Foo");
|
Menu.addMenuItem("Foo","Remove Foo");
|
||||||
|
Menu.addMenuItem("Foo","Remove Bar-Spam");
|
||||||
Menu.addMenu("Bar");
|
Menu.addMenu("Bar");
|
||||||
|
|
||||||
Menu.addMenuItem("Bar","Bar item 1", "b");
|
Menu.addMenuItem("Bar","Bar item 1", "b");
|
||||||
|
@ -91,6 +92,10 @@ function menuItemEvent(menuItem) {
|
||||||
if (menuItem == "Remove Foo") {
|
if (menuItem == "Remove Foo") {
|
||||||
Menu.removeMenu("Foo");
|
Menu.removeMenu("Foo");
|
||||||
}
|
}
|
||||||
|
if (menuItem == "Remove Bar-Spam") {
|
||||||
|
Menu.removeMenu("Bar > Spam");
|
||||||
|
}
|
||||||
|
|
||||||
if (menuItem == "Remove Spam item 2") {
|
if (menuItem == "Remove Spam item 2") {
|
||||||
Menu.removeMenuItem("Bar > Spam", "Spam item 2");
|
Menu.removeMenuItem("Bar > Spam", "Spam item 2");
|
||||||
}
|
}
|
||||||
|
|
114
examples/selectAudioDevice.js
Normal file
114
examples/selectAudioDevice.js
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
//
|
||||||
|
// audioDeviceExample.js
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/22/14
|
||||||
|
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
// This is an example script that demonstrates use of the Menu object
|
||||||
|
//
|
||||||
|
|
||||||
|
if (typeof String.prototype.startsWith != 'function') {
|
||||||
|
String.prototype.startsWith = function (str){
|
||||||
|
return this.slice(0, str.length) == str;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof String.prototype.endsWith != 'function') {
|
||||||
|
String.prototype.endsWith = function (str){
|
||||||
|
return this.slice(-str.length) == str;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof String.prototype.trimStartsWith != 'function') {
|
||||||
|
String.prototype.trimStartsWith = function (str){
|
||||||
|
if (this.startsWith(str)) {
|
||||||
|
return this.substr(str.length);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof String.prototype.trimEndsWith != 'function') {
|
||||||
|
String.prototype.trimEndsWith = function (str){
|
||||||
|
if (this.endsWith(str)) {
|
||||||
|
return this.substr(0,this.length - str.length);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var selectedInputMenu = "";
|
||||||
|
var selectedOutputMenu = "";
|
||||||
|
|
||||||
|
function setupAudioMenus() {
|
||||||
|
Menu.addMenu("Tools > Audio");
|
||||||
|
Menu.addSeparator("Tools > Audio","Output Audio Device");
|
||||||
|
|
||||||
|
var outputDevices = AudioDevice.getOutputDevices();
|
||||||
|
var selectedOutputDevice = AudioDevice.getOutputDevice();
|
||||||
|
|
||||||
|
for(var i = 0; i < outputDevices.length; i++) {
|
||||||
|
var thisDeviceSelected = (outputDevices[i] == selectedOutputDevice);
|
||||||
|
var menuItem = "Use " + outputDevices[i] + " for Output";
|
||||||
|
Menu.addMenuItem({
|
||||||
|
menuName: "Tools > Audio",
|
||||||
|
menuItemName: menuItem,
|
||||||
|
isCheckable: true,
|
||||||
|
isChecked: thisDeviceSelected
|
||||||
|
});
|
||||||
|
if (thisDeviceSelected) {
|
||||||
|
selectedOutputMenu = menuItem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Menu.addSeparator("Tools > Audio","Input Audio Device");
|
||||||
|
|
||||||
|
var inputDevices = AudioDevice.getInputDevices();
|
||||||
|
var selectedInputDevice = AudioDevice.getInputDevice();
|
||||||
|
|
||||||
|
for(var i = 0; i < inputDevices.length; i++) {
|
||||||
|
var thisDeviceSelected = (inputDevices[i] == selectedInputDevice);
|
||||||
|
var menuItem = "Use " + inputDevices[i] + " for Input";
|
||||||
|
Menu.addMenuItem({
|
||||||
|
menuName: "Tools > Audio",
|
||||||
|
menuItemName: menuItem,
|
||||||
|
isCheckable: true,
|
||||||
|
isChecked: thisDeviceSelected
|
||||||
|
});
|
||||||
|
if (thisDeviceSelected) {
|
||||||
|
selectedInputMenu = menuItem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setupAudioMenus();
|
||||||
|
|
||||||
|
function scriptEnding() {
|
||||||
|
Menu.removeMenu("Tools > Audio");
|
||||||
|
}
|
||||||
|
Script.scriptEnding.connect(scriptEnding);
|
||||||
|
|
||||||
|
|
||||||
|
function menuItemEvent(menuItem) {
|
||||||
|
if (menuItem.startsWith("Use ")) {
|
||||||
|
if (menuItem.endsWith(" for Output")) {
|
||||||
|
var selectedDevice = menuItem.trimStartsWith("Use ").trimEndsWith(" for Output");
|
||||||
|
print("output audio selection..." + selectedDevice);
|
||||||
|
Menu.setIsOptionChecked(selectedOutputMenu, false);
|
||||||
|
selectedOutputMenu = menuItem;
|
||||||
|
Menu.setIsOptionChecked(selectedOutputMenu, true);
|
||||||
|
AudioDevice.setOutputDevice(selectedDevice);
|
||||||
|
|
||||||
|
} else if (menuItem.endsWith(" for Input")) {
|
||||||
|
var selectedDevice = menuItem.trimStartsWith("Use ").trimEndsWith(" for Input");
|
||||||
|
print("input audio selection..." + selectedDevice);
|
||||||
|
Menu.setIsOptionChecked(selectedInputMenu, false);
|
||||||
|
selectedInputMenu = menuItem;
|
||||||
|
Menu.setIsOptionChecked(selectedInputMenu, true);
|
||||||
|
AudioDevice.setInputDevice(selectedDevice);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Menu.menuItemEvent.connect(menuItemEvent);
|
18
examples/settingsExample.js
Normal file
18
examples/settingsExample.js
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
//
|
||||||
|
// settingsExample.js
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/22/14
|
||||||
|
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
// This is an example script that demonstrates use of the Menu object
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print("mySetting: " + Settings.getValue("mySetting"));
|
||||||
|
Settings.setValue("mySetting", "spam");
|
||||||
|
print("mySetting: " + Settings.getValue("mySetting"));
|
||||||
|
|
||||||
|
Script.stop();
|
|
@ -1,133 +0,0 @@
|
||||||
//
|
|
||||||
// This sample script moves a voxel around like a bird and sometimes makes tweeting noises
|
|
||||||
//
|
|
||||||
|
|
||||||
function vLength(v) {
|
|
||||||
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
|
|
||||||
}
|
|
||||||
|
|
||||||
function printVector(v) {
|
|
||||||
print(v.x + ", " + v.y + ", " + v.z + "\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a random vector with individual lengths between a,b
|
|
||||||
function randVector(a, b) {
|
|
||||||
var rval = { x: a + Math.random() * (b - a), y: a + Math.random() * (b - a), z: a + Math.random() * (b - a) };
|
|
||||||
return rval;
|
|
||||||
}
|
|
||||||
|
|
||||||
function vMinus(a, b) {
|
|
||||||
var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z };
|
|
||||||
return rval;
|
|
||||||
}
|
|
||||||
|
|
||||||
function vPlus(a, b) {
|
|
||||||
var rval = { x: a.x + b.x, y: a.y + b.y, z: a.z + b.z };
|
|
||||||
return rval;
|
|
||||||
}
|
|
||||||
|
|
||||||
function vCopy(a, b) {
|
|
||||||
a.x = b.x;
|
|
||||||
a.y = b.y;
|
|
||||||
a.z = b.z;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns a vector which is fraction of the way between a and b
|
|
||||||
function vInterpolate(a, b, fraction) {
|
|
||||||
var rval = { x: a.x + (b.x - a.x) * fraction, y: a.y + (b.y - a.y) * fraction, z: a.z + (b.z - a.z) * fraction };
|
|
||||||
return rval;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decide what kind of bird we are
|
|
||||||
var tweet;
|
|
||||||
|
|
||||||
var which = Math.random();
|
|
||||||
if (which < 0.2) {
|
|
||||||
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/bushtit_1.raw");
|
|
||||||
} else if (which < 0.4) {
|
|
||||||
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/rosyfacedlovebird.raw");
|
|
||||||
} else if (which < 0.6) {
|
|
||||||
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/saysphoebe.raw");
|
|
||||||
} else if (which < 0.8) {
|
|
||||||
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/mexicanWhipoorwill.raw");
|
|
||||||
} else {
|
|
||||||
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/westernscreechowl.raw");
|
|
||||||
}
|
|
||||||
|
|
||||||
var position = { x: 0, y: 0, z: 0 };
|
|
||||||
var lastPosition = { x: 0, y: 0, z: 0 };
|
|
||||||
var oldPosition = { x: 0, y: 0, z:0 };
|
|
||||||
var targetPosition = { x: 0, y: 0, z: 0 };
|
|
||||||
|
|
||||||
var size = 0.125;
|
|
||||||
var range = 50.0; // Over what distance in meters do you want your bird to fly around
|
|
||||||
var color = { r: 100, g: 50, b: 150 };
|
|
||||||
var colorEdge = { r:255, g:250, b:175 };
|
|
||||||
var frame = 0;
|
|
||||||
var thisColor = color;
|
|
||||||
var moving = false;
|
|
||||||
var tweeting = 0;
|
|
||||||
var moved = true;
|
|
||||||
|
|
||||||
var CHANCE_OF_MOVING = 0.05;
|
|
||||||
var CHANCE_OF_TWEETING = 0.05;
|
|
||||||
|
|
||||||
function moveBird(deltaTime) {
|
|
||||||
frame++;
|
|
||||||
if (frame % 3 == 0) {
|
|
||||||
// Tweeting behavior
|
|
||||||
if (tweeting == 0) {
|
|
||||||
if (Math.random() < CHANCE_OF_TWEETING) {
|
|
||||||
//print("tweet!" + "\n");
|
|
||||||
var options = new AudioInjectionOptions();
|
|
||||||
options.position = position;
|
|
||||||
options.volume = 0.75;
|
|
||||||
Audio.playSound(tweet, options);
|
|
||||||
tweeting = 10;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
tweeting -= 1;
|
|
||||||
}
|
|
||||||
// Moving behavior
|
|
||||||
if (moving == false) {
|
|
||||||
if (Math.random() < CHANCE_OF_MOVING) {
|
|
||||||
targetPosition = randVector(0, range);
|
|
||||||
//printVector(position);
|
|
||||||
moving = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (moving) {
|
|
||||||
position = vInterpolate(position, targetPosition, 0.5);
|
|
||||||
if (vLength(vMinus(position, targetPosition)) < (size / 2.0)) {
|
|
||||||
moved = false;
|
|
||||||
moving = false;
|
|
||||||
} else {
|
|
||||||
moved = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tweeting > 0) {
|
|
||||||
// Change color of voxel to blinky red a bit while playing the sound
|
|
||||||
var blinkColor = { r: Math.random() * 255, g: 0, b: 0 };
|
|
||||||
Voxels.setVoxel(position.x,
|
|
||||||
position.y,
|
|
||||||
position.z,
|
|
||||||
size,
|
|
||||||
blinkColor.r, blinkColor.g, blinkColor.b);
|
|
||||||
}
|
|
||||||
if (moved) {
|
|
||||||
Voxels.setVoxel(position.x, position.y, position.z, size, thisColor.r, thisColor.g, thisColor.b);
|
|
||||||
// delete old voxel
|
|
||||||
|
|
||||||
Voxels.eraseVoxel(oldPosition.x, oldPosition.y, oldPosition.z, size);
|
|
||||||
// Copy old location to new
|
|
||||||
vCopy(oldPosition, position);
|
|
||||||
moved = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Voxels.setPacketsPerSecond(10000);
|
|
||||||
// Connect a call back that happens every frame
|
|
||||||
Script.update.connect(moveBird);
|
|
|
@ -48,7 +48,7 @@ configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVe
|
||||||
|
|
||||||
# grab the implementation and header files from src dirs
|
# grab the implementation and header files from src dirs
|
||||||
file(GLOB INTERFACE_SRCS src/*.cpp src/*.h)
|
file(GLOB INTERFACE_SRCS src/*.cpp src/*.h)
|
||||||
foreach(SUBDIR avatar devices renderer ui starfield location)
|
foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels)
|
||||||
file(GLOB_RECURSE SUBDIR_SRCS src/${SUBDIR}/*.cpp src/${SUBDIR}/*.h)
|
file(GLOB_RECURSE SUBDIR_SRCS src/${SUBDIR}/*.cpp src/${SUBDIR}/*.h)
|
||||||
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
|
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
|
||||||
endforeach(SUBDIR)
|
endforeach(SUBDIR)
|
||||||
|
|
|
@ -4,22 +4,22 @@
|
||||||
<context>
|
<context>
|
||||||
<name>Application</name>
|
<name>Application</name>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Application.cpp" line="1364"/>
|
<location filename="src/Application.cpp" line="1368"/>
|
||||||
<source>Export Voxels</source>
|
<source>Export Voxels</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Application.cpp" line="1365"/>
|
<location filename="src/Application.cpp" line="1369"/>
|
||||||
<source>Sparse Voxel Octree Files (*.svo)</source>
|
<source>Sparse Voxel Octree Files (*.svo)</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Application.cpp" line="3577"/>
|
<location filename="src/Application.cpp" line="3596"/>
|
||||||
<source>Open Script</source>
|
<source>Open Script</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Application.cpp" line="3578"/>
|
<location filename="src/Application.cpp" line="3597"/>
|
||||||
<source>JavaScript Files (*.js)</source>
|
<source>JavaScript Files (*.js)</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
|
@ -113,18 +113,18 @@
|
||||||
<context>
|
<context>
|
||||||
<name>Menu</name>
|
<name>Menu</name>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Menu.cpp" line="439"/>
|
<location filename="src/Menu.cpp" line="449"/>
|
||||||
<source>Open .ini config file</source>
|
<source>Open .ini config file</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Menu.cpp" line="441"/>
|
<location filename="src/Menu.cpp" line="451"/>
|
||||||
<location filename="src/Menu.cpp" line="453"/>
|
<location filename="src/Menu.cpp" line="463"/>
|
||||||
<source>Text files (*.ini)</source>
|
<source>Text files (*.ini)</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/Menu.cpp" line="451"/>
|
<location filename="src/Menu.cpp" line="461"/>
|
||||||
<source>Save .ini config file</source>
|
<source>Save .ini config file</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
|
@ -132,28 +132,28 @@
|
||||||
<context>
|
<context>
|
||||||
<name>QObject</name>
|
<name>QObject</name>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/ImportDialog.cpp" line="22"/>
|
<location filename="src/ui/ImportDialog.cpp" line="22"/>
|
||||||
<location filename="src/ImportDialog.cpp" line="23"/>
|
<location filename="src/ui/ImportDialog.cpp" line="23"/>
|
||||||
<source>Import Voxels</source>
|
<source>Import Voxels</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/ImportDialog.cpp" line="24"/>
|
<location filename="src/ui/ImportDialog.cpp" line="24"/>
|
||||||
<source>Loading ...</source>
|
<source>Loading ...</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/ImportDialog.cpp" line="25"/>
|
<location filename="src/ui/ImportDialog.cpp" line="25"/>
|
||||||
<source>Place voxels</source>
|
<source>Place voxels</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/ImportDialog.cpp" line="26"/>
|
<location filename="src/ui/ImportDialog.cpp" line="26"/>
|
||||||
<source><b>Import</b> %1 as voxels</source>
|
<source><b>Import</b> %1 as voxels</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
<message>
|
<message>
|
||||||
<location filename="src/ImportDialog.cpp" line="27"/>
|
<location filename="src/ui/ImportDialog.cpp" line="27"/>
|
||||||
<source>Cancel</source>
|
<source>Cancel</source>
|
||||||
<translation type="unfinished"></translation>
|
<translation type="unfinished"></translation>
|
||||||
</message>
|
</message>
|
||||||
|
|
14
interface/resources/shaders/model_shadow.frag
Normal file
14
interface/resources/shaders/model_shadow.frag
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
#version 120
|
||||||
|
|
||||||
|
//
|
||||||
|
// model_shadow.frag
|
||||||
|
// fragment shader
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 3/24/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
void main(void) {
|
||||||
|
// fixed color for now (we may eventually want to use texture alpha)
|
||||||
|
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
|
||||||
|
}
|
14
interface/resources/shaders/model_shadow.vert
Normal file
14
interface/resources/shaders/model_shadow.vert
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
#version 120
|
||||||
|
|
||||||
|
//
|
||||||
|
// model_shadow.vert
|
||||||
|
// vertex shader
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 3/24/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
void main(void) {
|
||||||
|
// just use standard pipeline transform
|
||||||
|
gl_Position = ftransform();
|
||||||
|
}
|
27
interface/resources/shaders/skin_model_shadow.vert
Normal file
27
interface/resources/shaders/skin_model_shadow.vert
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
#version 120
|
||||||
|
|
||||||
|
//
|
||||||
|
// skin_model_shadow.vert
|
||||||
|
// vertex shader
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 3/24/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
const int MAX_CLUSTERS = 128;
|
||||||
|
const int INDICES_PER_VERTEX = 4;
|
||||||
|
|
||||||
|
uniform mat4 clusterMatrices[MAX_CLUSTERS];
|
||||||
|
|
||||||
|
attribute vec4 clusterIndices;
|
||||||
|
attribute vec4 clusterWeights;
|
||||||
|
|
||||||
|
void main(void) {
|
||||||
|
vec4 position = vec4(0.0, 0.0, 0.0, 0.0);
|
||||||
|
for (int i = 0; i < INDICES_PER_VERTEX; i++) {
|
||||||
|
mat4 clusterMatrix = clusterMatrices[int(clusterIndices[i])];
|
||||||
|
float clusterWeight = clusterWeights[i];
|
||||||
|
position += clusterMatrix * gl_Vertex * clusterWeight;
|
||||||
|
}
|
||||||
|
gl_Position = gl_ModelViewProjectionMatrix * position;
|
||||||
|
}
|
|
@ -40,7 +40,7 @@ detect_strip_roi_width 2
|
||||||
detect_strip_roi_height 4
|
detect_strip_roi_height 4
|
||||||
|
|
||||||
smoothing_factors
|
smoothing_factors
|
||||||
150 5 -2 100 -1 50 50 0
|
5 25 -2 100 -1 50 25 0
|
||||||
#translation rotation action_units eyebrows mouth gaze eye_closure other
|
#translation rotation action_units eyebrows mouth gaze eye_closure other
|
||||||
|
|
||||||
process_eyes 1
|
process_eyes 1
|
||||||
|
|
|
@ -65,17 +65,21 @@
|
||||||
#include <FstReader.h>
|
#include <FstReader.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "ClipboardScriptingInterface.h"
|
|
||||||
#include "InterfaceVersion.h"
|
#include "InterfaceVersion.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "MenuScriptingInterface.h"
|
|
||||||
#include "Util.h"
|
#include "Util.h"
|
||||||
#include "devices/OculusManager.h"
|
#include "devices/OculusManager.h"
|
||||||
#include "devices/TV3DManager.h"
|
#include "devices/TV3DManager.h"
|
||||||
#include "renderer/ProgramObject.h"
|
#include "renderer/ProgramObject.h"
|
||||||
#include "ui/TextRenderer.h"
|
|
||||||
#include "InfoView.h"
|
#include "scripting/AudioDeviceScriptingInterface.h"
|
||||||
|
#include "scripting/ClipboardScriptingInterface.h"
|
||||||
|
#include "scripting/MenuScriptingInterface.h"
|
||||||
|
#include "scripting/SettingsScriptingInterface.h"
|
||||||
|
|
||||||
|
#include "ui/InfoView.h"
|
||||||
#include "ui/Snapshot.h"
|
#include "ui/Snapshot.h"
|
||||||
|
#include "ui/TextRenderer.h"
|
||||||
#include "ui/Stats.h"
|
#include "ui/Stats.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
@ -2171,21 +2175,22 @@ void Application::updateShadowMap() {
|
||||||
glViewport(0, 0, fbo->width(), fbo->height());
|
glViewport(0, 0, fbo->width(), fbo->height());
|
||||||
|
|
||||||
glm::vec3 lightDirection = -getSunDirection();
|
glm::vec3 lightDirection = -getSunDirection();
|
||||||
glm::quat rotation = glm::inverse(rotationBetween(IDENTITY_FRONT, lightDirection));
|
glm::quat rotation = rotationBetween(IDENTITY_FRONT, lightDirection);
|
||||||
glm::vec3 translation = glm::vec3();
|
glm::quat inverseRotation = glm::inverse(rotation);
|
||||||
float nearScale = 0.0f;
|
float nearScale = 0.0f;
|
||||||
const float MAX_SHADOW_DISTANCE = 2.0f;
|
const float MAX_SHADOW_DISTANCE = 2.0f;
|
||||||
float farScale = (MAX_SHADOW_DISTANCE - _viewFrustum.getNearClip()) / (_viewFrustum.getFarClip() - _viewFrustum.getNearClip());
|
float farScale = (MAX_SHADOW_DISTANCE - _viewFrustum.getNearClip()) /
|
||||||
|
(_viewFrustum.getFarClip() - _viewFrustum.getNearClip());
|
||||||
loadViewFrustum(_myCamera, _viewFrustum);
|
loadViewFrustum(_myCamera, _viewFrustum);
|
||||||
glm::vec3 points[] = {
|
glm::vec3 points[] = {
|
||||||
rotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), nearScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), nearScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), nearScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), nearScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), nearScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), nearScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), nearScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), nearScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), farScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), farScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), farScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), farScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), farScale) + translation),
|
inverseRotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), farScale)),
|
||||||
rotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), farScale) + translation) };
|
inverseRotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), farScale)) };
|
||||||
glm::vec3 minima(FLT_MAX, FLT_MAX, FLT_MAX), maxima(-FLT_MAX, -FLT_MAX, -FLT_MAX);
|
glm::vec3 minima(FLT_MAX, FLT_MAX, FLT_MAX), maxima(-FLT_MAX, -FLT_MAX, -FLT_MAX);
|
||||||
for (size_t i = 0; i < sizeof(points) / sizeof(points[0]); i++) {
|
for (size_t i = 0; i < sizeof(points) / sizeof(points[0]); i++) {
|
||||||
minima = glm::min(minima, points[i]);
|
minima = glm::min(minima, points[i]);
|
||||||
|
@ -2198,9 +2203,20 @@ void Application::updateShadowMap() {
|
||||||
|
|
||||||
// save the combined matrix for rendering
|
// save the combined matrix for rendering
|
||||||
_shadowMatrix = glm::transpose(glm::translate(glm::vec3(0.5f, 0.5f, 0.5f)) * glm::scale(glm::vec3(0.5f, 0.5f, 0.5f)) *
|
_shadowMatrix = glm::transpose(glm::translate(glm::vec3(0.5f, 0.5f, 0.5f)) * glm::scale(glm::vec3(0.5f, 0.5f, 0.5f)) *
|
||||||
glm::ortho(minima.x, maxima.x, minima.y, maxima.y, -maxima.z, -minima.z) *
|
glm::ortho(minima.x, maxima.x, minima.y, maxima.y, -maxima.z, -minima.z) * glm::mat4_cast(inverseRotation));
|
||||||
glm::mat4_cast(rotation) * glm::translate(translation));
|
|
||||||
|
|
||||||
|
// update the shadow view frustum
|
||||||
|
_shadowViewFrustum.setPosition(rotation * ((minima + maxima) * 0.5f));
|
||||||
|
_shadowViewFrustum.setOrientation(rotation);
|
||||||
|
_shadowViewFrustum.setOrthographic(true);
|
||||||
|
_shadowViewFrustum.setWidth(maxima.x - minima.x);
|
||||||
|
_shadowViewFrustum.setHeight(maxima.y - minima.y);
|
||||||
|
_shadowViewFrustum.setNearClip(minima.z);
|
||||||
|
_shadowViewFrustum.setFarClip(maxima.z);
|
||||||
|
_shadowViewFrustum.setEyeOffsetPosition(glm::vec3());
|
||||||
|
_shadowViewFrustum.setEyeOffsetOrientation(glm::quat());
|
||||||
|
_shadowViewFrustum.calculate();
|
||||||
|
|
||||||
glMatrixMode(GL_PROJECTION);
|
glMatrixMode(GL_PROJECTION);
|
||||||
glPushMatrix();
|
glPushMatrix();
|
||||||
glLoadIdentity();
|
glLoadIdentity();
|
||||||
|
@ -2209,16 +2225,14 @@ void Application::updateShadowMap() {
|
||||||
glMatrixMode(GL_MODELVIEW);
|
glMatrixMode(GL_MODELVIEW);
|
||||||
glPushMatrix();
|
glPushMatrix();
|
||||||
glLoadIdentity();
|
glLoadIdentity();
|
||||||
glm::vec3 axis = glm::axis(rotation);
|
glm::vec3 axis = glm::axis(inverseRotation);
|
||||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
glRotatef(glm::degrees(glm::angle(inverseRotation)), axis.x, axis.y, axis.z);
|
||||||
|
|
||||||
// store view matrix without translation, which we'll use for precision-sensitive objects
|
// store view matrix without translation, which we'll use for precision-sensitive objects
|
||||||
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat*)&_untranslatedViewMatrix);
|
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat*)&_untranslatedViewMatrix);
|
||||||
_viewMatrixTranslation = translation;
|
_viewMatrixTranslation = glm::vec3();
|
||||||
|
|
||||||
glTranslatef(translation.x, translation.y, translation.z);
|
_avatarManager.renderAvatars(Avatar::SHADOW_RENDER_MODE);
|
||||||
|
|
||||||
_avatarManager.renderAvatars(true);
|
|
||||||
_particles.render();
|
_particles.render();
|
||||||
|
|
||||||
glPopMatrix();
|
glPopMatrix();
|
||||||
|
@ -2396,7 +2410,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
|
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
|
||||||
_avatarManager.renderAvatars(mirrorMode, selfAvatarOnly);
|
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE, selfAvatarOnly);
|
||||||
|
|
||||||
if (!selfAvatarOnly) {
|
if (!selfAvatarOnly) {
|
||||||
// Render the world box
|
// Render the world box
|
||||||
|
@ -2868,6 +2882,9 @@ void Application::domainChanged(const QString& domainHostname) {
|
||||||
|
|
||||||
// reset the particle renderer
|
// reset the particle renderer
|
||||||
_particles.clear();
|
_particles.clear();
|
||||||
|
|
||||||
|
// reset the voxels renderer
|
||||||
|
_voxels.killLocalVoxels();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::connectedToDomain(const QString& hostname) {
|
void Application::connectedToDomain(const QString& hostname) {
|
||||||
|
@ -3161,6 +3178,8 @@ void Application::loadScript(const QString& fileNameString) {
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("Overlays", &_overlays);
|
scriptEngine->registerGlobalObject("Overlays", &_overlays);
|
||||||
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
|
||||||
|
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||||
|
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||||
|
|
||||||
QThread* workerThread = new QThread(this);
|
QThread* workerThread = new QThread(this);
|
||||||
|
|
||||||
|
|
|
@ -29,12 +29,12 @@
|
||||||
#include <ParticleEditPacketSender.h>
|
#include <ParticleEditPacketSender.h>
|
||||||
#include <ScriptEngine.h>
|
#include <ScriptEngine.h>
|
||||||
#include <OctreeQuery.h>
|
#include <OctreeQuery.h>
|
||||||
|
#include <ViewFrustum.h>
|
||||||
|
#include <VoxelEditPacketSender.h>
|
||||||
|
|
||||||
#include "Audio.h"
|
#include "Audio.h"
|
||||||
#include "BandwidthMeter.h"
|
|
||||||
#include "BuckyBalls.h"
|
#include "BuckyBalls.h"
|
||||||
#include "Camera.h"
|
#include "Camera.h"
|
||||||
#include "ControllerScriptingInterface.h"
|
|
||||||
#include "DatagramProcessor.h"
|
#include "DatagramProcessor.h"
|
||||||
#include "Environment.h"
|
#include "Environment.h"
|
||||||
#include "FileLogger.h"
|
#include "FileLogger.h"
|
||||||
|
@ -44,13 +44,6 @@
|
||||||
#include "PacketHeaders.h"
|
#include "PacketHeaders.h"
|
||||||
#include "ParticleTreeRenderer.h"
|
#include "ParticleTreeRenderer.h"
|
||||||
#include "Stars.h"
|
#include "Stars.h"
|
||||||
#include "ViewFrustum.h"
|
|
||||||
#include "VoxelFade.h"
|
|
||||||
#include "VoxelEditPacketSender.h"
|
|
||||||
#include "VoxelHideShowThread.h"
|
|
||||||
#include "VoxelPacketProcessor.h"
|
|
||||||
#include "VoxelSystem.h"
|
|
||||||
#include "VoxelImporter.h"
|
|
||||||
#include "avatar/Avatar.h"
|
#include "avatar/Avatar.h"
|
||||||
#include "avatar/AvatarManager.h"
|
#include "avatar/AvatarManager.h"
|
||||||
#include "avatar/MyAvatar.h"
|
#include "avatar/MyAvatar.h"
|
||||||
|
@ -63,13 +56,20 @@
|
||||||
#include "renderer/PointShader.h"
|
#include "renderer/PointShader.h"
|
||||||
#include "renderer/TextureCache.h"
|
#include "renderer/TextureCache.h"
|
||||||
#include "renderer/VoxelShader.h"
|
#include "renderer/VoxelShader.h"
|
||||||
|
#include "scripting/ControllerScriptingInterface.h"
|
||||||
#include "ui/BandwidthDialog.h"
|
#include "ui/BandwidthDialog.h"
|
||||||
|
#include "ui/BandwidthMeter.h"
|
||||||
#include "ui/OctreeStatsDialog.h"
|
#include "ui/OctreeStatsDialog.h"
|
||||||
#include "ui/RearMirrorTools.h"
|
#include "ui/RearMirrorTools.h"
|
||||||
#include "ui/LodToolsDialog.h"
|
#include "ui/LodToolsDialog.h"
|
||||||
#include "ui/LogDialog.h"
|
#include "ui/LogDialog.h"
|
||||||
#include "ui/UpdateDialog.h"
|
#include "ui/UpdateDialog.h"
|
||||||
#include "ui/Overlays.h"
|
#include "ui/overlays/Overlays.h"
|
||||||
|
#include "voxels/VoxelFade.h"
|
||||||
|
#include "voxels/VoxelHideShowThread.h"
|
||||||
|
#include "voxels/VoxelImporter.h"
|
||||||
|
#include "voxels/VoxelPacketProcessor.h"
|
||||||
|
#include "voxels/VoxelSystem.h"
|
||||||
|
|
||||||
|
|
||||||
class QAction;
|
class QAction;
|
||||||
|
@ -155,6 +155,7 @@ public:
|
||||||
Audio* getAudio() { return &_audio; }
|
Audio* getAudio() { return &_audio; }
|
||||||
Camera* getCamera() { return &_myCamera; }
|
Camera* getCamera() { return &_myCamera; }
|
||||||
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
|
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
|
||||||
|
ViewFrustum* getShadowViewFrustum() { return &_shadowViewFrustum; }
|
||||||
VoxelSystem* getVoxels() { return &_voxels; }
|
VoxelSystem* getVoxels() { return &_voxels; }
|
||||||
VoxelTree* getVoxelTree() { return _voxels.getTree(); }
|
VoxelTree* getVoxelTree() { return _voxels.getTree(); }
|
||||||
ParticleTreeRenderer* getParticles() { return &_particles; }
|
ParticleTreeRenderer* getParticles() { return &_particles; }
|
||||||
|
@ -171,7 +172,11 @@ public:
|
||||||
Visage* getVisage() { return &_visage; }
|
Visage* getVisage() { return &_visage; }
|
||||||
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
||||||
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
||||||
QSettings* getSettings() { return _settings; }
|
|
||||||
|
/// if you need to access the application settings, use lockSettings()/unlockSettings()
|
||||||
|
QSettings* lockSettings() { _settingsMutex.lock(); return _settings; }
|
||||||
|
void unlockSettings() { _settingsMutex.unlock(); }
|
||||||
|
|
||||||
QMainWindow* getWindow() { return _window; }
|
QMainWindow* getWindow() { return _window; }
|
||||||
NodeToOctreeSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
|
NodeToOctreeSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
|
||||||
void lockOctreeSceneStats() { _octreeSceneStatsLock.lockForRead(); }
|
void lockOctreeSceneStats() { _octreeSceneStatsLock.lockForRead(); }
|
||||||
|
@ -347,6 +352,7 @@ private:
|
||||||
DatagramProcessor _datagramProcessor;
|
DatagramProcessor _datagramProcessor;
|
||||||
|
|
||||||
QNetworkAccessManager* _networkAccessManager;
|
QNetworkAccessManager* _networkAccessManager;
|
||||||
|
QMutex _settingsMutex;
|
||||||
QSettings* _settings;
|
QSettings* _settings;
|
||||||
|
|
||||||
glm::vec3 _gravity;
|
glm::vec3 _gravity;
|
||||||
|
@ -380,6 +386,7 @@ private:
|
||||||
|
|
||||||
ViewFrustum _viewFrustum; // current state of view frustum, perspective, orientation, etc.
|
ViewFrustum _viewFrustum; // current state of view frustum, perspective, orientation, etc.
|
||||||
ViewFrustum _lastQueriedViewFrustum; /// last view frustum used to query octree servers (voxels, particles)
|
ViewFrustum _lastQueriedViewFrustum; /// last view frustum used to query octree servers (voxels, particles)
|
||||||
|
ViewFrustum _shadowViewFrustum;
|
||||||
quint64 _lastQueriedTime;
|
quint64 _lastQueriedTime;
|
||||||
|
|
||||||
Oscilloscope _audioScope;
|
Oscilloscope _audioScope;
|
||||||
|
|
|
@ -92,6 +92,16 @@ void Audio::reset() {
|
||||||
_ringBuffer.reset();
|
_ringBuffer.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
|
||||||
|
QAudioDeviceInfo result;
|
||||||
|
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
|
||||||
|
if (audioDevice.deviceName().trimmed() == deviceName.trimmed()) {
|
||||||
|
result = audioDevice;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
if (QAudioDeviceInfo::availableDevices(mode).size() > 1) {
|
if (QAudioDeviceInfo::availableDevices(mode).size() > 1) {
|
||||||
|
@ -249,27 +259,105 @@ void Audio::start() {
|
||||||
_desiredOutputFormat.setChannelCount(2);
|
_desiredOutputFormat.setChannelCount(2);
|
||||||
|
|
||||||
QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
|
QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
|
||||||
qDebug() << "The audio input device is" << inputDeviceInfo.deviceName();
|
qDebug() << "The default audio input device is" << inputDeviceInfo.deviceName();
|
||||||
|
bool inputFormatSupported = switchInputToAudioDevice(inputDeviceInfo.deviceName());
|
||||||
|
|
||||||
|
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
|
||||||
|
qDebug() << "The default audio output device is" << outputDeviceInfo.deviceName();
|
||||||
|
bool outputFormatSupported = switchOutputToAudioDevice(outputDeviceInfo.deviceName());
|
||||||
|
|
||||||
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
if (!inputFormatSupported || !outputFormatSupported) {
|
||||||
qDebug() << "The format to be used for audio input is" << _inputFormat;
|
qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
QString Audio::getDefaultDeviceName(QAudio::Mode mode) {
|
||||||
|
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
|
||||||
|
return deviceInfo.deviceName();
|
||||||
|
}
|
||||||
|
|
||||||
|
QVector<QString> Audio::getDeviceNames(QAudio::Mode mode) {
|
||||||
|
QVector<QString> deviceNames;
|
||||||
|
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
|
||||||
|
deviceNames << audioDevice.deviceName().trimmed();
|
||||||
|
}
|
||||||
|
return deviceNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Audio::switchInputToAudioDevice(const QString& inputDeviceName) {
|
||||||
|
bool supportedFormat = false;
|
||||||
|
|
||||||
|
// cleanup any previously initialized device
|
||||||
|
if (_audioInput) {
|
||||||
|
_audioInput->stop();
|
||||||
|
disconnect(_inputDevice, 0, 0, 0);
|
||||||
|
_inputDevice = NULL;
|
||||||
|
|
||||||
|
delete _audioInput;
|
||||||
|
_audioInput = NULL;
|
||||||
|
_numInputCallbackBytes = 0;
|
||||||
|
|
||||||
|
_inputAudioDeviceName = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
QAudioDeviceInfo inputDeviceInfo = getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName);
|
||||||
|
|
||||||
|
if (!inputDeviceInfo.isNull()) {
|
||||||
|
qDebug() << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
|
||||||
|
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
|
||||||
|
|
||||||
|
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
|
||||||
|
qDebug() << "The format to be used for audio input is" << _inputFormat;
|
||||||
|
|
||||||
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
|
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
|
||||||
_numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
|
_numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
|
||||||
* (_inputFormat.sampleRate() / SAMPLE_RATE)
|
* (_inputFormat.sampleRate() / SAMPLE_RATE)
|
||||||
/ CALLBACK_ACCELERATOR_RATIO;
|
/ CALLBACK_ACCELERATOR_RATIO;
|
||||||
_audioInput->setBufferSize(_numInputCallbackBytes);
|
_audioInput->setBufferSize(_numInputCallbackBytes);
|
||||||
|
|
||||||
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
|
// how do we want to handle input working, but output not working?
|
||||||
qDebug() << "The audio output device is" << outputDeviceInfo.deviceName();
|
|
||||||
|
|
||||||
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
|
||||||
qDebug() << "The format to be used for audio output is" << _outputFormat;
|
|
||||||
|
|
||||||
_inputRingBuffer.resizeForFrameSize(_numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO / sizeof(int16_t));
|
_inputRingBuffer.resizeForFrameSize(_numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO / sizeof(int16_t));
|
||||||
_inputDevice = _audioInput->start();
|
_inputDevice = _audioInput->start();
|
||||||
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
|
||||||
|
|
||||||
|
supportedFormat = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return supportedFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
|
||||||
|
bool supportedFormat = false;
|
||||||
|
|
||||||
|
// cleanup any previously initialized device
|
||||||
|
if (_audioOutput) {
|
||||||
|
_audioOutput->stop();
|
||||||
|
disconnect(_outputDevice, 0, 0, 0);
|
||||||
|
_outputDevice = NULL;
|
||||||
|
|
||||||
|
delete _audioOutput;
|
||||||
|
_audioOutput = NULL;
|
||||||
|
_numInputCallbackBytes = 0;
|
||||||
|
|
||||||
|
_loopbackOutputDevice = NULL;
|
||||||
|
delete _loopbackAudioOutput;
|
||||||
|
_loopbackAudioOutput = NULL;
|
||||||
|
|
||||||
|
_proceduralOutputDevice = NULL;
|
||||||
|
delete _proceduralAudioOutput;
|
||||||
|
_proceduralAudioOutput = NULL;
|
||||||
|
_outputAudioDeviceName = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
QAudioDeviceInfo outputDeviceInfo = getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName);
|
||||||
|
|
||||||
|
if (!outputDeviceInfo.isNull()) {
|
||||||
|
qDebug() << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
|
||||||
|
_outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed();
|
||||||
|
|
||||||
|
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
|
||||||
|
qDebug() << "The format to be used for audio output is" << _outputFormat;
|
||||||
|
|
||||||
// setup our general output device for audio-mixer audio
|
// setup our general output device for audio-mixer audio
|
||||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||||
_audioOutput->setBufferSize(_ringBuffer.getSampleCapacity() * sizeof(int16_t));
|
_audioOutput->setBufferSize(_ringBuffer.getSampleCapacity() * sizeof(int16_t));
|
||||||
|
@ -278,17 +366,15 @@ void Audio::start() {
|
||||||
|
|
||||||
// setup a loopback audio output device
|
// setup a loopback audio output device
|
||||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||||
|
|
||||||
// setup a procedural audio output device
|
// setup a procedural audio output device
|
||||||
_proceduralAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
_proceduralAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||||
|
|
||||||
gettimeofday(&_lastReceiveTime, NULL);
|
gettimeofday(&_lastReceiveTime, NULL);
|
||||||
|
supportedFormat = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
return supportedFormat;
|
||||||
qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Audio::handleAudioInput() {
|
void Audio::handleAudioInput() {
|
||||||
|
@ -309,13 +395,15 @@ void Audio::handleAudioInput() {
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted) {
|
||||||
// if this person wants local loopback add that to the locally injected audio
|
// if this person wants local loopback add that to the locally injected audio
|
||||||
|
|
||||||
if (!_loopbackOutputDevice) {
|
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
|
||||||
// we didn't have the loopback output device going so set that up now
|
// we didn't have the loopback output device going so set that up now
|
||||||
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
_loopbackOutputDevice = _loopbackAudioOutput->start();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_inputFormat == _outputFormat) {
|
if (_inputFormat == _outputFormat) {
|
||||||
_loopbackOutputDevice->write(inputByteArray);
|
if (_loopbackOutputDevice) {
|
||||||
|
_loopbackOutputDevice->write(inputByteArray);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
|
static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
|
||||||
* (_outputFormat.channelCount() / _inputFormat.channelCount());
|
* (_outputFormat.channelCount() / _inputFormat.channelCount());
|
||||||
|
@ -326,7 +414,9 @@ void Audio::handleAudioInput() {
|
||||||
inputByteArray.size() / sizeof(int16_t),
|
inputByteArray.size() / sizeof(int16_t),
|
||||||
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
|
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
|
||||||
|
|
||||||
_loopbackOutputDevice->write(loopBackByteArray);
|
if (_loopbackOutputDevice) {
|
||||||
|
_loopbackOutputDevice->write(loopBackByteArray);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,7 +545,7 @@ void Audio::handleAudioInput() {
|
||||||
addProceduralSounds(monoAudioSamples,
|
addProceduralSounds(monoAudioSamples,
|
||||||
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||||
|
|
||||||
if (!_proceduralOutputDevice) {
|
if (!_proceduralOutputDevice && _proceduralAudioOutput) {
|
||||||
_proceduralOutputDevice = _proceduralAudioOutput->start();
|
_proceduralOutputDevice = _proceduralAudioOutput->start();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -469,7 +559,9 @@ void Audio::handleAudioInput() {
|
||||||
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 4,
|
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 4,
|
||||||
_desiredInputFormat, _outputFormat);
|
_desiredInputFormat, _outputFormat);
|
||||||
|
|
||||||
_proceduralOutputDevice->write(proceduralOutput);
|
if (_proceduralOutputDevice) {
|
||||||
|
_proceduralOutputDevice->write(proceduralOutput);
|
||||||
|
}
|
||||||
|
|
||||||
NodeList* nodeList = NodeList::getInstance();
|
NodeList* nodeList = NodeList::getInstance();
|
||||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||||
|
@ -553,7 +645,7 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
|
||||||
static float networkOutputToOutputRatio = (_desiredOutputFormat.sampleRate() / (float) _outputFormat.sampleRate())
|
static float networkOutputToOutputRatio = (_desiredOutputFormat.sampleRate() / (float) _outputFormat.sampleRate())
|
||||||
* (_desiredOutputFormat.channelCount() / (float) _outputFormat.channelCount());
|
* (_desiredOutputFormat.channelCount() / (float) _outputFormat.channelCount());
|
||||||
|
|
||||||
if (!_ringBuffer.isStarved() && _audioOutput->bytesFree() == _audioOutput->bufferSize()) {
|
if (!_ringBuffer.isStarved() && _audioOutput && _audioOutput->bytesFree() == _audioOutput->bufferSize()) {
|
||||||
// we don't have any audio data left in the output buffer
|
// we don't have any audio data left in the output buffer
|
||||||
// we just starved
|
// we just starved
|
||||||
//qDebug() << "Audio output just starved.";
|
//qDebug() << "Audio output just starved.";
|
||||||
|
|
|
@ -19,17 +19,20 @@
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
|
|
||||||
|
#include <QAudio>
|
||||||
|
#include <QAudioInput>
|
||||||
|
#include <QGLWidget>
|
||||||
#include <QtCore/QObject>
|
#include <QtCore/QObject>
|
||||||
#include <QtCore/QVector>
|
#include <QtCore/QVector>
|
||||||
#include <QtMultimedia/QAudioFormat>
|
#include <QtMultimedia/QAudioFormat>
|
||||||
|
#include <QVector>
|
||||||
|
|
||||||
#include <AbstractAudioInterface.h>
|
#include <AbstractAudioInterface.h>
|
||||||
#include <AudioRingBuffer.h>
|
#include <AudioRingBuffer.h>
|
||||||
#include <StdDev.h>
|
#include <StdDev.h>
|
||||||
|
|
||||||
#include "Oscilloscope.h"
|
#include "ui/Oscilloscope.h"
|
||||||
|
|
||||||
#include <QGLWidget>
|
|
||||||
|
|
||||||
static const int NUM_AUDIO_CHANNELS = 2;
|
static const int NUM_AUDIO_CHANNELS = 2;
|
||||||
|
|
||||||
|
@ -72,7 +75,7 @@ public:
|
||||||
|
|
||||||
int getNetworkSampleRate() { return SAMPLE_RATE; }
|
int getNetworkSampleRate() { return SAMPLE_RATE; }
|
||||||
int getNetworkBufferLengthSamplesPerChannel() { return NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; }
|
int getNetworkBufferLengthSamplesPerChannel() { return NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void start();
|
void start();
|
||||||
void addReceivedAudioToBuffer(const QByteArray& audioByteArray);
|
void addReceivedAudioToBuffer(const QByteArray& audioByteArray);
|
||||||
|
@ -83,10 +86,21 @@ public slots:
|
||||||
|
|
||||||
virtual void handleAudioByteArray(const QByteArray& audioByteArray);
|
virtual void handleAudioByteArray(const QByteArray& audioByteArray);
|
||||||
|
|
||||||
|
bool switchInputToAudioDevice(const QString& inputDeviceName);
|
||||||
|
bool switchOutputToAudioDevice(const QString& outputDeviceName);
|
||||||
|
QString getDeviceName(QAudio::Mode mode) const { return (mode == QAudio::AudioInput) ?
|
||||||
|
_inputAudioDeviceName : _outputAudioDeviceName; }
|
||||||
|
QString getDefaultDeviceName(QAudio::Mode mode);
|
||||||
|
QVector<QString> getDeviceNames(QAudio::Mode mode);
|
||||||
|
|
||||||
|
float getInputVolume() const { return (_audioInput) ? _audioInput->volume() : 0.0f; }
|
||||||
|
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
bool muteToggled();
|
bool muteToggled();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
QByteArray firstInputFrame;
|
QByteArray firstInputFrame;
|
||||||
QAudioInput* _audioInput;
|
QAudioInput* _audioInput;
|
||||||
QAudioFormat _desiredInputFormat;
|
QAudioFormat _desiredInputFormat;
|
||||||
|
@ -105,6 +119,9 @@ private:
|
||||||
QIODevice* _proceduralOutputDevice;
|
QIODevice* _proceduralOutputDevice;
|
||||||
AudioRingBuffer _inputRingBuffer;
|
AudioRingBuffer _inputRingBuffer;
|
||||||
AudioRingBuffer _ringBuffer;
|
AudioRingBuffer _ringBuffer;
|
||||||
|
|
||||||
|
QString _inputAudioDeviceName;
|
||||||
|
QString _outputAudioDeviceName;
|
||||||
|
|
||||||
Oscilloscope* _scope;
|
Oscilloscope* _scope;
|
||||||
StDev _stdev;
|
StDev _stdev;
|
||||||
|
|
|
@ -33,11 +33,11 @@
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "MenuScriptingInterface.h"
|
#include "scripting/MenuScriptingInterface.h"
|
||||||
#include "Util.h"
|
#include "Util.h"
|
||||||
#include "InfoView.h"
|
#include "ui/InfoView.h"
|
||||||
#include "ui/MetavoxelEditor.h"
|
#include "ui/MetavoxelEditor.h"
|
||||||
#include "ModelBrowser.h"
|
#include "ui/ModelBrowser.h"
|
||||||
|
|
||||||
|
|
||||||
Menu* Menu::_instance = NULL;
|
Menu* Menu::_instance = NULL;
|
||||||
|
@ -374,8 +374,10 @@ Menu::~Menu() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Menu::loadSettings(QSettings* settings) {
|
void Menu::loadSettings(QSettings* settings) {
|
||||||
|
bool lockedSettings = false;
|
||||||
if (!settings) {
|
if (!settings) {
|
||||||
settings = Application::getInstance()->getSettings();
|
settings = Application::getInstance()->lockSettings();
|
||||||
|
lockedSettings = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
_audioJitterBufferSamples = loadSetting(settings, "audioJitterBufferSamples", 0);
|
_audioJitterBufferSamples = loadSetting(settings, "audioJitterBufferSamples", 0);
|
||||||
|
@ -404,11 +406,17 @@ void Menu::loadSettings(QSettings* settings) {
|
||||||
// TODO: cache more settings in MyAvatar that are checked with very high frequency.
|
// TODO: cache more settings in MyAvatar that are checked with very high frequency.
|
||||||
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
|
||||||
myAvatar->updateCollisionFlags();
|
myAvatar->updateCollisionFlags();
|
||||||
|
|
||||||
|
if (lockedSettings) {
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Menu::saveSettings(QSettings* settings) {
|
void Menu::saveSettings(QSettings* settings) {
|
||||||
|
bool lockedSettings = false;
|
||||||
if (!settings) {
|
if (!settings) {
|
||||||
settings = Application::getInstance()->getSettings();
|
settings = Application::getInstance()->lockSettings();
|
||||||
|
lockedSettings = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
settings->setValue("audioJitterBufferSamples", _audioJitterBufferSamples);
|
settings->setValue("audioJitterBufferSamples", _audioJitterBufferSamples);
|
||||||
|
@ -430,6 +438,9 @@ void Menu::saveSettings(QSettings* settings) {
|
||||||
Application::getInstance()->getAvatar()->saveData(settings);
|
Application::getInstance()->getAvatar()->saveData(settings);
|
||||||
NodeList::getInstance()->saveData(settings);
|
NodeList::getInstance()->saveData(settings);
|
||||||
|
|
||||||
|
if (lockedSettings) {
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Menu::importSettings() {
|
void Menu::importSettings() {
|
||||||
|
@ -1403,9 +1414,8 @@ void Menu::removeMenu(const QString& menuName) {
|
||||||
if (action) {
|
if (action) {
|
||||||
QString finalMenuPart;
|
QString finalMenuPart;
|
||||||
QMenu* parent = getMenuParent(menuName, finalMenuPart);
|
QMenu* parent = getMenuParent(menuName, finalMenuPart);
|
||||||
|
|
||||||
if (parent) {
|
if (parent) {
|
||||||
removeAction(parent, finalMenuPart);
|
parent->removeAction(action);
|
||||||
} else {
|
} else {
|
||||||
QMenuBar::removeAction(action);
|
QMenuBar::removeAction(action);
|
||||||
}
|
}
|
||||||
|
|
|
@ -189,10 +189,12 @@ static TextRenderer* textRenderer(TextRendererType type) {
|
||||||
return displayNameRenderer;
|
return displayNameRenderer;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
|
void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
|
||||||
// simple frustum check
|
// simple frustum check
|
||||||
float boundingRadius = getBillboardSize();
|
float boundingRadius = getBillboardSize();
|
||||||
if (Application::getInstance()->getViewFrustum()->sphereInFrustum(cameraPosition, boundingRadius) == ViewFrustum::OUTSIDE) {
|
ViewFrustum* frustum = (renderMode == Avatar::SHADOW_RENDER_MODE) ?
|
||||||
|
Application::getInstance()->getShadowViewFrustum() : Application::getInstance()->getViewFrustum();
|
||||||
|
if (frustum->sphereInFrustum(_position, boundingRadius) == ViewFrustum::OUTSIDE) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -202,11 +204,11 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
|
||||||
{
|
{
|
||||||
// glow when moving far away
|
// glow when moving far away
|
||||||
const float GLOW_DISTANCE = 20.0f;
|
const float GLOW_DISTANCE = 20.0f;
|
||||||
Glower glower(_moving && distanceToTarget > GLOW_DISTANCE && !forShadowMap ? 1.0f : 0.0f);
|
Glower glower(_moving && distanceToTarget > GLOW_DISTANCE && renderMode == NORMAL_RENDER_MODE ? 1.0f : 0.0f);
|
||||||
|
|
||||||
// render body
|
// render body
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
|
||||||
renderBody(forShadowMap);
|
renderBody(renderMode);
|
||||||
}
|
}
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) {
|
||||||
_skeletonModel.renderCollisionProxies(0.7f);
|
_skeletonModel.renderCollisionProxies(0.7f);
|
||||||
|
@ -230,7 +232,8 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
|
||||||
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
|
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
|
||||||
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
|
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
|
||||||
|
|
||||||
if (!forShadowMap && (sphereRadius > MIN_SPHERE_SIZE) && (angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
|
if (renderMode == NORMAL_RENDER_MODE && (sphereRadius > MIN_SPHERE_SIZE) &&
|
||||||
|
(angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
|
||||||
glColor4f(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.f - angle / MAX_SPHERE_ANGLE);
|
glColor4f(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.f - angle / MAX_SPHERE_ANGLE);
|
||||||
glPushMatrix();
|
glPushMatrix();
|
||||||
glTranslatef(_position.x, _position.y, _position.z);
|
glTranslatef(_position.x, _position.y, _position.z);
|
||||||
|
@ -242,8 +245,8 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const float DISPLAYNAME_DISTANCE = 10.0f;
|
const float DISPLAYNAME_DISTANCE = 10.0f;
|
||||||
setShowDisplayName(!forShadowMap && distanceToTarget < DISPLAYNAME_DISTANCE);
|
setShowDisplayName(renderMode == NORMAL_RENDER_MODE && distanceToTarget < DISPLAYNAME_DISTANCE);
|
||||||
if (forShadowMap) {
|
if (renderMode != NORMAL_RENDER_MODE) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
renderDisplayName();
|
renderDisplayName();
|
||||||
|
@ -306,17 +309,16 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
|
||||||
return glm::angleAxis(angle * proportion, axis);
|
return glm::angleAxis(angle * proportion, axis);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Avatar::renderBody(bool forShadowMap) {
|
void Avatar::renderBody(RenderMode renderMode) {
|
||||||
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||||
// render the billboard until both models are loaded
|
// render the billboard until both models are loaded
|
||||||
if (forShadowMap) {
|
if (renderMode != SHADOW_RENDER_MODE) {
|
||||||
return;
|
renderBillboard();
|
||||||
}
|
}
|
||||||
renderBillboard();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_skeletonModel.render(1.0f);
|
_skeletonModel.render(1.0f, renderMode == SHADOW_RENDER_MODE);
|
||||||
getHead()->render(1.0f);
|
getHead()->render(1.0f, renderMode == SHADOW_RENDER_MODE);
|
||||||
getHand()->render(false);
|
getHand()->render(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,10 @@ public:
|
||||||
|
|
||||||
void init();
|
void init();
|
||||||
void simulate(float deltaTime);
|
void simulate(float deltaTime);
|
||||||
virtual void render(const glm::vec3& cameraPosition, bool forShadowMap);
|
|
||||||
|
enum RenderMode { NORMAL_RENDER_MODE, SHADOW_RENDER_MODE, MIRROR_RENDER_MODE };
|
||||||
|
|
||||||
|
virtual void render(const glm::vec3& cameraPosition, RenderMode renderMode = NORMAL_RENDER_MODE);
|
||||||
|
|
||||||
//setters
|
//setters
|
||||||
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
|
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
|
||||||
|
@ -133,7 +136,7 @@ public:
|
||||||
|
|
||||||
void setShowDisplayName(bool showDisplayName);
|
void setShowDisplayName(bool showDisplayName);
|
||||||
|
|
||||||
int parseDataAtOffset(const QByteArray& packet, int offset);
|
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
|
||||||
|
|
||||||
static void renderJointConnectingCone(glm::vec3 position1, glm::vec3 position2, float radius1, float radius2);
|
static void renderJointConnectingCone(glm::vec3 position1, glm::vec3 position2, float radius1, float radius2);
|
||||||
|
|
||||||
|
@ -181,7 +184,7 @@ protected:
|
||||||
float getPelvisToHeadLength() const;
|
float getPelvisToHeadLength() const;
|
||||||
|
|
||||||
void renderDisplayName();
|
void renderDisplayName();
|
||||||
virtual void renderBody(bool forShadowMap);
|
virtual void renderBody(RenderMode renderMode);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
simulateAvatarFades(deltaTime);
|
simulateAvatarFades(deltaTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly) {
|
void AvatarManager::renderAvatars(Avatar::RenderMode renderMode, bool selfAvatarOnly) {
|
||||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||||
"Application::renderAvatars()");
|
"Application::renderAvatars()");
|
||||||
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::LookAtVectors);
|
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::LookAtVectors);
|
||||||
|
@ -85,13 +85,13 @@ void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly
|
||||||
if (!avatar->isInitialized()) {
|
if (!avatar->isInitialized()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
avatar->render(cameraPosition, forShadowMapOrMirror);
|
avatar->render(cameraPosition, renderMode);
|
||||||
avatar->setDisplayingLookatVectors(renderLookAtVectors);
|
avatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||||
}
|
}
|
||||||
renderAvatarFades(cameraPosition, forShadowMapOrMirror);
|
renderAvatarFades(cameraPosition, renderMode);
|
||||||
} else {
|
} else {
|
||||||
// just render myAvatar
|
// just render myAvatar
|
||||||
_myAvatar->render(cameraPosition, forShadowMapOrMirror);
|
_myAvatar->render(cameraPosition, renderMode);
|
||||||
_myAvatar->setDisplayingLookatVectors(renderLookAtVectors);
|
_myAvatar->setDisplayingLookatVectors(renderLookAtVectors);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,14 +114,14 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap) {
|
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, Avatar::RenderMode renderMode) {
|
||||||
// render avatar fades
|
// render avatar fades
|
||||||
Glower glower(forShadowMap ? 0.0f : 1.0f);
|
Glower glower(renderMode == Avatar::NORMAL_RENDER_MODE ? 1.0f : 0.0f);
|
||||||
|
|
||||||
foreach(const AvatarSharedPointer& fadingAvatar, _avatarFades) {
|
foreach(const AvatarSharedPointer& fadingAvatar, _avatarFades) {
|
||||||
Avatar* avatar = static_cast<Avatar*>(fadingAvatar.data());
|
Avatar* avatar = static_cast<Avatar*>(fadingAvatar.data());
|
||||||
if (avatar != static_cast<Avatar*>(_myAvatar.data())) {
|
if (avatar != static_cast<Avatar*>(_myAvatar.data())) {
|
||||||
avatar->render(cameraPosition, forShadowMap);
|
avatar->render(cameraPosition, renderMode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ public:
|
||||||
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
|
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
|
||||||
|
|
||||||
void updateOtherAvatars(float deltaTime);
|
void updateOtherAvatars(float deltaTime);
|
||||||
void renderAvatars(bool forShadowMapOrMirror = false, bool selfAvatarOnly = false);
|
void renderAvatars(Avatar::RenderMode renderMode, bool selfAvatarOnly = false);
|
||||||
|
|
||||||
void clearOtherAvatars();
|
void clearOtherAvatars();
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ private:
|
||||||
void processKillAvatar(const QByteArray& datagram);
|
void processKillAvatar(const QByteArray& datagram);
|
||||||
|
|
||||||
void simulateAvatarFades(float deltaTime);
|
void simulateAvatarFades(float deltaTime);
|
||||||
void renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap);
|
void renderAvatarFades(const glm::vec3& cameraPosition, Avatar::RenderMode renderMode);
|
||||||
|
|
||||||
// virtual override
|
// virtual override
|
||||||
AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
AvatarHash::iterator erase(const AvatarHash::iterator& iterator);
|
||||||
|
|
|
@ -45,13 +45,6 @@ void FaceModel::simulate(float deltaTime) {
|
||||||
Model::simulate(deltaTime, true, newJointStates);
|
Model::simulate(deltaTime, true, newJointStates);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FaceModel::render(float alpha) {
|
|
||||||
if (!Model::render(alpha)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
|
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
|
||||||
// get the rotation axes in joint space and use them to adjust the rotation
|
// get the rotation axes in joint space and use them to adjust the rotation
|
||||||
glm::mat3 axes = glm::mat3_cast(_rotation);
|
glm::mat3 axes = glm::mat3_cast(_rotation);
|
||||||
|
|
|
@ -22,7 +22,6 @@ public:
|
||||||
FaceModel(Head* owningHead);
|
FaceModel(Head* owningHead);
|
||||||
|
|
||||||
void simulate(float deltaTime);
|
void simulate(float deltaTime);
|
||||||
bool render(float alpha);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
|
|
|
@ -27,10 +27,7 @@ Hand::Hand(Avatar* owningAvatar) :
|
||||||
HandData((AvatarData*)owningAvatar),
|
HandData((AvatarData*)owningAvatar),
|
||||||
|
|
||||||
_owningAvatar(owningAvatar),
|
_owningAvatar(owningAvatar),
|
||||||
_renderAlpha(1.0),
|
_renderAlpha(1.0)
|
||||||
_collisionCenter(0,0,0),
|
|
||||||
_collisionAge(0),
|
|
||||||
_collisionDuration(0)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,10 +39,6 @@ void Hand::reset() {
|
||||||
|
|
||||||
void Hand::simulate(float deltaTime, bool isMine) {
|
void Hand::simulate(float deltaTime, bool isMine) {
|
||||||
|
|
||||||
if (_collisionAge > 0.f) {
|
|
||||||
_collisionAge += deltaTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
calculateGeometry();
|
calculateGeometry();
|
||||||
|
|
||||||
if (isMine) {
|
if (isMine) {
|
||||||
|
@ -222,26 +215,6 @@ void Hand::collideAgainstOurself() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Hand::handleVoxelCollision(PalmData* palm, const glm::vec3& fingerTipPosition, VoxelTreeElement* voxel, float deltaTime) {
|
|
||||||
// Collision between finger and a voxel plays sound
|
|
||||||
const float LOWEST_FREQUENCY = 100.f;
|
|
||||||
const float HERTZ_PER_RGB = 3.f;
|
|
||||||
const float DECAY_PER_SAMPLE = 0.0005f;
|
|
||||||
const float DURATION_MAX = 2.0f;
|
|
||||||
const float MIN_VOLUME = 0.1f;
|
|
||||||
float volume = MIN_VOLUME + glm::clamp(glm::length(palm->getRawVelocity()), 0.f, (1.f - MIN_VOLUME));
|
|
||||||
float duration = volume;
|
|
||||||
_collisionCenter = fingerTipPosition;
|
|
||||||
_collisionAge = deltaTime;
|
|
||||||
_collisionDuration = duration;
|
|
||||||
int voxelBrightness = voxel->getColor()[0] + voxel->getColor()[1] + voxel->getColor()[2];
|
|
||||||
float frequency = LOWEST_FREQUENCY + (voxelBrightness * HERTZ_PER_RGB);
|
|
||||||
Application::getInstance()->getAudio()->startDrumSound(volume,
|
|
||||||
frequency,
|
|
||||||
DURATION_MAX,
|
|
||||||
DECAY_PER_SAMPLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Hand::calculateGeometry() {
|
void Hand::calculateGeometry() {
|
||||||
// generate finger tip balls....
|
// generate finger tip balls....
|
||||||
_leapFingerTipBalls.clear();
|
_leapFingerTipBalls.clear();
|
||||||
|
@ -312,21 +285,6 @@ void Hand::render(bool isMine) {
|
||||||
renderLeapHands(isMine);
|
renderLeapHands(isMine);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isMine) {
|
|
||||||
// If hand/voxel collision has happened, render a little expanding sphere
|
|
||||||
if (_collisionAge > 0.f) {
|
|
||||||
float opacity = glm::clamp(1.f - (_collisionAge / _collisionDuration), 0.f, 1.f);
|
|
||||||
glColor4f(1, 0, 0, 0.5 * opacity);
|
|
||||||
glPushMatrix();
|
|
||||||
glTranslatef(_collisionCenter.x, _collisionCenter.y, _collisionCenter.z);
|
|
||||||
glutSolidSphere(_collisionAge * 0.25f, 20, 20);
|
|
||||||
glPopMatrix();
|
|
||||||
if (_collisionAge > _collisionDuration) {
|
|
||||||
_collisionAge = 0.f;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
glEnable(GL_DEPTH_TEST);
|
glEnable(GL_DEPTH_TEST);
|
||||||
glEnable(GL_RESCALE_NORMAL);
|
glEnable(GL_RESCALE_NORMAL);
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
#include "world.h"
|
#include "world.h"
|
||||||
#include "VoxelSystem.h"
|
|
||||||
|
|
||||||
|
|
||||||
class Avatar;
|
class Avatar;
|
||||||
|
@ -72,13 +71,6 @@ private:
|
||||||
std::vector<HandBall> _leapFingerTipBalls;
|
std::vector<HandBall> _leapFingerTipBalls;
|
||||||
std::vector<HandBall> _leapFingerRootBalls;
|
std::vector<HandBall> _leapFingerRootBalls;
|
||||||
|
|
||||||
glm::vec3 _lastFingerAddVoxel, _lastFingerDeleteVoxel;
|
|
||||||
VoxelDetail _collidingVoxel;
|
|
||||||
|
|
||||||
glm::vec3 _collisionCenter;
|
|
||||||
float _collisionAge;
|
|
||||||
float _collisionDuration;
|
|
||||||
|
|
||||||
// private methods
|
// private methods
|
||||||
void setLeapHands(const std::vector<glm::vec3>& handPositions,
|
void setLeapHands(const std::vector<glm::vec3>& handPositions,
|
||||||
const std::vector<glm::vec3>& handNormals);
|
const std::vector<glm::vec3>& handNormals);
|
||||||
|
@ -88,8 +80,6 @@ private:
|
||||||
|
|
||||||
void calculateGeometry();
|
void calculateGeometry();
|
||||||
|
|
||||||
void handleVoxelCollision(PalmData* palm, const glm::vec3& fingerTipPosition, VoxelTreeElement* voxel, float deltaTime);
|
|
||||||
|
|
||||||
void playSlaps(PalmData& palm, Avatar* avatar);
|
void playSlaps(PalmData& palm, Avatar* avatar);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -168,8 +168,8 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
||||||
_eyePosition = calculateAverageEyePosition();
|
_eyePosition = calculateAverageEyePosition();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Head::render(float alpha) {
|
void Head::render(float alpha, bool forShadowMap) {
|
||||||
if (_faceModel.render(alpha) && _renderLookatVectors) {
|
if (_faceModel.render(alpha, forShadowMap) && _renderLookatVectors) {
|
||||||
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
|
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ public:
|
||||||
void init();
|
void init();
|
||||||
void reset();
|
void reset();
|
||||||
void simulate(float deltaTime, bool isMine, bool billboard = false);
|
void simulate(float deltaTime, bool isMine, bool billboard = false);
|
||||||
void render(float alpha);
|
void render(float alpha, bool forShadowMap);
|
||||||
void setScale(float scale);
|
void setScale(float scale);
|
||||||
void setPosition(glm::vec3 position) { _position = position; }
|
void setPosition(glm::vec3 position) { _position = position; }
|
||||||
void setGravity(glm::vec3 gravity) { _gravity = gravity; }
|
void setGravity(glm::vec3 gravity) { _gravity = gravity; }
|
||||||
|
|
|
@ -26,7 +26,6 @@
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "MyAvatar.h"
|
#include "MyAvatar.h"
|
||||||
#include "Physics.h"
|
#include "Physics.h"
|
||||||
#include "VoxelSystem.h"
|
|
||||||
#include "devices/Faceshift.h"
|
#include "devices/Faceshift.h"
|
||||||
#include "devices/OculusManager.h"
|
#include "devices/OculusManager.h"
|
||||||
#include "ui/TextRenderer.h"
|
#include "ui/TextRenderer.h"
|
||||||
|
@ -451,12 +450,12 @@ void MyAvatar::renderDebugBodyPoints() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// virtual
|
// virtual
|
||||||
void MyAvatar::render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror) {
|
void MyAvatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
|
||||||
// don't render if we've been asked to disable local rendering
|
// don't render if we've been asked to disable local rendering
|
||||||
if (!_shouldRender) {
|
if (!_shouldRender) {
|
||||||
return; // exit early
|
return; // exit early
|
||||||
}
|
}
|
||||||
Avatar::render(cameraPosition, forShadowMapOrMirror);
|
Avatar::render(cameraPosition, renderMode);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::renderHeadMouse() const {
|
void MyAvatar::renderHeadMouse() const {
|
||||||
|
@ -551,6 +550,14 @@ void MyAvatar::loadData(QSettings* settings) {
|
||||||
settings->endGroup();
|
settings->endGroup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int MyAvatar::parseDataAtOffset(const QByteArray& packet, int offset) {
|
||||||
|
qDebug() << "Error: ignoring update packet for MyAvatar"
|
||||||
|
<< " packetLength = " << packet.size()
|
||||||
|
<< " offset = " << offset;
|
||||||
|
// this packet is just bad, so we pretend that we unpacked it ALL
|
||||||
|
return packet.size() - offset;
|
||||||
|
}
|
||||||
|
|
||||||
void MyAvatar::sendKillAvatar() {
|
void MyAvatar::sendKillAvatar() {
|
||||||
QByteArray killPacket = byteArrayWithPopulatedHeader(PacketTypeKillAvatar);
|
QByteArray killPacket = byteArrayWithPopulatedHeader(PacketTypeKillAvatar);
|
||||||
NodeList::getInstance()->broadcastToNodes(killPacket, NodeSet() << NodeType::AvatarMixer);
|
NodeList::getInstance()->broadcastToNodes(killPacket, NodeSet() << NodeType::AvatarMixer);
|
||||||
|
@ -631,20 +638,20 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
||||||
_billboardValid = false;
|
_billboardValid = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::renderBody(bool forceRenderHead) {
|
void MyAvatar::renderBody(RenderMode renderMode) {
|
||||||
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||||
return; // wait until both models are loaded
|
return; // wait until both models are loaded
|
||||||
}
|
}
|
||||||
|
|
||||||
// Render the body's voxels and head
|
// Render the body's voxels and head
|
||||||
_skeletonModel.render(1.0f);
|
_skeletonModel.render(1.0f, renderMode == SHADOW_RENDER_MODE);
|
||||||
|
|
||||||
// Render head so long as the camera isn't inside it
|
// Render head so long as the camera isn't inside it
|
||||||
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.40f;
|
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.40f;
|
||||||
Camera* myCamera = Application::getInstance()->getCamera();
|
Camera* myCamera = Application::getInstance()->getCamera();
|
||||||
if (forceRenderHead || (glm::length(myCamera->getPosition() - getHead()->calculateAverageEyePosition()) >
|
if (renderMode != NORMAL_RENDER_MODE || (glm::length(myCamera->getPosition() - getHead()->calculateAverageEyePosition()) >
|
||||||
RENDER_HEAD_CUTOFF_DISTANCE * _scale)) {
|
RENDER_HEAD_CUTOFF_DISTANCE * _scale)) {
|
||||||
getHead()->render(1.0f);
|
getHead()->render(1.0f, renderMode == SHADOW_RENDER_MODE);
|
||||||
}
|
}
|
||||||
getHand()->render(true);
|
getHand()->render(true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,8 +35,8 @@ public:
|
||||||
void simulate(float deltaTime);
|
void simulate(float deltaTime);
|
||||||
void updateFromGyros(float deltaTime);
|
void updateFromGyros(float deltaTime);
|
||||||
|
|
||||||
void render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror = false);
|
void render(const glm::vec3& cameraPosition, RenderMode renderMode = NORMAL_RENDER_MODE);
|
||||||
void renderBody(bool forceRenderHead);
|
void renderBody(RenderMode renderMode);
|
||||||
void renderDebugBodyPoints();
|
void renderDebugBodyPoints();
|
||||||
void renderHeadMouse() const;
|
void renderHeadMouse() const;
|
||||||
|
|
||||||
|
@ -71,6 +71,8 @@ public:
|
||||||
void jump() { _shouldJump = true; };
|
void jump() { _shouldJump = true; };
|
||||||
|
|
||||||
bool isMyAvatar() { return true; }
|
bool isMyAvatar() { return true; }
|
||||||
|
|
||||||
|
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
|
||||||
|
|
||||||
static void sendKillAvatar();
|
static void sendKillAvatar();
|
||||||
|
|
||||||
|
|
|
@ -62,17 +62,6 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SkeletonModel::render(float alpha) {
|
|
||||||
|
|
||||||
if (_jointStates.isEmpty()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
Model::render(alpha);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const {
|
void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const {
|
||||||
if (jointIndex == -1) {
|
if (jointIndex == -1) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -23,7 +23,6 @@ public:
|
||||||
SkeletonModel(Avatar* owningAvatar);
|
SkeletonModel(Avatar* owningAvatar);
|
||||||
|
|
||||||
void simulate(float deltaTime, bool fullUpdate = true);
|
void simulate(float deltaTime, bool fullUpdate = true);
|
||||||
bool render(float alpha);
|
|
||||||
|
|
||||||
/// \param jointIndex index of hand joint
|
/// \param jointIndex index of hand joint
|
||||||
/// \param shapes[out] list in which is stored pointers to hand shapes
|
/// \param shapes[out] list in which is stored pointers to hand shapes
|
||||||
|
|
|
@ -43,11 +43,14 @@ Model::~Model() {
|
||||||
|
|
||||||
ProgramObject Model::_program;
|
ProgramObject Model::_program;
|
||||||
ProgramObject Model::_normalMapProgram;
|
ProgramObject Model::_normalMapProgram;
|
||||||
|
ProgramObject Model::_shadowProgram;
|
||||||
ProgramObject Model::_skinProgram;
|
ProgramObject Model::_skinProgram;
|
||||||
ProgramObject Model::_skinNormalMapProgram;
|
ProgramObject Model::_skinNormalMapProgram;
|
||||||
|
ProgramObject Model::_skinShadowProgram;
|
||||||
int Model::_normalMapTangentLocation;
|
int Model::_normalMapTangentLocation;
|
||||||
Model::SkinLocations Model::_skinLocations;
|
Model::SkinLocations Model::_skinLocations;
|
||||||
Model::SkinLocations Model::_skinNormalMapLocations;
|
Model::SkinLocations Model::_skinNormalMapLocations;
|
||||||
|
Model::SkinLocations Model::_skinShadowLocations;
|
||||||
|
|
||||||
void Model::initSkinProgram(ProgramObject& program, Model::SkinLocations& locations) {
|
void Model::initSkinProgram(ProgramObject& program, Model::SkinLocations& locations) {
|
||||||
program.bind();
|
program.bind();
|
||||||
|
@ -93,6 +96,11 @@ void Model::init() {
|
||||||
_normalMapTangentLocation = _normalMapProgram.attributeLocation("tangent");
|
_normalMapTangentLocation = _normalMapProgram.attributeLocation("tangent");
|
||||||
_normalMapProgram.release();
|
_normalMapProgram.release();
|
||||||
|
|
||||||
|
_shadowProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/model_shadow.vert");
|
||||||
|
_shadowProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
|
||||||
|
"shaders/model_shadow.frag");
|
||||||
|
_shadowProgram.link();
|
||||||
|
|
||||||
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath()
|
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath()
|
||||||
+ "shaders/skin_model.vert");
|
+ "shaders/skin_model.vert");
|
||||||
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath()
|
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath()
|
||||||
|
@ -108,6 +116,14 @@ void Model::init() {
|
||||||
_skinNormalMapProgram.link();
|
_skinNormalMapProgram.link();
|
||||||
|
|
||||||
initSkinProgram(_skinNormalMapProgram, _skinNormalMapLocations);
|
initSkinProgram(_skinNormalMapProgram, _skinNormalMapLocations);
|
||||||
|
|
||||||
|
_skinShadowProgram.addShaderFromSourceFile(QGLShader::Vertex,
|
||||||
|
Application::resourcesPath() + "shaders/skin_model_shadow.vert");
|
||||||
|
_skinShadowProgram.addShaderFromSourceFile(QGLShader::Fragment,
|
||||||
|
Application::resourcesPath() + "shaders/model_shadow.frag");
|
||||||
|
_skinShadowProgram.link();
|
||||||
|
|
||||||
|
initSkinProgram(_skinShadowProgram, _skinShadowLocations);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +183,7 @@ void Model::simulate(float deltaTime, bool fullUpdate) {
|
||||||
simulate(deltaTime, fullUpdate, updateGeometry());
|
simulate(deltaTime, fullUpdate, updateGeometry());
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Model::render(float alpha) {
|
bool Model::render(float alpha, bool forShadowMap) {
|
||||||
// render the attachments
|
// render the attachments
|
||||||
foreach (Model* attachment, _attachments) {
|
foreach (Model* attachment, _attachments) {
|
||||||
attachment->render(alpha);
|
attachment->render(alpha);
|
||||||
|
@ -198,13 +214,13 @@ bool Model::render(float alpha) {
|
||||||
glEnable(GL_ALPHA_TEST);
|
glEnable(GL_ALPHA_TEST);
|
||||||
glAlphaFunc(GL_GREATER, 0.5f * alpha);
|
glAlphaFunc(GL_GREATER, 0.5f * alpha);
|
||||||
|
|
||||||
renderMeshes(alpha, false);
|
renderMeshes(alpha, forShadowMap, false);
|
||||||
|
|
||||||
glDisable(GL_ALPHA_TEST);
|
glDisable(GL_ALPHA_TEST);
|
||||||
|
|
||||||
// render translucent meshes afterwards, with back face culling
|
// render translucent meshes afterwards, with back face culling
|
||||||
|
|
||||||
renderMeshes(alpha, true);
|
renderMeshes(alpha, forShadowMap, true);
|
||||||
|
|
||||||
glDisable(GL_CULL_FACE);
|
glDisable(GL_CULL_FACE);
|
||||||
|
|
||||||
|
@ -960,7 +976,7 @@ void Model::deleteGeometry() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Model::renderMeshes(float alpha, bool translucent) {
|
void Model::renderMeshes(float alpha, bool forShadowMap, bool translucent) {
|
||||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||||
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
|
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
|
||||||
|
|
||||||
|
@ -985,7 +1001,12 @@ void Model::renderMeshes(float alpha, bool translucent) {
|
||||||
ProgramObject* program = &_program;
|
ProgramObject* program = &_program;
|
||||||
ProgramObject* skinProgram = &_skinProgram;
|
ProgramObject* skinProgram = &_skinProgram;
|
||||||
SkinLocations* skinLocations = &_skinLocations;
|
SkinLocations* skinLocations = &_skinLocations;
|
||||||
if (!mesh.tangents.isEmpty()) {
|
if (forShadowMap) {
|
||||||
|
program = &_shadowProgram;
|
||||||
|
skinProgram = &_skinShadowProgram;
|
||||||
|
skinLocations = &_skinShadowLocations;
|
||||||
|
|
||||||
|
} else if (!mesh.tangents.isEmpty()) {
|
||||||
program = &_normalMapProgram;
|
program = &_normalMapProgram;
|
||||||
skinProgram = &_skinNormalMapProgram;
|
skinProgram = &_skinNormalMapProgram;
|
||||||
skinLocations = &_skinNormalMapLocations;
|
skinLocations = &_skinNormalMapLocations;
|
||||||
|
@ -1018,7 +1039,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mesh.blendshapes.isEmpty()) {
|
if (mesh.blendshapes.isEmpty()) {
|
||||||
if (!mesh.tangents.isEmpty()) {
|
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
|
||||||
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
|
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
|
||||||
activeProgram->enableAttributeArray(tangentLocation);
|
activeProgram->enableAttributeArray(tangentLocation);
|
||||||
}
|
}
|
||||||
|
@ -1028,7 +1049,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
|
||||||
(mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
|
(mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
if (!mesh.tangents.isEmpty()) {
|
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
|
||||||
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, 0, 3);
|
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, 0, 3);
|
||||||
activeProgram->enableAttributeArray(tangentLocation);
|
activeProgram->enableAttributeArray(tangentLocation);
|
||||||
}
|
}
|
||||||
|
@ -1057,31 +1078,33 @@ void Model::renderMeshes(float alpha, bool translucent) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// apply material properties
|
// apply material properties
|
||||||
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
|
if (forShadowMap) {
|
||||||
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
|
|
||||||
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
|
} else {
|
||||||
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
|
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
|
||||||
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
|
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
|
||||||
|
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
|
||||||
Texture* diffuseMap = networkPart.diffuseTexture.data();
|
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
|
||||||
if (mesh.isEye) {
|
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
|
||||||
if (diffuseMap) {
|
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
|
||||||
|
|
||||||
|
Texture* diffuseMap = networkPart.diffuseTexture.data();
|
||||||
|
if (mesh.isEye && diffuseMap) {
|
||||||
diffuseMap = (_dilatedTextures[i][j] =
|
diffuseMap = (_dilatedTextures[i][j] =
|
||||||
static_cast<DilatableNetworkTexture*>(diffuseMap)->getDilatedTexture(_pupilDilation)).data();
|
static_cast<DilatableNetworkTexture*>(diffuseMap)->getDilatedTexture(_pupilDilation)).data();
|
||||||
}
|
}
|
||||||
|
glBindTexture(GL_TEXTURE_2D, !diffuseMap ?
|
||||||
|
Application::getInstance()->getTextureCache()->getWhiteTextureID() : diffuseMap->getID());
|
||||||
|
|
||||||
|
if (!mesh.tangents.isEmpty()) {
|
||||||
|
glActiveTexture(GL_TEXTURE1);
|
||||||
|
Texture* normalMap = networkPart.normalTexture.data();
|
||||||
|
glBindTexture(GL_TEXTURE_2D, !normalMap ?
|
||||||
|
Application::getInstance()->getTextureCache()->getBlueTextureID() : normalMap->getID());
|
||||||
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
glBindTexture(GL_TEXTURE_2D, !diffuseMap ?
|
|
||||||
Application::getInstance()->getTextureCache()->getWhiteTextureID() : diffuseMap->getID());
|
|
||||||
|
|
||||||
if (!mesh.tangents.isEmpty()) {
|
|
||||||
glActiveTexture(GL_TEXTURE1);
|
|
||||||
Texture* normalMap = networkPart.normalTexture.data();
|
|
||||||
glBindTexture(GL_TEXTURE_2D, !normalMap ?
|
|
||||||
Application::getInstance()->getTextureCache()->getBlueTextureID() : normalMap->getID());
|
|
||||||
glActiveTexture(GL_TEXTURE0);
|
|
||||||
}
|
|
||||||
|
|
||||||
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
|
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
|
||||||
offset += part.quadIndices.size() * sizeof(int);
|
offset += part.quadIndices.size() * sizeof(int);
|
||||||
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
|
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
|
||||||
|
@ -1096,7 +1119,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
|
||||||
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
|
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mesh.tangents.isEmpty()) {
|
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
|
||||||
glActiveTexture(GL_TEXTURE1);
|
glActiveTexture(GL_TEXTURE1);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
glActiveTexture(GL_TEXTURE0);
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
|
|
@ -58,7 +58,7 @@ public:
|
||||||
void createCollisionShapes();
|
void createCollisionShapes();
|
||||||
void updateShapePositions();
|
void updateShapePositions();
|
||||||
void simulate(float deltaTime, bool fullUpdate = true);
|
void simulate(float deltaTime, bool fullUpdate = true);
|
||||||
bool render(float alpha);
|
bool render(float alpha = 1.0f, bool forShadowMap = false);
|
||||||
|
|
||||||
/// Sets the URL of the model to render.
|
/// Sets the URL of the model to render.
|
||||||
/// \param fallback the URL of a fallback model to render if the requested model fails to load
|
/// \param fallback the URL of a fallback model to render if the requested model fails to load
|
||||||
|
@ -261,7 +261,7 @@ private:
|
||||||
|
|
||||||
void applyNextGeometry();
|
void applyNextGeometry();
|
||||||
void deleteGeometry();
|
void deleteGeometry();
|
||||||
void renderMeshes(float alpha, bool translucent);
|
void renderMeshes(float alpha, bool forShadowMap, bool translucent);
|
||||||
|
|
||||||
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
|
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
|
||||||
QSharedPointer<NetworkGeometry> _nextBaseGeometry;
|
QSharedPointer<NetworkGeometry> _nextBaseGeometry;
|
||||||
|
@ -283,8 +283,10 @@ private:
|
||||||
|
|
||||||
static ProgramObject _program;
|
static ProgramObject _program;
|
||||||
static ProgramObject _normalMapProgram;
|
static ProgramObject _normalMapProgram;
|
||||||
|
static ProgramObject _shadowProgram;
|
||||||
static ProgramObject _skinProgram;
|
static ProgramObject _skinProgram;
|
||||||
static ProgramObject _skinNormalMapProgram;
|
static ProgramObject _skinNormalMapProgram;
|
||||||
|
static ProgramObject _skinShadowProgram;
|
||||||
|
|
||||||
static int _normalMapTangentLocation;
|
static int _normalMapTangentLocation;
|
||||||
|
|
||||||
|
@ -298,6 +300,7 @@ private:
|
||||||
|
|
||||||
static SkinLocations _skinLocations;
|
static SkinLocations _skinLocations;
|
||||||
static SkinLocations _skinNormalMapLocations;
|
static SkinLocations _skinNormalMapLocations;
|
||||||
|
static SkinLocations _skinShadowLocations;
|
||||||
|
|
||||||
static void initSkinProgram(ProgramObject& program, SkinLocations& locations);
|
static void initSkinProgram(ProgramObject& program, SkinLocations& locations);
|
||||||
static QVector<JointState> createJointStates(const FBXGeometry& geometry);
|
static QVector<JointState> createJointStates(const FBXGeometry& geometry);
|
||||||
|
|
69
interface/src/scripting/AudioDeviceScriptingInterface.cpp
Normal file
69
interface/src/scripting/AudioDeviceScriptingInterface.cpp
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
//
|
||||||
|
// AudioDeviceScriptingInterface.cpp
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/23/14
|
||||||
|
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "Application.h"
|
||||||
|
#include "AudioDeviceScriptingInterface.h"
|
||||||
|
|
||||||
|
|
||||||
|
AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
|
||||||
|
static AudioDeviceScriptingInterface sharedInstance;
|
||||||
|
return &sharedInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
|
||||||
|
bool result;
|
||||||
|
QMetaObject::invokeMethod(Application::getInstance()->getAudio(), "switchInputToAudioDevice",
|
||||||
|
Qt::BlockingQueuedConnection,
|
||||||
|
Q_RETURN_ARG(bool, result),
|
||||||
|
Q_ARG(const QString&, deviceName));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
|
||||||
|
bool result;
|
||||||
|
QMetaObject::invokeMethod(Application::getInstance()->getAudio(), "switchOutputToAudioDevice",
|
||||||
|
Qt::BlockingQueuedConnection,
|
||||||
|
Q_RETURN_ARG(bool, result),
|
||||||
|
Q_ARG(const QString&, deviceName));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
QString AudioDeviceScriptingInterface::getInputDevice() {
|
||||||
|
return Application::getInstance()->getAudio()->getDeviceName(QAudio::AudioInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
QString AudioDeviceScriptingInterface::getOutputDevice() {
|
||||||
|
return Application::getInstance()->getAudio()->getDeviceName(QAudio::AudioOutput);
|
||||||
|
}
|
||||||
|
|
||||||
|
QString AudioDeviceScriptingInterface::getDefaultInputDevice() {
|
||||||
|
return Application::getInstance()->getAudio()->getDefaultDeviceName(QAudio::AudioInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
QString AudioDeviceScriptingInterface::getDefaultOutputDevice() {
|
||||||
|
return Application::getInstance()->getAudio()->getDefaultDeviceName(QAudio::AudioOutput);
|
||||||
|
}
|
||||||
|
|
||||||
|
QVector<QString> AudioDeviceScriptingInterface::getInputDevices() {
|
||||||
|
return Application::getInstance()->getAudio()->getDeviceNames(QAudio::AudioInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
QVector<QString> AudioDeviceScriptingInterface::getOutputDevices() {
|
||||||
|
return Application::getInstance()->getAudio()->getDeviceNames(QAudio::AudioOutput);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
float AudioDeviceScriptingInterface::getInputVolume() {
|
||||||
|
return Application::getInstance()->getAudio()->getInputVolume();
|
||||||
|
}
|
||||||
|
|
||||||
|
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
|
||||||
|
Application::getInstance()->getAudio()->setInputVolume(volume);
|
||||||
|
}
|
41
interface/src/scripting/AudioDeviceScriptingInterface.h
Normal file
41
interface/src/scripting/AudioDeviceScriptingInterface.h
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
//
|
||||||
|
// AudioDeviceScriptingInterface.h
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/22/14
|
||||||
|
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef __hifi__AudioDeviceScriptingInterface__
|
||||||
|
#define __hifi__AudioDeviceScriptingInterface__
|
||||||
|
|
||||||
|
#include <QDebug>
|
||||||
|
#include <QObject>
|
||||||
|
#include <QString>
|
||||||
|
|
||||||
|
#include "Application.h"
|
||||||
|
|
||||||
|
class AudioDeviceScriptingInterface : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
|
AudioDeviceScriptingInterface() { };
|
||||||
|
public:
|
||||||
|
static AudioDeviceScriptingInterface* getInstance();
|
||||||
|
|
||||||
|
public slots:
|
||||||
|
bool setInputDevice(const QString& deviceName);
|
||||||
|
bool setOutputDevice(const QString& deviceName);
|
||||||
|
|
||||||
|
QString getInputDevice();
|
||||||
|
QString getOutputDevice();
|
||||||
|
|
||||||
|
QString getDefaultInputDevice();
|
||||||
|
QString getDefaultOutputDevice();
|
||||||
|
|
||||||
|
QVector<QString> getInputDevices();
|
||||||
|
QVector<QString> getOutputDevices();
|
||||||
|
|
||||||
|
float getInputVolume();
|
||||||
|
void setInputVolume(float volume);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* defined(__hifi__AudioDeviceScriptingInterface__) */
|
36
interface/src/scripting/SettingsScriptingInterface.cpp
Normal file
36
interface/src/scripting/SettingsScriptingInterface.cpp
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
//
|
||||||
|
// SettingsScriptingInterface.cpp
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 2/25/14
|
||||||
|
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "Application.h"
|
||||||
|
#include "SettingsScriptingInterface.h"
|
||||||
|
|
||||||
|
|
||||||
|
SettingsScriptingInterface* SettingsScriptingInterface::getInstance() {
|
||||||
|
static SettingsScriptingInterface sharedInstance;
|
||||||
|
return &sharedInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
QVariant SettingsScriptingInterface::getValue(const QString& setting) {
|
||||||
|
QSettings* settings = Application::getInstance()->lockSettings();
|
||||||
|
QVariant value = settings->value(setting);
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
QVariant SettingsScriptingInterface::getValue(const QString& setting, const QVariant& defaultValue) {
|
||||||
|
QSettings* settings = Application::getInstance()->lockSettings();
|
||||||
|
QVariant value = settings->value(setting, defaultValue);
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SettingsScriptingInterface::setValue(const QString& setting, const QVariant& value) {
|
||||||
|
QSettings* settings = Application::getInstance()->lockSettings();
|
||||||
|
settings->setValue(setting, value);
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
}
|
30
interface/src/scripting/SettingsScriptingInterface.h
Normal file
30
interface/src/scripting/SettingsScriptingInterface.h
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
//
|
||||||
|
// SettingsScriptingInterface.h
|
||||||
|
// hifi
|
||||||
|
//
|
||||||
|
// Created by Brad Hefta-Gaub on 3/22/14
|
||||||
|
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef __hifi__SettingsScriptingInterface__
|
||||||
|
#define __hifi__SettingsScriptingInterface__
|
||||||
|
|
||||||
|
#include <QDebug>
|
||||||
|
#include <QObject>
|
||||||
|
#include <QString>
|
||||||
|
|
||||||
|
#include "Application.h"
|
||||||
|
|
||||||
|
class SettingsScriptingInterface : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
|
SettingsScriptingInterface() { };
|
||||||
|
public:
|
||||||
|
static SettingsScriptingInterface* getInstance();
|
||||||
|
|
||||||
|
public slots:
|
||||||
|
QVariant getValue(const QString& setting);
|
||||||
|
QVariant getValue(const QString& setting, const QVariant& defaultValue);
|
||||||
|
void setValue(const QString& setting, const QVariant& value);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* defined(__hifi__SettingsScriptingInterface__) */
|
|
@ -38,11 +38,12 @@ void InfoView::forcedShow() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool InfoView::shouldShow() {
|
bool InfoView::shouldShow() {
|
||||||
|
bool shouldShow = false;
|
||||||
if (_forced) {
|
if (_forced) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
QSettings* settings = Application::getInstance()->getSettings();
|
QSettings* settings = Application::getInstance()->lockSettings();
|
||||||
|
|
||||||
QString lastVersion = settings->value(SETTINGS_VERSION_KEY).toString();
|
QString lastVersion = settings->value(SETTINGS_VERSION_KEY).toString();
|
||||||
|
|
||||||
|
@ -51,10 +52,12 @@ bool InfoView::shouldShow() {
|
||||||
|
|
||||||
if (version != QString::null && (lastVersion == QString::null || lastVersion != version)) {
|
if (version != QString::null && (lastVersion == QString::null || lastVersion != version)) {
|
||||||
settings->setValue(SETTINGS_VERSION_KEY, version);
|
settings->setValue(SETTINGS_VERSION_KEY, version);
|
||||||
return true;
|
shouldShow = true;
|
||||||
} else {
|
} else {
|
||||||
return false;
|
shouldShow = false;
|
||||||
}
|
}
|
||||||
|
Application::getInstance()->unlockSettings();
|
||||||
|
return shouldShow;
|
||||||
}
|
}
|
||||||
|
|
||||||
void InfoView::loaded(bool ok) {
|
void InfoView::loaded(bool ok) {
|
|
@ -12,7 +12,6 @@
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
|
|
||||||
#include "Base3DOverlay.h"
|
#include "Base3DOverlay.h"
|
||||||
#include "TextRenderer.h"
|
|
||||||
|
|
||||||
const glm::vec3 DEFAULT_POSITION = glm::vec3(0.0f, 0.0f, 0.0f);
|
const glm::vec3 DEFAULT_POSITION = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||||
const float DEFAULT_LINE_WIDTH = 1.0f;
|
const float DEFAULT_LINE_WIDTH = 1.0f;
|
|
@ -12,10 +12,10 @@
|
||||||
#include <QGLWidget>
|
#include <QGLWidget>
|
||||||
#include <QScriptValue>
|
#include <QScriptValue>
|
||||||
|
|
||||||
#include <VoxelSystem.h>
|
|
||||||
#include <Application.h>
|
#include <Application.h>
|
||||||
|
|
||||||
#include "LocalVoxelsOverlay.h"
|
#include "LocalVoxelsOverlay.h"
|
||||||
|
#include "voxels/VoxelSystem.h"
|
||||||
|
|
||||||
QMap<QString, WeakVoxelSystemPointer> LocalVoxelsOverlay::_voxelSystemMap;
|
QMap<QString, WeakVoxelSystemPointer> LocalVoxelsOverlay::_voxelSystemMap;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue