3
0
Fork 0
mirror of https://github.com/lubosz/overte.git synced 2025-04-27 21:15:27 +02:00

Merge branch 'master' of git://github.com/highfidelity/hifi into 19644

Conflicts:
	interface/src/Application.cpp
	interface/src/Application.h
	interface/src/ui/PreferencesDialog.cpp
	interface/src/ui/RunningScriptsWidget.cpp
	interface/ui/preferencesDialog.ui
	interface/ui/runningScriptsWidget.ui
This commit is contained in:
Ryan Huffman 2014-06-11 13:30:47 -07:00
commit 355fe01490
217 changed files with 11672 additions and 2577 deletions
assignment-client/src
domain-server/src
examples
interface

View file

@ -213,6 +213,8 @@ void Agent::run() {
loop.exec();
// let the AvatarData and ResourceCache classes use our QNetworkAccessManager
AvatarData::setNetworkAccessManager(networkManager);
ResourceCache::setNetworkAccessManager(networkManager);

View file

@ -173,134 +173,151 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
weakChannelAmplitudeRatio = 1 - (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio);
}
}
// if the bearing relative angle to source is > 0 then the delayed channel is the right one
int delayedChannelOffset = (bearingRelativeAngleToSource > 0.0f) ? 1 : 0;
int goodChannelOffset = delayedChannelOffset == 0 ? 1 : 0;
const int16_t* nextOutputStart = bufferToAdd->getNextOutput();
const int16_t* bufferStart = bufferToAdd->getBuffer();
int ringBufferSampleCapacity = bufferToAdd->getSampleCapacity();
int16_t correctBufferSample[2], delayBufferSample[2];
int delayedChannelIndex = 0;
const int SINGLE_STEREO_OFFSET = 2;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
if (!bufferToAdd->isStereo()) {
// this is a mono buffer, which means it gets full attenuation and spatialization
// setup the int16_t variables for the two sample sets
correctBufferSample[0] = nextOutputStart[s / 2] * attenuationCoefficient;
correctBufferSample[1] = nextOutputStart[(s / 2) + 1] * attenuationCoefficient;
// if the bearing relative angle to source is > 0 then the delayed channel is the right one
int delayedChannelOffset = (bearingRelativeAngleToSource > 0.0f) ? 1 : 0;
int goodChannelOffset = delayedChannelOffset == 0 ? 1 : 0;
delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset;
const int16_t* bufferStart = bufferToAdd->getBuffer();
int ringBufferSampleCapacity = bufferToAdd->getSampleCapacity();
delayBufferSample[0] = correctBufferSample[0] * weakChannelAmplitudeRatio;
delayBufferSample[1] = correctBufferSample[1] * weakChannelAmplitudeRatio;
int16_t correctBufferSample[2], delayBufferSample[2];
int delayedChannelIndex = 0;
__m64 bufferSamples = _mm_set_pi16(_clientSamples[s + goodChannelOffset],
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET],
_clientSamples[delayedChannelIndex],
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET]);
__m64 addedSamples = _mm_set_pi16(correctBufferSample[0], correctBufferSample[1],
delayBufferSample[0], delayBufferSample[1]);
const int SINGLE_STEREO_OFFSET = 2;
// perform the MMX add (with saturation) of two correct and delayed samples
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addedSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
// assign the results from the result of the mmx arithmetic
_clientSamples[s + goodChannelOffset] = shortResults[3];
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET] = shortResults[2];
_clientSamples[delayedChannelIndex] = shortResults[1];
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET] = shortResults[0];
}
// The following code is pretty gross and redundant, but AFAIK it's the best way to avoid
// too many conditionals in handling the delay samples at the beginning of _clientSamples.
// Basically we try to take the samples in batches of four, and then handle the remainder
// conditionally to get rid of the rest.
const int DOUBLE_STEREO_OFFSET = 4;
const int TRIPLE_STEREO_OFFSET = 6;
if (numSamplesDelay > 0) {
// if there was a sample delay for this buffer, we need to pull samples prior to the nextOutput
// to stick at the beginning
float attenuationAndWeakChannelRatio = attenuationCoefficient * weakChannelAmplitudeRatio;
const int16_t* delayNextOutputStart = nextOutputStart - numSamplesDelay;
if (delayNextOutputStart < bufferStart) {
delayNextOutputStart = bufferStart + ringBufferSampleCapacity - numSamplesDelay;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
// setup the int16_t variables for the two sample sets
correctBufferSample[0] = nextOutputStart[s / 2] * attenuationCoefficient;
correctBufferSample[1] = nextOutputStart[(s / 2) + 1] * attenuationCoefficient;
delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset;
delayBufferSample[0] = correctBufferSample[0] * weakChannelAmplitudeRatio;
delayBufferSample[1] = correctBufferSample[1] * weakChannelAmplitudeRatio;
__m64 bufferSamples = _mm_set_pi16(_clientSamples[s + goodChannelOffset],
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET],
_clientSamples[delayedChannelIndex],
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET]);
__m64 addedSamples = _mm_set_pi16(correctBufferSample[0], correctBufferSample[1],
delayBufferSample[0], delayBufferSample[1]);
// perform the MMX add (with saturation) of two correct and delayed samples
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addedSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
// assign the results from the result of the mmx arithmetic
_clientSamples[s + goodChannelOffset] = shortResults[3];
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET] = shortResults[2];
_clientSamples[delayedChannelIndex] = shortResults[1];
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET] = shortResults[0];
}
int i = 0;
// The following code is pretty gross and redundant, but AFAIK it's the best way to avoid
// too many conditionals in handling the delay samples at the beginning of _clientSamples.
// Basically we try to take the samples in batches of four, and then handle the remainder
// conditionally to get rid of the rest.
while (i + 3 < numSamplesDelay) {
// handle the first cases where we can MMX add four samples at once
const int DOUBLE_STEREO_OFFSET = 4;
const int TRIPLE_STEREO_OFFSET = 6;
if (numSamplesDelay > 0) {
// if there was a sample delay for this buffer, we need to pull samples prior to the nextOutput
// to stick at the beginning
float attenuationAndWeakChannelRatio = attenuationCoefficient * weakChannelAmplitudeRatio;
const int16_t* delayNextOutputStart = nextOutputStart - numSamplesDelay;
if (delayNextOutputStart < bufferStart) {
delayNextOutputStart = bufferStart + ringBufferSampleCapacity - numSamplesDelay;
}
int i = 0;
while (i + 3 < numSamplesDelay) {
// handle the first cases where we can MMX add four samples at once
int parentIndex = i * 2;
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset]);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 3] * attenuationAndWeakChannelRatio);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[0];
// push the index
i += 4;
}
int parentIndex = i * 2;
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset]);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 3] * attenuationAndWeakChannelRatio);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
_clientSamples[parentIndex + TRIPLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[0];
// push the index
i += 4;
if (i + 2 < numSamplesDelay) {
// MMX add only three delayed samples
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
} else if (i + 1 < numSamplesDelay) {
// MMX add two delayed samples
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset], 0, 0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio, 0, 0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
} else if (i < numSamplesDelay) {
// MMX add a single delayed sample
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset], 0, 0, 0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio, 0, 0, 0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
}
}
int parentIndex = i * 2;
if (i + 2 < numSamplesDelay) {
// MMX add only three delayed samples
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset],
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset],
0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 2] * attenuationAndWeakChannelRatio,
0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
_clientSamples[parentIndex + DOUBLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[1];
} else if (i + 1 < numSamplesDelay) {
// MMX add two delayed samples
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset],
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset], 0, 0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio,
delayNextOutputStart[i + 1] * attenuationAndWeakChannelRatio, 0, 0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
_clientSamples[parentIndex + SINGLE_STEREO_OFFSET + delayedChannelOffset] = shortResults[2];
} else if (i < numSamplesDelay) {
// MMX add a single delayed sample
__m64 bufferSamples = _mm_set_pi16(_clientSamples[parentIndex + delayedChannelOffset], 0, 0, 0);
__m64 addSamples = _mm_set_pi16(delayNextOutputStart[i] * attenuationAndWeakChannelRatio, 0, 0, 0);
__m64 mmxResult = _mm_adds_pi16(bufferSamples, addSamples);
int16_t* shortResults = reinterpret_cast<int16_t*>(&mmxResult);
_clientSamples[parentIndex + delayedChannelOffset] = shortResults[3];
} else {
// stereo buffer - do attenuation but no sample delay for spatialization
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
// use MMX to clamp four additions at a time
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int) (nextOutputStart[s] * attenuationCoefficient),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
_clientSamples[s + 1] = glm::clamp(_clientSamples[s + 1] + (int) (nextOutputStart[s + 1] * attenuationCoefficient),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
_clientSamples[s + 2] = glm::clamp(_clientSamples[s + 2] + (int) (nextOutputStart[s + 2] * attenuationCoefficient),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
_clientSamples[s + 3] = glm::clamp(_clientSamples[s + 3] + (int) (nextOutputStart[s + 3] * attenuationCoefficient),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
}
}
}

View file

@ -50,10 +50,22 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
// grab the AvatarAudioRingBuffer from the vector (or create it if it doesn't exist)
AvatarAudioRingBuffer* avatarRingBuffer = getAvatarAudioRingBuffer();
// read the first byte after the header to see if this is a stereo or mono buffer
quint8 channelFlag = packet.at(numBytesForPacketHeader(packet));
bool isStereo = channelFlag == 1;
if (avatarRingBuffer && avatarRingBuffer->isStereo() != isStereo) {
// there's a mismatch in the buffer channels for the incoming and current buffer
// so delete our current buffer and create a new one
_ringBuffers.removeOne(avatarRingBuffer);
avatarRingBuffer->deleteLater();
avatarRingBuffer = NULL;
}
if (!avatarRingBuffer) {
// we don't have an AvatarAudioRingBuffer yet, so add it
avatarRingBuffer = new AvatarAudioRingBuffer();
avatarRingBuffer = new AvatarAudioRingBuffer(isStereo);
_ringBuffers.push_back(avatarRingBuffer);
}
@ -106,7 +118,8 @@ void AudioMixerClientData::pushBuffersAfterFrameSend() {
PositionalAudioRingBuffer* audioBuffer = _ringBuffers[i];
if (audioBuffer->willBeAddedToMix()) {
audioBuffer->shiftReadPosition(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
audioBuffer->shiftReadPosition(audioBuffer->isStereo()
? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
audioBuffer->setWillBeAddedToMix(false);
} else if (audioBuffer->getType() == PositionalAudioRingBuffer::Injector

View file

@ -24,14 +24,14 @@ public:
AudioMixerClientData();
~AudioMixerClientData();
const std::vector<PositionalAudioRingBuffer*> getRingBuffers() const { return _ringBuffers; }
const QList<PositionalAudioRingBuffer*> getRingBuffers() const { return _ringBuffers; }
AvatarAudioRingBuffer* getAvatarAudioRingBuffer() const;
int parseData(const QByteArray& packet);
void checkBuffersBeforeFrameSend(int jitterBufferLengthSamples);
void pushBuffersAfterFrameSend();
private:
std::vector<PositionalAudioRingBuffer*> _ringBuffers;
QList<PositionalAudioRingBuffer*> _ringBuffers;
};
#endif // hifi_AudioMixerClientData_h

View file

@ -13,8 +13,8 @@
#include "AvatarAudioRingBuffer.h"
AvatarAudioRingBuffer::AvatarAudioRingBuffer() :
PositionalAudioRingBuffer(PositionalAudioRingBuffer::Microphone) {
AvatarAudioRingBuffer::AvatarAudioRingBuffer(bool isStereo) :
PositionalAudioRingBuffer(PositionalAudioRingBuffer::Microphone, isStereo) {
}

View file

@ -18,7 +18,7 @@
class AvatarAudioRingBuffer : public PositionalAudioRingBuffer {
public:
AvatarAudioRingBuffer();
AvatarAudioRingBuffer(bool isStereo = false);
int parseData(const QByteArray& packet);
private:

View file

@ -86,7 +86,7 @@ bool ModelServer::hasSpecialPacketToSend(const SharedNodePointer& node) {
return shouldSendDeletedModels;
}
int ModelServer::sendSpecialPacket(const SharedNodePointer& node) {
int ModelServer::sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node) {
unsigned char outputBuffer[MAX_PACKET_SIZE];
size_t packetLength = 0;
@ -100,12 +100,13 @@ int ModelServer::sendSpecialPacket(const SharedNodePointer& node) {
// TODO: is it possible to send too many of these packets? what if you deleted 1,000,000 models?
while (hasMoreToSend) {
hasMoreToSend = tree->encodeModelsDeletedSince(deletedModelsSentAt,
hasMoreToSend = tree->encodeModelsDeletedSince(queryNode->getSequenceNumber(), deletedModelsSentAt,
outputBuffer, MAX_PACKET_SIZE, packetLength);
//qDebug() << "sending PacketType_MODEL_ERASE packetLength:" << packetLength;
NodeList::getInstance()->writeDatagram((char*) outputBuffer, packetLength, SharedNodePointer(node));
queryNode->packetSent(outputBuffer, packetLength);
}
nodeData->setLastDeletedModelsSentAt(deletePacketSentAt);

View file

@ -37,7 +37,7 @@ public:
// subclass may implement these method
virtual void beforeRun();
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
virtual int sendSpecialPacket(const SharedNodePointer& node);
virtual int sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node);
virtual void modelCreated(const ModelItem& newModel, const SharedNodePointer& senderNode);

View file

@ -41,7 +41,8 @@ OctreeQueryNode::OctreeQueryNode() :
_sequenceNumber(0),
_lastRootTimestamp(0),
_myPacketType(PacketTypeUnknown),
_isShuttingDown(false)
_isShuttingDown(false),
_sentPacketHistory(1000)
{
}
@ -158,10 +159,11 @@ bool OctreeQueryNode::shouldSuppressDuplicatePacket() {
void OctreeQueryNode::init() {
_myPacketType = getMyPacketType();
resetOctreePacket(true); // don't bump sequence
resetOctreePacket(); // don't bump sequence
}
void OctreeQueryNode::resetOctreePacket(bool lastWasSurpressed) {
void OctreeQueryNode::resetOctreePacket() {
// if shutting down, return immediately
if (_isShuttingDown) {
return;
@ -196,15 +198,12 @@ void OctreeQueryNode::resetOctreePacket(bool lastWasSurpressed) {
*flagsAt = flags;
_octreePacketAt += sizeof(OCTREE_PACKET_FLAGS);
_octreePacketAvailableBytes -= sizeof(OCTREE_PACKET_FLAGS);
// pack in sequence number
OCTREE_PACKET_SEQUENCE* sequenceAt = (OCTREE_PACKET_SEQUENCE*)_octreePacketAt;
*sequenceAt = _sequenceNumber;
_octreePacketAt += sizeof(OCTREE_PACKET_SEQUENCE);
_octreePacketAvailableBytes -= sizeof(OCTREE_PACKET_SEQUENCE);
if (!(lastWasSurpressed || _lastOctreePacketLength == (numBytesPacketHeader + OCTREE_PACKET_EXTRA_HEADERS_SIZE))) {
_sequenceNumber++;
}
// pack in timestamp
OCTREE_PACKET_SENT_TIME now = usecTimestampNow();
@ -365,3 +364,44 @@ void OctreeQueryNode::dumpOutOfView() {
}
}
void OctreeQueryNode::octreePacketSent() {
packetSent(_octreePacket, getPacketLength());
}
void OctreeQueryNode::packetSent(unsigned char* packet, int packetLength) {
packetSent(QByteArray((char*)packet, packetLength));
}
void OctreeQueryNode::packetSent(const QByteArray& packet) {
_sentPacketHistory.packetSent(_sequenceNumber, packet);
_sequenceNumber++;
}
bool OctreeQueryNode::hasNextNackedPacket() const {
return !_nackedSequenceNumbers.isEmpty();
}
const QByteArray* OctreeQueryNode::getNextNackedPacket() {
if (!_nackedSequenceNumbers.isEmpty()) {
// could return null if packet is not in the history
return _sentPacketHistory.getPacket(_nackedSequenceNumbers.dequeue());
}
return NULL;
}
void OctreeQueryNode::parseNackPacket(QByteArray& packet) {
int numBytesPacketHeader = numBytesForPacketHeader(packet);
const unsigned char* dataAt = reinterpret_cast<const unsigned char*>(packet.data()) + numBytesPacketHeader;
// read number of sequence numbers
uint16_t numSequenceNumbers = (*(uint16_t*)dataAt);
dataAt += sizeof(uint16_t);
// read sequence numbers
for (int i = 0; i < numSequenceNumbers; i++) {
OCTREE_PACKET_SEQUENCE sequenceNumber = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
_nackedSequenceNumbers.enqueue(sequenceNumber);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
}
}

View file

@ -23,6 +23,8 @@
#include <OctreeQuery.h>
#include <OctreeSceneStats.h>
#include <ThreadedAssignment.h> // for SharedAssignmentPointer
#include "SentPacketHistory.h"
#include <qqueue.h>
class OctreeSendThread;
@ -35,7 +37,7 @@ public:
void init(); // called after creation to set up some virtual items
virtual PacketType getMyPacketType() const = 0;
void resetOctreePacket(bool lastWasSurpressed = false); // resets octree packet to after "V" header
void resetOctreePacket(); // resets octree packet to after "V" header
void writeToPacket(const unsigned char* buffer, unsigned int bytes); // writes to end of packet
@ -99,7 +101,17 @@ public:
void nodeKilled();
void forceNodeShutdown();
bool isShuttingDown() const { return _isShuttingDown; }
void octreePacketSent();
void packetSent(unsigned char* packet, int packetLength);
void packetSent(const QByteArray& packet);
OCTREE_PACKET_SEQUENCE getSequenceNumber() const { return _sequenceNumber; }
void parseNackPacket(QByteArray& packet);
bool hasNextNackedPacket() const;
const QByteArray* getNextNackedPacket();
private slots:
void sendThreadFinished();
@ -135,12 +147,16 @@ private:
float _lastClientOctreeSizeScale;
bool _lodChanged;
bool _lodInitialized;
OCTREE_PACKET_SEQUENCE _sequenceNumber;
quint64 _lastRootTimestamp;
PacketType _myPacketType;
bool _isShuttingDown;
SentPacketHistory _sentPacketHistory;
QQueue<OCTREE_PACKET_SEQUENCE> _nackedSequenceNumbers;
};
#endif // hifi_OctreeQueryNode_h

View file

@ -9,8 +9,6 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QMutexLocker>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <PerfStat.h>
@ -87,6 +85,7 @@ bool OctreeSendThread::process() {
if (nodeData && !nodeData->isShuttingDown()) {
bool viewFrustumChanged = nodeData->updateCurrentViewFrustum();
packetDistributor(nodeData, viewFrustumChanged);
resendNackedPackets(nodeData);
}
}
}
@ -139,18 +138,10 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
// this rate control savings.
if (nodeData->shouldSuppressDuplicatePacket()) {
nodeData->resetOctreePacket(true); // we still need to reset it though!
nodeData->resetOctreePacket(); // we still need to reset it though!
return packetsSent; // without sending...
}
const unsigned char* messageData = nodeData->getPacket();
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<const char*>(messageData));
const unsigned char* dataAt = messageData + numBytesPacketHeader;
dataAt += sizeof(OCTREE_PACKET_FLAGS);
OCTREE_PACKET_SEQUENCE sequence = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
// If we've got a stats message ready to send, then see if we can piggyback them together
if (nodeData->stats.isReadyToSend() && !nodeData->isShuttingDown()) {
// Send the stats message to the client
@ -172,7 +163,17 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
_totalBytes += nodeData->getPacketLength();
_totalPackets++;
if (debug) {
const unsigned char* messageData = nodeData->getPacket();
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<const char*>(messageData));
const unsigned char* dataAt = messageData + numBytesPacketHeader;
dataAt += sizeof(OCTREE_PACKET_FLAGS);
OCTREE_PACKET_SEQUENCE sequence = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
OCTREE_PACKET_SENT_TIME timestamp = (*(OCTREE_PACKET_SENT_TIME*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SENT_TIME);
qDebug() << "Adding stats to packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" statsMessageLength: " << statsMessageLength <<
" original size: " << nodeData->getPacketLength() << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
@ -194,7 +195,17 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
_totalBytes += statsMessageLength;
_totalPackets++;
if (debug) {
const unsigned char* messageData = nodeData->getPacket();
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<const char*>(messageData));
const unsigned char* dataAt = messageData + numBytesPacketHeader;
dataAt += sizeof(OCTREE_PACKET_FLAGS);
OCTREE_PACKET_SEQUENCE sequence = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
OCTREE_PACKET_SENT_TIME timestamp = (*(OCTREE_PACKET_SENT_TIME*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SENT_TIME);
qDebug() << "Sending separate stats packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" size: " << statsMessageLength << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
@ -204,8 +215,7 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
packetsSent++;
OctreeServer::didCallWriteDatagram(this);
NodeList::getInstance()->writeDatagram((char*) nodeData->getPacket(), nodeData->getPacketLength(), _node);
NodeList::getInstance()->writeDatagram((char*)nodeData->getPacket(), nodeData->getPacketLength(), _node);
packetSent = true;
thisWastedBytes = MAX_PACKET_SIZE - nodeData->getPacketLength();
@ -213,7 +223,17 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
_totalBytes += nodeData->getPacketLength();
_totalPackets++;
if (debug) {
const unsigned char* messageData = nodeData->getPacket();
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<const char*>(messageData));
const unsigned char* dataAt = messageData + numBytesPacketHeader;
dataAt += sizeof(OCTREE_PACKET_FLAGS);
OCTREE_PACKET_SEQUENCE sequence = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
OCTREE_PACKET_SENT_TIME timestamp = (*(OCTREE_PACKET_SENT_TIME*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SENT_TIME);
qDebug() << "Sending packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" size: " << nodeData->getPacketLength() << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
@ -224,7 +244,7 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
if (nodeData->isPacketWaiting() && !nodeData->isShuttingDown()) {
// just send the voxel packet
OctreeServer::didCallWriteDatagram(this);
NodeList::getInstance()->writeDatagram((char*) nodeData->getPacket(), nodeData->getPacketLength(), _node);
NodeList::getInstance()->writeDatagram((char*)nodeData->getPacket(), nodeData->getPacketLength(), _node);
packetSent = true;
int thisWastedBytes = MAX_PACKET_SIZE - nodeData->getPacketLength();
@ -232,7 +252,17 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
_totalBytes += nodeData->getPacketLength();
_totalPackets++;
if (debug) {
const unsigned char* messageData = nodeData->getPacket();
int numBytesPacketHeader = numBytesForPacketHeader(reinterpret_cast<const char*>(messageData));
const unsigned char* dataAt = messageData + numBytesPacketHeader;
dataAt += sizeof(OCTREE_PACKET_FLAGS);
OCTREE_PACKET_SEQUENCE sequence = (*(OCTREE_PACKET_SEQUENCE*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SEQUENCE);
OCTREE_PACKET_SENT_TIME timestamp = (*(OCTREE_PACKET_SENT_TIME*)dataAt);
dataAt += sizeof(OCTREE_PACKET_SENT_TIME);
qDebug() << "Sending packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" size: " << nodeData->getPacketLength() << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
@ -244,12 +274,33 @@ int OctreeSendThread::handlePacketSend(OctreeQueryNode* nodeData, int& trueBytes
trueBytesSent += nodeData->getPacketLength();
truePacketsSent++;
packetsSent++;
nodeData->octreePacketSent();
nodeData->resetOctreePacket();
}
return packetsSent;
}
int OctreeSendThread::resendNackedPackets(OctreeQueryNode* nodeData) {
const int MAX_PACKETS_RESEND = 10;
int packetsSent = 0;
const QByteArray* packet;
while (nodeData->hasNextNackedPacket() && packetsSent < MAX_PACKETS_RESEND) {
packet = nodeData->getNextNackedPacket();
if (packet) {
NodeList::getInstance()->writeDatagram(*packet, _node);
packetsSent++;
_totalBytes += packet->size();
_totalPackets++;
_totalWastedBytes += MAX_PACKET_SIZE - packet->size();
}
}
return packetsSent;
}
/// Version of voxel distributor that sends the deepest LOD level at once
int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrustumChanged) {
@ -530,7 +581,8 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
// send the environment packet
// TODO: should we turn this into a while loop to better handle sending multiple special packets
if (_myServer->hasSpecialPacketToSend(_node) && !nodeData->isShuttingDown()) {
trueBytesSent += _myServer->sendSpecialPacket(_node);
trueBytesSent += _myServer->sendSpecialPacket(nodeData, _node);
nodeData->resetOctreePacket(); // because nodeData's _sequenceNumber has changed
truePacketsSent++;
packetsSentThisInterval++;
}

View file

@ -55,6 +55,9 @@ private:
int _nodeMissingCount;
bool _isShuttingDown;
int resendNackedPackets(OctreeQueryNode* nodeData);
};
#endif // hifi_OctreeSendThread_h

View file

@ -832,14 +832,13 @@ void OctreeServer::readPendingDatagrams() {
PacketType packetType = packetTypeForPacket(receivedPacket);
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
if (packetType == getMyQueryMessageType()) {
// If we got a query packet, then we're talking to an agent, and we
// need to make sure we have it in our nodeList.
if (matchingNode) {
nodeList->updateNodeWithDataFromPacket(matchingNode, receivedPacket);
OctreeQueryNode* nodeData = (OctreeQueryNode*) matchingNode->getLinkedData();
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
if (nodeData && !nodeData->isOctreeSendThreadInitalized()) {
// NOTE: this is an important aspect of the proper ref counting. The send threads/node data need to
// know that the OctreeServer/Assignment will not get deleted on it while it's still active. The
// solution is to get the shared pointer for the current assignment. We need to make sure this is the
@ -848,6 +847,16 @@ void OctreeServer::readPendingDatagrams() {
nodeData->initializeOctreeSendThread(sharedAssignment, matchingNode);
}
}
} else if (packetType == PacketTypeOctreeDataNack) {
// If we got a nack packet, then we're talking to an agent, and we
// need to make sure we have it in our nodeList.
if (matchingNode) {
nodeList->updateNodeWithDataFromPacket(matchingNode, receivedPacket);
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
if (nodeData) {
nodeData->parseNackPacket(receivedPacket);
}
}
} else if (packetType == PacketTypeJurisdictionRequest) {
_jurisdictionSender->queueReceivedPacket(matchingNode, receivedPacket);
} else if (_octreeInboundPacketProcessor && getOctree()->handlesEditPacketType(packetType)) {

View file

@ -72,7 +72,7 @@ public:
// subclass may implement these method
virtual void beforeRun() { };
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node) { return false; }
virtual int sendSpecialPacket(const SharedNodePointer& node) { return 0; }
virtual int sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node) { return 0; }
static void attachQueryNodeToNode(Node* newNode);

View file

@ -0,0 +1,44 @@
//
// SentPacketHistory.cpp
// assignement-client/src/octree
//
// Created by Yixin Wang on 6/5/2014
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "SentPacketHistory.h"
SentPacketHistory::SentPacketHistory(int size)
: _sentPackets(size),
_newestPacketAt(0),
_numExistingPackets(0),
_newestSequenceNumber(0)
{
}
void SentPacketHistory::packetSent(OCTREE_PACKET_SEQUENCE sequenceNumber, const QByteArray& packet) {
_newestSequenceNumber = sequenceNumber;
// increment _newestPacketAt cyclically, insert new packet there.
// this will overwrite the oldest packet in the buffer
_newestPacketAt = (_newestPacketAt == _sentPackets.size() - 1) ? 0 : _newestPacketAt + 1;
_sentPackets[_newestPacketAt] = packet;
if (_numExistingPackets < _sentPackets.size()) {
_numExistingPackets++;
}
}
const QByteArray* SentPacketHistory::getPacket(OCTREE_PACKET_SEQUENCE sequenceNumber) const {
OCTREE_PACKET_SEQUENCE seqDiff = _newestSequenceNumber - sequenceNumber;
if (!(seqDiff >= 0 && seqDiff < _numExistingPackets)) {
return NULL;
}
int packetAt = _newestPacketAt - seqDiff;
if (packetAt < 0) {
packetAt += _sentPackets.size();
}
return &_sentPackets.at(packetAt);
}

View file

@ -0,0 +1,35 @@
//
// SentPacketHistory.h
// assignement-client/src/octree
//
// Created by Yixin Wang on 6/5/2014
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_SentPacketHistory_h
#define hifi_SentPacketHistory_h
#include <qbytearray.h>
#include <qvector.h>
#include "OctreePacketData.h"
class SentPacketHistory {
public:
SentPacketHistory(int size);
void packetSent(OCTREE_PACKET_SEQUENCE sequenceNumber, const QByteArray& packet);
const QByteArray* getPacket(OCTREE_PACKET_SEQUENCE sequenceNumber) const;
private:
QVector<QByteArray> _sentPackets; // circular buffer
int _newestPacketAt;
int _numExistingPackets;
OCTREE_PACKET_SEQUENCE _newestSequenceNumber;
};
#endif

View file

@ -86,7 +86,7 @@ bool ParticleServer::hasSpecialPacketToSend(const SharedNodePointer& node) {
return shouldSendDeletedParticles;
}
int ParticleServer::sendSpecialPacket(const SharedNodePointer& node) {
int ParticleServer::sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node) {
unsigned char outputBuffer[MAX_PACKET_SIZE];
size_t packetLength = 0;
@ -100,12 +100,13 @@ int ParticleServer::sendSpecialPacket(const SharedNodePointer& node) {
// TODO: is it possible to send too many of these packets? what if you deleted 1,000,000 particles?
while (hasMoreToSend) {
hasMoreToSend = tree->encodeParticlesDeletedSince(deletedParticlesSentAt,
hasMoreToSend = tree->encodeParticlesDeletedSince(queryNode->getSequenceNumber(), deletedParticlesSentAt,
outputBuffer, MAX_PACKET_SIZE, packetLength);
//qDebug() << "sending PacketType_PARTICLE_ERASE packetLength:" << packetLength;
NodeList::getInstance()->writeDatagram((char*) outputBuffer, packetLength, SharedNodePointer(node));
queryNode->packetSent(outputBuffer, packetLength);
}
nodeData->setLastDeletedParticlesSentAt(deletePacketSentAt);

View file

@ -37,7 +37,7 @@ public:
// subclass may implement these method
virtual void beforeRun();
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
virtual int sendSpecialPacket(const SharedNodePointer& node);
virtual int sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node);
virtual void particleCreated(const Particle& newParticle, const SharedNodePointer& senderNode);

View file

@ -40,9 +40,34 @@ bool VoxelServer::hasSpecialPacketToSend(const SharedNodePointer& node) {
return shouldSendEnvironments;
}
int VoxelServer::sendSpecialPacket(const SharedNodePointer& node) {
int VoxelServer::sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node) {
unsigned char* copyAt = _tempOutputBuffer;
int numBytesPacketHeader = populatePacketHeader(reinterpret_cast<char*>(_tempOutputBuffer), PacketTypeEnvironmentData);
copyAt += numBytesPacketHeader;
int envPacketLength = numBytesPacketHeader;
// pack in flags
OCTREE_PACKET_FLAGS flags = 0;
OCTREE_PACKET_FLAGS* flagsAt = (OCTREE_PACKET_FLAGS*)copyAt;
*flagsAt = flags;
copyAt += sizeof(OCTREE_PACKET_FLAGS);
envPacketLength += sizeof(OCTREE_PACKET_FLAGS);
// pack in sequence number
OCTREE_PACKET_SEQUENCE* sequenceAt = (OCTREE_PACKET_SEQUENCE*)copyAt;
*sequenceAt = queryNode->getSequenceNumber();
copyAt += sizeof(OCTREE_PACKET_SEQUENCE);
envPacketLength += sizeof(OCTREE_PACKET_SEQUENCE);
// pack in timestamp
OCTREE_PACKET_SENT_TIME now = usecTimestampNow();
OCTREE_PACKET_SENT_TIME* timeAt = (OCTREE_PACKET_SENT_TIME*)copyAt;
*timeAt = now;
copyAt += sizeof(OCTREE_PACKET_SENT_TIME);
envPacketLength += sizeof(OCTREE_PACKET_SENT_TIME);
int environmentsToSend = getSendMinimalEnvironment() ? 1 : getEnvironmentDataCount();
for (int i = 0; i < environmentsToSend; i++) {
@ -50,6 +75,8 @@ int VoxelServer::sendSpecialPacket(const SharedNodePointer& node) {
}
NodeList::getInstance()->writeDatagram((char*) _tempOutputBuffer, envPacketLength, SharedNodePointer(node));
queryNode->packetSent(_tempOutputBuffer, envPacketLength);
return envPacketLength;
}

View file

@ -46,7 +46,7 @@ public:
// subclass may implement these method
virtual void beforeRun();
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
virtual int sendSpecialPacket(const SharedNodePointer& node);
virtual int sendSpecialPacket(OctreeQueryNode* queryNode, const SharedNodePointer& node);
private:
bool _sendEnvironments;

View file

@ -218,6 +218,8 @@ bool DomainServer::optionallySetupAssignmentPayment() {
}
}
qDebug() << "Assignments will be paid for via" << qPrintable(_oauthProviderURL.toString());
// assume that the fact we are authing against HF data server means we will pay for assignments
// setup a timer to send transactions to pay assigned nodes every 30 seconds
QTimer* creditSetupTimer = new QTimer(this);
@ -310,6 +312,8 @@ void DomainServer::createScriptedAssignmentsFromArray(const QJsonArray &configAr
int numInstances = jsonObject[ASSIGNMENT_INSTANCES_KEY].toInt();
numInstances = (numInstances == 0 ? 1 : numInstances);
qDebug() << "Adding a static scripted assignment from" << assignmentURL;
for (int i = 0; i < numInstances; i++) {
// add a scripted assignment to the queue for this instance
Assignment* scriptAssignment = new Assignment(Assignment::CreateCommand,
@ -317,13 +321,8 @@ void DomainServer::createScriptedAssignmentsFromArray(const QJsonArray &configAr
assignmentPool);
scriptAssignment->setPayload(assignmentURL.toUtf8());
qDebug() << "Adding scripted assignment to queue -" << *scriptAssignment;
qDebug() << "URL for script is" << assignmentURL;
// scripts passed on CL or via JSON are static - so they are added back to the queue if the node dies
SharedAssignmentPointer sharedScriptAssignment(scriptAssignment);
_unfulfilledAssignments.enqueue(sharedScriptAssignment);
_allAssignments.insert(sharedScriptAssignment->getUUID(), sharedScriptAssignment);
addStaticAssignmentToAssignmentHash(scriptAssignment);
}
}
}
@ -405,7 +404,7 @@ void DomainServer::handleConnectRequest(const QByteArray& packet, const HifiSock
PendingAssignedNodeData* pendingAssigneeData = NULL;
if (isAssignment) {
pendingAssigneeData = _pendingAssignedNodes.take(packetUUID);
pendingAssigneeData = _pendingAssignedNodes.value(packetUUID);
if (pendingAssigneeData) {
matchingQueuedAssignment = matchingQueuedAssignmentForCheckIn(pendingAssigneeData->getAssignmentUUID(), nodeType);
@ -414,12 +413,21 @@ void DomainServer::handleConnectRequest(const QByteArray& packet, const HifiSock
qDebug() << "Assignment deployed with" << uuidStringWithoutCurlyBraces(packetUUID)
<< "matches unfulfilled assignment"
<< uuidStringWithoutCurlyBraces(matchingQueuedAssignment->getUUID());
// remove this unique assignment deployment from the hash of pending assigned nodes
// cleanup of the PendingAssignedNodeData happens below after the node has been added to the LimitedNodeList
_pendingAssignedNodes.remove(packetUUID);
} else {
// this is a node connecting to fulfill an assignment that doesn't exist
// don't reply back to them so they cycle back and re-request an assignment
qDebug() << "No match for assignment deployed with" << uuidStringWithoutCurlyBraces(packetUUID);
return;
}
}
}
if (!matchingQueuedAssignment && !_oauthProviderURL.isEmpty() && _argumentVariantMap.contains(ALLOWED_ROLES_CONFIG_KEY)) {
if (!isAssignment && !_oauthProviderURL.isEmpty() && _argumentVariantMap.contains(ALLOWED_ROLES_CONFIG_KEY)) {
// this is an Agent, and we require authentication so we can compare the user's roles to our list of allowed ones
if (_sessionAuthenticationHash.contains(packetUUID)) {
if (!_sessionAuthenticationHash.value(packetUUID)) {
@ -731,10 +739,11 @@ void DomainServer::setupPendingAssignmentCredits() {
qint64 elapsedMsecsSinceLastPayment = nodeData->getPaymentIntervalTimer().elapsed();
nodeData->getPaymentIntervalTimer().restart();
const float CREDITS_PER_HOUR = 3;
const float CREDITS_PER_HOUR = 0.10f;
const float CREDITS_PER_MSEC = CREDITS_PER_HOUR / (60 * 60 * 1000);
const int SATOSHIS_PER_MSEC = CREDITS_PER_MSEC * SATOSHIS_PER_CREDIT;
float pendingCredits = elapsedMsecsSinceLastPayment * CREDITS_PER_MSEC;
float pendingCredits = elapsedMsecsSinceLastPayment * SATOSHIS_PER_MSEC;
if (existingTransaction) {
existingTransaction->incrementAmount(pendingCredits);
@ -881,10 +890,10 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
if (!nodeData->getWalletUUID().isNull()) {
TransactionHash::iterator i = _pendingAssignmentCredits.find(nodeData->getWalletUUID());
double pendingCreditAmount = 0;
float pendingCreditAmount = 0;
while (i != _pendingAssignmentCredits.end() && i.key() == nodeData->getWalletUUID()) {
pendingCreditAmount += i.value()->getAmount();
pendingCreditAmount += i.value()->getAmount() / SATOSHIS_PER_CREDIT;
++i;
}
@ -1001,6 +1010,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
return true;
} else {
// check if this is for json stats for a node
const QString NODE_JSON_REGEX_STRING = QString("\\%1\\/(%2).json\\/?$").arg(URI_NODES).arg(UUID_REGEX_STRING);
QRegExp nodeShowRegex(NODE_JSON_REGEX_STRING);
@ -1025,6 +1035,40 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
// tell the caller we processed the request
return true;
}
return false;
}
// check if this is a request for a scripted assignment (with a temp unique UUID)
const QString ASSIGNMENT_REGEX_STRING = QString("\\%1\\/(%2)\\/?$").arg(URI_ASSIGNMENT).arg(UUID_REGEX_STRING);
QRegExp assignmentRegex(ASSIGNMENT_REGEX_STRING);
if (assignmentRegex.indexIn(url.path()) != -1) {
QUuid matchingUUID = QUuid(assignmentRegex.cap(1));
SharedAssignmentPointer matchingAssignment = _allAssignments.value(matchingUUID);
if (!matchingAssignment) {
// check if we have a pending assignment that matches this temp UUID, and it is a scripted assignment
PendingAssignedNodeData* pendingData = _pendingAssignedNodes.value(matchingUUID);
if (pendingData) {
matchingAssignment = _allAssignments.value(pendingData->getAssignmentUUID());
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
// via correct URL for the script so the client can download
QUrl scriptURL = url;
scriptURL.setPath(URI_ASSIGNMENT + "/"
+ uuidStringWithoutCurlyBraces(pendingData->getAssignmentUUID()));
// have the HTTPManager serve the appropriate script file
return _httpManager.handleHTTPRequest(connection, scriptURL);
}
}
}
// request not handled
return false;
}
}
} else if (connection->requestOperation() == QNetworkAccessManager::PostOperation) {

View file

@ -18,13 +18,13 @@
WalletTransaction::WalletTransaction() :
_uuid(),
_destinationUUID(),
_amount(),
_amount(0),
_isFinalized(false)
{
}
WalletTransaction::WalletTransaction(const QUuid& destinationUUID, double amount) :
WalletTransaction::WalletTransaction(const QUuid& destinationUUID, qint64 amount) :
_uuid(QUuid::createUuid()),
_destinationUUID(destinationUUID),
_amount(amount),
@ -63,5 +63,5 @@ void WalletTransaction::loadFromJson(const QJsonObject& jsonObject) {
_uuid = QUuid(transactionObject.value(TRANSACTION_ID_KEY).toString());
_destinationUUID = QUuid(transactionObject.value(TRANSACTION_DESTINATION_WALLET_ID_KEY).toString());
_amount = transactionObject.value(TRANSACTION_AMOUNT_KEY).toDouble();
_amount = transactionObject.value(TRANSACTION_AMOUNT_KEY).toInt();
}

View file

@ -19,16 +19,16 @@
class WalletTransaction : public QObject {
public:
WalletTransaction();
WalletTransaction(const QUuid& destinationUUID, double amount);
WalletTransaction(const QUuid& destinationUUID, qint64 amount);
const QUuid& getUUID() const { return _uuid; }
void setDestinationUUID(const QUuid& destinationUUID) { _destinationUUID = destinationUUID; }
const QUuid& getDestinationUUID() const { return _destinationUUID; }
double getAmount() const { return _amount; }
void setAmount(double amount) { _amount = amount; }
void incrementAmount(double increment) { _amount += increment; }
qint64 getAmount() const { return _amount; }
void setAmount(qint64 amount) { _amount = amount; }
void incrementAmount(qint64 increment) { _amount += increment; }
bool isFinalized() const { return _isFinalized; }
void setIsFinalized(bool isFinalized) { _isFinalized = isFinalized; }
@ -39,7 +39,7 @@ public:
private:
QUuid _uuid;
QUuid _destinationUUID;
double _amount;
qint64 _amount;
bool _isFinalized;
};

View file

@ -20,21 +20,21 @@ var jointMappings = "\n# Joint list start";
for (var i = 0; i < jointList.length; i++) {
jointMappings = jointMappings + "\njointIndex = " + jointList[i] + " = " + i;
}
print(jointMappings + "\n# Joint list end");
print(jointMappings + "\n# Joint list end");
Script.update.connect(function(deltaTime) {
cumulativeTime += deltaTime;
MyAvatar.setJointData("joint_R_hip", Quat.fromPitchYawRollDegrees(0.0, 0.0, AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY)));
MyAvatar.setJointData("joint_L_hip", Quat.fromPitchYawRollDegrees(0.0, 0.0, -AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY)));
MyAvatar.setJointData("joint_R_knee", Quat.fromPitchYawRollDegrees(0.0, 0.0,
AMPLITUDE * (1.0 + Math.sin(cumulativeTime * FREQUENCY))));
MyAvatar.setJointData("joint_L_knee", Quat.fromPitchYawRollDegrees(0.0, 0.0,
AMPLITUDE * (1.0 - Math.sin(cumulativeTime * FREQUENCY))));
MyAvatar.setJointData("RightUpLeg", Quat.fromPitchYawRollDegrees(AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY), 0.0, 0.0));
MyAvatar.setJointData("LeftUpLeg", Quat.fromPitchYawRollDegrees(-AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY), 0.0, 0.0));
MyAvatar.setJointData("RightLeg", Quat.fromPitchYawRollDegrees(
AMPLITUDE * (1.0 + Math.sin(cumulativeTime * FREQUENCY)),0.0, 0.0));
MyAvatar.setJointData("LeftLeg", Quat.fromPitchYawRollDegrees(
AMPLITUDE * (1.0 - Math.sin(cumulativeTime * FREQUENCY)),0.0, 0.0));
});
Script.scriptEnding.connect(function() {
MyAvatar.clearJointData("joint_R_hip");
MyAvatar.clearJointData("joint_L_hip");
MyAvatar.clearJointData("joint_R_knee");
MyAvatar.clearJointData("joint_L_knee");
MyAvatar.clearJointData("RightUpLeg");
MyAvatar.clearJointData("LeftUpLeg");
MyAvatar.clearJointData("RightLeg");
MyAvatar.clearJointData("LeftLeg");
});

42
examples/currentAPI.js Normal file
View file

@ -0,0 +1,42 @@
//
// currentAPI.js
// examples
//
// Created by Clément Brisset on 5/30/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var array = [];
var buffer = "\n\n\n\n\n======= JS API list =======";
function listKeys(string, object) {
if (string == "listKeys" || string == "array" || string == "buffer" || string == "i") {
return;
}
if (typeof(object) != "object") {
array.push(string + " " + typeof(object));
return;
}
var keys = Object.keys(object);
for (var i = 0; i < keys.length; ++i) {
if (string == "") {
listKeys(keys[i], object[keys[i]]);
} else {
listKeys(string + "." + keys[i], object[keys[i]]);
}
}
}
listKeys("", this);
array.sort();
for (var i = 0; i < array.length; ++i) {
buffer = buffer + "\n" + array[i];
}
buffer = buffer + "\n========= API END =========\n\n\n\n\n";
print(buffer);

View file

@ -10,6 +10,7 @@
Script.include("lookWithTouch.js");
Script.include("editVoxels.js");
Script.include("editModels.js");
Script.include("selectAudioDevice.js");
Script.include("hydraMove.js");
Script.include("inspect.js");

View file

@ -31,12 +31,12 @@ var originalProperties = {
green: 255,
blue: 0 },
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Feisar_Ship.FBX",
modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Feisar_Ship.FBX",
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/birarda/birarda_head.fbx",
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/pug.fbx",
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/newInvader16x16-large-purple.svo",
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/minotaur/mino_full.fbx",
modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Combat_tank_V01.FBX",
//modelURL: "http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Combat_tank_V01.FBX",
modelRotation: rotation
};

View file

@ -18,7 +18,8 @@ var toolWidth = 50;
var LASER_WIDTH = 4;
var LASER_COLOR = { red: 255, green: 0, blue: 0 };
var LASER_LENGTH_FACTOR = 5;
var LASER_LENGTH_FACTOR = 500
;
var LEFT = 0;
var RIGHT = 1;
@ -40,11 +41,26 @@ var modelURLs = [
var toolBar;
var jointList = MyAvatar.getJointNames();
var mode = 0;
function isLocked(properties) {
// special case to lock the ground plane model in hq.
if (location.hostname == "hq.highfidelity.io" &&
properties.modelURL == "https://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/Terrain_Reduce_forAlpha.fbx") {
return true;
}
return false;
}
function controller(wichSide) {
this.side = wichSide;
this.palm = 2 * wichSide;
this.tip = 2 * wichSide + 1;
this.trigger = wichSide;
this.bumper = 6 * wichSide + 5;
this.oldPalmPosition = Controller.getSpatialControlPosition(this.palm);
this.palmPosition = Controller.getSpatialControlPosition(this.palm);
@ -65,16 +81,25 @@ function controller(wichSide) {
this.rotation = this.oldRotation;
this.triggerValue = Controller.getTriggerValue(this.trigger);
this.bumperValue = Controller.isButtonPressed(this.bumper);
this.pressed = false; // is trigger pressed
this.pressing = false; // is trigger being pressed (is pressed now but wasn't previously)
this.grabbing = false;
this.modelID = { isKnownID: false };
this.modelURL = "";
this.oldModelRotation;
this.oldModelPosition;
this.oldModelRadius;
this.positionAtGrab;
this.rotationAtGrab;
this.modelPositionAtGrab;
this.modelRotationAtGrab;
this.jointsIntersectingFromStart = [];
this.laser = Overlays.addOverlay("line3d", {
position: { x: 0, y: 0, z: 0 },
end: { x: 0, y: 0, z: 0 },
@ -117,23 +142,82 @@ function controller(wichSide) {
this.grab = function (modelID, properties) {
if (this.isLocked(properties)) {
if (isLocked(properties)) {
print("Model locked " + modelID.id);
} else {
print("Grabbing " + modelID.id);
this.grabbing = true;
this.modelID = modelID;
this.modelURL = properties.modelURL;
this.oldModelPosition = properties.position;
this.oldModelRotation = properties.modelRotation;
this.oldModelRadius = properties.radius;
this.positionAtGrab = this.palmPosition;
this.rotationAtGrab = this.rotation;
this.modelPositionAtGrab = properties.position;
this.modelRotationAtGrab = properties.modelRotation;
this.jointsIntersectingFromStart = [];
for (var i = 0; i < jointList.length; i++) {
var distance = Vec3.distance(MyAvatar.getJointPosition(jointList[i]), this.oldModelPosition);
if (distance < this.oldModelRadius) {
this.jointsIntersectingFromStart.push(i);
}
}
this.showLaser(false);
}
}
this.release = function () {
if (this.grabbing) {
jointList = MyAvatar.getJointNames();
var closestJointIndex = -1;
var closestJointDistance = 10;
for (var i = 0; i < jointList.length; i++) {
var distance = Vec3.distance(MyAvatar.getJointPosition(jointList[i]), this.oldModelPosition);
if (distance < closestJointDistance) {
closestJointDistance = distance;
closestJointIndex = i;
}
}
if (closestJointIndex != -1) {
print("closestJoint: " + jointList[closestJointIndex]);
print("closestJointDistance (attach max distance): " + closestJointDistance + " (" + this.oldModelRadius + ")");
}
if (closestJointDistance < this.oldModelRadius) {
if (this.jointsIntersectingFromStart.indexOf(closestJointIndex) != -1 ||
(leftController.grabbing && rightController.grabbing &&
leftController.modelID.id == rightController.modelID.id)) {
// Do nothing
} else {
print("Attaching to " + jointList[closestJointIndex]);
var jointPosition = MyAvatar.getJointPosition(jointList[closestJointIndex]);
var jointRotation = MyAvatar.getJointCombinedRotation(jointList[closestJointIndex]);
var attachmentOffset = Vec3.subtract(this.oldModelPosition, jointPosition);
attachmentOffset = Vec3.multiplyQbyV(Quat.inverse(jointRotation), attachmentOffset);
var attachmentRotation = Quat.multiply(Quat.inverse(jointRotation), this.oldModelRotation);
MyAvatar.attach(this.modelURL, jointList[closestJointIndex],
attachmentOffset, attachmentRotation, 2.0 * this.oldModelRadius,
true, false);
Models.deleteModel(this.modelID);
}
}
}
this.grabbing = false;
this.modelID.isKnownID = false;
this.jointsIntersectingFromStart = [];
this.showLaser(true);
}
this.checkTrigger = function () {
@ -150,18 +234,9 @@ function controller(wichSide) {
}
}
this.isLocked = function (properties) {
// special case to lock the ground plane model in hq.
if (location.hostname == "hq.highfidelity.io" &&
properties.modelURL == "https://s3-us-west-1.amazonaws.com/highfidelity-public/ozan/Terrain_Reduce_forAlpha.fbx") {
return true;
}
return false;
}
this.checkModel = function (properties) {
// special case to lock the ground plane model in hq.
if (this.isLocked(properties)) {
if (isLocked(properties)) {
return { valid: false };
}
@ -187,12 +262,13 @@ function controller(wichSide) {
var X = Vec3.sum(A, Vec3.multiply(B, x));
var d = Vec3.length(Vec3.subtract(P, X));
if (d < properties.radius && 0 < x && x < LASER_LENGTH_FACTOR) {
if (0 < x && x < LASER_LENGTH_FACTOR) {
return { valid: true, x: x, y: y, z: z };
}
return { valid: false };
}
this.glowedIntersectingModel = { isKnownID: false };
this.moveLaser = function () {
// the overlays here are anchored to the avatar, which means they are specified in the avatar's local frame
@ -205,59 +281,119 @@ function controller(wichSide) {
Overlays.editOverlay(this.laser, {
position: startPosition,
end: endPosition,
visible: true
end: endPosition
});
Overlays.editOverlay(this.ball, {
position: endPosition,
visible: true
position: endPosition
});
Overlays.editOverlay(this.leftRight, {
position: Vec3.sum(endPosition, Vec3.multiply(this.right, 2 * this.guideScale)),
end: Vec3.sum(endPosition, Vec3.multiply(this.right, -2 * this.guideScale)),
visible: true
end: Vec3.sum(endPosition, Vec3.multiply(this.right, -2 * this.guideScale))
});
Overlays.editOverlay(this.topDown, {position: Vec3.sum(endPosition, Vec3.multiply(this.up, 2 * this.guideScale)),
end: Vec3.sum(endPosition, Vec3.multiply(this.up, -2 * this.guideScale)),
visible: true
end: Vec3.sum(endPosition, Vec3.multiply(this.up, -2 * this.guideScale))
});
this.showLaser(!this.grabbing || mode == 0);
if (this.glowedIntersectingModel.isKnownID) {
Models.editModel(this.glowedIntersectingModel, { glowLevel: 0.0 });
this.glowedIntersectingModel.isKnownID = false;
}
if (!this.grabbing) {
var intersection = Models.findRayIntersection({
origin: this.palmPosition,
direction: this.front
});
if (intersection.accurate && intersection.modelID.isKnownID) {
this.glowedIntersectingModel = intersection.modelID;
Models.editModel(this.glowedIntersectingModel, { glowLevel: 0.25 });
}
}
}
this.hideLaser = function() {
Overlays.editOverlay(this.laser, { visible: false });
Overlays.editOverlay(this.ball, { visible: false });
Overlays.editOverlay(this.leftRight, { visible: false });
Overlays.editOverlay(this.topDown, { visible: false });
this.showLaser = function(show) {
Overlays.editOverlay(this.laser, { visible: show });
Overlays.editOverlay(this.ball, { visible: show });
Overlays.editOverlay(this.leftRight, { visible: show });
Overlays.editOverlay(this.topDown, { visible: show });
}
this.moveModel = function () {
if (this.grabbing) {
var newPosition = Vec3.sum(this.palmPosition,
Vec3.multiply(this.front, this.x));
newPosition = Vec3.sum(newPosition,
Vec3.multiply(this.up, this.y));
newPosition = Vec3.sum(newPosition,
Vec3.multiply(this.right, this.z));
if (!this.modelID.isKnownID) {
print("Unknown grabbed ID " + this.modelID.id + ", isKnown: " + this.modelID.isKnownID);
this.modelID = Models.findRayIntersection({
origin: this.palmPosition,
direction: this.front
}).modelID;
print("Identified ID " + this.modelID.id + ", isKnown: " + this.modelID.isKnownID);
}
var newPosition;
var newRotation;
var newRotation = Quat.multiply(this.rotation,
Quat.inverse(this.oldRotation));
newRotation = Quat.multiply(newRotation,
this.oldModelRotation);
switch (mode) {
case 0:
newPosition = Vec3.sum(this.palmPosition,
Vec3.multiply(this.front, this.x));
newPosition = Vec3.sum(newPosition,
Vec3.multiply(this.up, this.y));
newPosition = Vec3.sum(newPosition,
Vec3.multiply(this.right, this.z));
newRotation = Quat.multiply(this.rotation,
Quat.inverse(this.oldRotation));
newRotation = Quat.multiply(newRotation,
this.oldModelRotation);
break;
case 1:
var forward = Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -1 });
var d = Vec3.dot(forward, MyAvatar.position);
var factor1 = Vec3.dot(forward, this.positionAtGrab) - d;
var factor2 = Vec3.dot(forward, this.modelPositionAtGrab) - d;
var vector = Vec3.subtract(this.palmPosition, this.positionAtGrab);
if (factor2 < 0) {
factor2 = 0;
}
if (factor1 <= 0) {
factor1 = 1;
factor2 = 1;
}
newPosition = Vec3.sum(this.modelPositionAtGrab,
Vec3.multiply(vector,
factor2 / factor1));
newRotation = Quat.multiply(this.rotation,
Quat.inverse(this.rotationAtGrab));
newRotation = Quat.multiply(newRotation,
this.modelRotationAtGrab);
break;
}
Models.editModel(this.modelID, {
position: newPosition,
modelRotation: newRotation
});
// print("Moving " + this.modelID.id);
// Vec3.print("Old Position: ", this.oldModelPosition);
// Vec3.print("Sav Position: ", newPosition);
// Quat.print("Old Rotation: ", this.oldModelRotation);
// Quat.print("New Rotation: ", newRotation);
this.oldModelRotation = newRotation;
this.oldModelPosition = newPosition;
var indicesToRemove = [];
for (var i = 0; i < this.jointsIntersectingFromStart.length; ++i) {
var distance = Vec3.distance(MyAvatar.getJointPosition(this.jointsIntersectingFromStart[i]), this.oldModelPosition);
if (distance >= this.oldModelRadius) {
indicesToRemove.push(this.jointsIntersectingFromStart[i]);
}
}
for (var i = 0; i < indicesToRemove.length; ++i) {
this.jointsIntersectingFromStart.splice(this.jointsIntersectingFromStart.indexOf(indicesToRemove[i], 1));
}
}
}
@ -281,6 +417,21 @@ function controller(wichSide) {
this.triggerValue = Controller.getTriggerValue(this.trigger);
var bumperValue = Controller.isButtonPressed(this.bumper);
if (bumperValue && !this.bumperValue) {
if (mode == 0) {
mode = 1;
Overlays.editOverlay(leftController.laser, { color: { red: 0, green: 0, blue: 255 } });
Overlays.editOverlay(rightController.laser, { color: { red: 0, green: 0, blue: 255 } });
} else {
mode = 0;
Overlays.editOverlay(leftController.laser, { color: { red: 255, green: 0, blue: 0 } });
Overlays.editOverlay(rightController.laser, { color: { red: 255, green: 0, blue: 0 } });
}
}
this.bumperValue = bumperValue;
this.checkTrigger();
this.moveLaser();
@ -291,37 +442,88 @@ function controller(wichSide) {
}
if (this.pressing) {
Vec3.print("Looking at: ", this.palmPosition);
var foundModels = Models.findModels(this.palmPosition, LASER_LENGTH_FACTOR);
for (var i = 0; i < foundModels.length; i++) {
// Checking for attachments intersecting
var attachments = MyAvatar.getAttachmentData();
var attachmentIndex = -1;
var attachmentX = LASER_LENGTH_FACTOR;
var newModel;
var newProperties;
for (var i = 0; i < attachments.length; ++i) {
var position = Vec3.sum(MyAvatar.getJointPosition(attachments[i].jointName),
Vec3.multiplyQbyV(MyAvatar.getJointCombinedRotation(attachments[i].jointName), attachments[i].translation));
var scale = attachments[i].scale;
if (!foundModels[i].isKnownID) {
var identify = Models.identifyModel(foundModels[i]);
var A = this.palmPosition;
var B = this.front;
var P = position;
var x = Vec3.dot(Vec3.subtract(P, A), B);
var X = Vec3.sum(A, Vec3.multiply(B, x));
var d = Vec3.length(Vec3.subtract(P, X));
if (d < scale / 2.0 && 0 < x && x < attachmentX) {
attachmentIndex = i;
attachmentX = d;
}
}
if (attachmentIndex != -1) {
print("Detaching: " + attachments[attachmentIndex].modelURL);
MyAvatar.detachOne(attachments[attachmentIndex].modelURL, attachments[attachmentIndex].jointName);
newProperties = {
position: Vec3.sum(MyAvatar.getJointPosition(attachments[attachmentIndex].jointName),
Vec3.multiplyQbyV(MyAvatar.getJointCombinedRotation(attachments[attachmentIndex].jointName), attachments[attachmentIndex].translation)),
modelRotation: Quat.multiply(MyAvatar.getJointCombinedRotation(attachments[attachmentIndex].jointName),
attachments[attachmentIndex].rotation),
radius: attachments[attachmentIndex].scale / 2.0,
modelURL: attachments[attachmentIndex].modelURL
};
newModel = Models.addModel(newProperties);
} else {
// There is none so ...
// Checking model tree
Vec3.print("Looking at: ", this.palmPosition);
var pickRay = { origin: this.palmPosition,
direction: Vec3.normalize(Vec3.subtract(this.tipPosition, this.palmPosition)) };
var foundIntersection = Models.findRayIntersection(pickRay);
if(!foundIntersection.accurate) {
print("No accurate intersection");
return;
}
newModel = foundIntersection.modelID;
if (!newModel.isKnownID) {
var identify = Models.identifyModel(newModel);
if (!identify.isKnownID) {
print("Unknown ID " + identify.id + "(update loop)");
print("Unknown ID " + identify.id + " (update loop " + newModel.id + ")");
return;
}
foundModels[i] = identify;
newModel = identify;
}
newProperties = Models.getModelProperties(newModel);
}
print("foundModel.modelURL=" + newProperties.modelURL);
if (isLocked(newProperties)) {
print("Model locked " + newProperties.id);
} else {
this.grab(newModel, newProperties);
var properties = Models.getModelProperties(foundModels[i]);
if (this.isLocked(properties)) {
print("Model locked " + properties.id);
} else {
print("Checking properties: " + properties.id + " " + properties.isKnownID);
var check = this.checkModel(properties);
if (check.valid) {
this.grab(foundModels[i], properties);
this.x = check.x;
this.y = check.y;
this.z = check.z;
return;
}
}
var check = this.checkModel(newProperties);
this.x = check.x;
this.y = check.y;
this.z = check.z;
return;
}
}
}
this.cleanup = function () {
Overlays.deleteOverlay(this.laser);
Overlays.deleteOverlay(this.ball);
@ -335,38 +537,74 @@ var rightController = new controller(RIGHT);
function moveModels() {
if (leftController.grabbing && rightController.grabbing && rightController.modelID.id == leftController.modelID.id) {
//print("Both controllers");
var oldLeftPoint = Vec3.sum(leftController.oldPalmPosition, Vec3.multiply(leftController.oldFront, leftController.x));
var oldRightPoint = Vec3.sum(rightController.oldPalmPosition, Vec3.multiply(rightController.oldFront, rightController.x));
var oldMiddle = Vec3.multiply(Vec3.sum(oldLeftPoint, oldRightPoint), 0.5);
var oldLength = Vec3.length(Vec3.subtract(oldLeftPoint, oldRightPoint));
var newPosition = leftController.oldModelPosition;
var rotation = leftController.oldModelRotation;
var ratio = 1;
var leftPoint = Vec3.sum(leftController.palmPosition, Vec3.multiply(leftController.front, leftController.x));
var rightPoint = Vec3.sum(rightController.palmPosition, Vec3.multiply(rightController.front, rightController.x));
var middle = Vec3.multiply(Vec3.sum(leftPoint, rightPoint), 0.5);
var length = Vec3.length(Vec3.subtract(leftPoint, rightPoint));
var ratio = length / oldLength;
var newPosition = Vec3.sum(middle,
Vec3.multiply(Vec3.subtract(leftController.oldModelPosition, oldMiddle), ratio));
//Vec3.print("Ratio : " + ratio + " New position: ", newPosition);
var rotation = Quat.multiply(leftController.rotation,
Quat.inverse(leftController.oldRotation));
rotation = Quat.multiply(rotation, leftController.oldModelRotation);
switch (mode) {
case 0:
var oldLeftPoint = Vec3.sum(leftController.oldPalmPosition, Vec3.multiply(leftController.oldFront, leftController.x));
var oldRightPoint = Vec3.sum(rightController.oldPalmPosition, Vec3.multiply(rightController.oldFront, rightController.x));
var oldMiddle = Vec3.multiply(Vec3.sum(oldLeftPoint, oldRightPoint), 0.5);
var oldLength = Vec3.length(Vec3.subtract(oldLeftPoint, oldRightPoint));
var leftPoint = Vec3.sum(leftController.palmPosition, Vec3.multiply(leftController.front, leftController.x));
var rightPoint = Vec3.sum(rightController.palmPosition, Vec3.multiply(rightController.front, rightController.x));
var middle = Vec3.multiply(Vec3.sum(leftPoint, rightPoint), 0.5);
var length = Vec3.length(Vec3.subtract(leftPoint, rightPoint));
ratio = length / oldLength;
newPosition = Vec3.sum(middle,
Vec3.multiply(Vec3.subtract(leftController.oldModelPosition, oldMiddle), ratio));
break;
case 1:
var u = Vec3.normalize(Vec3.subtract(rightController.oldPalmPosition, leftController.oldPalmPosition));
var v = Vec3.normalize(Vec3.subtract(rightController.palmPosition, leftController.palmPosition));
var cos_theta = Vec3.dot(u, v);
if (cos_theta > 1) {
cos_theta = 1;
}
var angle = Math.acos(cos_theta) / Math.PI * 180;
if (angle < 0.1) {
return;
}
var w = Vec3.normalize(Vec3.cross(u, v));
rotation = Quat.multiply(Quat.angleAxis(angle, w), leftController.oldModelRotation);
leftController.positionAtGrab = leftController.palmPosition;
leftController.rotationAtGrab = leftController.rotation;
leftController.modelPositionAtGrab = leftController.oldModelPosition;
leftController.modelRotationAtGrab = rotation;
rightController.positionAtGrab = rightController.palmPosition;
rightController.rotationAtGrab = rightController.rotation;
rightController.modelPositionAtGrab = rightController.oldModelPosition;
rightController.modelRotationAtGrab = rotation;
break;
}
Models.editModel(leftController.modelID, {
position: newPosition,
//modelRotation: rotation,
modelRotation: rotation,
radius: leftController.oldModelRadius * ratio
});
leftController.oldModelPosition = newPosition;
leftController.oldModelRotation = rotation;
leftController.oldModelRadius *= ratio;
rightController.oldModelPosition = newPosition;
rightController.oldModelRotation = rotation;
rightController.oldModelRadius *= ratio;
return;
}
@ -394,21 +632,19 @@ function checkController(deltaTime) {
if (hydraConnected) {
hydraConnected = false;
leftController.hideLaser();
rightController.hideLaser();
leftController.showLaser(false);
rightController.showLaser(false);
}
}
moveOverlays();
}
function initToolBar() {
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL);
// New Model
newModel = toolBar.addTool({
imageURL: toolIconUrl + "voxel-tool.svg",
imageURL: toolIconUrl + "add-model-tool.svg",
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: toolWidth, height: toolHeight,
visible: true,
@ -466,84 +702,110 @@ function mousePressEvent(event) {
}
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
Models.addModel({ position: position,
radius: radiusDefault,
modelURL: url
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
Models.addModel({ position: position,
radius: radiusDefault,
modelURL: url
});
} else {
print("Can't create model: Model would be out of bounds.");
}
} else {
var pickRay = Camera.computePickRay(event.x, event.y);
Vec3.print("[Mouse] Looking at: ", pickRay.origin);
var foundModels = Models.findModels(pickRay.origin, LASER_LENGTH_FACTOR);
for (var i = 0; i < foundModels.length; i++) {
if (!foundModels[i].isKnownID) {
var identify = Models.identifyModel(foundModels[i]);
if (!identify.isKnownID) {
print("Unknown ID " + identify.id + "(update loop)");
continue;
}
foundModels[i] = identify;
var foundIntersection = Models.findRayIntersection(pickRay);
if(!foundIntersection.accurate) {
return;
}
var foundModel = foundIntersection.modelID;
if (!foundModel.isKnownID) {
var identify = Models.identifyModel(foundModel);
if (!identify.isKnownID) {
print("Unknown ID " + identify.id + " (update loop " + foundModel.id + ")");
return;
}
foundModel = identify;
}
var properties = Models.getModelProperties(foundModel);
if (isLocked(properties)) {
print("Model locked " + properties.id);
} else {
print("Checking properties: " + properties.id + " " + properties.isKnownID);
// P P - Model
// /| A - Palm
// / | d B - unit vector toward tip
// / | X - base of the perpendicular line
// A---X----->B d - distance fom axis
// x x - distance from A
//
// |X-A| = (P-A).B
// X == A + ((P-A).B)B
// d = |P-X|
var properties = Models.getModelProperties(foundModels[i]);
if (this.isLocked(properties)) {
print("Model locked " + properties.id);
} else {
print("Checking properties: " + properties.id + " " + properties.isKnownID);
// P P - Model
// /| A - Palm
// / | d B - unit vector toward tip
// / | X - base of the perpendicular line
// A---X----->B d - distance fom axis
// x x - distance from A
//
// |X-A| = (P-A).B
// X == A + ((P-A).B)B
// d = |P-X|
var A = pickRay.origin;
var B = Vec3.normalize(pickRay.direction);
var P = properties.position;
var A = pickRay.origin;
var B = Vec3.normalize(pickRay.direction);
var P = properties.position;
var x = Vec3.dot(Vec3.subtract(P, A), B);
var X = Vec3.sum(A, Vec3.multiply(B, x));
var d = Vec3.length(Vec3.subtract(P, X));
var x = Vec3.dot(Vec3.subtract(P, A), B);
var X = Vec3.sum(A, Vec3.multiply(B, x));
var d = Vec3.length(Vec3.subtract(P, X));
if (d < properties.radius && 0 < x && x < LASER_LENGTH_FACTOR) {
modelSelected = true;
selectedModelID = foundModels[i];
selectedModelProperties = properties;
if (0 < x && x < LASER_LENGTH_FACTOR) {
modelSelected = true;
selectedModelID = foundModel;
selectedModelProperties = properties;
selectedModelProperties.oldRadius = selectedModelProperties.radius;
selectedModelProperties.oldPosition = {
x: selectedModelProperties.position.x,
y: selectedModelProperties.position.y,
z: selectedModelProperties.position.z,
};
selectedModelProperties.oldRotation = {
x: selectedModelProperties.modelRotation.x,
y: selectedModelProperties.modelRotation.y,
z: selectedModelProperties.modelRotation.z,
w: selectedModelProperties.modelRotation.w,
};
orientation = MyAvatar.orientation;
intersection = rayPlaneIntersection(pickRay, P, Quat.getFront(orientation));
print("Clicked on " + selectedModelID.id + " " + modelSelected);
return;
}
orientation = MyAvatar.orientation;
intersection = rayPlaneIntersection(pickRay, P, Quat.getFront(orientation));
}
}
}
if (modelSelected) {
selectedModelProperties.oldRadius = selectedModelProperties.radius;
selectedModelProperties.oldPosition = {
x: selectedModelProperties.position.x,
y: selectedModelProperties.position.y,
z: selectedModelProperties.position.z,
};
selectedModelProperties.oldRotation = {
x: selectedModelProperties.modelRotation.x,
y: selectedModelProperties.modelRotation.y,
z: selectedModelProperties.modelRotation.z,
w: selectedModelProperties.modelRotation.w,
};
selectedModelProperties.glowLevel = 0.0;
print("Clicked on " + selectedModelID.id + " " + modelSelected);
}
}
var glowedModelID = { id: -1, isKnownID: false };
var oldModifier = 0;
var modifier = 0;
var wasShifted = false;
function mouseMoveEvent(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
if (!modelSelected) {
var modelIntersection = Models.findRayIntersection(pickRay);
if (modelIntersection.accurate) {
if(glowedModelID.isKnownID && glowedModelID.id != modelIntersection.modelID.id) {
Models.editModel(glowedModelID, { glowLevel: 0.0 });
glowedModelID.id = -1;
glowedModelID.isKnownID = false;
}
if (modelIntersection.modelID.isKnownID) {
Models.editModel(modelIntersection.modelID, { glowLevel: 0.25 });
glowedModelID = modelIntersection.modelID;
}
}
return;
}
@ -558,8 +820,7 @@ function mouseMoveEvent(event) {
} else {
modifier = 0;
}
var pickRay = Camera.computePickRay(event.x, event.y);
pickRay = Camera.computePickRay(event.x, event.y);
if (wasShifted != event.isShifted || modifier != oldModifier) {
selectedModelProperties.oldRadius = selectedModelProperties.radius;
@ -632,10 +893,44 @@ function mouseMoveEvent(event) {
Models.editModel(selectedModelID, selectedModelProperties);
}
function mouseReleaseEvent(event) {
modelSelected = false;
glowedModelID.id = -1;
glowedModelID.isKnownID = false;
}
// In order for editVoxels and editModels to play nice together, they each check to see if a "delete" menu item already
// exists. If it doesn't they add it. If it does they don't. They also only delete the menu item if they were the one that
// added it.
var modelMenuAddedDelete = false;
function setupModelMenus() {
print("setupModelMenus()");
// add our menuitems
if (!Menu.menuItemExists("Edit","Delete")) {
print("no delete... adding ours");
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Models", isSeparator: true, beforeItem: "Physics" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Delete",
shortcutKeyEvent: { text: "backspace" }, afterItem: "Models" });
modelMenuAddedDelete = true;
} else {
print("delete exists... don't add ours");
}
}
function cleanupModelMenus() {
if (modelMenuAddedDelete) {
// delete our menuitems
Menu.removeSeparator("Edit", "Models");
Menu.removeMenuItem("Edit", "Delete");
}
}
function scriptEnding() {
leftController.cleanup();
rightController.cleanup();
toolBar.cleanup();
cleanupModelMenus();
}
Script.scriptEnding.connect(scriptEnding);
@ -643,6 +938,28 @@ Script.scriptEnding.connect(scriptEnding);
Script.update.connect(checkController);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
setupModelMenus();
Menu.menuItemEvent.connect(function(menuItem){
print("menuItemEvent() in JS... menuItem=" + menuItem);
if (menuItem == "Delete") {
if (leftController.grabbing) {
print(" Delete Model.... leftController.modelID="+ leftController.modelID);
Models.deleteModel(leftController.modelID);
leftController.grabbing = false;
} else if (rightController.grabbing) {
print(" Delete Model.... rightController.modelID="+ rightController.modelID);
Models.deleteModel(rightController.modelID);
rightController.grabbing = false;
} else if (modelSelected) {
print(" Delete Model.... selectedModelID="+ selectedModelID);
Models.deleteModel(selectedModelID);
modelSelected = false;
} else {
print(" Delete Model.... not holding...");
}
}
});

View file

@ -28,8 +28,10 @@ var NEW_VOXEL_SIZE = 1.0;
var NEW_VOXEL_DISTANCE_FROM_CAMERA = 3.0;
var PIXELS_PER_EXTRUDE_VOXEL = 16;
var WHEEL_PIXELS_PER_SCALE_CHANGE = 100;
var MAX_VOXEL_SCALE = 1.0;
var MIN_VOXEL_SCALE = 1.0 / Math.pow(2.0, 8.0);
var MAX_VOXEL_SCALE_POWER = 4;
var MIN_VOXEL_SCALE_POWER = -8;
var MAX_VOXEL_SCALE = Math.pow(2.0, MAX_VOXEL_SCALE_POWER);
var MIN_VOXEL_SCALE = Math.pow(2.0, MIN_VOXEL_SCALE_POWER);
var WHITE_COLOR = { red: 255, green: 255, blue: 255 };
var MAX_PASTE_VOXEL_SCALE = 256;
@ -330,6 +332,13 @@ function ScaleSelector() {
visible: false
});
this.setScale = function(scale) {
if (scale > MAX_VOXEL_SCALE) {
scale = MAX_VOXEL_SCALE;
}
if (scale < MIN_VOXEL_SCALE) {
scale = MIN_VOXEL_SCALE;
}
this.scale = scale;
this.power = Math.floor(Math.log(scale) / Math.log(2));
rescaleImport();
@ -391,7 +400,7 @@ function ScaleSelector() {
this.incrementScale = function() {
copyScale = false;
if (this.power < 13) {
if (this.power < MAX_VOXEL_SCALE_POWER) {
++this.power;
this.scale *= 2.0;
this.update();
@ -402,7 +411,7 @@ function ScaleSelector() {
this.decrementScale = function() {
copyScale = false;
if (-4 < this.power) {
if (MIN_VOXEL_SCALE_POWER < this.power) {
--this.power;
this.scale /= 2.0;
this.update();
@ -1056,6 +1065,9 @@ function mousePressEvent(event) {
lastVoxelPosition = { x: voxelDetails.x, y: voxelDetails.y, z: voxelDetails.z };
lastVoxelColor = { red: newColor.red, green: newColor.green, blue: newColor.blue };
lastVoxelScale = voxelDetails.s;
if (lastVoxelScale > MAX_VOXEL_SCALE) {
lastVoxelScale = MAX_VOXEL_SCALE;
}
addVoxelSound.playRandom();
@ -1105,7 +1117,12 @@ function keyReleaseEvent(event) {
trackKeyReleaseEvent(event); // used by preview support
}
function setupMenus() {
// In order for editVoxels and editModels to play nice together, they each check to see if a "delete" menu item already
// exists. If it doesn't they add it. If it does they don't. They also only delete the menu item if they were the one that
// added it.
var voxelMenuAddedDelete = false;
function setupVoxelMenus() {
// hook up menus
Menu.menuItemEvent.connect(menuItemEvent);
@ -1115,7 +1132,13 @@ function setupMenus() {
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Copy", shortcutKey: "CTRL+C", afterItem: "Cut" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Paste", shortcutKey: "CTRL+V", afterItem: "Copy" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Nudge", shortcutKey: "CTRL+N", afterItem: "Paste" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Delete", shortcutKeyEvent: { text: "backspace" }, afterItem: "Nudge" });
if (!Menu.menuItemExists("Edit","Delete")) {
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Delete",
shortcutKeyEvent: { text: "backspace" }, afterItem: "Nudge" });
voxelMenuAddedDelete = true;
}
Menu.addMenuItem({ menuName: "File", menuItemName: "Voxels", isSeparator: true, beforeItem: "Settings" });
Menu.addMenuItem({ menuName: "File", menuItemName: "Export Voxels", shortcutKey: "CTRL+E", afterItem: "Voxels" });
@ -1129,7 +1152,9 @@ function cleanupMenus() {
Menu.removeMenuItem("Edit", "Copy");
Menu.removeMenuItem("Edit", "Paste");
Menu.removeMenuItem("Edit", "Nudge");
Menu.removeMenuItem("Edit", "Delete");
if (voxelMenuAddedDelete) {
Menu.removeMenuItem("Edit", "Delete");
}
Menu.removeSeparator("File", "Voxels");
Menu.removeMenuItem("File", "Export Voxels");
Menu.removeMenuItem("File", "Import Voxels");
@ -1470,4 +1495,4 @@ Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);
setupMenus();
setupVoxelMenus();

View file

@ -51,7 +51,7 @@ var flockGravity = { x: 0, y: -1, z: 0};
var enableRandomXZThrust = false; // leading birds randomly decide to thrust in some random direction.
var enableSomeBirdsLead = false; // birds randomly decide not fly toward flock, causing other birds to follow
var leaders = 0; // number of birds leading
var PROBABILITY_TO_LEAD = 0.1; // probabolity a bird will choose to lead
var PROBABILITY_TO_LEAD = 0.1; // probability a bird will choose to lead
var birds = new Array(); // array of bird state data

View file

@ -8,6 +8,10 @@
// This is an example script that demonstrates use of the Controller and MyAvatar classes to implement
// avatar flying through the hydra/controller joysticks
//
// The joysticks (on hydra) will drive the avatar much like a playstation controller.
//
// Pressing the '4' or the 'FWD' button and moving/banking the hand will allow you to move and fly.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
@ -15,13 +19,14 @@
var damping = 0.9;
var position = { x: MyAvatar.position.x, y: MyAvatar.position.y, z: MyAvatar.position.z };
var joysticksCaptured = false;
var THRUST_CONTROLLER = 0;
var VIEW_CONTROLLER = 1;
var THRUST_CONTROLLER = 1;
var VIEW_CONTROLLER = 0;
var INITIAL_THRUST_MULTPLIER = 1.0;
var THRUST_INCREASE_RATE = 1.05;
var MAX_THRUST_MULTIPLIER = 75.0;
var thrustMultiplier = INITIAL_THRUST_MULTPLIER;
var grabDelta = { x: 0, y: 0, z: 0};
var grabStartPosition = { x: 0, y: 0, z: 0};
var grabDeltaVelocity = { x: 0, y: 0, z: 0};
var grabStartRotation = { x: 0, y: 0, z: 0, w: 1};
var grabCurrentRotation = { x: 0, y: 0, z: 0, w: 1};
@ -47,8 +52,11 @@ var JOYSTICK_PITCH_MAG = PITCH_MAG * 0.5;
var LEFT_PALM = 0;
var LEFT_BUTTON_4 = 4;
var LEFT_BUTTON_FWD = 5;
var RIGHT_PALM = 2;
var RIGHT_BUTTON_4 = 10;
var RIGHT_BUTTON_FWD = 11;
function printVector(text, v, decimals) {
@ -56,6 +64,72 @@ function printVector(text, v, decimals) {
}
var debug = false;
var RED_COLOR = { red: 255, green: 0, blue: 0 };
var GRAY_COLOR = { red: 25, green: 25, blue: 25 };
var defaultPosition = { x: 0, y: 0, z: 0};
var RADIUS = 0.05;
var greenSphere = -1;
var redSphere = -1;
function createDebugOverlay() {
if (greenSphere == -1) {
greenSphere = Overlays.addOverlay("sphere", {
position: defaultPosition,
size: RADIUS,
color: GRAY_COLOR,
alpha: 0.75,
visible: true,
solid: true,
anchor: "MyAvatar"
});
redSphere = Overlays.addOverlay("sphere", {
position: defaultPosition,
size: RADIUS,
color: RED_COLOR,
alpha: 0.5,
visible: true,
solid: true,
anchor: "MyAvatar"
});
}
}
function destroyDebugOverlay() {
if (greenSphere != -1) {
Overlays.deleteOverlay(greenSphere);
Overlays.deleteOverlay(redSphere);
greenSphere = -1;
redSphere = -1;
}
}
function displayDebug() {
if (!(grabbingWithRightHand || grabbingWithLeftHand)) {
if (greenSphere != -1) {
destroyDebugOverlay();
}
} else {
// update debug indicator
if (greenSphere == -1) {
createDebugOverlay();
}
var displayOffset = { x:0, y:0.5, z:-0.5 };
Overlays.editOverlay(greenSphere, { position: Vec3.sum(grabStartPosition, displayOffset) } );
Overlays.editOverlay(redSphere, { position: Vec3.sum(Vec3.sum(grabStartPosition, grabDelta), displayOffset), size: RADIUS + (0.25 * Vec3.length(grabDelta)) } );
}
}
function getJoystickPosition(palm) {
// returns CONTROLLER_ID position in avatar local frame
var invRotation = Quat.inverse(MyAvatar.orientation);
var palmWorld = Controller.getSpatialControlPosition(palm);
var palmRelative = Vec3.subtract(palmWorld, MyAvatar.position);
var palmLocal = Vec3.multiplyQbyV(invRotation, palmRelative);
return palmLocal;
}
// Used by handleGrabBehavior() for managing the grab position changes
function getAndResetGrabDelta() {
@ -83,11 +157,11 @@ function handleGrabBehavior(deltaTime) {
if (grabbingWithRightHand && !wasGrabbingWithRightHand) {
// Just starting grab, capture starting rotation
grabStartRotation = Controller.getSpatialControlRawRotation(RIGHT_PALM);
grabStartPosition = Controller.getSpatialControlPosition(RIGHT_PALM);
grabStartPosition = getJoystickPosition(RIGHT_PALM);
if (debug) printVector("start position", grabStartPosition, 3);
}
if (grabbingWithRightHand) {
grabDelta = Vec3.subtract(Controller.getSpatialControlPosition(RIGHT_PALM), grabStartPosition);
grabDelta = Vec3.subtract(getJoystickPosition(RIGHT_PALM), grabStartPosition);
grabCurrentRotation = Controller.getSpatialControlRawRotation(RIGHT_PALM);
}
if (!grabbingWithRightHand && wasGrabbingWithRightHand) {
@ -99,12 +173,12 @@ function handleGrabBehavior(deltaTime) {
if (grabbingWithLeftHand && !wasGrabbingWithLeftHand) {
// Just starting grab, capture starting rotation
grabStartRotation = Controller.getSpatialControlRawRotation(LEFT_PALM);
grabStartPosition = Controller.getSpatialControlPosition(LEFT_PALM);
grabStartPosition = getJoystickPosition(LEFT_PALM);
if (debug) printVector("start position", grabStartPosition, 3);
}
if (grabbingWithLeftHand) {
grabDelta = Vec3.subtract(Controller.getSpatialControlPosition(LEFT_PALM), grabStartPosition);
grabDelta = Vec3.subtract(getJoystickPosition(LEFT_PALM), grabStartPosition);
grabCurrentRotation = Controller.getSpatialControlRawRotation(LEFT_PALM);
}
if (!grabbingWithLeftHand && wasGrabbingWithLeftHand) {
@ -122,28 +196,27 @@ function handleGrabBehavior(deltaTime) {
var front = Quat.getFront(headOrientation);
var right = Quat.getRight(headOrientation);
var up = Quat.getUp(headOrientation);
grabDelta = Vec3.multiplyQbyV(MyAvatar.orientation, Vec3.multiply(grabDelta, -1));
if (debug) {
printVector("grabDelta: ", grabDelta, 3);
}
var THRUST_GRAB_SCALING = 0.0;
var thrust = Vec3.multiply(grabDelta, Math.abs(Vec3.length(grabDelta)));
var THRUST_GRAB_SCALING = 100000.0;
var thrustFront = Vec3.multiply(front, MyAvatar.scale * grabDelta.z * THRUST_GRAB_SCALING * deltaTime);
var thrustFront = Vec3.multiply(front, MyAvatar.scale * -thrust.z * THRUST_GRAB_SCALING * deltaTime);
MyAvatar.addThrust(thrustFront);
var thrustRight = Vec3.multiply(right, MyAvatar.scale * grabDelta.x * THRUST_GRAB_SCALING * deltaTime);
var thrustRight = Vec3.multiply(right, MyAvatar.scale * thrust.x * THRUST_GRAB_SCALING * deltaTime);
MyAvatar.addThrust(thrustRight);
var thrustUp = Vec3.multiply(up, MyAvatar.scale * grabDelta.y * THRUST_GRAB_SCALING * deltaTime);
var thrustUp = Vec3.multiply(up, MyAvatar.scale * thrust.y * THRUST_GRAB_SCALING * deltaTime);
MyAvatar.addThrust(thrustUp);
// add some rotation...
var deltaRotation = getGrabRotation();
var PITCH_SCALING = 2.0;
var PITCH_SCALING = 2.5;
var PITCH_DEAD_ZONE = 2.0;
var YAW_SCALING = 2.0;
var YAW_SCALING = 2.5;
var ROLL_SCALING = 2.0;
var euler = Quat.safeEulerAngles(deltaRotation);
@ -207,6 +280,8 @@ function flyWithHydra(deltaTime) {
MyAvatar.headPitch = newPitch;
}
handleGrabBehavior(deltaTime);
displayDebug();
}
Script.update.connect(flyWithHydra);

302
examples/locationsMenu.js Normal file
View file

@ -0,0 +1,302 @@
//
// locationsMenu.js
// examples
//
// Created by Ryan Huffman on 5/28/14
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var scriptUrl = "https://script.google.com/macros/s/AKfycbwIo4lmF-qUwX1Z-9eA_P-g2gse9oFhNcjVyyksGukyDDEFXgU/exec?action=listOwners&domain=alpha.highfidelity.io";
var LocationMenu = function(opts) {
var self = this;
var pageSize = opts.pageSize || 10;
var menuWidth = opts.menuWidth || 150;
var menuHeight = opts.menuItemHeight || 24;
var inactiveColor = { red: 51, green: 102, blue: 102 };
var activeColor = { red: 18, green: 66, blue: 66 };
var prevNextColor = { red: 192, green: 192, blue: 192 };
var disabledColor = { red: 64, green: 64, blue: 64};
var position = { x: 0, y: 0 };
var locationIconUrl = "http://highfidelity-public.s3-us-west-1.amazonaws.com/images/tools/location.svg";
var toolHeight = 50;
var toolWidth = 50;
var visible = false;
var menuItemOffset = {
x: 55,
y: 0,
};
var menuItemPadding = 5;
var margin = 7;
var fullMenuHeight = (2 * menuItemOffset.y) + (menuHeight * (pageSize + 1));
var menuOffset = -fullMenuHeight + toolHeight;
var windowDimensions = Controller.getViewportDimensions();
this.locations = [];
this.numPages = 1;
this.page = 0;
this.menuToggleButton = Overlays.addOverlay("image", {
x: position.x,
y: position.y,
width: toolWidth, height: toolHeight,
subImage: { x: 0, y: toolHeight, width: toolWidth, height: toolHeight },
imageURL: locationIconUrl,
alpha: 0.9
});
this.background = Overlays.addOverlay("text", {
x: 0,
y: 0,
width: menuWidth + 10,
height: (menuHeight * (pageSize + 1)) + 10,
color: { red: 0, green: 0, blue: 0},
topMargin: 4,
leftMargin: 4,
text: "",
visible: visible,
});
this.menuItems = [];
for (var i = 0; i < pageSize; i++) {
var menuItem = Overlays.addOverlay("text", {
x: 0,
y: 0,
width: menuWidth,
height: menuHeight,
color: inactiveColor,
topMargin: margin,
leftMargin: margin,
text: (i == 0) ? "Loading..." : "",
visible: visible,
});
this.menuItems.push({ overlay: menuItem, location: null });
}
this.previousButton = Overlays.addOverlay("text", {
x: 0,
y: 0,
width: menuWidth / 2,
height: menuHeight,
color: disabledColor,
topMargin: margin,
leftMargin: margin,
text: "Previous",
visible: visible,
});
this.nextButton = Overlays.addOverlay("text", {
x: 0,
y: 0,
width: menuWidth / 2,
height: menuHeight,
color: disabledColor,
topMargin: margin,
leftMargin: margin,
text: "Next",
visible: visible,
});
this.reposition = function(force) {
var newWindowDimensions = Controller.getViewportDimensions();
if (force || newWindowDimensions.y != windowDimensions.y) {
windowDimensions = newWindowDimensions;
position.x = 8;
position.y = Math.floor(windowDimensions.y / 2) + 25 + 50 + 8;
Overlays.editOverlay(self.menuToggleButton, {
x: position.x,
y: position.y,
});
Overlays.editOverlay(self.background, {
x: position.x + menuItemOffset.x,
y: position.y + menuItemOffset.y - 2 * menuItemPadding + menuOffset,
});
for (var i = 0; i < pageSize; i++) {
Overlays.editOverlay(self.menuItems[i].overlay, {
x: position.x + menuItemOffset.x + menuItemPadding,
y: position.y + menuItemOffset.y - menuItemPadding + (i * menuHeight) + menuOffset,
});
}
Overlays.editOverlay(self.previousButton, {
x: position.x + menuItemOffset.x + menuItemPadding,
y: position.y + menuItemOffset.y - menuItemPadding + (pageSize * menuHeight) + menuOffset,
});
Overlays.editOverlay(self.nextButton, {
x: position.x + menuItemOffset.x + menuItemPadding + (menuWidth / 2),
y: position.y + menuItemOffset.y - menuItemPadding + (pageSize * menuHeight) + menuOffset,
});
}
}
this.updateLocations = function(locations) {
this.locations = locations;
this.numPages = Math.ceil(locations.length / pageSize);
this.goToPage(0);
}
this.setError = function() {
Overlays.editOverlay(this.menuItems[0].overlay, { text: "Error loading data" });
}
this.toggleMenu = function() {
visible = !visible;
for (var i = 0; i < this.menuItems.length; i++) {
Overlays.editOverlay(this.menuItems[i].overlay, { visible: visible});
}
Overlays.editOverlay(this.previousButton, { visible: visible});
Overlays.editOverlay(this.nextButton, { visible: visible});
Overlays.editOverlay(this.background, { visible: visible});
if (visible) {
Overlays.editOverlay(this.menuToggleButton, { subImage: { x: 0, y: 0, width: toolWidth, height: toolHeight } }),
} else {
Overlays.editOverlay(this.menuToggleButton, { subImage: { x: 0, y: toolHeight, width: toolWidth, height: toolHeight } }),
}
}
this.goToPage = function(pageNumber) {
if (pageNumber < 0 || pageNumber >= this.numPages) {
return;
}
this.page = pageNumber;
var start = pageNumber * pageSize;
for (var i = 0; i < pageSize; i++) {
var update = {};
var location = null;
if (start + i < this.locations.length) {
location = this.locations[start + i];
update.text = (start + i + 1) + ". " + location.username;
update.color = inactiveColor;
} else {
update.text = "";
update.color = disabledColor;
}
Overlays.editOverlay(this.menuItems[i].overlay, update);
this.menuItems[i].location = location;
}
this.previousEnabled = pageNumber > 0;
this.nextEnabled = pageNumber < (this.numPages - 1);
Overlays.editOverlay(this.previousButton, { color: this.previousEnabled ? prevNextColor : disabledColor});
Overlays.editOverlay(this.nextButton, { color: this.nextEnabled ? prevNextColor : disabledColor });
}
this.mousePressEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (clickedOverlay == self.menuToggleButton) {
self.toggleMenu();
} else if (clickedOverlay == self.previousButton) {
if (self.previousEnabled) {
Overlays.editOverlay(clickedOverlay, { color: activeColor });
}
} else if (clickedOverlay == self.nextButton) {
if (self.nextEnabled) {
Overlays.editOverlay(clickedOverlay, { color: activeColor });
}
} else {
for (var i = 0; i < self.menuItems.length; i++) {
if (clickedOverlay == self.menuItems[i].overlay) {
if (self.menuItems[i].location != null) {
Overlays.editOverlay(clickedOverlay, { color: activeColor });
}
break;
}
}
}
}
this.mouseReleaseEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (clickedOverlay == self.previousButton) {
if (self.previousEnabled) {
Overlays.editOverlay(clickedOverlay, { color: inactiveColor });
self.goToPage(self.page - 1);
}
} else if (clickedOverlay == self.nextButton) {
if (self.nextEnabled) {
Overlays.editOverlay(clickedOverlay, { color: inactiveColor });
self.goToPage(self.page + 1);
}
} else {
for (var i = 0; i < self.menuItems.length; i++) {
if (clickedOverlay == self.menuItems[i].overlay) {
if (self.menuItems[i].location != null) {
Overlays.editOverlay(clickedOverlay, { color: inactiveColor });
var location = self.menuItems[i].location;
Window.location = "hifi://" + location.domain + "/"
+ location.x + "," + location.y + "," + location.z;
}
break;
}
}
}
}
this.cleanup = function() {
for (var i = 0; i < self.menuItems.length; i++) {
Overlays.deleteOverlay(self.menuItems[i].overlay);
}
Overlays.deleteOverlay(self.menuToggleButton);
Overlays.deleteOverlay(self.previousButton);
Overlays.deleteOverlay(self.nextButton);
Overlays.deleteOverlay(self.background);
}
Controller.mousePressEvent.connect(this.mousePressEvent);
Controller.mouseReleaseEvent.connect(this.mouseReleaseEvent);
Script.update.connect(this.reposition);
Script.scriptEnding.connect(this.cleanup);
this.reposition(true);
};
var locationMenu = new LocationMenu({ pageSize: 8 });
print("Loading strip data from " + scriptUrl);
var req = new XMLHttpRequest();
req.responseType = 'json';
req.onreadystatechange = function() {
if (req.readyState == req.DONE) {
if (req.status == 200 && req.response != null) {
for (var domain in req.response) {
var locations = req.response[domain];
var users = [];
for (var i = 0; i < locations.length; i++) {
var loc = locations[i];
var x1 = loc[1],
x2 = loc[2],
y1 = loc[3],
y2 = loc[4];
users.push({
domain: domain,
username: loc[0],
x: x1,
y: 300,
z: y1,
});
}
locationMenu.updateLocations(users);
}
} else {
print("Error loading data: " + req.status + " " + req.statusText + ", " + req.errorCode + ": " + req.responseText);
locationMenu.setError();
}
}
}
req.open("GET", scriptUrl);
req.send();

View file

@ -11,7 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var alwaysLook = true; // if you want the mouse look to happen only when you click, change this to false
var alwaysLook = false; // if you want the mouse look to happen only when you click, change this to false
var isMouseDown = false;
var lastX = 0;
var lastY = 0;

View file

@ -13,68 +13,88 @@
function setupMenus() {
Menu.addMenu("Foo");
Menu.addMenuItem("Foo","Foo item 1", "SHIFT+CTRL+F" );
Menu.addMenuItem("Foo","Foo item 2", "SHIFT+F" );
Menu.addMenuItem("Foo","Foo item 3", "META+F" );
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Foo item 4",
isCheckable: true,
isChecked: true
});
if (!Menu.menuExists("Foo")) {
Window.alert("Adding Menu Foo!");
Menu.addMenu("Foo");
Menu.addMenuItem("Foo","Foo item 1", "SHIFT+CTRL+F" );
Menu.addMenuItem("Foo","Foo item 2", "SHIFT+F" );
Menu.addMenuItem("Foo","Foo item 3", "META+F" );
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Foo item 5",
shortcutKey: "ALT+F",
isCheckable: true
});
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Foo item 4",
isCheckable: true,
isChecked: true
});
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Foo item 5",
shortcutKey: "ALT+F",
isCheckable: true
});
Menu.addSeparator("Foo","Removable Tools");
Menu.addMenuItem("Foo","Remove Foo item 4");
Menu.addMenuItem("Foo","Remove Foo");
Menu.addMenuItem("Foo","Remove Bar-Spam");
Menu.addMenu("Bar");
Menu.addSeparator("Foo","Removable Tools");
Menu.addMenuItem("Foo","Remove Foo item 4");
Menu.addMenuItem("Foo","Remove Foo");
Menu.addMenuItem("Foo","Remove Bar-Spam");
Menu.addMenuItem("Bar","Bar item 1", "b");
Menu.addMenuItem({
menuName: "Bar",
menuItemName: "Bar item 2",
shortcutKeyEvent: { text: "B", isControl: true }
});
Menu.addMenuItem("Foo","Remove Spam item 2");
Menu.addMenu("Bar > Spam");
Menu.addMenuItem("Bar > Spam","Spam item 1");
Menu.addMenuItem({
menuName: "Bar > Spam",
menuItemName: "Spam item 2",
isCheckable: true,
isChecked: false
});
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Remove Spam item 2"
});
} else {
Window.alert("Menu Foo already exists!");
}
if (!Menu.menuExists("Bar")) {
Window.alert("Adding Menu Bar!");
Menu.addMenu("Bar");
Menu.addMenuItem("Bar","Bar item 1", "b");
Menu.addMenuItem({
menuName: "Bar",
menuItemName: "Bar item 2",
shortcutKeyEvent: { text: "B", isControl: true }
});
Menu.addMenu("Bar > Spam");
Menu.addMenuItem("Bar > Spam","Spam item 1");
Menu.addMenuItem({
menuName: "Bar > Spam",
menuItemName: "Spam item 2",
isCheckable: true,
isChecked: false
});
Menu.addSeparator("Bar > Spam","Other Items");
Menu.addMenuItem("Bar > Spam","Remove Spam item 2");
Menu.addMenuItem("Foo","Remove Spam item 2");
Menu.addSeparator("Bar > Spam","Other Items");
Menu.addMenuItem("Bar > Spam","Remove Spam item 2");
}
Menu.addMenuItem({
menuName: "Foo",
menuItemName: "Remove Spam item 2"
});
Menu.addMenuItem({
menuName: "Edit",
menuItemName: "before Cut",
beforeItem: "Cut"
});
Menu.addMenuItem({
menuName: "Edit",
menuItemName: "after Nudge",
afterItem: "Nudge"
});
if (Menu.menuItemExists("Edit","Cut")) {
Window.alert("Menu Item Cut exist adding 'before Cut'.");
Menu.addMenuItem({
menuName: "Edit",
menuItemName: "before Cut",
beforeItem: "Cut"
});
} else {
Window.alert("Menu Item Cut doesn't exist!");
}
if (Menu.menuItemExists("Edit","Nudge")) {
Window.alert("Menu Item Nudge exist adding 'after Nudge'.");
Menu.addMenuItem({
menuName: "Edit",
menuItemName: "after Nudge",
afterItem: "Nudge"
});
} else {
Window.alert("Menu Item Nudge doesn't exist!");
}
}
function scriptEnding() {
@ -82,6 +102,10 @@ function scriptEnding() {
Menu.removeMenu("Foo");
Menu.removeMenu("Bar");
Menu.removeMenuItem("Edit", "before Cut");
Menu.removeMenuItem("Edit", "after Nudge");
}
function menuItemEvent(menuItem) {

View file

@ -1,202 +0,0 @@
//
// particleBird.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
//
// This sample script moves a voxel around like a bird and sometimes makes tweeting noises
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
function vLength(v) {
return Math.sqrt(v.x * v.x + v.y * v.y + v.z * v.z);
}
function printVector(v) {
print(v.x + ", " + v.y + ", " + v.z + "\n");
}
// Create a random vector with individual lengths between a,b
function randVector(a, b) {
var rval = { x: a + Math.random() * (b - a), y: a + Math.random() * (b - a), z: a + Math.random() * (b - a) };
return rval;
}
function vMinus(a, b) {
var rval = { x: a.x - b.x, y: a.y - b.y, z: a.z - b.z };
return rval;
}
function vPlus(a, b) {
var rval = { x: a.x + b.x, y: a.y + b.y, z: a.z + b.z };
return rval;
}
function vCopy(a, b) {
a.x = b.x;
a.y = b.y;
a.z = b.z;
return;
}
// Returns a vector which is fraction of the way between a and b
function vInterpolate(a, b, fraction) {
var rval = { x: a.x + (b.x - a.x) * fraction, y: a.y + (b.y - a.y) * fraction, z: a.z + (b.z - a.z) * fraction };
return rval;
}
// Decide what kind of bird we are
var tweet;
var color;
var size;
var which = Math.random();
if (which < 0.2) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/bushtit_1.raw");
color = { r: 100, g: 50, b: 120 };
size = 0.08;
} else if (which < 0.4) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/rosyfacedlovebird.raw");
color = { r: 100, g: 150, b: 75 };
size = 0.09;
} else if (which < 0.6) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/saysphoebe.raw");
color = { r: 84, g: 121, b: 36 };
size = 0.05;
} else if (which < 0.8) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/mexicanWhipoorwill.raw");
color = { r: 23, g: 197, b: 230 };
size = 0.12;
} else {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/westernscreechowl.raw");
color = { r: 50, g: 67, b: 144 };
size = 0.15;
}
var startTimeInSeconds = new Date().getTime() / 1000;
var birdLifetime = 20; // lifetime of the bird in seconds!
var position = { x: 0, y: 0, z: 0 };
var targetPosition = { x: 0, y: 0, z: 0 };
var range = 1.0; // Over what distance in meters do you want your bird to fly around
var frame = 0;
var moving = false;
var tweeting = 0;
var moved = false;
var CHANCE_OF_MOVING = 0.00;
var CHANCE_OF_FLAPPING = 0.05;
var CHANCE_OF_TWEETING = 0.05;
var START_HEIGHT_ABOVE_ME = 1.5;
var BIRD_GRAVITY = -0.1;
var BIRD_FLAP = 1.0;
var myPosition = MyAvatar.position;
var properties = {
lifetime: birdLifetime,
position: { x: myPosition.x, y: myPosition.y + START_HEIGHT_ABOVE_ME, z: myPosition.z },
velocity: { x: 0, y: Math.random() * BIRD_FLAP, z: 0 },
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
radius : 0.1,
color: { red: 0,
green: 255,
blue: 0 }
};
var range = 1.0; // Distance around avatar where I can move
// Create the actual bird
var particleID = Particles.addParticle(properties);
function moveBird(deltaTime) {
// check to see if we've been running long enough that our bird is dead
var nowTimeInSeconds = new Date().getTime() / 1000;
if ((nowTimeInSeconds - startTimeInSeconds) >= birdLifetime) {
print("our bird is dying, stop our script");
Script.stop();
return;
}
myPosition = MyAvatar.position;
frame++;
if (frame % 3 == 0) {
// Tweeting behavior
if (tweeting == 0) {
if (Math.random() < CHANCE_OF_TWEETING) {
//print("tweet!" + "\n");
var options = new AudioInjectionOptions();
options.position = position;
options.volume = 0.75;
Audio.playSound(tweet, options);
tweeting = 10;
}
} else {
tweeting -= 1;
}
if (Math.random() < CHANCE_OF_FLAPPING) {
// Add a little upward impulse to our bird
// TODO: Get velocity
//
var newProperties = {
velocity: { x:0.0, y: Math.random() * BIRD_FLAP, z: 0.0 }
};
Particles.editParticle(particleID, newProperties);
print("flap!");
}
// Moving behavior
if (moving == false) {
if (Math.random() < CHANCE_OF_MOVING) {
targetPosition = randVector(-range, range);
targetPosition = vPlus(targetPosition, myPosition);
if (targetPosition.x < 0) {
targetPosition.x = 0;
}
if (targetPosition.y < 0) {
targetPosition.y = 0;
}
if (targetPosition.z < 0) {
targetPosition.z = 0;
}
if (targetPosition.x > TREE_SCALE) {
targetPosition.x = TREE_SCALE;
}
if (targetPosition.y > TREE_SCALE) {
targetPosition.y = TREE_SCALE;
}
if (targetPosition.z > TREE_SCALE) {
targetPosition.z = TREE_SCALE;
}
//printVector(position);
moving = true;
}
}
if (moving) {
position = vInterpolate(position, targetPosition, 0.5);
if (vLength(vMinus(position, targetPosition)) < (size / 5.0)) {
moved = false;
moving = false;
} else {
moved = true;
}
}
if (moved || (tweeting > 0)) {
if (tweeting > 0) {
var newProperties = {
position: position,
radius : size * 1.5,
color: { red: Math.random() * 255, green: 0, blue: 0 }
};
} else {
var newProperties = {
position: position,
radius : size,
color: { red: color.r, green: color.g, blue: color.b }
};
}
Particles.editParticle(particleID, newProperties);
moved = false;
}
}
}
// register the call back so it fires before each data send
Script.update.connect(moveBird);

199
examples/particleBirds.js Normal file
View file

@ -0,0 +1,199 @@
//
// particleBirds.js
// examples
//
// Created by Benjamin Arnold on May 29, 2014
// Copyright 2014 High Fidelity, Inc.
//
// This sample script creates a swarm of tweeting bird particles that fly around the avatar.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Multiply vector by scalar
function vScalarMult(v, s) {
var rval = { x: v.x * s, y: v.y * s, z: v.z * s };
return rval;
}
function printVector(v) {
print(v.x + ", " + v.y + ", " + v.z + "\n");
}
// Create a random vector with individual lengths between a,b
function randVector(a, b) {
var rval = { x: a + Math.random() * (b - a), y: a + Math.random() * (b - a), z: a + Math.random() * (b - a) };
return rval;
}
// Returns a vector which is fraction of the way between a and b
function vInterpolate(a, b, fraction) {
var rval = { x: a.x + (b.x - a.x) * fraction, y: a.y + (b.y - a.y) * fraction, z: a.z + (b.z - a.z) * fraction };
return rval;
}
var startTimeInSeconds = new Date().getTime() / 1000;
var birdLifetime = 20; // lifetime of the birds in seconds!
var range = 1.0; // Over what distance in meters do you want the flock to fly around
var frame = 0;
var CHANCE_OF_MOVING = 0.1;
var CHANCE_OF_TWEETING = 0.05;
var BIRD_GRAVITY = -0.1;
var BIRD_FLAP_SPEED = 10.0;
var BIRD_VELOCITY = 0.5;
var myPosition = MyAvatar.position;
var range = 1.0; // Distance around avatar where I can move
// This is our Bird object
function Bird (particleID, tweetSound, targetPosition) {
this.particleID = particleID;
this.tweetSound = tweetSound;
this.previousFlapOffset = 0;
this.targetPosition = targetPosition;
this.moving = false;
this.tweeting = -1;
}
// Array of birds
var birds = [];
function addBird()
{
// Decide what kind of bird we are
var tweet;
var color;
var size;
var which = Math.random();
if (which < 0.2) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/bushtit_1.raw");
color = { red: 100, green: 50, blue: 120 };
size = 0.08;
} else if (which < 0.4) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/rosyfacedlovebird.raw");
color = { red: 100, green: 150, blue: 75 };
size = 0.09;
} else if (which < 0.6) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/saysphoebe.raw");
color = { red: 84, green: 121, blue: 36 };
size = 0.05;
} else if (which < 0.8) {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/mexicanWhipoorwill.raw");
color = { red: 23, green: 197, blue: 230 };
size = 0.12;
} else {
tweet = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/westernscreechowl.raw");
color = { red: 50, green: 67, blue: 144 };
size = 0.15;
}
var properties = {
lifetime: birdLifetime,
position: Vec3.sum(randVector(-range, range), myPosition),
velocity: { x: 0, y: 0, z: 0 },
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
radius : size,
color: color
};
birds.push(new Bird(Particles.addParticle(properties), tweet, properties.position));
}
var numBirds = 30;
// Generate the birds
for (var i = 0; i < numBirds; i++) {
addBird();
}
// Main update function
function updateBirds(deltaTime) {
// Check to see if we've been running long enough that our birds are dead
var nowTimeInSeconds = new Date().getTime() / 1000;
if ((nowTimeInSeconds - startTimeInSeconds) >= birdLifetime) {
print("our birds are dying, stop our script");
Script.stop();
return;
}
frame++;
// Only update every third frame
if ((frame % 3) == 0) {
myPosition = MyAvatar.position;
// Update all the birds
for (var i = 0; i < numBirds; i++) {
particleID = birds[i].particleID;
var properties = Particles.getParticleProperties(particleID);
// Tweeting behavior
if (birds[i].tweeting == 0) {
if (Math.random() < CHANCE_OF_TWEETING) {
var options = new AudioInjectionOptions();
options.position = properties.position;
options.volume = 0.75;
Audio.playSound(birds[i].tweetSound, options);
birds[i].tweeting = 10;
}
} else {
birds[i].tweeting -= 1;
}
// Begin movement by getting a target
if (birds[i].moving == false) {
if (Math.random() < CHANCE_OF_MOVING) {
var targetPosition = Vec3.sum(randVector(-range, range), myPosition);
if (targetPosition.x < 0) {
targetPosition.x = 0;
}
if (targetPosition.y < 0) {
targetPosition.y = 0;
}
if (targetPosition.z < 0) {
targetPosition.z = 0;
}
if (targetPosition.x > TREE_SCALE) {
targetPosition.x = TREE_SCALE;
}
if (targetPosition.y > TREE_SCALE) {
targetPosition.y = TREE_SCALE;
}
if (targetPosition.z > TREE_SCALE) {
targetPosition.z = TREE_SCALE;
}
birds[i].targetPosition = targetPosition;
birds[i].moving = true;
}
}
// If we are moving, move towards the target
if (birds[i].moving) {
var desiredVelocity = Vec3.subtract(birds[i].targetPosition, properties.position);
desiredVelocity = vScalarMult(Vec3.normalize(desiredVelocity), BIRD_VELOCITY);
properties.velocity = vInterpolate(properties.velocity, desiredVelocity, 0.2);
// If we are near the target, we should get a new target
if (Vec3.length(Vec3.subtract(properties.position, birds[i].targetPosition)) < (properties.radius / 5.0)) {
birds[i].moving = false;
}
}
// Use a cosine wave offset to make it look like its flapping.
var offset = Math.cos(nowTimeInSeconds * BIRD_FLAP_SPEED) * properties.radius;
properties.position.y = properties.position.y + (offset - birds[i].previousFlapOffset);
// Change position relative to previous offset.
birds[i].previousFlapOffset = offset;
// Update the particle
Particles.editParticle(particleID, properties);
}
}
}
// register the call back so it fires before each data send
Script.update.connect(updateBirds);

View file

@ -236,7 +236,7 @@ function checkControllerSide(whichSide) {
debugPrint("modelRadius=" +modelRadius);
newModel = Models.addModel(properties);
//newModel = Models.addModel(properties);
print("just added model... newModel=" + newModel.creatorTokenID);
print("properties.animationURL=" + properties.animationURL);

View file

@ -3,22 +3,22 @@
// examples
//
// Copyright 2014 High Fidelity, Inc.
// This sample script loads a sound file and plays it at the 'fingertip' of the
// Plays a sample audio file at the avatar's current location
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// First, load the clap sound from a URL
var clap = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/bushtit_1.raw");
// First, load a sample sound from a URL
var bird = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Animals/bushtit_1.raw");
function maybePlaySound(deltaTime) {
if (Math.random() < 0.01) {
// Set the location and other info for the sound to play
var options = new AudioInjectionOptions();
var palmPosition = Controller.getSpatialControlPosition(0);
options.position = palmPosition;
var position = MyAvatar.position;
options.position = position;
options.volume = 0.5;
Audio.playSound(clap, options);
Audio.playSound(bird, options);
}
}

43
examples/playSoundLoop.js Normal file
View file

@ -0,0 +1,43 @@
//
// playSoundLoop.js
// examples
//
// Created by David Rowe on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// This example script plays a sound in a continuous loop.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var sound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guitars/Guitar+-+Nylon+A.raw");
var soundPlaying = false;
function keyPressEvent(event) {
if (event.text === "1") {
if (!Audio.isInjectorPlaying(soundPlaying)) {
var options = new AudioInjectionOptions();
options.position = MyAvatar.position;
options.volume = 0.5;
options.loop = true;
soundPlaying = Audio.playSound(sound, options);
print("Started sound loop");
} else {
Audio.stopInjector(soundPlaying);
print("Stopped sound loop");
}
}
}
function scriptEnding() {
if (Audio.isInjectorPlaying(soundPlaying)) {
Audio.stopInjector(soundPlaying);
print("Stopped sound loop");
}
}
// Connect a call back that happens every frame
Script.scriptEnding.connect(scriptEnding);
Controller.keyPressEvent.connect(keyPressEvent);

22
examples/playSoundWave.js Normal file
View file

@ -0,0 +1,22 @@
//
// playSoundWave.js
// examples
//
// Created by Ryan Huffman on 05/27/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var soundClip = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Cocktail%20Party%20Snippets/Walken1.wav");
function playSound() {
var options = new AudioInjectionOptions();
var position = MyAvatar.position;
options.position = position;
options.volume = 0.5;
Audio.playSound(soundClip, options);
}
Script.setInterval(playSound, 10000);

View file

@ -41,6 +41,22 @@ function mouseMoveEvent(event) {
print("voxelAt.x/y/z/s=" + voxelAt.x + ", " + voxelAt.y + ", " + voxelAt.z + ": " + voxelAt.s);
print("voxelAt.red/green/blue=" + voxelAt.red + ", " + voxelAt.green + ", " + voxelAt.blue);
}
intersection = Models.findRayIntersection(pickRay);
if (!intersection.accurate) {
print(">>> NOTE: intersection not accurate. will try calling Models.findRayIntersectionBlocking()");
intersection = Models.findRayIntersectionBlocking(pickRay);
print(">>> AFTER BLOCKING CALL intersection.accurate=" + intersection.accurate);
}
if (intersection.intersects) {
print("intersection modelID.id=" + intersection.modelID.id);
print("intersection modelProperties.modelURL=" + intersection.modelProperties.modelURL);
print("intersection face=" + intersection.face);
print("intersection distance=" + intersection.distance);
print("intersection intersection.x/y/z=" + intersection.intersection.x + ", "
+ intersection.intersection.y + ", " + intersection.intersection.z);
}
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);

View file

@ -40,6 +40,8 @@ var passedTime = 0.0;
var startPosition = null;
var animationLenght = 2.0;
var sitting = false;
// This is the pose we would like to end up
var pose = [
{joint:"RightUpLeg", rotation: {x:100.0, y:15.0, z:0.0}},
@ -101,31 +103,41 @@ var standingUpAnimation = function(deltaTime){
}
}
function sitDown() {
sitting = true;
passedTime = 0.0;
startPosition = MyAvatar.position;
storeStartPoseAndTransition();
try{
Script.update.disconnect(standingUpAnimation);
} catch(e){
// no need to handle. if it wasn't connected no harm done
}
Script.update.connect(sittingDownAnimation);
Overlays.editOverlay(sitDownButton, { visible: false });
Overlays.editOverlay(standUpButton, { visible: true });
}
function standUp() {
sitting = false;
passedTime = 0.0;
startPosition = MyAvatar.position;
try{
Script.update.disconnect(sittingDownAnimation);
} catch (e){}
Script.update.connect(standingUpAnimation);
Overlays.editOverlay(standUpButton, { visible: false });
Overlays.editOverlay(sitDownButton, { visible: true });
}
Controller.mousePressEvent.connect(function(event){
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (clickedOverlay == sitDownButton) {
passedTime = 0.0;
startPosition = MyAvatar.position;
storeStartPoseAndTransition();
try{
Script.update.disconnect(standingUpAnimation);
} catch(e){
// no need to handle. if it wasn't connected no harm done
}
Script.update.connect(sittingDownAnimation);
Overlays.editOverlay(sitDownButton, { visible: false });
Overlays.editOverlay(standUpButton, { visible: true });
sitDown();
} else if (clickedOverlay == standUpButton) {
passedTime = 0.0;
startPosition = MyAvatar.position;
try{
Script.update.disconnect(sittingDownAnimation);
} catch (e){}
Script.update.connect(standingUpAnimation);
Overlays.editOverlay(standUpButton, { visible: false });
Overlays.editOverlay(sitDownButton, { visible: true });
standUp();
}
})
@ -140,12 +152,24 @@ function update(deltaTime){
}
}
function keyPressEvent(event) {
if (event.text === ".") {
if (sitting) {
standUp();
} else {
sitDown();
}
}
}
Script.update.connect(update);
Controller.keyPressEvent.connect(keyPressEvent);
Script.scriptEnding.connect(function() {
for (var i = 0; i < pose.length; i++){
MyAvatar.clearJointData(pose[i][0]);
MyAvatar.clearJointData(pose[i].joint);
}
Overlays.deleteOverlay(sitDownButton);

52
examples/squeezeHands.js Normal file
View file

@ -0,0 +1,52 @@
//
// squeezeHands.js
// examples
//
// Created by Philip Rosedale on June 4, 2014
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var rightHandAnimation = "https://s3-us-west-1.amazonaws.com/highfidelity-public/animations/RightHandAnim.fbx";
var leftHandAnimation = "https://s3-us-west-1.amazonaws.com/highfidelity-public/animations/LeftHandAnim.fbx";
var LEFT = 0;
var RIGHT = 1;
var lastLeftFrame = 0;
var lastRightFrame = 0;
var LAST_FRAME = 11.0; // What is the number of the last frame we want to use in the animation?
var SMOOTH_FACTOR = 0.80;
Script.update.connect(function(deltaTime) {
var leftTriggerValue = Math.sqrt(Controller.getTriggerValue(LEFT));
var rightTriggerValue = Math.sqrt(Controller.getTriggerValue(RIGHT));
var leftFrame, rightFrame;
// Average last few trigger frames together for a bit of smoothing
leftFrame = (leftTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastLeftFrame * SMOOTH_FACTOR;
rightFrame = (rightTriggerValue * LAST_FRAME) * (1.0 - SMOOTH_FACTOR) + lastRightFrame * SMOOTH_FACTOR;
if ((leftFrame != lastLeftFrame) && leftHandAnimation.length){
MyAvatar.stopAnimation(leftHandAnimation);
MyAvatar.startAnimation(leftHandAnimation, 30.0, 1.0, false, true, leftFrame, leftFrame);
}
if ((rightFrame != lastRightFrame) && rightHandAnimation.length) {
MyAvatar.stopAnimation(rightHandAnimation);
MyAvatar.startAnimation(rightHandAnimation, 30.0, 1.0, false, true, rightFrame, rightFrame);
}
lastLeftFrame = leftFrame;
lastRightFrame = rightFrame;
});
Script.scriptEnding.connect(function() {
MyAvatar.stopAnimation(leftHandAnimation);
MyAvatar.stopAnimation(rightHandAnimation);
});

View file

@ -0,0 +1,40 @@
#version 120
//
// cascaded_shadow_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the color in shadow
varying vec4 shadowColor;
// the interpolated position
varying vec4 position;
void main(void) {
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
dot(gl_EyePlaneR[shadowIndex], position));
gl_FragColor = mix(shadowColor, gl_Color, 0.25 *
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r));
}

View file

@ -0,0 +1,33 @@
#version 120
//
// cascaded_shadow_map.vert
// vertex shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the color in shadow
varying vec4 shadowColor;
// the interpolated position
varying vec4 position;
void main(void) {
// the shadow color includes only the ambient terms
shadowColor = gl_Color * (gl_LightModel.ambient + gl_LightSource[0].ambient);
// the normal color includes diffuse
vec4 normal = normalize(gl_ModelViewMatrix * vec4(gl_Normal, 0.0));
gl_FrontColor = shadowColor + gl_Color * (gl_LightSource[0].diffuse * max(0.0, dot(normal, gl_LightSource[0].position)));
// generate the shadow texture coordinates using the eye position
position = gl_ModelViewMatrix * gl_Vertex;
// use the fixed function transform
gl_Position = ftransform();
}

View file

@ -14,6 +14,9 @@
// the diffuse texture
uniform sampler2D diffuseMap;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
@ -26,7 +29,7 @@ void main(void) {
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position + vec4(0.0, 0.0, 1.0, 0.0)),
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution

View file

@ -11,6 +11,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
@ -19,12 +22,18 @@ void main(void) {
// transform and store the normal for interpolation
normal = normalize(gl_ModelViewMatrix * vec4(gl_Normal, 0.0));
// likewise with the position
position = gl_ModelViewMatrix * gl_Vertex;
// pass along the vertex color
gl_FrontColor = gl_Color;
// and the texture coordinates
gl_TexCoord[0] = gl_MultiTexCoord0;
// and the shadow texture coordinates
gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], position), dot(gl_EyePlaneT[0], position), dot(gl_EyePlaneR[0], position), 1.0);
// use standard pipeline transform
gl_Position = ftransform();
}

View file

@ -0,0 +1,56 @@
#version 120
//
// model_cascaded_shadow_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
dot(gl_EyePlaneR[shadowIndex], position));
// compute the base color based on OpenGL lighting model
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -0,0 +1,69 @@
#version 120
//
// model_cascaded_shadow_normal_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the normal map texture
uniform sampler2D normalMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
// the interpolated tangent
varying vec4 interpolatedTangent;
void main(void) {
vec3 normalizedNormal = normalize(vec3(interpolatedNormal));
vec3 normalizedTangent = normalize(vec3(interpolatedTangent));
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(interpolatedPosition.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], interpolatedPosition),
dot(gl_EyePlaneT[shadowIndex], interpolatedPosition),
dot(gl_EyePlaneR[shadowIndex], interpolatedPosition));
// compute the base color based on OpenGL lighting model
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
float diffuse = dot(viewNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -0,0 +1,72 @@
#version 120
//
// model_cascaded_shadow_normal_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the normal map texture
uniform sampler2D normalMap;
// the specular map texture
uniform sampler2D specularMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
// the interpolated tangent
varying vec4 interpolatedTangent;
void main(void) {
vec3 normalizedNormal = normalize(vec3(interpolatedNormal));
vec3 normalizedTangent = normalize(vec3(interpolatedTangent));
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(interpolatedPosition.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], interpolatedPosition),
dot(gl_EyePlaneT[shadowIndex], interpolatedPosition),
dot(gl_EyePlaneR[shadowIndex], interpolatedPosition));
// compute the base color based on OpenGL lighting model
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
float diffuse = dot(viewNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -0,0 +1,59 @@
#version 120
//
// model_cascaded_shadow_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the specular texture
uniform sampler2D specularMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position in view space
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
dot(gl_EyePlaneR[shadowIndex], position));
// compute the base color based on OpenGL lighting model
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -17,6 +17,9 @@ uniform sampler2D diffuseMap;
// the normal map texture
uniform sampler2D normalMap;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
@ -38,8 +41,8 @@ void main(void) {
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position + vec4(0.0, 0.0, 1.0, 0.0)),
viewNormal));
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +

View file

@ -14,6 +14,9 @@
// the tangent vector
attribute vec3 tangent;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
@ -22,7 +25,8 @@ varying vec4 interpolatedTangent;
void main(void) {
// transform and store the normal and tangent for interpolation
// transform and store the position, normal and tangent for interpolation
interpolatedPosition = gl_ModelViewMatrix * gl_Vertex;
interpolatedNormal = gl_ModelViewMatrix * vec4(gl_Normal, 0.0);
interpolatedTangent = gl_ModelViewMatrix * vec4(tangent, 0.0);
@ -32,6 +36,10 @@ void main(void) {
// and the texture coordinates
gl_TexCoord[0] = gl_MultiTexCoord0;
// and the shadow texture coordinates
gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], interpolatedPosition), dot(gl_EyePlaneT[0], interpolatedPosition),
dot(gl_EyePlaneR[0], interpolatedPosition), 1.0);
// use standard pipeline transform
gl_Position = ftransform();
}

View file

@ -20,6 +20,9 @@ uniform sampler2D normalMap;
// the specular map texture
uniform sampler2D specularMap;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
@ -41,8 +44,8 @@ void main(void) {
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position + vec4(0.0, 0.0, 1.0, 0.0)),
viewNormal));
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *

View file

@ -0,0 +1,48 @@
#version 120
//
// model_shadow_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/23/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the base color based on OpenGL lighting model
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -0,0 +1,60 @@
#version 120
//
// model_shadow_normal_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/23/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the normal map texture
uniform sampler2D normalMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
// the interpolated tangent
varying vec4 interpolatedTangent;
void main(void) {
vec3 normalizedNormal = normalize(vec3(interpolatedNormal));
vec3 normalizedTangent = normalize(vec3(interpolatedTangent));
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
// compute the base color based on OpenGL lighting model
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
float diffuse = dot(viewNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -0,0 +1,63 @@
#version 120
//
// model_shadow_normal_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/23/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the normal map texture
uniform sampler2D normalMap;
// the specular map texture
uniform sampler2D specularMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
// the interpolated tangent
varying vec4 interpolatedTangent;
void main(void) {
vec3 normalizedNormal = normalize(vec3(interpolatedNormal));
vec3 normalizedTangent = normalize(vec3(interpolatedTangent));
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
vec3 localNormal = vec3(texture2D(normalMap, gl_TexCoord[0].st)) * 2.0 - vec3(1.0, 1.0, 1.0);
// compute the base color based on OpenGL lighting model
vec4 viewNormal = vec4(normalizedTangent * localNormal.x +
normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z, 0.0);
float diffuse = dot(viewNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position -
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -0,0 +1,51 @@
#version 120
//
// model_shadow_specular_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 5/23/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
// the specular texture
uniform sampler2D specularMap;
// the shadow texture
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position in view space
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the base color based on OpenGL lighting model
vec4 normalizedNormal = normalize(normal);
float diffuse = dot(normalizedNormal, gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
vec4 base = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -17,6 +17,9 @@ uniform sampler2D diffuseMap;
// the specular texture
uniform sampler2D specularMap;
// the interpolated position in view space
varying vec4 position;
// the interpolated normal
varying vec4 normal;
@ -29,7 +32,7 @@ void main(void) {
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
// compute the specular component (sans exponent)
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position + vec4(0.0, 0.0, 1.0, 0.0)),
float specular = facingLight * max(0.0, dot(normalize(gl_LightSource[0].position - normalize(vec4(position.xyz, 0.0))),
normalizedNormal));
// modulate texture by base color and add specular contribution

View file

@ -13,8 +13,16 @@
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the color in shadow
varying vec4 shadowColor;
void main(void) {
gl_FragColor = mix(shadowColor, gl_Color, shadow2D(shadowMap, gl_TexCoord[0].stp));
gl_FragColor = mix(shadowColor, gl_Color, 0.25 *
(shadow2D(shadowMap, gl_TexCoord[0].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[0].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[0].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[0].stp + vec3(shadowScale, shadowScale, 0.0)).r));
}

View file

@ -11,6 +11,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the color in shadow
varying vec4 shadowColor;
void main(void) {
@ -21,10 +22,10 @@ void main(void) {
vec4 normal = normalize(gl_ModelViewMatrix * vec4(gl_Normal, 0.0));
gl_FrontColor = shadowColor + gl_Color * (gl_LightSource[0].diffuse * max(0.0, dot(normal, gl_LightSource[0].position)));
// generate the shadow texture coordinate using the eye position
// generate the shadow texture coordinates using the eye position
vec4 eyePosition = gl_ModelViewMatrix * gl_Vertex;
gl_TexCoord[0] = vec4(dot(gl_EyePlaneS[0], eyePosition), dot(gl_EyePlaneT[0], eyePosition),
dot(gl_EyePlaneR[0], eyePosition), 1.0);
dot(gl_EyePlaneR[0], eyePosition), 1.0);
// use the fixed function transform
gl_Position = ftransform();

View file

@ -19,11 +19,14 @@ uniform mat4 clusterMatrices[MAX_CLUSTERS];
attribute vec4 clusterIndices;
attribute vec4 clusterWeights;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
vec4 position = vec4(0.0, 0.0, 0.0, 0.0);
position = vec4(0.0, 0.0, 0.0, 0.0);
normal = vec4(0.0, 0.0, 0.0, 0.0);
for (int i = 0; i < INDICES_PER_VERTEX; i++) {
mat4 clusterMatrix = clusterMatrices[int(clusterIndices[i])];
@ -31,7 +34,7 @@ void main(void) {
position += clusterMatrix * gl_Vertex * clusterWeight;
normal += clusterMatrix * vec4(gl_Normal, 0.0) * clusterWeight;
}
position = gl_ModelViewProjectionMatrix * position;
position = gl_ModelViewMatrix * position;
normal = normalize(gl_ModelViewMatrix * normal);
// pass along the vertex color
@ -40,5 +43,8 @@ void main(void) {
// and the texture coordinates
gl_TexCoord[0] = gl_MultiTexCoord0;
gl_Position = position;
// and the shadow texture coordinates
gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], position), dot(gl_EyePlaneT[0], position), dot(gl_EyePlaneR[0], position), 1.0);
gl_Position = gl_ProjectionMatrix * position;
}

View file

@ -22,6 +22,9 @@ attribute vec3 tangent;
attribute vec4 clusterIndices;
attribute vec4 clusterWeights;
// the interpolated position
varying vec4 interpolatedPosition;
// the interpolated normal
varying vec4 interpolatedNormal;
@ -29,17 +32,17 @@ varying vec4 interpolatedNormal;
varying vec4 interpolatedTangent;
void main(void) {
vec4 position = vec4(0.0, 0.0, 0.0, 0.0);
interpolatedPosition = vec4(0.0, 0.0, 0.0, 0.0);
interpolatedNormal = vec4(0.0, 0.0, 0.0, 0.0);
interpolatedTangent = vec4(0.0, 0.0, 0.0, 0.0);
for (int i = 0; i < INDICES_PER_VERTEX; i++) {
mat4 clusterMatrix = clusterMatrices[int(clusterIndices[i])];
float clusterWeight = clusterWeights[i];
position += clusterMatrix * gl_Vertex * clusterWeight;
interpolatedPosition += clusterMatrix * gl_Vertex * clusterWeight;
interpolatedNormal += clusterMatrix * vec4(gl_Normal, 0.0) * clusterWeight;
interpolatedTangent += clusterMatrix * vec4(tangent, 0.0) * clusterWeight;
}
position = gl_ModelViewProjectionMatrix * position;
interpolatedPosition = gl_ModelViewMatrix * interpolatedPosition;
interpolatedNormal = gl_ModelViewMatrix * interpolatedNormal;
interpolatedTangent = gl_ModelViewMatrix * interpolatedTangent;
@ -49,5 +52,9 @@ void main(void) {
// and the texture coordinates
gl_TexCoord[0] = gl_MultiTexCoord0;
gl_Position = position;
// and the shadow texture coordinates
gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], interpolatedPosition), dot(gl_EyePlaneT[0], interpolatedPosition),
dot(gl_EyePlaneR[0], interpolatedPosition), 1.0);
gl_Position = gl_ProjectionMatrix * interpolatedPosition;
}

View file

@ -0,0 +1,20 @@
* {
font-family: Inconsolata, Lucida Console, Andale Mono, Monaco;
font-size: 14px;
}
#promptTextEdit {
color: #425d72;
}
#promptTextEdit:!enabled {
color: #7f7f7f;
}
#promptGutterLabel {
color: #a9bbc3;
}
#promptGutterLabel:!enabled {
color: #7f7f7f;
}

File diff suppressed because it is too large Load diff

View file

@ -28,6 +28,7 @@
#include <QHash>
#include <QTouchEvent>
#include <QUndoStack>
#include <QSystemTrayIcon>
#include <ModelEditPacketSender.h>
#include <NetworkPacket.h>
@ -82,6 +83,7 @@
#include "ui/LogDialog.h"
#include "ui/UpdateDialog.h"
#include "ui/overlays/Overlays.h"
#include "ui/ApplicationOverlay.h"
#include "ui/RunningScriptsWidget.h"
#include "voxels/VoxelFade.h"
#include "voxels/VoxelHideShowThread.h"
@ -115,6 +117,15 @@ static const QString CUSTOM_URL_SCHEME = "hifi:";
static const float BILLBOARD_FIELD_OF_VIEW = 30.0f; // degrees
static const float BILLBOARD_DISTANCE = 5.0f; // meters
static const int MIRROR_VIEW_TOP_PADDING = 5;
static const int MIRROR_VIEW_LEFT_PADDING = 10;
static const int MIRROR_VIEW_WIDTH = 265;
static const int MIRROR_VIEW_HEIGHT = 215;
static const float MIRROR_FULLSCREEN_DISTANCE = 0.35f;
static const float MIRROR_REARVIEW_DISTANCE = 0.65f;
static const float MIRROR_REARVIEW_BODY_DISTANCE = 2.3f;
static const float MIRROR_FIELD_OF_VIEW = 30.0f;
class Application : public QApplication {
Q_OBJECT
@ -181,6 +192,7 @@ public:
ViewFrustum* getShadowViewFrustum() { return &_shadowViewFrustum; }
VoxelSystem* getVoxels() { return &_voxels; }
VoxelTree* getVoxelTree() { return _voxels.getTree(); }
const VoxelPacketProcessor& getVoxelPacketProcessor() const { return _voxelProcessor; }
ParticleTreeRenderer* getParticles() { return &_particles; }
MetavoxelSystem* getMetavoxels() { return &_metavoxels; }
ModelTreeRenderer* getModels() { return &_models; }
@ -203,6 +215,14 @@ public:
JoystickManager* getJoystickManager() { return &_joystickManager; }
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
QUndoStack* getUndoStack() { return &_undoStack; }
QSystemTrayIcon* getTrayIcon() { return _trayIcon; }
ApplicationOverlay& getApplicationOverlay() { return _applicationOverlay; }
Overlays& getOverlays() { return _overlays; }
float getFps() const { return _fps; }
float getPacketsPerSecond() const { return _packetsPerSecond; }
float getBytesPerSecond() const { return _bytesPerSecond; }
const glm::vec3& getViewMatrixTranslation() const { return _viewMatrixTranslation; }
/// if you need to access the application settings, use lockSettings()/unlockSettings()
QSettings* lockSettings() { _settingsMutex.lock(); return _settings; }
@ -241,11 +261,11 @@ public:
/// result from matrix multiplication at high translation magnitudes.
void loadTranslatedViewMatrix(const glm::vec3& translation);
const glm::mat4& getShadowMatrix() const { return _shadowMatrix; }
void getModelViewMatrix(glm::dmat4* modelViewMatrix);
void getProjectionMatrix(glm::dmat4* projectionMatrix);
const glm::vec3& getShadowDistances() const { return _shadowDistances; }
/// Computes the off-axis frustum parameters for the view frustum, taking mirroring into account.
void computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const;
@ -303,11 +323,12 @@ public slots:
void loadScriptURLDialog();
void toggleLogDialog();
void initAvatarAndViewFrustum();
ScriptEngine* loadScript(const QString& fileNameString, bool loadScriptFromEditor = false, bool activateMainWindow = false);
ScriptEngine* loadScript(const QString& fileNameString = QString(), bool loadScriptFromEditor = false, bool activateMainWindow = false);
void scriptFinished(const QString& scriptName);
void stopAllScripts(bool restart = false);
void stopScript(const QString& scriptName);
void reloadAllScripts();
void loadDefaultScripts();
void toggleRunningScriptsWidget();
void uploadHead();
@ -375,7 +396,6 @@ private:
glm::vec3 getSunDirection();
void updateShadowMap();
void displayOverlay();
void renderRearViewMirror(const QRect& region, bool billboard = false);
void renderViewFrustum(ViewFrustum& viewFrustum);
@ -391,6 +411,8 @@ private:
static void attachNewHeadToNode(Node *newNode);
static void* networkReceive(void* args); // network receive thread
void sendNack();
MainWindow* _window;
GLCanvas* _glWidget; // our GLCanvas has a couple extra features
@ -467,12 +489,14 @@ private:
glm::mat4 _untranslatedViewMatrix;
glm::vec3 _viewMatrixTranslation;
glm::mat4 _projectionMatrix;
float _scaleMirror;
float _rotateMirror;
float _raiseMirror;
glm::mat4 _shadowMatrix;
static const int CASCADED_SHADOW_MATRIX_COUNT = 4;
glm::mat4 _shadowMatrices[CASCADED_SHADOW_MATRIX_COUNT];
glm::vec3 _shadowDistances;
Environment _environment;
@ -533,6 +557,7 @@ private:
NodeBounds _nodeBoundsDisplay;
std::vector<VoxelFade> _voxelFades;
QReadWriteLock _voxelFadesLock;
ControllerScriptingInterface _controllerScriptingInterface;
QPointer<LogDialog> _logDialog;
QPointer<SnapshotShareDialog> _snapshotShareDialog;
@ -549,11 +574,16 @@ private:
TouchEvent _lastTouchEvent;
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
AudioReflector _audioReflector;
RunningScriptsWidget* _runningScriptsWidget;
QHash<QString, ScriptEngine*> _scriptEnginesHash;
bool _runningScriptsWidgetWasVisible;
QSystemTrayIcon* _trayIcon;
quint64 _lastNackTime;
};
#endif // hifi_Application_h

View file

@ -68,6 +68,7 @@ Audio::Audio(int16_t initialJitterBufferSamples, QObject* parent) :
_proceduralOutputDevice(NULL),
_inputRingBuffer(0),
_ringBuffer(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL),
_isStereoInput(false),
_averagedLatency(0.0),
_measuredJitter(0),
_jitterBufferSamples(initialJitterBufferSamples),
@ -97,9 +98,11 @@ Audio::Audio(int16_t initialJitterBufferSamples, QObject* parent) :
_scopeEnabledPause(false),
_scopeInputOffset(0),
_scopeOutputOffset(0),
_scopeInput(SAMPLES_PER_SCOPE_WIDTH * sizeof(int16_t), 0),
_scopeOutputLeft(SAMPLES_PER_SCOPE_WIDTH * sizeof(int16_t), 0),
_scopeOutputRight(SAMPLES_PER_SCOPE_WIDTH * sizeof(int16_t), 0)
_framesPerScope(DEFAULT_FRAMES_PER_SCOPE),
_samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope),
_scopeInput(0),
_scopeOutputLeft(0),
_scopeOutputRight(0)
{
// clear the array of locally injected samples
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
@ -202,7 +205,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL, __uuidof(IMMDeviceEnumerator), (void**)&pMMDeviceEnumerator);
IMMDevice* pEndpoint;
hr = pMMDeviceEnumerator->GetDefaultAudioEndpoint(mode == QAudio::AudioOutput ? eRender : eCapture, eMultimedia, &pEndpoint);
if (hr == E_NOTFOUND){
if (hr == E_NOTFOUND) {
printf("Audio Error: device not found\n");
deviceName = QString("NONE");
} else {
@ -287,20 +290,27 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
if (sourceToDestinationFactor >= 2) {
// we need to downsample from 48 to 24
// for now this only supports a mono output - this would be the case for audio input
for (unsigned int i = sourceAudioFormat.channelCount(); i < numSourceSamples; i += 2 * sourceAudioFormat.channelCount()) {
if (i + (sourceAudioFormat.channelCount()) >= numSourceSamples) {
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
if (destinationAudioFormat.channelCount() == 1) {
for (unsigned int i = sourceAudioFormat.channelCount(); i < numSourceSamples; i += 2 * sourceAudioFormat.channelCount()) {
if (i + (sourceAudioFormat.channelCount()) >= numSourceSamples) {
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
(sourceSamples[i - sourceAudioFormat.channelCount()] / 2)
+ (sourceSamples[i] / 2);
} else {
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
} else {
destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
(sourceSamples[i - sourceAudioFormat.channelCount()] / 4)
+ (sourceSamples[i] / 2)
+ (sourceSamples[i + sourceAudioFormat.channelCount()] / 4);
}
}
} else {
// this is a 48 to 24 resampling but both source and destination are two channels
// squish two samples into one in each channel
for (int i = 0; i < numSourceSamples; i += 4) {
destinationSamples[i / 2] = (sourceSamples[i] / 2) + (sourceSamples[i + 2] / 2);
destinationSamples[(i / 2) + 1] = (sourceSamples[i + 1] / 2) + (sourceSamples[i + 3] / 2);
}
}
} else {
if (sourceAudioFormat.sampleRate() == destinationAudioFormat.sampleRate()) {
// mono to stereo, same sample rate
@ -403,12 +413,12 @@ bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
}
void Audio::handleAudioInput() {
static char monoAudioDataPacket[MAX_PACKET_SIZE];
static char audioDataPacket[MAX_PACKET_SIZE];
static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho);
static int leadingBytes = numBytesPacketHeader + sizeof(glm::vec3) + sizeof(glm::quat);
static int leadingBytes = numBytesPacketHeader + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
static int16_t* monoAudioSamples = (int16_t*) (monoAudioDataPacket + leadingBytes);
static int16_t* networkAudioSamples = (int16_t*) (audioDataPacket + leadingBytes);
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio(_numInputCallbackBytes);
@ -450,126 +460,139 @@ void Audio::handleAudioInput() {
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
int numNetworkBytes = _isStereoInput ? NETWORK_BUFFER_LENGTH_BYTES_STEREO : NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
int numNetworkSamples = _isStereoInput ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
// zero out the monoAudioSamples array and the locally injected audio
memset(monoAudioSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
memset(networkAudioSamples, 0, numNetworkBytes);
if (!_muted) {
// we aren't muted, downsample the input audio
linearResampling((int16_t*) inputAudioSamples,
monoAudioSamples,
inputSamplesRequired,
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
linearResampling((int16_t*) inputAudioSamples, networkAudioSamples,
inputSamplesRequired, numNetworkSamples,
_inputFormat, _desiredInputFormat);
//
// Impose Noise Gate
//
// The Noise Gate is used to reject constant background noise by measuring the noise
// floor observed at the microphone and then opening the 'gate' to allow microphone
// signals to be transmitted when the microphone samples average level exceeds a multiple
// of the noise floor.
//
// NOISE_GATE_HEIGHT: How loud you have to speak relative to noise background to open the gate.
// Make this value lower for more sensitivity and less rejection of noise.
// NOISE_GATE_WIDTH: The number of samples in an audio frame for which the height must be exceeded
// to open the gate.
// NOISE_GATE_CLOSE_FRAME_DELAY: Once the noise is below the gate height for the frame, how many frames
// will we wait before closing the gate.
// NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
// More means better rejection but also can reject continuous things like singing.
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
float loudness = 0;
float thisSample = 0;
int samplesOverNoiseGate = 0;
const float NOISE_GATE_HEIGHT = 7.0f;
const int NOISE_GATE_WIDTH = 5;
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
const float DC_OFFSET_AVERAGING = 0.99f;
const float CLIPPING_THRESHOLD = 0.90f;
//
// Check clipping, adjust DC offset, and check if should open noise gate
//
float measuredDcOffset = 0.0f;
// Increment the time since the last clip
if (_timeSinceLastClip >= 0.0f) {
_timeSinceLastClip += (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float) SAMPLE_RATE;
}
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
measuredDcOffset += monoAudioSamples[i];
monoAudioSamples[i] -= (int16_t) _dcOffset;
thisSample = fabsf(monoAudioSamples[i]);
if (thisSample >= (32767.0f * CLIPPING_THRESHOLD)) {
_timeSinceLastClip = 0.0f;
// only impose the noise gate and perform tone injection if we sending mono audio
if (!_isStereoInput) {
//
// Impose Noise Gate
//
// The Noise Gate is used to reject constant background noise by measuring the noise
// floor observed at the microphone and then opening the 'gate' to allow microphone
// signals to be transmitted when the microphone samples average level exceeds a multiple
// of the noise floor.
//
// NOISE_GATE_HEIGHT: How loud you have to speak relative to noise background to open the gate.
// Make this value lower for more sensitivity and less rejection of noise.
// NOISE_GATE_WIDTH: The number of samples in an audio frame for which the height must be exceeded
// to open the gate.
// NOISE_GATE_CLOSE_FRAME_DELAY: Once the noise is below the gate height for the frame, how many frames
// will we wait before closing the gate.
// NOISE_GATE_FRAMES_TO_AVERAGE: How many audio frames should we average together to compute noise floor.
// More means better rejection but also can reject continuous things like singing.
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
float loudness = 0;
float thisSample = 0;
int samplesOverNoiseGate = 0;
const float NOISE_GATE_HEIGHT = 7.0f;
const int NOISE_GATE_WIDTH = 5;
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
const float DC_OFFSET_AVERAGING = 0.99f;
const float CLIPPING_THRESHOLD = 0.90f;
//
// Check clipping, adjust DC offset, and check if should open noise gate
//
float measuredDcOffset = 0.0f;
// Increment the time since the last clip
if (_timeSinceLastClip >= 0.0f) {
_timeSinceLastClip += (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float) SAMPLE_RATE;
}
loudness += thisSample;
// Noise Reduction: Count peaks above the average loudness
if (_noiseGateEnabled && (thisSample > (_noiseGateMeasuredFloor * NOISE_GATE_HEIGHT))) {
samplesOverNoiseGate++;
}
}
measuredDcOffset /= NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
// Add tone injection if enabled
const float TONE_FREQ = 220.0f / SAMPLE_RATE * TWO_PI;
const float QUARTER_VOLUME = 8192.0f;
if (_toneInjectionEnabled) {
loudness = 0.0f;
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
monoAudioSamples[i] = QUARTER_VOLUME * sinf(TONE_FREQ * (float)(i + _proceduralEffectSample));
loudness += fabsf(monoAudioSamples[i]);
}
}
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
// If Noise Gate is enabled, check and turn the gate on and off
if (!_toneInjectionEnabled && _noiseGateEnabled) {
float averageOfAllSampleFrames = 0.0f;
_noiseSampleFrames[_noiseGateSampleCounter++] = _lastInputLoudness;
if (_noiseGateSampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
float smallestSample = FLT_MAX;
for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_FRAMES - NOISE_GATE_FRAMES_TO_AVERAGE; i += NOISE_GATE_FRAMES_TO_AVERAGE) {
float thisAverage = 0.0f;
for (int j = i; j < i + NOISE_GATE_FRAMES_TO_AVERAGE; j++) {
thisAverage += _noiseSampleFrames[j];
averageOfAllSampleFrames += _noiseSampleFrames[j];
}
thisAverage /= NOISE_GATE_FRAMES_TO_AVERAGE;
if (thisAverage < smallestSample) {
smallestSample = thisAverage;
}
measuredDcOffset += networkAudioSamples[i];
networkAudioSamples[i] -= (int16_t) _dcOffset;
thisSample = fabsf(networkAudioSamples[i]);
if (thisSample >= (32767.0f * CLIPPING_THRESHOLD)) {
_timeSinceLastClip = 0.0f;
}
loudness += thisSample;
// Noise Reduction: Count peaks above the average loudness
if (_noiseGateEnabled && (thisSample > (_noiseGateMeasuredFloor * NOISE_GATE_HEIGHT))) {
samplesOverNoiseGate++;
}
averageOfAllSampleFrames /= NUMBER_OF_NOISE_SAMPLE_FRAMES;
_noiseGateMeasuredFloor = smallestSample;
_noiseGateSampleCounter = 0;
}
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
_noiseGateOpen = true;
_noiseGateFramesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
measuredDcOffset /= NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
if (_dcOffset == 0.0f) {
// On first frame, copy over measured offset
_dcOffset = measuredDcOffset;
} else {
if (--_noiseGateFramesToClose == 0) {
_noiseGateOpen = false;
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
// Add tone injection if enabled
const float TONE_FREQ = 220.0f / SAMPLE_RATE * TWO_PI;
const float QUARTER_VOLUME = 8192.0f;
if (_toneInjectionEnabled) {
loudness = 0.0f;
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
networkAudioSamples[i] = QUARTER_VOLUME * sinf(TONE_FREQ * (float)(i + _proceduralEffectSample));
loudness += fabsf(networkAudioSamples[i]);
}
}
if (!_noiseGateOpen) {
memset(monoAudioSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
_lastInputLoudness = 0;
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
// If Noise Gate is enabled, check and turn the gate on and off
if (!_toneInjectionEnabled && _noiseGateEnabled) {
float averageOfAllSampleFrames = 0.0f;
_noiseSampleFrames[_noiseGateSampleCounter++] = _lastInputLoudness;
if (_noiseGateSampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
float smallestSample = FLT_MAX;
for (int i = 0; i <= NUMBER_OF_NOISE_SAMPLE_FRAMES - NOISE_GATE_FRAMES_TO_AVERAGE; i += NOISE_GATE_FRAMES_TO_AVERAGE) {
float thisAverage = 0.0f;
for (int j = i; j < i + NOISE_GATE_FRAMES_TO_AVERAGE; j++) {
thisAverage += _noiseSampleFrames[j];
averageOfAllSampleFrames += _noiseSampleFrames[j];
}
thisAverage /= NOISE_GATE_FRAMES_TO_AVERAGE;
if (thisAverage < smallestSample) {
smallestSample = thisAverage;
}
}
averageOfAllSampleFrames /= NUMBER_OF_NOISE_SAMPLE_FRAMES;
_noiseGateMeasuredFloor = smallestSample;
_noiseGateSampleCounter = 0;
}
if (samplesOverNoiseGate > NOISE_GATE_WIDTH) {
_noiseGateOpen = true;
_noiseGateFramesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
} else {
if (--_noiseGateFramesToClose == 0) {
_noiseGateOpen = false;
}
}
if (!_noiseGateOpen) {
memset(networkAudioSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
_lastInputLoudness = 0;
}
}
} else {
float loudness = 0.0f;
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; i++) {
loudness += fabsf(networkAudioSamples[i]);
}
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
}
} else {
// our input loudness is 0, since we're muted
@ -578,21 +601,21 @@ void Audio::handleAudioInput() {
// at this point we have clean monoAudioSamples, which match our target output...
// this is what we should send to our interested listeners
if (_processSpatialAudio && !_muted && _audioOutput) {
QByteArray monoInputData((char*)monoAudioSamples, NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * sizeof(int16_t));
if (_processSpatialAudio && !_muted && !_isStereoInput && _audioOutput) {
QByteArray monoInputData((char*)networkAudioSamples, NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * sizeof(int16_t));
emit processLocalAudio(_spatialAudioStart, monoInputData, _desiredInputFormat);
}
if (_proceduralAudioOutput) {
processProceduralAudio(monoAudioSamples, NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
if (!_isStereoInput && _proceduralAudioOutput) {
processProceduralAudio(networkAudioSamples, NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
}
if (_scopeEnabled && !_scopeEnabledPause) {
if (!_isStereoInput && _scopeEnabled && !_scopeEnabledPause) {
unsigned int numMonoAudioChannels = 1;
unsigned int monoAudioChannel = 0;
addBufferToScope(_scopeInput, _scopeInputOffset, monoAudioSamples, monoAudioChannel, numMonoAudioChannels);
addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, monoAudioChannel, numMonoAudioChannels);
_scopeInputOffset += NETWORK_SAMPLES_PER_FRAME;
_scopeInputOffset %= SAMPLES_PER_SCOPE_WIDTH;
_scopeInputOffset %= _samplesPerScope;
}
NodeList* nodeList = NodeList::getInstance();
@ -601,10 +624,8 @@ void Audio::handleAudioInput() {
if (audioMixer && audioMixer->getActiveSocket()) {
MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition();
glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientation();
// we need the amount of bytes in the buffer + 1 for type
// + 12 for 3 floats for position + float for bearing + 1 attenuation byte
glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientationInWorldFrame();
quint8 isStereo = _isStereoInput ? 1 : 0;
int numAudioBytes = 0;
@ -613,11 +634,12 @@ void Audio::handleAudioInput() {
packetType = PacketTypeSilentAudioFrame;
// we need to indicate how many silent samples this is to the audio mixer
monoAudioSamples[0] = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
audioDataPacket[0] = _isStereoInput
? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO
: NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
numAudioBytes = sizeof(int16_t);
} else {
numAudioBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
numAudioBytes = _isStereoInput ? NETWORK_BUFFER_LENGTH_BYTES_STEREO : NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)) {
packetType = PacketTypeMicrophoneAudioWithEcho;
@ -626,7 +648,10 @@ void Audio::handleAudioInput() {
}
}
char* currentPacketPtr = monoAudioDataPacket + populatePacketHeader(monoAudioDataPacket, packetType);
char* currentPacketPtr = audioDataPacket + populatePacketHeader(audioDataPacket, packetType);
// set the mono/stereo byte
*currentPacketPtr++ = isStereo;
// memcpy the three float positions
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
@ -636,7 +661,7 @@ void Audio::handleAudioInput() {
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
currentPacketPtr += sizeof(headOrientation);
nodeList->writeDatagram(monoAudioDataPacket, numAudioBytes + leadingBytes, audioMixer);
nodeList->writeDatagram(audioDataPacket, numAudioBytes + leadingBytes, audioMixer);
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
.updateValue(numAudioBytes + leadingBytes);
@ -759,6 +784,24 @@ void Audio::toggleAudioNoiseReduction() {
_noiseGateEnabled = !_noiseGateEnabled;
}
void Audio::toggleStereoInput() {
int oldChannelCount = _desiredInputFormat.channelCount();
QAction* stereoAudioOption = Menu::getInstance()->getActionForOption(MenuOption::StereoAudio);
if (stereoAudioOption->isChecked()) {
_desiredInputFormat.setChannelCount(2);
_isStereoInput = true;
} else {
_desiredInputFormat.setChannelCount(1);
_isStereoInput = false;
}
if (oldChannelCount != _desiredInputFormat.channelCount()) {
// change in channel count for desired input format, restart the input device
switchInputToAudioDevice(_inputAudioDeviceName);
}
}
void Audio::processReceivedAudio(const QByteArray& audioByteArray) {
_ringBuffer.parseData(audioByteArray);
@ -849,7 +892,7 @@ void Audio::processReceivedAudio(const QByteArray& audioByteArray) {
samples, audioChannel, numAudioChannels);
_scopeOutputOffset += NETWORK_SAMPLES_PER_FRAME;
_scopeOutputOffset %= SAMPLES_PER_SCOPE_WIDTH;
_scopeOutputOffset %= _samplesPerScope;
samples += NETWORK_SAMPLES_PER_FRAME * numAudioChannels;
}
}
@ -1060,25 +1103,73 @@ void Audio::renderToolBox(int x, int y, bool boxed) {
glDisable(GL_TEXTURE_2D);
}
void Audio::toggleScope() {
_scopeEnabled = !_scopeEnabled;
if (_scopeEnabled) {
_scopeInputOffset = 0;
_scopeOutputOffset = 0;
allocateScope();
} else {
freeScope();
}
}
void Audio::toggleScopePause() {
_scopeEnabledPause = !_scopeEnabledPause;
}
void Audio::toggleScope() {
_scopeEnabled = !_scopeEnabled;
if (_scopeEnabled) {
static const int width = SAMPLES_PER_SCOPE_WIDTH;
_scopeInputOffset = 0;
_scopeOutputOffset = 0;
memset(_scopeInput.data(), 0, width * sizeof(int16_t));
memset(_scopeOutputLeft.data(), 0, width * sizeof(int16_t));
memset(_scopeOutputRight.data(), 0, width * sizeof(int16_t));
_scopeEnabledPause = false;
void Audio::selectAudioScopeFiveFrames() {
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioScopeFiveFrames)) {
reallocateScope(5);
}
}
void Audio::selectAudioScopeTwentyFrames() {
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioScopeTwentyFrames)) {
reallocateScope(20);
}
}
void Audio::selectAudioScopeFiftyFrames() {
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioScopeFiftyFrames)) {
reallocateScope(50);
}
}
void Audio::allocateScope() {
int num = _samplesPerScope * sizeof(int16_t);
_scopeInput = new QByteArray(num, 0);
_scopeOutputLeft = new QByteArray(num, 0);
_scopeOutputRight = new QByteArray(num, 0);
}
void Audio::reallocateScope(int frames) {
if (_framesPerScope != frames) {
_framesPerScope = frames;
_samplesPerScope = NETWORK_SAMPLES_PER_FRAME * _framesPerScope;
QMutexLocker lock(&_guard);
freeScope();
allocateScope();
}
}
void Audio::freeScope() {
if (_scopeInput) {
delete _scopeInput;
_scopeInput = 0;
}
if (_scopeOutputLeft) {
delete _scopeOutputLeft;
_scopeOutputLeft = 0;
}
if (_scopeOutputRight) {
delete _scopeOutputRight;
_scopeOutputRight = 0;
}
}
void Audio::addBufferToScope(
QByteArray& byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels) {
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels) {
// Constant multiplier to map sample value to vertical size of scope
float multiplier = (float)MULTIPLIER_SCOPE_HEIGHT / logf(2.0f);
@ -1089,8 +1180,9 @@ void Audio::addBufferToScope(
// Temporary variable receives mapping of sample value
int16_t value;
QMutexLocker lock(&_guard);
// Short int pointer to mapped samples in byte array
int16_t* destination = (int16_t*) byteArray.data();
int16_t* destination = (int16_t*) byteArray->data();
for (unsigned int i = 0; i < NETWORK_SAMPLES_PER_FRAME; i++) {
sample = (float)source[i * sourceNumberOfChannels + sourceChannel];
@ -1116,18 +1208,20 @@ void Audio::renderScope(int width, int height) {
static const float outputLeftColor[4] = { 0.7f, .3f, 0.3f, 0.6f };
static const float outputRightColor[4] = { 0.3f, .3f, 0.7f, 0.6f };
static const int gridRows = 2;
static const int gridCols = 5;
int gridCols = _framesPerScope;
int x = (width - SAMPLES_PER_SCOPE_WIDTH) / 2;
int y = (height - SAMPLES_PER_SCOPE_HEIGHT) / 2;
int w = SAMPLES_PER_SCOPE_WIDTH;
int h = SAMPLES_PER_SCOPE_HEIGHT;
int x = (width - SCOPE_WIDTH) / 2;
int y = (height - SCOPE_HEIGHT) / 2;
int w = SCOPE_WIDTH;
int h = SCOPE_HEIGHT;
renderBackground(backgroundColor, x, y, w, h);
renderGrid(gridColor, x, y, w, h, gridRows, gridCols);
renderLineStrip(inputColor, x, y, w, _scopeInputOffset, _scopeInput);
renderLineStrip(outputLeftColor, x, y, w, _scopeOutputOffset, _scopeOutputLeft);
renderLineStrip(outputRightColor, x, y, w, _scopeOutputOffset, _scopeOutputRight);
QMutexLocker lock(&_guard);
renderLineStrip(inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput);
renderLineStrip(outputLeftColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputLeft);
renderLineStrip(outputRightColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputRight);
}
void Audio::renderBackground(const float* color, int x, int y, int width, int height) {
@ -1170,21 +1264,54 @@ void Audio::renderGrid(const float* color, int x, int y, int width, int height,
glColor4f(1, 1, 1, 1);
}
void Audio::renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray& byteArray) {
void Audio::renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray* byteArray) {
glColor4fv(color);
glBegin(GL_LINE_STRIP);
int16_t sample;
int16_t* samples = ((int16_t*) byteArray.data()) + offset;
y += SAMPLES_PER_SCOPE_HEIGHT / 2;
for (int i = n - offset; --i >= 0; ) {
sample = *samples++;
int16_t* samples = ((int16_t*) byteArray->data()) + offset;
int numSamplesToAverage = _framesPerScope / DEFAULT_FRAMES_PER_SCOPE;
int count = (n - offset) / numSamplesToAverage;
int remainder = (n - offset) % numSamplesToAverage;
y += SCOPE_HEIGHT / 2;
// Compute and draw the sample averages from the offset position
for (int i = count; --i >= 0; ) {
sample = 0;
for (int j = numSamplesToAverage; --j >= 0; ) {
sample += *samples++;
}
sample /= numSamplesToAverage;
glVertex2i(x++, y - sample);
}
samples = (int16_t*) byteArray.data();
for (int i = offset; --i >= 0; ) {
sample = *samples++;
// Compute and draw the sample average across the wrap boundary
if (remainder != 0) {
sample = 0;
for (int j = remainder; --j >= 0; ) {
sample += *samples++;
}
samples = (int16_t*) byteArray->data();
for (int j = numSamplesToAverage - remainder; --j >= 0; ) {
sample += *samples++;
}
sample /= numSamplesToAverage;
glVertex2i(x++, y - sample);
} else {
samples = (int16_t*) byteArray->data();
}
// Compute and draw the sample average from the beginning to the offset
count = (offset - remainder) / numSamplesToAverage;
for (int i = count; --i >= 0; ) {
sample = 0;
for (int j = numSamplesToAverage; --j >= 0; ) {
sample += *samples++;
}
sample /= numSamplesToAverage;
glVertex2i(x++, y - sample);
}
glEnd();
@ -1214,18 +1341,21 @@ bool Audio::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
qDebug() << "The format to be used for audio input is" << _inputFormat;
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
_numInputCallbackBytes = calculateNumberOfInputCallbackBytes(_inputFormat);
_audioInput->setBufferSize(_numInputCallbackBytes);
// how do we want to handle input working, but output not working?
int numFrameSamples = calculateNumberOfFrameSamples(_numInputCallbackBytes);
_inputRingBuffer.resizeForFrameSize(numFrameSamples);
_inputDevice = _audioInput->start();
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
supportedFormat = true;
// if the user wants stereo but this device can't provide then bail
if (!_isStereoInput || _inputFormat.channelCount() == 2) {
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
_numInputCallbackBytes = calculateNumberOfInputCallbackBytes(_inputFormat);
_audioInput->setBufferSize(_numInputCallbackBytes);
// how do we want to handle input working, but output not working?
int numFrameSamples = calculateNumberOfFrameSamples(_numInputCallbackBytes);
_inputRingBuffer.resizeForFrameSize(numFrameSamples);
_inputDevice = _audioInput->start();
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
supportedFormat = true;
}
}
}
return supportedFormat;

View file

@ -85,6 +85,10 @@ public slots:
void toggleScope();
void toggleScopePause();
void toggleAudioSpatialProcessing();
void toggleStereoInput();
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
virtual void handleAudioByteArray(const QByteArray& audioByteArray);
@ -124,6 +128,7 @@ private:
QIODevice* _proceduralOutputDevice;
AudioRingBuffer _inputRingBuffer;
AudioRingBuffer _ringBuffer;
bool _isStereoInput;
QString _inputAudioDeviceName;
QString _outputAudioDeviceName;
@ -197,28 +202,36 @@ private:
int calculateNumberOfFrameSamples(int numBytes);
float calculateDeviceToNetworkInputRatio(int numBytes);
// Audio scope methods for allocation/deallocation
void allocateScope();
void freeScope();
void reallocateScope(int frames);
// Audio scope methods for data acquisition
void addBufferToScope(
QByteArray& byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels);
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels);
// Audio scope methods for rendering
void renderBackground(const float* color, int x, int y, int width, int height);
void renderGrid(const float* color, int x, int y, int width, int height, int rows, int cols);
void renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray& byteArray);
void renderLineStrip(const float* color, int x, int y, int n, int offset, const QByteArray* byteArray);
// Audio scope data
static const unsigned int NETWORK_SAMPLES_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
static const unsigned int FRAMES_PER_SCOPE = 5;
static const unsigned int SAMPLES_PER_SCOPE_WIDTH = FRAMES_PER_SCOPE * NETWORK_SAMPLES_PER_FRAME;
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
static const unsigned int SCOPE_WIDTH = NETWORK_SAMPLES_PER_FRAME * DEFAULT_FRAMES_PER_SCOPE;
static const unsigned int MULTIPLIER_SCOPE_HEIGHT = 20;
static const unsigned int SAMPLES_PER_SCOPE_HEIGHT = 2 * 15 * MULTIPLIER_SCOPE_HEIGHT;
static const unsigned int SCOPE_HEIGHT = 2 * 15 * MULTIPLIER_SCOPE_HEIGHT;
bool _scopeEnabled;
bool _scopeEnabledPause;
int _scopeInputOffset;
int _scopeOutputOffset;
QByteArray _scopeInput;
QByteArray _scopeOutputLeft;
QByteArray _scopeOutputRight;
int _framesPerScope;
int _samplesPerScope;
QMutex _guard;
QByteArray* _scopeInput;
QByteArray* _scopeOutputLeft;
QByteArray* _scopeOutputRight;
};

View file

@ -17,7 +17,7 @@ const float MINIMUM_ATTENUATION_TO_REFLECT = 1.0f / 256.0f;
const float DEFAULT_DISTANCE_SCALING_FACTOR = 2.0f;
const float MAXIMUM_DELAY_MS = 1000.0 * 20.0f; // stop reflecting after path is this long
const int DEFAULT_DIFFUSION_FANOUT = 5;
const int ABSOLUTE_MAXIMUM_BOUNCE_COUNT = 10;
const unsigned int ABSOLUTE_MAXIMUM_BOUNCE_COUNT = 10;
const float DEFAULT_LOCAL_ATTENUATION_FACTOR = 0.125;
const float DEFAULT_COMB_FILTER_WINDOW = 0.05f; //ms delay differential to avoid
@ -459,7 +459,7 @@ void AudioReflector::calculateAllReflections() {
// only recalculate when we've moved, or if the attributes have changed
// TODO: what about case where new voxels are added in front of us???
bool wantHeadOrientation = Menu::getInstance()->isOptionChecked(MenuOption::AudioSpatialProcessingHeadOriented);
glm::quat orientation = wantHeadOrientation ? _myAvatar->getHead()->getFinalOrientation() : _myAvatar->getOrientation();
glm::quat orientation = wantHeadOrientation ? _myAvatar->getHead()->getFinalOrientationInWorldFrame() : _myAvatar->getOrientation();
glm::vec3 origin = _myAvatar->getHead()->getPosition();
glm::vec3 listenerPosition = _myAvatar->getHead()->getPosition();

View file

@ -18,6 +18,7 @@
#include "Camera.h"
#include "Menu.h"
#include "Util.h"
#include "devices/OculusManager.h"
const float CAMERA_FIRST_PERSON_MODE_UP_SHIFT = 0.0f;
const float CAMERA_FIRST_PERSON_MODE_DISTANCE = 0.0f;
@ -264,7 +265,12 @@ PickRay CameraScriptableObject::computePickRay(float x, float y) {
float screenWidth = Application::getInstance()->getGLWidget()->width();
float screenHeight = Application::getInstance()->getGLWidget()->height();
PickRay result;
_viewFrustum->computePickRay(x / screenWidth, y / screenHeight, result.origin, result.direction);
if (OculusManager::isConnected()) {
result.origin = _camera->getPosition();
Application::getInstance()->getApplicationOverlay().computeOculusPickRay(x / screenWidth, y / screenHeight, result.direction);
} else {
_viewFrustum->computePickRay(x / screenWidth, y / screenHeight, result.origin, result.direction);
}
return result;
}

View file

@ -160,6 +160,11 @@ bool Environment::findCapsulePenetration(const glm::vec3& start, const glm::vec3
int Environment::parseData(const HifiSockAddr& senderAddress, const QByteArray& packet) {
// push past the packet header
int bytesRead = numBytesForPacketHeader(packet);
// push past flags, sequence, timestamp
bytesRead += sizeof(OCTREE_PACKET_FLAGS);
bytesRead += sizeof(OCTREE_PACKET_SEQUENCE);
bytesRead += sizeof(OCTREE_PACKET_SENT_TIME);
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);

View file

@ -34,6 +34,7 @@
#include <UUID.h>
#include "Application.h"
#include "AccountManager.h"
#include "Menu.h"
#include "scripting/MenuScriptingInterface.h"
#include "Util.h"
@ -72,6 +73,11 @@ const int ONE_SECOND_OF_FRAMES = 60;
const int FIVE_SECONDS_OF_FRAMES = 5 * ONE_SECOND_OF_FRAMES;
const float MUTE_RADIUS = 50;
const QString CONSOLE_TITLE = "Scripting Console";
const float CONSOLE_WINDOW_OPACITY = 0.95f;
const int CONSOLE_WIDTH = 800;
const int CONSOLE_HEIGHT = 200;
Menu::Menu() :
_actionHash(),
_audioJitterBufferSamples(0),
@ -80,6 +86,7 @@ Menu::Menu() :
_faceshiftEyeDeflection(DEFAULT_FACESHIFT_EYE_DEFLECTION),
_frustumDrawMode(FRUSTUM_DRAW_MODE_ALL),
_viewFrustumOffset(DEFAULT_FRUSTUM_OFFSET),
_jsConsole(NULL),
_octreeStatsDialog(NULL),
_lodToolsDialog(NULL),
_maxVoxels(DEFAULT_MAX_VOXELS_PER_SYSTEM),
@ -195,12 +202,13 @@ Menu::Menu() :
addActionToQMenuAndActionHash(editMenu, MenuOption::Attachments, 0, this, SLOT(editAttachments()));
addActionToQMenuAndActionHash(editMenu, MenuOption::Animations, 0, this, SLOT(editAnimations()));
addDisabledActionAndSeparator(editMenu, "Physics");
QObject* avatar = appInstance->getAvatar();
addCheckableActionToQMenuAndActionHash(editMenu, MenuOption::ObeyEnvironmentalGravity, Qt::SHIFT | Qt::Key_G, false,
addCheckableActionToQMenuAndActionHash(editMenu, MenuOption::ObeyEnvironmentalGravity, Qt::SHIFT | Qt::Key_G, false,
avatar, SLOT(updateMotionBehaviorsFromMenu()));
addCheckableActionToQMenuAndActionHash(editMenu, MenuOption::StandOnNearbyFloors, 0, true,
avatar, SLOT(updateMotionBehaviorsFromMenu()));
addAvatarCollisionSubMenu(editMenu);
@ -225,6 +233,12 @@ Menu::Menu() :
_chatWindow = new ChatWindow(Application::getInstance()->getWindow());
#endif
addActionToQMenuAndActionHash(toolsMenu,
MenuOption::Console,
Qt::CTRL | Qt::ALT | Qt::Key_J,
this,
SLOT(toggleConsole()));
QMenu* viewMenu = addMenu("View");
addCheckableActionToQMenuAndActionHash(viewMenu,
@ -301,7 +315,12 @@ Menu::Menu() :
appInstance->getGlowEffect(),
SLOT(cycleRenderMode()));
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Shadows, 0, false);
QMenu* shadowMenu = renderOptionsMenu->addMenu("Shadows");
QActionGroup* shadowGroup = new QActionGroup(shadowMenu);
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, "None", 0, true));
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::SimpleShadows, 0, false));
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::CascadedShadows, 0, false));
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Metavoxels, 0, true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::BuckyBalls, 0, false);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Particles, 0, true);
@ -330,9 +349,11 @@ Menu::Menu() :
QMenu* avatarOptionsMenu = developerMenu->addMenu("Avatar Options");
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::Avatars, 0, true);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::AvatarsReceiveShadows, 0, true);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderSkeletonCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderHeadCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::RenderBoundingCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::CollideAsRagDoll);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::LookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu,
@ -354,6 +375,10 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::GlowWhenSpeaking, 0, true);
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false);
QMenu* oculusOptionsMenu = developerMenu->addMenu("Oculus Options");
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::AllowOculusCameraModeChange, 0, false);
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::DisplayOculusOverlays, 0, true);
QMenu* handOptionsMenu = developerMenu->addMenu("Hand Options");
addCheckableActionToQMenuAndActionHash(handOptionsMenu,
@ -369,9 +394,21 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, true);
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlternateIK, 0, false);
addDisabledActionAndSeparator(developerMenu, "Testing");
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::DisableNackPackets, 0, false);
addDisabledActionAndSeparator(developerMenu, "Testing");
QMenu* timingMenu = developerMenu->addMenu("Timing and Statistics Tools");
QMenu* perfTimerMenu = timingMenu->addMenu("Performance Timer");
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::DisplayTimingDetails, 0, true);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandDisplaySideTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandAvatarSimulateTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandAvatarUpdateTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandMiscAvatarTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandIdleTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandPaintGLTiming, 0, false);
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandUpdateTiming, 0, false);
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::TestPing, 0, true);
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::FrameTimer);
addActionToQMenuAndActionHash(timingMenu, MenuOption::RunTimingTests, 0, this, SLOT(runTests()));
@ -398,6 +435,8 @@ Menu::Menu() :
SLOT(toggleAudioNoiseReduction()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio);
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio);
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::StereoAudio, 0, false,
appInstance->getAudio(), SLOT(toggleStereoInput()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::MuteAudio,
Qt::CTRL | Qt::Key_M,
false,
@ -413,7 +452,8 @@ Menu::Menu() :
false,
appInstance->getAudio(),
SLOT(toggleToneInjection()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope, Qt::CTRL | Qt::Key_P, false,
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope,
Qt::CTRL | Qt::Key_P, false,
appInstance->getAudio(),
SLOT(toggleScope()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScopePause,
@ -422,6 +462,33 @@ Menu::Menu() :
appInstance->getAudio(),
SLOT(toggleScopePause()));
QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope Options");
addDisabledActionAndSeparator(audioScopeMenu, "Display Frames");
{
QAction *fiveFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiveFrames,
0,
true,
appInstance->getAudio(),
SLOT(selectAudioScopeFiveFrames()));
QAction *twentyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeTwentyFrames,
0,
false,
appInstance->getAudio(),
SLOT(selectAudioScopeTwentyFrames()));
QAction *fiftyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiftyFrames,
0,
false,
appInstance->getAudio(),
SLOT(selectAudioScopeFiftyFrames()));
QActionGroup* audioScopeFramesGroup = new QActionGroup(audioScopeMenu);
audioScopeFramesGroup->addAction(fiveFrames);
audioScopeFramesGroup->addAction(twentyFrames);
audioScopeFramesGroup->addAction(fiftyFrames);
}
QMenu* spatialAudioMenu = audioDebugMenu->addMenu("Spatial Audio");
addCheckableActionToQMenuAndActionHash(spatialAudioMenu, MenuOption::AudioSpatialProcessing,
@ -636,6 +703,10 @@ void Menu::scanMenu(QMenu* menu, settingsAction modifySetting, QSettings* set) {
set->endGroup();
}
bool Menu::getShadowsEnabled() const {
return isOptionChecked(MenuOption::SimpleShadows) || isOptionChecked(MenuOption::CascadedShadows);
}
void Menu::handleViewFrustumOffsetKeyModifier(int key) {
const float VIEW_FRUSTUM_OFFSET_DELTA = 0.5f;
const float VIEW_FRUSTUM_OFFSET_UP_DELTA = 0.05f;
@ -797,6 +868,7 @@ QAction* Menu::addCheckableActionToQMenuAndActionHash(QMenu* destinationMenu,
void Menu::removeAction(QMenu* menu, const QString& actionName) {
menu->removeAction(_actionHash.value(actionName));
_actionHash.remove(actionName);
}
void Menu::setIsOptionChecked(const QString& menuOption, bool isChecked) {
@ -806,8 +878,8 @@ void Menu::setIsOptionChecked(const QString& menuOption, bool isChecked) {
}
}
bool Menu::isOptionChecked(const QString& menuOption) {
QAction* menu = _actionHash.value(menuOption);
bool Menu::isOptionChecked(const QString& menuOption) const {
const QAction* menu = _actionHash.value(menuOption);
if (menu) {
return menu->isChecked();
}
@ -1026,24 +1098,24 @@ void Menu::multipleDestinationsDecision(const QJsonObject& userData, const QJson
void Menu::muteEnvironment() {
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketTypeMuteEnvironment);
int packetSize = headerSize + sizeof(glm::vec3) + sizeof(float);
glm::vec3 position = Application::getInstance()->getAvatar()->getPosition();
char* packet = (char*)malloc(packetSize);
populatePacketHeader(packet, PacketTypeMuteEnvironment);
memcpy(packet + headerSize, &position, sizeof(glm::vec3));
memcpy(packet + headerSize + sizeof(glm::vec3), &MUTE_RADIUS, sizeof(float));
QByteArray mutePacket(packet, packetSize);
// grab our audio mixer from the NodeList, if it exists
SharedNodePointer audioMixer = NodeList::getInstance()->soloNodeOfType(NodeType::AudioMixer);
if (audioMixer) {
// send off this mute packet
NodeList::getInstance()->writeDatagram(mutePacket, audioMixer);
}
free(packet);
}
@ -1201,19 +1273,24 @@ void Menu::showScriptEditor() {
}
void Menu::showChat() {
QMainWindow* mainWindow = Application::getInstance()->getWindow();
if (!_chatWindow) {
_chatWindow = new ChatWindow(mainWindow);
}
if (_chatWindow->isHidden()) {
_chatWindow->show();
if (AccountManager::getInstance().isLoggedIn()) {
QMainWindow* mainWindow = Application::getInstance()->getWindow();
if (!_chatWindow) {
_chatWindow = new ChatWindow(mainWindow);
}
if (_chatWindow->isHidden()) {
_chatWindow->show();
}
} else {
Application::getInstance()->getTrayIcon()->showMessage("Interface", "You need to login to be able to chat with others on this domain.");
}
}
void Menu::toggleChat() {
#ifdef HAVE_QXMPP
_chatAction->setEnabled(XmppClient::getInstance().getXMPPClient().isConnected());
if (!_chatAction->isEnabled() && _chatWindow) {
if (!_chatAction->isEnabled() && _chatWindow && AccountManager::getInstance().isLoggedIn()) {
if (_chatWindow->isHidden()) {
_chatWindow->show();
} else {
@ -1223,6 +1300,25 @@ void Menu::toggleChat() {
#endif
}
void Menu::toggleConsole() {
QMainWindow* mainWindow = Application::getInstance()->getWindow();
if (!_jsConsole) {
QDialog* dialog = new QDialog(mainWindow, Qt::WindowStaysOnTopHint);
QVBoxLayout* layout = new QVBoxLayout(dialog);
dialog->setLayout(new QVBoxLayout(dialog));
dialog->resize(QSize(CONSOLE_WIDTH, CONSOLE_HEIGHT));
layout->setMargin(0);
layout->setSpacing(0);
layout->addWidget(new JSConsole(dialog));
dialog->setWindowOpacity(CONSOLE_WINDOW_OPACITY);
dialog->setWindowTitle(CONSOLE_TITLE);
_jsConsole = dialog;
}
_jsConsole->setVisible(!_jsConsole->isVisible());
}
void Menu::audioMuteToggled() {
QAction *muteAction = _actionHash.value(MenuOption::MuteAudio);
muteAction->setChecked(Application::getInstance()->getAudio()->getMuted());
@ -1565,6 +1661,16 @@ void Menu::removeMenu(const QString& menuName) {
}
}
bool Menu::menuExists(const QString& menuName) {
QAction* action = getMenuAction(menuName);
// only proceed if the menu actually exists
if (action) {
return true;
}
return false;
}
void Menu::addSeparator(const QString& menuName, const QString& separatorName) {
QMenu* menuObj = getMenu(menuName);
if (menuObj) {
@ -1640,8 +1746,17 @@ void Menu::removeMenuItem(const QString& menu, const QString& menuitem) {
QMenu* menuObj = getMenu(menu);
if (menuObj) {
removeAction(menuObj, menuitem);
QMenuBar::repaint();
}
QMenuBar::repaint();
};
bool Menu::menuItemExists(const QString& menu, const QString& menuitem) {
QMenu* menuObj = getMenu(menu);
QAction* menuItemAction = _actionHash.value(menuitem);
if (menuObj && menuItemAction) {
return true;
}
return false;
};
QString Menu::getSnapshotsLocation() const {

View file

@ -26,6 +26,7 @@
#include "location/LocationManager.h"
#include "ui/PreferencesDialog.h"
#include "ui/ChatWindow.h"
#include "ui/JSConsole.h"
#include "ui/ScriptEditorWindow.h"
const float ADJUST_LOD_DOWN_FPS = 40.0;
@ -105,6 +106,8 @@ public:
int getMaxVoxels() const { return _maxVoxels; }
QAction* getUseVoxelShader() const { return _useVoxelShader; }
bool getShadowsEnabled() const;
void handleViewFrustumOffsetKeyModifier(int key);
// User Tweakable LOD Items
@ -172,11 +175,13 @@ public slots:
QMenu* addMenu(const QString& menuName);
void removeMenu(const QString& menuName);
bool menuExists(const QString& menuName);
void addSeparator(const QString& menuName, const QString& separatorName);
void removeSeparator(const QString& menuName, const QString& separatorName);
void addMenuItem(const MenuItemProperties& properties);
void removeMenuItem(const QString& menuName, const QString& menuitem);
bool isOptionChecked(const QString& menuOption);
bool menuItemExists(const QString& menuName, const QString& menuitem);
bool isOptionChecked(const QString& menuOption) const;
void setIsOptionChecked(const QString& menuOption, bool isChecked);
private slots:
@ -195,6 +200,7 @@ private slots:
void showMetavoxelEditor();
void showScriptEditor();
void showChat();
void toggleConsole();
void toggleChat();
void audioMuteToggled();
void namedLocationCreated(LocationManager::NamedLocationCreateResponse response);
@ -249,6 +255,7 @@ private:
QPointer<MetavoxelEditor> _MetavoxelEditor;
QPointer<ScriptEditorWindow> _ScriptEditor;
QPointer<ChatWindow> _chatWindow;
QDialog* _jsConsole;
OctreeStatsDialog* _octreeStatsDialog;
LodToolsDialog* _lodToolsDialog;
int _maxVoxels;
@ -277,6 +284,7 @@ private:
namespace MenuOption {
const QString AboutApp = "About Interface";
const QString AlignForearmsWithWrists = "Align Forearms with Wrists";
const QString AllowOculusCameraModeChange = "Allow Oculus Camera Mode Change (Nausea)";
const QString AlternateIK = "Alternate IK";
const QString AmbientOcclusion = "Ambient Occlusion";
const QString Animations = "Animations...";
@ -285,6 +293,10 @@ namespace MenuOption {
const QString AudioNoiseReduction = "Audio Noise Reduction";
const QString AudioScope = "Audio Scope";
const QString AudioScopePause = "Pause Audio Scope";
const QString AudioScopeFrames = "Display Frames";
const QString AudioScopeFiveFrames = "Five";
const QString AudioScopeTwentyFrames = "Twenty";
const QString AudioScopeFiftyFrames = "Fifty";
const QString AudioToneInjection = "Inject Test Tone";
const QString AudioSpatialProcessing = "Audio Spatial Processing";
const QString AudioSpatialProcessingHeadOriented = "Head Oriented";
@ -299,29 +311,43 @@ namespace MenuOption {
const QString AudioSpatialProcessingDontDistanceAttenuate = "Don't calculate distance attenuation";
const QString AudioSpatialProcessingAlternateDistanceAttenuate = "Alternate distance attenuation";
const QString Avatars = "Avatars";
const QString AvatarsReceiveShadows = "Avatars Receive Shadows";
const QString Bandwidth = "Bandwidth Display";
const QString BandwidthDetails = "Bandwidth Details";
const QString BuckyBalls = "Bucky Balls";
const QString CascadedShadows = "Cascaded";
const QString Chat = "Chat...";
const QString ChatCircling = "Chat Circling";
const QString CollideAsRagDoll = "Collide As RagDoll";
const QString CollideWithAvatars = "Collide With Avatars";
const QString CollideWithEnvironment = "Collide With World Boundaries";
const QString CollideWithParticles = "Collide With Particles";
const QString CollideWithVoxels = "Collide With Voxels";
const QString Collisions = "Collisions";
const QString Console = "Console...";
const QString DecreaseAvatarSize = "Decrease Avatar Size";
const QString DecreaseVoxelSize = "Decrease Voxel Size";
const QString DisableAutoAdjustLOD = "Disable Automatically Adjusting LOD";
const QString DisableNackPackets = "Disable NACK Packets";
const QString DisplayFrustum = "Display Frustum";
const QString DisplayHands = "Display Hands";
const QString DisplayHandTargets = "Display Hand Targets";
const QString DisplayModelBounds = "Display Model Bounds";
const QString DisplayModelElementProxy = "Display Model Element Bounds";
const QString DisplayModelElementChildProxies = "Display Model Element Children";
const QString DisplayOculusOverlays = "Display Oculus Overlays";
const QString DisplayTimingDetails = "Display Timing Details";
const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes";
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString ExpandMiscAvatarTiming = "Expand Misc MyAvatar Timing";
const QString ExpandAvatarUpdateTiming = "Expand MyAvatar update Timing";
const QString ExpandAvatarSimulateTiming = "Expand MyAvatar simulate Timing";
const QString ExpandDisplaySideTiming = "Expand Display Side Timing";
const QString ExpandIdleTiming = "Expand Idle Timing";
const QString ExpandPaintGLTiming = "Expand PaintGL Timing";
const QString ExpandUpdateTiming = "Expand Update Timing";
const QString Faceplus = "Faceplus";
const QString Faceshift = "Faceshift";
const QString FilterSixense = "Smooth Sixense Movement";
@ -377,13 +403,15 @@ namespace MenuOption {
const QString ScriptEditor = "Script Editor...";
const QString SettingsExport = "Export Settings";
const QString SettingsImport = "Import Settings";
const QString Shadows = "Shadows";
const QString SimpleShadows = "Simple";
const QString ShowBordersVoxelNodes = "Show Voxel Nodes";
const QString ShowBordersModelNodes = "Show Model Nodes";
const QString ShowBordersParticleNodes = "Show Particle Nodes";
const QString ShowIKConstraints = "Show IK Constraints";
const QString StandOnNearbyFloors = "Stand on nearby floors";
const QString Stars = "Stars";
const QString Stats = "Stats";
const QString StereoAudio = "Stereo Audio";
const QString StopAllScripts = "Stop All Scripts";
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
const QString TestPing = "Test Ping";

View file

@ -50,7 +50,6 @@ static const QString JOINT_FIELD = "joint";
static const QString FREE_JOINT_FIELD = "freeJoint";
static const QString S3_URL = "http://public.highfidelity.io";
static const QString DATA_SERVER_URL = "https://data-web.highfidelity.io";
static const QString MODEL_URL = "/api/v1/models";
static const QString SETTING_NAME = "LastModelUploadLocation";

View file

@ -400,9 +400,9 @@ bool closeEnoughForGovernmentWork(float a, float b) {
void runTimingTests() {
// How long does it take to make a call to get the time?
const int numTests = 1000000;
int iResults[numTests];
int* iResults = (int*)malloc(sizeof(int) * numTests);
float fTest = 1.0;
float fResults[numTests];
float* fResults = (float*)malloc(sizeof(float) * numTests);
QElapsedTimer startTime;
startTime.start();
float elapsedUsecs;
@ -413,7 +413,7 @@ void runTimingTests() {
// Random number generation
startTime.start();
for (int i = 1; i < numTests; i++) {
for (int i = 0; i < numTests; i++) {
iResults[i] = rand();
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
@ -421,16 +421,19 @@ void runTimingTests() {
// Random number generation using randFloat()
startTime.start();
for (int i = 1; i < numTests; i++) {
for (int i = 0; i < numTests; i++) {
fResults[i] = randFloat();
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qDebug("randFloat() stored in array usecs: %f, first result: %f", elapsedUsecs / (float) numTests, fResults[0]);
free(iResults);
free(fResults);
// PowF function
fTest = 1145323.2342f;
startTime.start();
for (int i = 1; i < numTests; i++) {
for (int i = 0; i < numTests; i++) {
fTest = powf(fTest, 0.5f);
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
@ -440,7 +443,7 @@ void runTimingTests() {
float distance;
glm::vec3 pointA(randVector()), pointB(randVector());
startTime.start();
for (int i = 1; i < numTests; i++) {
for (int i = 0; i < numTests; i++) {
//glm::vec3 temp = pointA - pointB;
//float distanceSquared = glm::dot(temp, temp);
distance = glm::distance(pointA, pointB);
@ -454,7 +457,7 @@ void runTimingTests() {
float result;
startTime.start();
for (int i = 1; i < numTests; i++) {
for (int i = 0; i < numTests; i++) {
glm::vec3 temp = vecA-vecB;
result = glm::dot(temp,temp);
}

View file

@ -237,11 +237,15 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
// If this is the avatar being looked at, render a little ball above their head
if (_isLookAtTarget) {
const float LOOK_AT_INDICATOR_RADIUS = 0.03f;
const float LOOK_AT_INDICATOR_HEIGHT = 0.60f;
const float LOOK_AT_INDICATOR_COLOR[] = { 0.8f, 0.0f, 0.0f, 0.5f };
const float LOOK_AT_INDICATOR_OFFSET = 0.22f;
const float LOOK_AT_INDICATOR_COLOR[] = { 0.8f, 0.0f, 0.0f, 0.75f };
glPushMatrix();
glColor4fv(LOOK_AT_INDICATOR_COLOR);
glTranslatef(_position.x, _position.y + (getSkeletonHeight() * LOOK_AT_INDICATOR_HEIGHT), _position.z);
if (_displayName.isEmpty() || _displayNameAlpha == 0.0f) {
glTranslatef(_position.x, getDisplayNamePosition().y, _position.z);
} else {
glTranslatef(_position.x, getDisplayNamePosition().y + LOOK_AT_INDICATOR_OFFSET, _position.z);
}
glutSolidSphere(LOOK_AT_INDICATOR_RADIUS, 15, 15);
glPopMatrix();
}
@ -343,7 +347,6 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
void Avatar::renderBody(RenderMode renderMode, float glowLevel) {
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
{
Glower glower(glowLevel);
@ -352,7 +355,8 @@ void Avatar::renderBody(RenderMode renderMode, float glowLevel) {
renderBillboard();
return;
}
_skeletonModel.render(1.0f, modelRenderMode);
_skeletonModel.render(1.0f, modelRenderMode, Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows));
renderAttachments(renderMode);
getHand()->render(false, modelRenderMode);
}
@ -373,11 +377,11 @@ void Avatar::simulateAttachments(float deltaTime) {
if (!isMyAvatar()) {
model->setLODDistance(getLODDistance());
}
if (_skeletonModel.getJointPosition(jointIndex, jointPosition) &&
_skeletonModel.getJointRotation(jointIndex, jointRotation)) {
if (_skeletonModel.getJointPositionInWorldFrame(jointIndex, jointPosition) &&
_skeletonModel.getJointCombinedRotation(jointIndex, jointRotation)) {
model->setTranslation(jointPosition + jointRotation * attachment.translation * _scale);
model->setRotation(jointRotation * attachment.rotation);
model->setScale(_skeletonModel.getScale() * attachment.scale);
model->setScaleToFit(true, _scale * attachment.scale);
model->simulate(deltaTime);
}
}
@ -386,8 +390,9 @@ void Avatar::simulateAttachments(float deltaTime) {
void Avatar::renderAttachments(RenderMode renderMode) {
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
bool receiveShadows = Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows);
foreach (Model* model, _attachmentModels) {
model->render(1.0f, modelRenderMode);
model->render(1.0f, modelRenderMode, receiveShadows);
}
}
@ -460,6 +465,17 @@ float Avatar::getBillboardSize() const {
return _scale * BILLBOARD_DISTANCE * tanf(glm::radians(BILLBOARD_FIELD_OF_VIEW / 2.0f));
}
glm::vec3 Avatar::getDisplayNamePosition() {
glm::vec3 namePosition;
if (getSkeletonModel().getNeckPosition(namePosition)) {
namePosition += getBodyUpDirection() * getHeadHeight() * 1.1f;
} else {
const float HEAD_PROPORTION = 0.75f;
namePosition = _position + getBodyUpDirection() * (getBillboardSize() * HEAD_PROPORTION);
}
return namePosition;
}
void Avatar::renderDisplayName() {
if (_displayName.isEmpty() || _displayNameAlpha == 0.0f) {
@ -469,13 +485,7 @@ void Avatar::renderDisplayName() {
glDisable(GL_LIGHTING);
glPushMatrix();
glm::vec3 textPosition;
if (getSkeletonModel().getNeckPosition(textPosition)) {
textPosition += getBodyUpDirection() * getHeadHeight() * 1.1f;
} else {
const float HEAD_PROPORTION = 0.75f;
textPosition = _position + getBodyUpDirection() * (getBillboardSize() * HEAD_PROPORTION);
}
glm::vec3 textPosition = getDisplayNamePosition();
glTranslatef(textPosition.x, textPosition.y, textPosition.z);
@ -695,6 +705,54 @@ QStringList Avatar::getJointNames() const {
return _skeletonModel.isActive() ? _skeletonModel.getGeometry()->getFBXGeometry().getJointNames() : QStringList();
}
glm::vec3 Avatar::getJointPosition(int index) const {
if (QThread::currentThread() != thread()) {
glm::vec3 position;
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointPosition", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(glm::vec3, position), Q_ARG(const int, index));
return position;
}
glm::vec3 position;
_skeletonModel.getJointPositionInWorldFrame(index, position);
return position;
}
glm::vec3 Avatar::getJointPosition(const QString& name) const {
if (QThread::currentThread() != thread()) {
glm::vec3 position;
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointPosition", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(glm::vec3, position), Q_ARG(const QString&, name));
return position;
}
glm::vec3 position;
_skeletonModel.getJointPositionInWorldFrame(getJointIndex(name), position);
return position;
}
glm::quat Avatar::getJointCombinedRotation(int index) const {
if (QThread::currentThread() != thread()) {
glm::quat rotation;
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointCombinedRotation", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(glm::quat, rotation), Q_ARG(const int, index));
return rotation;
}
glm::quat rotation;
_skeletonModel.getJointCombinedRotation(index, rotation);
return rotation;
}
glm::quat Avatar::getJointCombinedRotation(const QString& name) const {
if (QThread::currentThread() != thread()) {
glm::quat rotation;
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointCombinedRotation", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(glm::quat, rotation), Q_ARG(const QString&, name));
return rotation;
}
glm::quat rotation;
_skeletonModel.getJointCombinedRotation(getJointIndex(name), rotation);
return rotation;
}
void Avatar::setFaceModelURL(const QUrl& faceModelURL) {
AvatarData::setFaceModelURL(faceModelURL);
const QUrl DEFAULT_FACE_MODEL_URL = QUrl::fromLocalFile(Application::resourcesPath() + "meshes/defaultAvatar_head.fst");
@ -724,6 +782,8 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
// update the urls
for (int i = 0; i < attachmentData.size(); i++) {
_attachmentModels[i]->setSnapModelToCenter(true);
_attachmentModels[i]->setScaleToFit(true, _scale * _attachmentData.at(i).scale);
_attachmentModels[i]->setURL(attachmentData.at(i).modelURL);
}
}

View file

@ -152,7 +152,12 @@ public:
quint32 getCollisionGroups() const { return _collisionGroups; }
virtual void setCollisionGroups(quint32 collisionGroups) { _collisionGroups = (collisionGroups & VALID_COLLISION_GROUPS); }
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
Q_INVOKABLE glm::quat getJointCombinedRotation(int index) const;
Q_INVOKABLE glm::quat getJointCombinedRotation(const QString& name) const;
public slots:
void updateCollisionGroups();
@ -185,6 +190,7 @@ protected:
float getHeadHeight() const;
float getPelvisFloatingHeight() const;
float getPelvisToHeadLength() const;
glm::vec3 getDisplayNamePosition();
void renderDisplayName();
virtual void renderBody(RenderMode renderMode, float glowLevel = 0.0f);

View file

@ -30,7 +30,7 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
}
setTranslation(neckPosition);
glm::quat neckParentRotation;
if (!owningAvatar->getSkeletonModel().getNeckParentRotation(neckParentRotation)) {
if (!owningAvatar->getSkeletonModel().getNeckParentRotationFromDefaultOrientation(neckParentRotation)) {
neckParentRotation = owningAvatar->getOrientation();
}
setRotation(neckParentRotation);
@ -48,10 +48,10 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(_rotation);
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.transform * glm::translate(state.translation) *
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInParentFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation)));
state.rotation = glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2]))
state._rotationInParentFrame = glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2]))
* glm::angleAxis(RADIANS_PER_DEGREE * _owningHead->getFinalYaw(), glm::normalize(inverse * axes[1]))
* glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalPitch(), glm::normalize(inverse * axes[0]))
* joint.rotation;
@ -59,13 +59,41 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// likewise with the eye joints
glm::mat4 inverse = glm::inverse(parentState.transform * glm::translate(state.translation) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientation() * IDENTITY_FRONT, 0.0f));
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
glm::mat4 inverse = glm::inverse(glm::mat4_cast(_rotation) * parentState.getTransform() *
glm::translate(state.getDefaultTranslationInParentFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
_owningHead->getSaccade() - _translation, 1.0f));
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
state.rotation = glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
state._rotationInParentFrame = glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
joint.rotation;
}
void FaceModel::updateJointState(int index) {
JointState& state = _jointStates[index];
const FBXJoint& joint = state.getFBXJoint();
if (joint.parentIndex != -1) {
const JointState& parentState = _jointStates.at(joint.parentIndex);
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint, state);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(parentState, joint, state);
}
}
Model::updateJointState(index);
}
bool FaceModel::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
if (!isActive()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
return getJointPositionInWorldFrame(geometry.leftEyeJointIndex, firstEyePosition) &&
getJointPositionInWorldFrame(geometry.rightEyeJointIndex, secondEyePosition);
}

View file

@ -28,6 +28,11 @@ public:
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void updateJointState(int index);
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
private:

View file

@ -103,7 +103,7 @@ void Hand::collideAgainstOurself() {
getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
float scaledPalmRadius = PALM_COLLISION_RADIUS * _owningAvatar->getScale();
const Model& skeletonModel = _owningAvatar->getSkeletonModel();
const SkeletonModel& skeletonModel = _owningAvatar->getSkeletonModel();
for (int i = 0; i < int(getNumPalms()); i++) {
PalmData& palm = getPalms()[i];
if (!palm.isActive()) {

View file

@ -31,7 +31,6 @@ Head::Head(Avatar* owningAvatar) :
_rightEyePosition(0.0f, 0.0f, 0.0f),
_eyePosition(0.0f, 0.0f, 0.0f),
_scale(1.0f),
_gravity(0.0f, -1.0f, 0.0f),
_lastLoudness(0.0f),
_audioAttack(0.0f),
_angularVelocity(0,0,0),
@ -176,7 +175,8 @@ void Head::relaxLean(float deltaTime) {
}
void Head::render(float alpha, Model::RenderMode mode) {
if (_faceModel.render(alpha, mode) && _renderLookatVectors && mode != Model::SHADOW_RENDER_MODE) {
if (_faceModel.render(alpha, mode, Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows)) &&
_renderLookatVectors && mode != Model::SHADOW_RENDER_MODE) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
}
}
@ -188,9 +188,12 @@ void Head::setScale (float scale) {
_scale = scale;
}
glm::quat Head::getFinalOrientation() const {
return _owningAvatar->getOrientation() * glm::quat(glm::radians(
glm::vec3(getFinalPitch(), getFinalYaw(), getFinalRoll() )));
glm::quat Head::getFinalOrientationInWorldFrame() const {
return _owningAvatar->getOrientation() * getFinalOrientationInLocalFrame();
}
glm::quat Head::getFinalOrientationInLocalFrame() const {
return glm::quat(glm::radians(glm::vec3(getFinalPitch(), getFinalYaw(), getFinalRoll() )));
}
glm::quat Head::getCameraOrientation () const {

View file

@ -45,15 +45,18 @@ public:
void render(float alpha, Model::RenderMode mode);
void setScale(float scale);
void setPosition(glm::vec3 position) { _position = position; }
void setGravity(glm::vec3 gravity) { _gravity = gravity; }
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
void setLeanSideways(float leanSideways) { _leanSideways = leanSideways; }
void setLeanForward(float leanForward) { _leanForward = leanForward; }
/// \return orientationBase+Delta
glm::quat getFinalOrientationInLocalFrame() const;
/// \return orientationBody * orientationBase+Delta
glm::quat getFinalOrientation() const;
/// \return orientationBody * (orientationBase+Delta)
glm::quat getFinalOrientationInWorldFrame() const;
/// \return orientationBody * orientationBasePitch
glm::quat getCameraOrientation () const;
@ -118,7 +121,6 @@ private:
glm::vec3 _rightEyePosition;
glm::vec3 _eyePosition;
float _scale;
glm::vec3 _gravity;
float _lastLoudness;
float _audioAttack;
glm::vec3 _angularVelocity;

View file

@ -24,9 +24,9 @@
#include <GeometryUtil.h>
#include <NodeList.h>
#include <PacketHeaders.h>
#include <SharedUtil.h>
#include <PerfStat.h>
#include <ShapeCollider.h>
#include <SharedUtil.h>
#include "Application.h"
#include "Audio.h"
@ -65,6 +65,7 @@ MyAvatar::MyAvatar() :
_distanceToNearestAvatar(std::numeric_limits<float>::max()),
_wasPushing(false),
_isPushing(false),
_isBraking(false),
_trapDuration(0.0f),
_thrust(0.0f),
_motorVelocity(0.0f),
@ -75,8 +76,7 @@ MyAvatar::MyAvatar() :
_lastFloorContactPoint(0.0f),
_lookAtTargetAvatar(),
_shouldRender(true),
_billboardValid(false),
_oculusYawOffset(0.0f)
_billboardValid(false)
{
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
_driveKeys[i] = 0.0f;
@ -89,8 +89,7 @@ MyAvatar::~MyAvatar() {
void MyAvatar::reset() {
_skeletonModel.reset();
getHead()->reset();
_oculusYawOffset = 0.0f;
getHead()->reset();
setVelocity(glm::vec3(0.0f));
setThrust(glm::vec3(0.0f));
@ -102,10 +101,15 @@ void MyAvatar::reset() {
}
void MyAvatar::update(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::update/");
Head* head = getHead();
head->relaxLean(deltaTime);
updateFromTrackers(deltaTime);
{
PerformanceTimer perfTimer("MyAvatar::update/updateFromTrackers");
updateFromTrackers(deltaTime);
}
if (Menu::getInstance()->isOptionChecked(MenuOption::MoveWithLean)) {
PerformanceTimer perfTimer("MyAvatar::update/moveWithLean");
// Faceshift drive is enabled, set the avatar drive based on the head position
moveWithLean();
}
@ -116,13 +120,18 @@ void MyAvatar::update(float deltaTime) {
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
if (_motionBehaviors & AVATAR_MOTION_OBEY_ENVIRONMENTAL_GRAVITY) {
PerformanceTimer perfTimer("MyAvatar::update/gravityWork");
setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()));
}
simulate(deltaTime);
{
PerformanceTimer perfTimer("MyAvatar::update/simulate");
simulate(deltaTime);
}
}
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::simulate");
if (_scale != _targetScale) {
float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale;
@ -133,72 +142,56 @@ void MyAvatar::simulate(float deltaTime) {
// no extra movement of the hand here any more ...
_handState = HAND_STATE_NULL;
updateOrientation(deltaTime);
float keyboardInput = fabsf(_driveKeys[FWD] - _driveKeys[BACK]) +
fabsf(_driveKeys[RIGHT] - _driveKeys[LEFT]) +
fabsf(_driveKeys[UP] - _driveKeys[DOWN]);
bool walkingOnFloor = false;
float gravityLength = glm::length(_gravity);
if (gravityLength > EPSILON) {
const CapsuleShape& boundingShape = _skeletonModel.getBoundingShape();
glm::vec3 startCap;
boundingShape.getStartPoint(startCap);
glm::vec3 bottomOfBoundingCapsule = startCap + (boundingShape.getRadius() / gravityLength) * _gravity;
float fallThreshold = 2.0f * deltaTime * gravityLength;
walkingOnFloor = (glm::distance(bottomOfBoundingCapsule, _lastFloorContactPoint) < fallThreshold);
{
PerformanceTimer perfTimer("MyAvatar::simulate/updateOrientation");
updateOrientation(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/updatePosition");
updatePosition(deltaTime);
}
if (keyboardInput > 0.0f || glm::length2(_velocity) > 0.0f || glm::length2(_thrust) > 0.0f ||
! walkingOnFloor) {
// apply gravity
_velocity += _scale * _gravity * (GRAVITY_EARTH * deltaTime);
// update motor and thrust
updateMotorFromKeyboard(deltaTime, walkingOnFloor);
applyMotor(deltaTime);
applyThrust(deltaTime);
{
PerformanceTimer perfTimer("MyAvatar::simulate/hand Collision,simulate");
// update avatar skeleton and simulate hand and head
getHand()->collideAgainstOurself();
getHand()->simulate(deltaTime, true);
}
// update position
if (glm::length2(_velocity) < EPSILON) {
_velocity = glm::vec3(0.0f);
} else {
_position += _velocity * deltaTime;
{
PerformanceTimer perfTimer("MyAvatar::simulate/_skeletonModel.simulate()");
_skeletonModel.simulate(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/simulateAttachments");
simulateAttachments(deltaTime);
}
{
PerformanceTimer perfTimer("MyAvatar::simulate/copy joints");
// copy out the skeleton joints from the model
_jointData.resize(_skeletonModel.getJointStateCount());
for (int i = 0; i < _jointData.size(); i++) {
JointData& data = _jointData[i];
data.valid = _skeletonModel.getJointState(i, data.rotation);
}
}
// update moving flag based on speed
const float MOVING_SPEED_THRESHOLD = 0.01f;
_moving = glm::length(_velocity) > MOVING_SPEED_THRESHOLD;
updateChatCircle(deltaTime);
// update avatar skeleton and simulate hand and head
getHand()->collideAgainstOurself();
getHand()->simulate(deltaTime, true);
_skeletonModel.simulate(deltaTime);
simulateAttachments(deltaTime);
// copy out the skeleton joints from the model
_jointData.resize(_skeletonModel.getJointStateCount());
for (int i = 0; i < _jointData.size(); i++) {
JointData& data = _jointData[i];
data.valid = _skeletonModel.getJointState(i, data.rotation);
{
PerformanceTimer perfTimer("MyAvatar::simulate/head Simulate");
Head* head = getHead();
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
headPosition = _position;
}
head->setPosition(headPosition);
head->setScale(_scale);
head->simulate(deltaTime, true);
}
Head* head = getHead();
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
headPosition = _position;
}
head->setPosition(headPosition);
head->setScale(_scale);
head->simulate(deltaTime, true);
// now that we're done stepping the avatar forward in time, compute new collisions
if (_collisionGroups != 0) {
PerformanceTimer perfTimer("MyAvatar::simulate/_collisionGroups");
Camera* myCamera = Application::getInstance()->getCamera();
float radius = getSkeletonHeight() * COLLISION_RADIUS_SCALE;
@ -206,19 +199,20 @@ void MyAvatar::simulate(float deltaTime) {
radius = myCamera->getAspectRatio() * (myCamera->getNearClip() / cos(myCamera->getFieldOfView() / 2.0f));
radius *= COLLISION_RADIUS_SCALAR;
}
if (_collisionGroups) {
updateShapePositions();
if (_collisionGroups & COLLISION_GROUP_ENVIRONMENT) {
updateCollisionWithEnvironment(deltaTime, radius);
}
if (_collisionGroups & COLLISION_GROUP_VOXELS) {
updateCollisionWithVoxels(deltaTime, radius);
} else {
_trapDuration = 0.0f;
}
if (_collisionGroups & COLLISION_GROUP_AVATARS) {
updateCollisionWithAvatars(deltaTime);
}
updateShapePositions();
if (_collisionGroups & COLLISION_GROUP_ENVIRONMENT) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithEnvironment");
updateCollisionWithEnvironment(deltaTime, radius);
}
if (_collisionGroups & COLLISION_GROUP_VOXELS) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithVoxels");
updateCollisionWithVoxels(deltaTime, radius);
} else {
_trapDuration = 0.0f;
}
if (_collisionGroups & COLLISION_GROUP_AVATARS) {
PerformanceTimer perfTimer("MyAvatar::simulate/updateCollisionWithAvatars");
updateCollisionWithAvatars(deltaTime);
}
}
@ -242,7 +236,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
}
}
// Rotate the body if the head is turned beyond the screen
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
@ -266,7 +260,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
Head* head = getHead();
if (OculusManager::isConnected()){
if (OculusManager::isConnected()) {
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
} else {
@ -417,21 +411,18 @@ void MyAvatar::setLocalGravity(glm::vec3 gravity) {
void MyAvatar::setGravity(const glm::vec3& gravity) {
_gravity = gravity;
getHead()->setGravity(_gravity);
// use the gravity to determine the new world up direction, if possible
float gravityLength = glm::length(gravity);
if (gravityLength > EPSILON) {
_worldUpDirection = _gravity / -gravityLength;
} else {
_worldUpDirection = DEFAULT_UP_DIRECTION;
}
// NOTE: the else case here it to leave _worldUpDirection unchanged
// so it continues to point opposite to the previous gravity setting.
}
AnimationHandlePointer MyAvatar::addAnimationHandle() {
AnimationHandlePointer handle = _skeletonModel.createAnimationHandle();
handle->setLoop(true);
handle->start();
_animationHandles.append(handle);
return handle;
}
@ -441,10 +432,12 @@ void MyAvatar::removeAnimationHandle(const AnimationHandlePointer& handle) {
_animationHandles.removeOne(handle);
}
void MyAvatar::startAnimation(const QString& url, float fps, float priority, bool loop) {
void MyAvatar::startAnimation(const QString& url, float fps, float priority,
bool loop, bool hold, float firstFrame, float lastFrame, const QStringList& maskedJoints) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startAnimation", Q_ARG(const QString&, url),
Q_ARG(float, fps), Q_ARG(float, priority), Q_ARG(bool, loop));
QMetaObject::invokeMethod(this, "startAnimation", Q_ARG(const QString&, url), Q_ARG(float, fps),
Q_ARG(float, priority), Q_ARG(bool, loop), Q_ARG(bool, hold), Q_ARG(float, firstFrame),
Q_ARG(float, lastFrame), Q_ARG(const QStringList&, maskedJoints));
return;
}
AnimationHandlePointer handle = _skeletonModel.createAnimationHandle();
@ -452,9 +445,54 @@ void MyAvatar::startAnimation(const QString& url, float fps, float priority, boo
handle->setFPS(fps);
handle->setPriority(priority);
handle->setLoop(loop);
handle->setHold(hold);
handle->setFirstFrame(firstFrame);
handle->setLastFrame(lastFrame);
handle->setMaskedJoints(maskedJoints);
handle->start();
}
void MyAvatar::startAnimationByRole(const QString& role, const QString& url, float fps, float priority,
bool loop, bool hold, float firstFrame, float lastFrame, const QStringList& maskedJoints) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startAnimationByRole", Q_ARG(const QString&, role), Q_ARG(const QString&, url),
Q_ARG(float, fps), Q_ARG(float, priority), Q_ARG(bool, loop), Q_ARG(bool, hold), Q_ARG(float, firstFrame),
Q_ARG(float, lastFrame), Q_ARG(const QStringList&, maskedJoints));
return;
}
// check for a configured animation for the role
foreach (const AnimationHandlePointer& handle, _animationHandles) {
if (handle->getRole() == role) {
handle->start();
return;
}
}
// no joy; use the parameters provided
AnimationHandlePointer handle = _skeletonModel.createAnimationHandle();
handle->setRole(role);
handle->setURL(url);
handle->setFPS(fps);
handle->setPriority(priority);
handle->setLoop(loop);
handle->setHold(hold);
handle->setFirstFrame(firstFrame);
handle->setLastFrame(lastFrame);
handle->setMaskedJoints(maskedJoints);
handle->start();
}
void MyAvatar::stopAnimationByRole(const QString& role) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopAnimationByRole", Q_ARG(const QString&, role));
return;
}
foreach (const AnimationHandlePointer& handle, _skeletonModel.getRunningAnimations()) {
if (handle->getRole() == role) {
handle->stop();
}
}
}
void MyAvatar::stopAnimation(const QString& url) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopAnimation", Q_ARG(const QString&, url));
@ -463,7 +501,6 @@ void MyAvatar::stopAnimation(const QString& url) {
foreach (const AnimationHandlePointer& handle, _skeletonModel.getRunningAnimations()) {
if (handle->getURL() == url) {
handle->stop();
return;
}
}
}
@ -510,9 +547,16 @@ void MyAvatar::saveData(QSettings* settings) {
for (int i = 0; i < _animationHandles.size(); i++) {
settings->setArrayIndex(i);
const AnimationHandlePointer& pointer = _animationHandles.at(i);
settings->setValue("role", pointer->getRole());
settings->setValue("url", pointer->getURL());
settings->setValue("fps", pointer->getFPS());
settings->setValue("priority", pointer->getPriority());
settings->setValue("loop", pointer->getLoop());
settings->setValue("hold", pointer->getHold());
settings->setValue("startAutomatically", pointer->getStartAutomatically());
settings->setValue("firstFrame", pointer->getFirstFrame());
settings->setValue("lastFrame", pointer->getLastFrame());
settings->setValue("maskedJoints", pointer->getMaskedJoints());
}
settings->endArray();
@ -576,9 +620,16 @@ void MyAvatar::loadData(QSettings* settings) {
for (int i = 0; i < animationCount; i++) {
settings->setArrayIndex(i);
const AnimationHandlePointer& handle = _animationHandles.at(i);
handle->setRole(settings->value("role", "idle").toString());
handle->setURL(settings->value("url").toUrl());
handle->setFPS(loadSetting(settings, "fps", 30.0f));
handle->setPriority(loadSetting(settings, "priority", 1.0f));
handle->setLoop(settings->value("loop", true).toBool());
handle->setHold(settings->value("hold", false).toBool());
handle->setStartAutomatically(settings->value("startAutomatically", true).toBool());
handle->setFirstFrame(settings->value("firstFrame", 0.0f).toFloat());
handle->setLastFrame(settings->value("lastFrame", INT_MAX).toFloat());
handle->setMaskedJoints(settings->value("maskedJoints").toStringList());
}
settings->endArray();
@ -673,7 +724,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
avatar->setIsLookAtTarget(false);
if (!avatar->isMyAvatar()) {
float angleTo = glm::angle(getHead()->getFinalOrientation() * glm::vec3(0.0f, 0.0f, -1.0f),
glm::vec3 DEFAULT_GAZE_IN_HEAD_FRAME = glm::vec3(0.0f, 0.0f, -1.0f);
float angleTo = glm::angle(getHead()->getFinalOrientationInWorldFrame() * DEFAULT_GAZE_IN_HEAD_FRAME,
glm::normalize(avatar->getHead()->getEyePosition() - getHead()->getEyePosition()));
if (angleTo < smallestAngleTo) {
_lookAtTargetAvatar = avatarPointer;
@ -695,17 +747,19 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const {
return _position + getWorldAlignedOrientation() * glm::vec3(0.0f, getPelvisToHeadLength(), 0.0f);
}
const float JOINT_PRIORITY = 2.0f;
void MyAvatar::setJointData(int index, const glm::quat& rotation) {
Avatar::setJointData(index, rotation);
if (QThread::currentThread() == thread()) {
_skeletonModel.setJointState(index, true, rotation);
_skeletonModel.setJointState(index, true, rotation, JOINT_PRIORITY);
}
}
void MyAvatar::clearJointData(int index) {
Avatar::clearJointData(index);
if (QThread::currentThread() == thread()) {
_skeletonModel.setJointState(index, false);
_skeletonModel.setJointState(index, false, glm::quat(), JOINT_PRIORITY);
}
}
@ -752,7 +806,7 @@ void MyAvatar::renderBody(RenderMode renderMode, float glowLevel) {
// Render the body's voxels and head
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
_skeletonModel.render(1.0f, modelRenderMode);
_skeletonModel.render(1.0f, modelRenderMode, Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows));
renderAttachments(renderMode);
// Render head so long as the camera isn't inside it
@ -770,6 +824,16 @@ bool MyAvatar::shouldRenderHead(const glm::vec3& cameraPosition, RenderMode rend
(glm::length(cameraPosition - head->calculateAverageEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
}
float MyAvatar::computeDistanceToFloor(const glm::vec3& startPoint) {
PerformanceTimer perfTimer("MyAvatar::computeDistanceToFloor()");
glm::vec3 direction = -_worldUpDirection;
OctreeElement* elementHit; // output from findRayIntersection
float distance = FLT_MAX; // output from findRayIntersection
BoxFace face; // output from findRayIntersection
Application::getInstance()->getVoxelTree()->findRayIntersection(startPoint, direction, elementHit, distance, face);
return distance;
}
void MyAvatar::updateOrientation(float deltaTime) {
// Gather rotation information from keyboard
_bodyYawDelta -= _driveKeys[ROT_RIGHT] * YAW_SPEED * deltaTime;
@ -799,44 +863,8 @@ void MyAvatar::updateOrientation(float deltaTime) {
OculusManager::getEulerAngles(yaw, pitch, roll);
// ... so they need to be converted to degrees before we do math...
// The neck is limited in how much it can yaw, so we check its relative
// yaw from the body and yaw the body if necessary.
yaw *= DEGREES_PER_RADIAN;
float bodyToHeadYaw = yaw - _oculusYawOffset;
const float MAX_NECK_YAW = 85.0f; // degrees
if ((fabs(bodyToHeadYaw) > 2.0f * MAX_NECK_YAW) && (yaw * _oculusYawOffset < 0.0f)) {
// We've wrapped around the range for yaw so adjust
// the measured yaw to be relative to _oculusYawOffset.
if (yaw > 0.0f) {
yaw -= 360.0f;
} else {
yaw += 360.0f;
}
bodyToHeadYaw = yaw - _oculusYawOffset;
}
float delta = fabs(bodyToHeadYaw) - MAX_NECK_YAW;
if (delta > 0.0f) {
yaw = MAX_NECK_YAW;
if (bodyToHeadYaw < 0.0f) {
delta *= -1.0f;
bodyToHeadYaw = -MAX_NECK_YAW;
} else {
bodyToHeadYaw = MAX_NECK_YAW;
}
// constrain _oculusYawOffset to be within range [-180,180]
_oculusYawOffset = fmod((_oculusYawOffset + delta) + 180.0f, 360.0f) - 180.0f;
// We must adjust the body orientation using a delta rotation (rather than
// doing yaw math) because the body's yaw ranges are not the same
// as what the Oculus API provides.
glm::vec3 UP_AXIS = glm::vec3(0.0f, 1.0f, 0.0f);
glm::quat bodyCorrection = glm::angleAxis(glm::radians(delta), UP_AXIS);
orientation = orientation * bodyCorrection;
}
Head* head = getHead();
head->setBaseYaw(bodyToHeadYaw);
head->setBaseYaw(yaw * DEGREES_PER_RADIAN);
head->setBasePitch(pitch * DEGREES_PER_RADIAN);
head->setBaseRoll(roll * DEGREES_PER_RADIAN);
}
@ -845,6 +873,97 @@ void MyAvatar::updateOrientation(float deltaTime) {
setOrientation(orientation);
}
const float NEARBY_FLOOR_THRESHOLD = 5.0f;
void MyAvatar::updatePosition(float deltaTime) {
PerformanceTimer perfTimer("MyAvatar::updatePosition");
float keyboardInput = fabsf(_driveKeys[FWD] - _driveKeys[BACK]) +
fabsf(_driveKeys[RIGHT] - _driveKeys[LEFT]) +
fabsf(_driveKeys[UP] - _driveKeys[DOWN]);
bool walkingOnFloor = false;
float gravityLength = glm::length(_gravity) * GRAVITY_EARTH;
const CapsuleShape& boundingShape = _skeletonModel.getBoundingShape();
glm::vec3 startCap;
boundingShape.getStartPoint(startCap);
glm::vec3 bottom = startCap - boundingShape.getRadius() * _worldUpDirection;
if (gravityLength > EPSILON) {
float speedFromGravity = _scale * deltaTime * gravityLength;
float distanceToFall = glm::distance(bottom, _lastFloorContactPoint);
walkingOnFloor = (distanceToFall < 2.0f * deltaTime * speedFromGravity);
if (walkingOnFloor) {
// BEGIN HACK: to prevent the avatar from bouncing on a floor surface
if (distanceToFall < deltaTime * speedFromGravity) {
float verticalSpeed = glm::dot(_velocity, _worldUpDirection);
if (fabs(verticalSpeed) < speedFromGravity) {
// we're standing on a floor, and nearly at rest so we zero the vertical velocity component
_velocity -= verticalSpeed * _worldUpDirection;
}
} else {
// fall with gravity against floor
_velocity -= speedFromGravity * _worldUpDirection;
}
// END HACK
} else {
if (!_isBraking) {
// fall with gravity toward floor
_velocity -= speedFromGravity * _worldUpDirection;
}
if (_motionBehaviors & AVATAR_MOTION_STAND_ON_NEARBY_FLOORS) {
const float MAX_VERTICAL_FLOOR_DETECTION_SPEED = _scale * MAX_WALKING_SPEED;
if (keyboardInput && glm::dot(_motorVelocity, _worldUpDirection) > 0.0f &&
glm::dot(_velocity, _worldUpDirection) > MAX_VERTICAL_FLOOR_DETECTION_SPEED) {
// disable local gravity when flying up
setLocalGravity(glm::vec3(0.0f));
} else {
const float maxFloorDistance = _scale * NEARBY_FLOOR_THRESHOLD;
if (computeDistanceToFloor(bottom) > maxFloorDistance) {
// disable local gravity when floor is too far
setLocalGravity(glm::vec3(0.0f));
}
}
}
}
} else {
if ((_collisionGroups & COLLISION_GROUP_VOXELS) &&
_motionBehaviors & AVATAR_MOTION_STAND_ON_NEARBY_FLOORS) {
const float MIN_FLOOR_DETECTION_SPEED = _scale * 1.0f;
if (glm::length(_velocity) < MIN_FLOOR_DETECTION_SPEED ) {
// scan for floor under avatar
const float maxFloorDistance = _scale * NEARBY_FLOOR_THRESHOLD;
if (computeDistanceToFloor(bottom) < maxFloorDistance) {
// enable local gravity
setLocalGravity(-_worldUpDirection);
}
}
}
}
if (keyboardInput > 0.0f || glm::length2(_velocity) > 0.0f || glm::length2(_thrust) > 0.0f || ! walkingOnFloor) {
// update motor and thrust
updateMotorFromKeyboard(deltaTime, walkingOnFloor);
applyMotor(deltaTime);
applyThrust(deltaTime);
// update position
if (glm::length2(_velocity) < EPSILON) {
_velocity = glm::vec3(0.0f);
} else {
_position += _velocity * deltaTime;
}
}
// update moving flag based on speed
const float MOVING_SPEED_THRESHOLD = 0.01f;
_moving = glm::length(_velocity) > MOVING_SPEED_THRESHOLD;
updateChatCircle(deltaTime);
}
void MyAvatar::updateMotorFromKeyboard(float deltaTime, bool walking) {
// Increase motor velocity until its length is equal to _maxMotorSpeed.
if (!(_motionBehaviors & AVATAR_MOTION_MOTOR_KEYBOARD_ENABLED)) {
@ -870,12 +989,12 @@ void MyAvatar::updateMotorFromKeyboard(float deltaTime, bool walking) {
if (directionLength > EPSILON) {
direction /= directionLength;
// the finalMotorSpeed depends on whether we are walking or not
float finalMaxMotorSpeed = walking ? MAX_WALKING_SPEED : _maxMotorSpeed;
float finalMaxMotorSpeed = walking ? _scale * MAX_WALKING_SPEED : _scale * _maxMotorSpeed;
float motorLength = glm::length(_motorVelocity);
if (motorLength < MIN_KEYBOARD_CONTROL_SPEED) {
if (motorLength < _scale * MIN_KEYBOARD_CONTROL_SPEED) {
// an active keyboard motor should never be slower than this
_motorVelocity = MIN_KEYBOARD_CONTROL_SPEED * direction;
_motorVelocity = _scale * MIN_KEYBOARD_CONTROL_SPEED * direction;
} else {
float MOTOR_LENGTH_TIMESCALE = 1.5f;
float tau = glm::clamp(deltaTime / MOTOR_LENGTH_TIMESCALE, 0.0f, 1.0f);
@ -903,32 +1022,30 @@ float MyAvatar::computeMotorTimescale() {
// (1) braking --> short timescale (aggressive motor assertion)
// (2) pushing --> medium timescale (mild motor assertion)
// (3) inactive --> long timescale (gentle friction for low speeds)
//
// TODO: recover extra braking behavior when flying close to nearest avatar
float MIN_MOTOR_TIMESCALE = 0.125f;
float MAX_MOTOR_TIMESCALE = 0.5f;
float MIN_BRAKE_SPEED = 0.4f;
float timescale = MAX_MOTOR_TIMESCALE;
float speed = glm::length(_velocity);
bool areThrusting = (glm::length2(_thrust) > EPSILON);
if (_wasPushing && !(_isPushing || areThrusting) && speed > MIN_BRAKE_SPEED) {
// we don't change _wasPushing for this case -->
// keeps the brakes on until we go below MIN_BRAKE_SPEED
timescale = MIN_MOTOR_TIMESCALE;
bool isThrust = (glm::length2(_thrust) > EPSILON);
if (_isPushing || isThrust) {
timescale = _motorTimescale;
_isBraking = false;
} else {
if (_isPushing) {
timescale = _motorTimescale;
}
_wasPushing = _isPushing || areThrusting;
float speed = glm::length(_velocity);
_isBraking = _wasPushing || (_isBraking && speed > MIN_BRAKE_SPEED);
if (_isBraking) {
timescale = MIN_MOTOR_TIMESCALE;
}
}
_wasPushing = _isPushing || isThrust;
_isPushing = false;
return timescale;
}
void MyAvatar::applyMotor(float deltaTime) {
// TODO: recover extra braking behavior when flying close to nearest avatar
if (!( _motionBehaviors & AVATAR_MOTION_MOTOR_ENABLED)) {
// nothing to do --> early exit
return;
@ -1118,6 +1235,8 @@ void MyAvatar::updateCollisionWithVoxels(float deltaTime, float radius) {
const float MIN_STEP_HEIGHT = 0.0f;
glm::vec3 footBase = boundingShape.getPosition() - (capsuleRadius + capsuleHalfHeight) * _worldUpDirection;
float highestStep = 0.0f;
float lowestStep = MAX_STEP_HEIGHT;
glm::vec3 floorPoint;
glm::vec3 stepPenetration(0.0f);
glm::vec3 totalPenetration(0.0f);
@ -1151,8 +1270,15 @@ void MyAvatar::updateCollisionWithVoxels(float deltaTime, float radius) {
highestStep = stepHeight;
stepPenetration = collision->_penetration;
}
if (stepHeight < lowestStep) {
lowestStep = stepHeight;
floorPoint = collision->_contactPoint - collision->_penetration;
}
}
}
if (lowestStep < MAX_STEP_HEIGHT) {
_lastFloorContactPoint = floorPoint;
}
float penetrationLength = glm::length(totalPenetration);
if (penetrationLength < EPSILON) {
@ -1191,8 +1317,9 @@ void MyAvatar::updateCollisionWithVoxels(float deltaTime, float radius) {
applyHardCollision(totalPenetration, VOXEL_ELASTICITY, VOXEL_DAMPING);
}
const float VOXEL_COLLISION_FREQUENCY = 0.5f;
updateCollisionSound(myCollisions[0]->_penetration, deltaTime, VOXEL_COLLISION_FREQUENCY);
// Don't make a collision sound against voxlels by default -- too annoying when walking
//const float VOXEL_COLLISION_FREQUENCY = 0.5f;
//updateCollisionSound(myCollisions[0]->_penetration, deltaTime, VOXEL_COLLISION_FREQUENCY);
}
_trapDuration = isTrapped ? _trapDuration + deltaTime : 0.0f;
}
@ -1538,7 +1665,8 @@ void MyAvatar::goToLocationFromResponse(const QJsonObject& jsonObject) {
}
void MyAvatar::updateMotionBehaviorsFromMenu() {
if (Menu::getInstance()->isOptionChecked(MenuOption::ObeyEnvironmentalGravity)) {
Menu* menu = Menu::getInstance();
if (menu->isOptionChecked(MenuOption::ObeyEnvironmentalGravity)) {
_motionBehaviors |= AVATAR_MOTION_OBEY_ENVIRONMENTAL_GRAVITY;
// Environmental and Local gravities are incompatible. Environmental setting trumps local.
_motionBehaviors &= ~AVATAR_MOTION_OBEY_LOCAL_GRAVITY;
@ -1548,6 +1676,14 @@ void MyAvatar::updateMotionBehaviorsFromMenu() {
if (! (_motionBehaviors & (AVATAR_MOTION_OBEY_ENVIRONMENTAL_GRAVITY | AVATAR_MOTION_OBEY_LOCAL_GRAVITY))) {
setGravity(glm::vec3(0.0f));
}
if (menu->isOptionChecked(MenuOption::StandOnNearbyFloors)) {
_motionBehaviors |= AVATAR_MOTION_STAND_ON_NEARBY_FLOORS;
// standing on floors requires collision with voxels
_collisionGroups |= COLLISION_GROUP_VOXELS;
menu->setIsOptionChecked(MenuOption::CollideWithVoxels, true);
} else {
_motionBehaviors &= ~AVATAR_MOTION_STAND_ON_NEARBY_FLOORS;
}
}
void MyAvatar::renderAttachments(RenderMode renderMode) {
@ -1559,10 +1695,11 @@ void MyAvatar::renderAttachments(RenderMode renderMode) {
QString headJointName = (geometry.headJointIndex == -1) ? QString() : geometry.joints.at(geometry.headJointIndex).name;
Model::RenderMode modelRenderMode = (renderMode == SHADOW_RENDER_MODE) ?
Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
bool receiveShadows = Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows);
for (int i = 0; i < _attachmentData.size(); i++) {
const QString& jointName = _attachmentData.at(i).jointName;
if (jointName != headJointName && jointName != "Head") {
_attachmentModels.at(i)->render(1.0f, modelRenderMode);
_attachmentModels.at(i)->render(1.0f, modelRenderMode, receiveShadows);
}
}
}
@ -1574,6 +1711,11 @@ void MyAvatar::setCollisionGroups(quint32 collisionGroups) {
menu->setIsOptionChecked(MenuOption::CollideWithAvatars, (bool)(_collisionGroups & COLLISION_GROUP_AVATARS));
menu->setIsOptionChecked(MenuOption::CollideWithVoxels, (bool)(_collisionGroups & COLLISION_GROUP_VOXELS));
menu->setIsOptionChecked(MenuOption::CollideWithParticles, (bool)(_collisionGroups & COLLISION_GROUP_PARTICLES));
if (! (_collisionGroups & COLLISION_GROUP_VOXELS)) {
// no collision with voxels --> disable standing on floors
_motionBehaviors &= ~AVATAR_MOTION_STAND_ON_NEARBY_FLOORS;
menu->setIsOptionChecked(MenuOption::StandOnNearbyFloors, false);
}
}
void MyAvatar::setMotionBehaviorsByScript(quint32 flags) {

View file

@ -67,12 +67,21 @@ public:
void removeAnimationHandle(const AnimationHandlePointer& handle);
/// Allows scripts to run animations.
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f,
float priority = 1.0f, bool loop = false);
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
bool hold = false, float firstFrame = 0.0f, float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
/// Stops an animation as identified by a URL.
Q_INVOKABLE void stopAnimation(const QString& url);
/// Starts an animation by its role, using the provided URL and parameters if the avatar doesn't have a custom
/// animation for the role.
Q_INVOKABLE void startAnimationByRole(const QString& role, const QString& url = QString(), float fps = 30.0f,
float priority = 1.0f, bool loop = false, bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
/// Stops an animation identified by its role.
Q_INVOKABLE void stopAnimationByRole(const QString& role);
// get/set avatar data
void saveData(QSettings* settings);
void loadData(QSettings* settings);
@ -141,11 +150,11 @@ private:
bool _shouldJump;
float _driveKeys[MAX_DRIVE_KEYS];
glm::vec3 _gravity;
glm::vec3 _environmentGravity;
float _distanceToNearestAvatar; // How close is the nearest avatar?
bool _wasPushing;
bool _isPushing;
bool _isBraking;
float _trapDuration; // seconds that avatar has been trapped by collisions
glm::vec3 _thrust; // final acceleration from outside sources for the current frame
@ -165,7 +174,9 @@ private:
QList<AnimationHandlePointer> _animationHandles;
// private methods
float computeDistanceToFloor(const glm::vec3& startPoint);
void updateOrientation(float deltaTime);
void updatePosition(float deltaTime);
void updateMotorFromKeyboard(float deltaTime, bool walking);
float computeMotorTimescale();
void applyMotor(float deltaTime);

View file

@ -21,6 +21,16 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar) :
_owningAvatar(owningAvatar) {
}
void SkeletonModel::setJointStates(QVector<JointState> states) {
Model::setJointStates(states);
if (isActive() && _owningAvatar->isMyAvatar()) {
_ragDoll.init(_jointStates);
}
}
const float PALM_PRIORITY = 3.0f;
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getPosition());
setRotation(_owningAvatar->getOrientation() * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)));
@ -43,7 +53,8 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
}
int jointIndex = geometry.humanIKJointIndices.at(humanIKJointIndex);
if (jointIndex != -1) {
setJointRotation(jointIndex, _rotation * prioVR->getJointRotations().at(i), true);
JointState& state = _jointStates[jointIndex];
state.setRotationFromBindFrame(prioVR->getJointRotations().at(i), PALM_PRIORITY);
}
}
return;
@ -58,21 +69,38 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
if (leftPalmIndex == -1) {
// palms are not yet set, use mouse
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
restoreRightHandPosition(HAND_RESTORATION_RATE);
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyHandPosition(geometry.rightHandJointIndex, _owningAvatar->getHandPosition());
// transform into model-frame
glm::vec3 handPosition = glm::inverse(_rotation) * (_owningAvatar->getHandPosition() - _translation);
applyHandPosition(geometry.rightHandJointIndex, handPosition);
}
restoreLeftHandPosition(HAND_RESTORATION_RATE);
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else if (leftPalmIndex == rightPalmIndex) {
// right hand only
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[leftPalmIndex]);
restoreLeftHandPosition(HAND_RESTORATION_RATE);
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
} else {
applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
}
simulateRagDoll(deltaTime);
}
void SkeletonModel::simulateRagDoll(float deltaTime) {
_ragDoll.slaveToSkeleton(_jointStates, 0.1f); // fraction = 0.1f left intentionally low for demo purposes
float MIN_CONSTRAINT_ERROR = 0.005f; // 5mm
int MAX_ITERATIONS = 4;
int iterations = 0;
float delta = 0.0f;
do {
delta = _ragDoll.enforceConstraints();
++iterations;
} while (delta > MIN_CONSTRAINT_ERROR && iterations < MAX_ITERATIONS);
}
void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const {
@ -116,6 +144,9 @@ void SkeletonModel::getBodyShapes(QVector<const Shape*>& shapes) const {
void SkeletonModel::renderIKConstraints() {
renderJointConstraints(getRightHandJointIndex());
renderJointConstraints(getLeftHandJointIndex());
//if (isActive() && _owningAvatar->isMyAvatar()) {
// renderRagDoll();
//}
}
class IndexValue {
@ -129,10 +160,11 @@ bool operator<(const IndexValue& firstIndex, const IndexValue& secondIndex) {
}
void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position) {
if (jointIndex == -1) {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return;
}
setJointPosition(jointIndex, position);
// NOTE: 'position' is in model-frame
setJointPosition(jointIndex, position, glm::quat(), false, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::vec3 handPosition, elbowPosition;
@ -143,16 +175,16 @@ void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position)
if (forearmLength < EPSILON) {
return;
}
glm::quat handRotation;
getJointRotation(jointIndex, handRotation, true);
JointState& state = _jointStates[jointIndex];
glm::quat handRotation = state.getRotation();
// align hand with forearm
float sign = (jointIndex == geometry.rightHandJointIndex) ? 1.0f : -1.0f;
applyRotationDelta(jointIndex, rotationBetween(handRotation * glm::vec3(-sign, 0.0f, 0.0f), forearmVector));
state.applyRotationDelta(rotationBetween(handRotation * glm::vec3(-sign, 0.0f, 0.0f), forearmVector), true, PALM_PRIORITY);
}
void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
if (jointIndex == -1) {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
@ -161,45 +193,63 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
if (parentJointIndex == -1) {
return;
}
// rotate palm to align with its normal (normal points out of hand's palm)
glm::quat palmRotation;
if (!Menu::getInstance()->isOptionChecked(MenuOption::AlternateIK) &&
Menu::getInstance()->isOptionChecked(MenuOption::AlignForearmsWithWrists)) {
getJointRotation(parentJointIndex, palmRotation, true);
} else {
getJointRotation(jointIndex, palmRotation, true);
}
palmRotation = rotationBetween(palmRotation * geometry.palmDirection, palm.getNormal()) * palmRotation;
// rotate palm to align with finger direction
glm::vec3 direction = palm.getFingerDirection();
palmRotation = rotationBetween(palmRotation * glm::vec3(-sign, 0.0f, 0.0f), direction) * palmRotation;
glm::quat inverseRotation = glm::inverse(_rotation);
glm::vec3 palmPosition = inverseRotation * (palm.getPosition() - _translation);
glm::vec3 palmNormal = inverseRotation * palm.getNormal();
glm::vec3 fingerDirection = inverseRotation * palm.getFingerDirection();
glm::quat palmRotation = rotationBetween(geometry.palmDirection, palmNormal);
palmRotation = rotationBetween(palmRotation * glm::vec3(-sign, 0.0f, 0.0f), fingerDirection) * palmRotation;
// set hand position, rotation
if (Menu::getInstance()->isOptionChecked(MenuOption::AlternateIK)) {
setHandPosition(jointIndex, palm.getPosition(), palmRotation);
setHandPosition(jointIndex, palmPosition, palmRotation);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::AlignForearmsWithWrists)) {
glm::vec3 forearmVector = palmRotation * glm::vec3(sign, 0.0f, 0.0f);
setJointPosition(parentJointIndex, palm.getPosition() + forearmVector *
geometry.joints.at(jointIndex).distanceToParent * extractUniformScale(_scale));
setJointRotation(parentJointIndex, palmRotation, true);
_jointStates[jointIndex].rotation = glm::quat();
float forearmLength = geometry.joints.at(jointIndex).distanceToParent * extractUniformScale(_scale);
glm::vec3 forearm = palmRotation * glm::vec3(sign * forearmLength, 0.0f, 0.0f);
setJointPosition(parentJointIndex, palmPosition + forearm,
glm::quat(), false, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
JointState& parentState = _jointStates[parentJointIndex];
parentState.setRotationFromBindFrame(palmRotation, PALM_PRIORITY);
// lock hand to forearm by slamming its rotation (in parent-frame) to identity
_jointStates[jointIndex]._rotationInParentFrame = glm::quat();
} else {
setJointPosition(jointIndex, palm.getPosition(), palmRotation, true);
setJointPosition(jointIndex, palmPosition, palmRotation,
true, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
}
}
void SkeletonModel::updateJointState(int index) {
JointState& state = _jointStates[index];
const FBXJoint& joint = state.getFBXJoint();
if (joint.parentIndex != -1) {
const JointState& parentState = _jointStates.at(joint.parentIndex);
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (index == geometry.leanJointIndex) {
maybeUpdateLeanRotation(parentState, joint, state);
} else if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint, state);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(parentState, joint, state);
}
}
Model::updateJointState(index);
if (index == _geometry->getFBXGeometry().rootJointIndex) {
JointState& state = _jointStates[index];
state.transform[3][0] = 0.0f;
state.transform[3][1] = 0.0f;
state.transform[3][2] = 0.0f;
state.clearTransformTranslation();
}
}
void SkeletonModel::updateShapePositions() {
if (isActive() && _owningAvatar->isMyAvatar() &&
Menu::getInstance()->isOptionChecked(MenuOption::CollideAsRagDoll)) {
_ragDoll.updateShapes(_jointShapes, _rotation, _translation);
} else {
Model::updateShapePositions();
}
}
@ -208,10 +258,10 @@ void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const
return;
}
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(_rotation);
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.transform * glm::translate(state.translation) *
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInParentFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation)));
state.rotation = glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(),
state._rotationInParentFrame = glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(),
glm::normalize(inverse * axes[2])) * glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(),
glm::normalize(inverse * axes[0])) * joint.rotation;
}
@ -225,7 +275,7 @@ void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const
}
void SkeletonModel::renderJointConstraints(int jointIndex) {
if (jointIndex == -1) {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
@ -235,11 +285,11 @@ void SkeletonModel::renderJointConstraints(int jointIndex) {
do {
const FBXJoint& joint = geometry.joints.at(jointIndex);
const JointState& jointState = _jointStates.at(jointIndex);
glm::vec3 position = extractTranslation(jointState.transform) + _translation;
glm::vec3 position = _rotation * jointState.getPosition() + _translation;
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glm::quat parentRotation = (joint.parentIndex == -1) ? _rotation : _jointStates.at(joint.parentIndex).combinedRotation;
glm::quat parentRotation = (joint.parentIndex == -1) ? _rotation : _rotation * _jointStates.at(joint.parentIndex).getRotation();
glm::vec3 rotationAxis = glm::axis(parentRotation);
glRotatef(glm::degrees(glm::angle(parentRotation)), rotationAxis.x, rotationAxis.y, rotationAxis.z);
float fanScale = directionSize * 0.75f;
@ -272,7 +322,7 @@ void SkeletonModel::renderJointConstraints(int jointIndex) {
}
glPopMatrix();
renderOrientationDirections(position, jointState.combinedRotation, directionSize);
renderOrientationDirections(position, _rotation * jointState.getRotation(), directionSize);
jointIndex = joint.parentIndex;
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
@ -331,15 +381,127 @@ void SkeletonModel::setHandPosition(int jointIndex, const glm::vec3& position, c
// ik solution
glm::vec3 elbowPosition = shoulderPosition + glm::normalize(shoulderToWrist) * mid - tangent * height;
glm::vec3 forwardVector(rightHand ? -1.0f : 1.0f, 0.0f, 0.0f);
glm::quat shoulderRotation = rotationBetween(forwardVector, elbowPosition - shoulderPosition);
setJointRotation(shoulderJointIndex, shoulderRotation, true);
JointState& shoulderState = _jointStates[shoulderJointIndex];
shoulderState.setRotationFromBindFrame(shoulderRotation, PALM_PRIORITY);
setJointRotation(elbowJointIndex, rotationBetween(shoulderRotation * forwardVector,
wristPosition - elbowPosition) * shoulderRotation, true);
JointState& elbowState = _jointStates[elbowJointIndex];
elbowState.setRotationFromBindFrame(rotationBetween(shoulderRotation * forwardVector, wristPosition - elbowPosition) * shoulderRotation, PALM_PRIORITY);
setJointRotation(jointIndex, rotation, true);
JointState& handState = _jointStates[jointIndex];
handState.setRotationFromBindFrame(rotation, PALM_PRIORITY);
}
bool SkeletonModel::getLeftHandPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getLeftHandJointIndex(), position);
}
bool SkeletonModel::getRightHandPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getRightHandJointIndex(), position);
}
bool SkeletonModel::restoreLeftHandPosition(float fraction, float priority) {
return restoreJointPosition(getLeftHandJointIndex(), fraction, priority);
}
bool SkeletonModel::getLeftShoulderPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getLastFreeJointIndex(getLeftHandJointIndex()), position);
}
float SkeletonModel::getLeftArmLength() const {
return getLimbLength(getLeftHandJointIndex());
}
bool SkeletonModel::restoreRightHandPosition(float fraction, float priority) {
return restoreJointPosition(getRightHandJointIndex(), fraction, priority);
}
bool SkeletonModel::getRightShoulderPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getLastFreeJointIndex(getRightHandJointIndex()), position);
}
float SkeletonModel::getRightArmLength() const {
return getLimbLength(getRightHandJointIndex());
}
bool SkeletonModel::getHeadPosition(glm::vec3& headPosition) const {
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().headJointIndex, headPosition);
}
bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
if (!isActive()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (geometry.neckJointIndex == -1) {
return false;
}
int parentIndex = geometry.joints.at(geometry.neckJointIndex).parentIndex;
glm::quat worldFrameRotation;
if (getJointRotationInWorldFrame(parentIndex, worldFrameRotation)) {
neckParentRotation = worldFrameRotation * _jointStates[parentIndex].getFBXJoint().inverseDefaultRotation;
return true;
}
return false;
}
bool SkeletonModel::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
if (!isActive()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (getJointPositionInWorldFrame(geometry.leftEyeJointIndex, firstEyePosition) &&
getJointPositionInWorldFrame(geometry.rightEyeJointIndex, secondEyePosition)) {
return true;
}
// no eye joints; try to estimate based on head/neck joints
glm::vec3 neckPosition, headPosition;
if (getJointPositionInWorldFrame(geometry.neckJointIndex, neckPosition) &&
getJointPositionInWorldFrame(geometry.headJointIndex, headPosition)) {
const float EYE_PROPORTION = 0.6f;
glm::vec3 baseEyePosition = glm::mix(neckPosition, headPosition, EYE_PROPORTION);
glm::quat headRotation;
getJointRotationInWorldFrame(geometry.headJointIndex, headRotation);
const float EYES_FORWARD = 0.25f;
const float EYE_SEPARATION = 0.1f;
float headHeight = glm::distance(neckPosition, headPosition);
firstEyePosition = baseEyePosition + headRotation * glm::vec3(EYE_SEPARATION, 0.0f, EYES_FORWARD) * headHeight;
secondEyePosition = baseEyePosition + headRotation * glm::vec3(-EYE_SEPARATION, 0.0f, EYES_FORWARD) * headHeight;
return true;
}
return false;
}
void SkeletonModel::renderRagDoll() {
const int BALL_SUBDIVISIONS = 6;
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glPushMatrix();
Application::getInstance()->loadTranslatedViewMatrix(_translation);
QVector<glm::vec3> points = _ragDoll.getPoints();
int numPoints = points.size();
float alpha = 0.3f;
float radius1 = 0.008f;
float radius2 = 0.01f;
for (int i = 0; i < numPoints; ++i) {
glPushMatrix();
// draw each point as a yellow hexagon with black border
glm::vec3 position = _rotation * points[i];
glTranslatef(position.x, position.y, position.z);
glColor4f(0.0f, 0.0f, 0.0f, alpha);
glutSolidSphere(radius2, BALL_SUBDIVISIONS, BALL_SUBDIVISIONS);
glColor4f(1.0f, 1.0f, 0.0f, alpha);
glutSolidSphere(radius1, BALL_SUBDIVISIONS, BALL_SUBDIVISIONS);
glPopMatrix();
}
glPopMatrix();
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
}

View file

@ -13,6 +13,7 @@
#define hifi_SkeletonModel_h
#include "renderer/Model.h"
#include "renderer/RagDoll.h"
class Avatar;
@ -23,8 +24,12 @@ class SkeletonModel : public Model {
public:
SkeletonModel(Avatar* owningAvatar);
void setJointStates(QVector<JointState> states);
void simulate(float deltaTime, bool fullUpdate = true);
void simulateRagDoll(float deltaTime);
void updateShapePositions();
/// \param jointIndex index of hand joint
/// \param shapes[out] list in which is stored pointers to hand shapes
@ -34,9 +39,66 @@ public:
void getBodyShapes(QVector<const Shape*>& shapes) const;
void renderIKConstraints();
/// Returns the index of the left hand joint, or -1 if not found.
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
/// Returns the index of the right hand joint, or -1 if not found.
int getRightHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().rightHandJointIndex : -1; }
/// Retrieve the position of the left hand
/// \return true whether or not the position was found
bool getLeftHandPosition(glm::vec3& position) const;
/// Retrieve the position of the right hand
/// \return true whether or not the position was found
bool getRightHandPosition(glm::vec3& position) const;
/// Restores some fraction of the default position of the left hand.
/// \param fraction the fraction of the default position to restore
/// \return whether or not the left hand joint was found
bool restoreLeftHandPosition(float fraction = 1.0f, float priority = 1.0f);
/// Gets the position of the left shoulder.
/// \return whether or not the left shoulder joint was found
bool getLeftShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the left hand to its last free ancestor.
float getLeftArmLength() const;
/// Restores some fraction of the default position of the right hand.
/// \param fraction the fraction of the default position to restore
/// \return whether or not the right hand joint was found
bool restoreRightHandPosition(float fraction = 1.0f, float priority = 1.0f);
/// Gets the position of the right shoulder.
/// \return whether or not the right shoulder joint was found
bool getRightShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the right hand to its first free ancestor.
float getRightArmLength() const;
/// Returns the position of the head joint.
/// \return whether or not the head was found
bool getHeadPosition(glm::vec3& headPosition) const;
/// Returns the position of the neck joint.
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint's parent from default orientation
/// \return whether or not the neck was found
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
void renderRagDoll();
protected:
/// \param jointIndex index of joint in model
/// \param position position of joint in model-frame
void applyHandPosition(int jointIndex, const glm::vec3& position);
void applyPalmData(int jointIndex, PalmData& palm);
@ -44,16 +106,21 @@ protected:
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
virtual void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
private:
void renderJointConstraints(int jointIndex);
/// \param jointIndex index of joint in model
/// \param position position of joint in model-frame
/// \param rotation rotation of joint in model-frame
void setHandPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation);
Avatar* _owningAvatar;
RagDoll _ragDoll;
};
#endif // hifi_SkeletonModel_h

View file

@ -50,7 +50,8 @@ void OculusManager::connect() {
_sensorDevice = *_hmdDevice->GetSensor();
_sensorFusion = new SensorFusion;
_sensorFusion->AttachToSensor(_sensorDevice);
_sensorFusion->SetPredictionEnabled(true);
HMDInfo info;
_hmdDevice->GetDeviceInfo(&info);
_stereoConfig.SetHMDInfo(info);
@ -81,7 +82,13 @@ void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenH
void OculusManager::display(Camera& whichCamera) {
#ifdef HAVE_LIBOVR
Application::getInstance()->getGlowEffect()->prepare();
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
// We only need to render the overlays to a texture once, then we just render the texture as a quad
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
applicationOverlay.renderOverlay(true);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::DisplayOculusOverlays);
Application::getInstance()->getGlowEffect()->prepare();
// render the left eye view to the left side of the screen
const StereoEyeParams& leftEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Left);
@ -100,6 +107,10 @@ void OculusManager::display(Camera& whichCamera) {
Application::getInstance()->displaySide(whichCamera);
if (displayOverlays) {
applicationOverlay.displayOverlayTextureOculus(whichCamera);
}
// and the right eye to the right side
const StereoEyeParams& rightEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Right);
glMatrixMode(GL_PROJECTION);
@ -115,6 +126,10 @@ void OculusManager::display(Camera& whichCamera) {
Application::getInstance()->displaySide(whichCamera);
if (displayOverlays) {
applicationOverlay.displayOverlayTextureOculus(whichCamera);
}
glPopMatrix();
// restore our normal viewport
@ -187,7 +202,7 @@ void OculusManager::reset() {
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
#ifdef HAVE_LIBOVR
_sensorFusion->GetOrientation().GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
_sensorFusion->GetPredictedOrientation().GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
#endif
}

View file

@ -63,7 +63,7 @@ static void setPalm(float deltaTime, int index) {
if (!Application::getInstance()->getJoystickManager()->getJoystickStates().isEmpty()) {
const JoystickState& state = Application::getInstance()->getJoystickManager()->getJoystickStates().at(0);
if (state.axes.size() >= 4 && state.buttons.size() >= 4) {
if (index == SIXENSE_CONTROLLER_ID_LEFT_HAND) {
if (index == LEFT_HAND_INDEX) {
palm->setControllerButtons(state.buttons.at(1) ? BUTTON_FWD : 0);
palm->setTrigger(state.buttons.at(0) ? 1.0f : 0.0f);
palm->setJoystick(state.axes.at(0), -state.axes.at(1));
@ -76,23 +76,24 @@ static void setPalm(float deltaTime, int index) {
}
}
// NOTE: this math is done in the worl-frame with unecessary complexity.
// TODO: transfom this to stay in the model-frame.
glm::vec3 position;
glm::quat rotation;
Model* skeletonModel = &Application::getInstance()->getAvatar()->getSkeletonModel();
SkeletonModel* skeletonModel = &Application::getInstance()->getAvatar()->getSkeletonModel();
int jointIndex;
glm::quat inverseRotation = glm::inverse(Application::getInstance()->getAvatar()->getOrientation());
if (index == SIXENSE_CONTROLLER_ID_LEFT_HAND) {
if (index == LEFT_HAND_INDEX) {
jointIndex = skeletonModel->getLeftHandJointIndex();
skeletonModel->getJointRotation(jointIndex, rotation, true);
skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);
rotation = inverseRotation * rotation * glm::quat(glm::vec3(0.0f, PI_OVER_TWO, 0.0f));
} else {
jointIndex = skeletonModel->getRightHandJointIndex();
skeletonModel->getJointRotation(jointIndex, rotation, true);
skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);
rotation = inverseRotation * rotation * glm::quat(glm::vec3(0.0f, -PI_OVER_TWO, 0.0f));
}
skeletonModel->getJointPosition(jointIndex, position);
skeletonModel->getJointPositionInWorldFrame(jointIndex, position);
position = inverseRotation * (position - skeletonModel->getTranslation());
palm->setRawRotation(rotation);
@ -180,8 +181,8 @@ void PrioVR::update(float deltaTime) {
}
// convert the joysticks into palm data
setPalm(deltaTime, SIXENSE_CONTROLLER_ID_LEFT_HAND);
setPalm(deltaTime, SIXENSE_CONTROLLER_ID_RIGHT_HAND);
setPalm(deltaTime, LEFT_HAND_INDEX);
setPalm(deltaTime, RIGHT_HAND_INDEX);
#endif
}

View file

@ -120,7 +120,6 @@ void SixenseManager::update(float deltaTime) {
// Rotation of Palm
glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
rotation = glm::angleAxis(PI, glm::vec3(0.f, 1.f, 0.f)) * _orbRotation * rotation;
palm->setRawRotation(rotation);
// Compute current velocity from position change
glm::vec3 rawVelocity;
@ -130,7 +129,12 @@ void SixenseManager::update(float deltaTime) {
rawVelocity = glm::vec3(0.0f);
}
palm->setRawVelocity(rawVelocity); // meters/sec
palm->setRawPosition(position);
// Use a velocity sensitive filter to damp small motions and preserve large ones with
// no latency.
float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
palm->setRawPosition(palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter));
palm->setRawRotation(safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter));
// use the velocity to determine whether there's any movement (if the hand isn't new)
const float MOVEMENT_DISTANCE_THRESHOLD = 0.003f;

View file

@ -11,6 +11,8 @@
#include <glm/gtx/quaternion.hpp>
#include <FBXReader.h>
#include "InterfaceConfig.h"
#include "Menu.h"
#include "ModelTreeRenderer.h"
@ -34,8 +36,13 @@ ModelTreeRenderer::~ModelTreeRenderer() {
void ModelTreeRenderer::init() {
OctreeRenderer::init();
static_cast<ModelTree*>(_tree)->setFBXService(this);
}
void ModelTreeRenderer::setTree(Octree* newTree) {
OctreeRenderer::setTree(newTree);
static_cast<ModelTree*>(_tree)->setFBXService(this);
}
void ModelTreeRenderer::update() {
if (_tree) {
@ -48,13 +55,31 @@ void ModelTreeRenderer::render(RenderMode renderMode) {
OctreeRenderer::render(renderMode);
}
const FBXGeometry* ModelTreeRenderer::getGeometryForModel(const ModelItem& modelItem) {
const FBXGeometry* result = NULL;
Model* model = getModel(modelItem);
if (model) {
result = &model->getGeometry()->getFBXGeometry();
}
return result;
}
Model* ModelTreeRenderer::getModel(const ModelItem& modelItem) {
Model* model = NULL;
if (modelItem.isKnownID()) {
if (_knownModelsItemModels.find(modelItem.getID()) != _knownModelsItemModels.end()) {
model = _knownModelsItemModels[modelItem.getID()];
} else {
// Make sure we only create new models on the thread that owns the ModelTreeRenderer
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "getModel", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(Model*, model), Q_ARG(const ModelItem&, modelItem));
return model;
}
model = new Model();
model->init();
model->setURL(QUrl(modelItem.getModelURL()));
@ -64,6 +89,13 @@ Model* ModelTreeRenderer::getModel(const ModelItem& modelItem) {
if (_unknownModelsItemModels.find(modelItem.getCreatorTokenID()) != _unknownModelsItemModels.end()) {
model = _unknownModelsItemModels[modelItem.getCreatorTokenID()];
} else {
// Make sure we only create new models on the thread that owns the ModelTreeRenderer
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "getModel", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(Model*, model), Q_ARG(const ModelItem&, modelItem));
return model;
}
model = new Model();
model->init();
model->setURL(QUrl(modelItem.getModelURL()));
@ -91,7 +123,7 @@ void ModelTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args)
if (!isShadowMode && displayElementProxy && numberOfModels > 0) {
glm::vec3 elementCenter = modelTreeElement->getAABox().calcCenter() * (float)TREE_SCALE;
glm::vec3 elementCenter = modelTreeElement->getAACube().calcCenter() * (float)TREE_SCALE;
float elementSize = modelTreeElement->getScale() * (float)TREE_SCALE;
glColor3f(1.0f, 0.0f, 0.0f);
glPushMatrix();
@ -157,9 +189,9 @@ void ModelTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args)
for (uint16_t i = 0; i < numberOfModels; i++) {
ModelItem& modelItem = modelItems[i];
// render modelItem aspoints
AABox modelBox = modelItem.getAABox();
modelBox.scale(TREE_SCALE);
if (args->_viewFrustum->boxInFrustum(modelBox) != ViewFrustum::OUTSIDE) {
AACube modelCube = modelItem.getAACube();
modelCube.scale(TREE_SCALE);
if (args->_viewFrustum->cubeInFrustum(modelCube) != ViewFrustum::OUTSIDE) {
glm::vec3 position = modelItem.getPosition() * (float)TREE_SCALE;
float radius = modelItem.getRadius() * (float)TREE_SCALE;
float size = modelItem.getSize() * (float)TREE_SCALE;
@ -170,52 +202,122 @@ void ModelTreeRenderer::renderElement(OctreeElement* element, RenderArgs* args)
if (drawAsModel) {
glPushMatrix();
{
const float alpha = 1.0f;
Model* model = getModel(modelItem);
model->setScaleToFit(true, radius * 2.0f);
model->setSnapModelToCenter(true);
// set the rotation
glm::quat rotation = modelItem.getModelRotation();
model->setRotation(rotation);
// set the position
model->setTranslation(position);
// handle animations..
if (modelItem.hasAnimation()) {
if (!modelItem.jointsMapped()) {
QStringList modelJointNames = model->getJointNames();
modelItem.mapJoints(modelJointNames);
if (model) {
model->setScaleToFit(true, radius * 2.0f);
model->setSnapModelToCenter(true);
// set the rotation
glm::quat rotation = modelItem.getModelRotation();
model->setRotation(rotation);
// set the position
model->setTranslation(position);
// handle animations..
if (modelItem.hasAnimation()) {
if (!modelItem.jointsMapped()) {
QStringList modelJointNames = model->getJointNames();
modelItem.mapJoints(modelJointNames);
}
QVector<glm::quat> frameData = modelItem.getAnimationFrame();
for (int i = 0; i < frameData.size(); i++) {
model->setJointState(i, true, frameData[i]);
}
}
// make sure to simulate so everything gets set up correctly for rendering
model->simulate(0.0f);
// TODO: should we allow modelItems to have alpha on their models?
Model::RenderMode modelRenderMode = args->_renderMode == OctreeRenderer::SHADOW_RENDER_MODE
? Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
if (modelItem.getGlowLevel() > 0.0f) {
Glower glower(modelItem.getGlowLevel());
if (model->isActive()) {
model->render(alpha, modelRenderMode);
} else {
// if we couldn't get a model, then just draw a sphere
glColor3ub(modelItem.getColor()[RED_INDEX],modelItem.getColor()[GREEN_INDEX],modelItem.getColor()[BLUE_INDEX]);
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glutSolidSphere(radius, 15, 15);
glPopMatrix();
}
} else {
if (model->isActive()) {
model->render(alpha, modelRenderMode);
} else {
// if we couldn't get a model, then just draw a sphere
glColor3ub(modelItem.getColor()[RED_INDEX],modelItem.getColor()[GREEN_INDEX],modelItem.getColor()[BLUE_INDEX]);
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glutSolidSphere(radius, 15, 15);
glPopMatrix();
}
}
QVector<glm::quat> frameData = modelItem.getAnimationFrame();
for (int i = 0; i < frameData.size(); i++) {
model->setJointState(i, true, frameData[i]);
if (!isShadowMode && displayModelBounds) {
glm::vec3 unRotatedMinimum = model->getUnscaledMeshExtents().minimum;
glm::vec3 unRotatedMaximum = model->getUnscaledMeshExtents().maximum;
glm::vec3 unRotatedExtents = unRotatedMaximum - unRotatedMinimum;
float width = unRotatedExtents.x;
float height = unRotatedExtents.y;
float depth = unRotatedExtents.z;
Extents rotatedExtents = model->getUnscaledMeshExtents();
calculateRotatedExtents(rotatedExtents, rotation);
glm::vec3 rotatedSize = rotatedExtents.maximum - rotatedExtents.minimum;
const glm::vec3& modelScale = model->getScale();
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
// draw the orignal bounding cube
glColor4f(1.0f, 1.0f, 0.0f, 1.0f);
glutWireCube(size);
// draw the rotated bounding cube
glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glPushMatrix();
glScalef(rotatedSize.x * modelScale.x, rotatedSize.y * modelScale.y, rotatedSize.z * modelScale.z);
glutWireCube(1.0);
glPopMatrix();
// draw the model relative bounding box
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glScalef(width * modelScale.x, height * modelScale.y, depth * modelScale.z);
glColor3f(0.0f, 1.0f, 0.0f);
glutWireCube(1.0);
glPopMatrix();
}
}
// make sure to simulate so everything gets set up correctly for rendering
model->simulate(0.0f);
// TODO: should we allow modelItems to have alpha on their models?
Model::RenderMode modelRenderMode = args->_renderMode == OctreeRenderer::SHADOW_RENDER_MODE
? Model::SHADOW_RENDER_MODE : Model::DEFAULT_RENDER_MODE;
model->render(alpha, modelRenderMode);
if (!isShadowMode && displayModelBounds) {
glColor3f(0.0f, 1.0f, 0.0f);
} else {
// if we couldn't get a model, then just draw a sphere
glColor3ub(modelItem.getColor()[RED_INDEX],modelItem.getColor()[GREEN_INDEX],modelItem.getColor()[BLUE_INDEX]);
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glutWireCube(size);
glutSolidSphere(radius, 15, 15);
glPopMatrix();
}
}
glPopMatrix();
} else {
glColor3ub(modelItem.getColor()[RED_INDEX],modelItem.getColor()[GREEN_INDEX],modelItem.getColor()[BLUE_INDEX]);
//glColor3ub(modelItem.getColor()[RED_INDEX],modelItem.getColor()[GREEN_INDEX],modelItem.getColor()[BLUE_INDEX]);
glColor3f(1.0f, 0.0f, 0.0f);
glPushMatrix();
glTranslatef(position.x, position.y, position.z);
glutSolidSphere(radius, 15, 15);

View file

@ -26,7 +26,7 @@
#include "renderer/Model.h"
// Generic client side Octree renderer class.
class ModelTreeRenderer : public OctreeRenderer {
class ModelTreeRenderer : public OctreeRenderer, public ModelItemFBXService {
public:
ModelTreeRenderer();
virtual ~ModelTreeRenderer();
@ -38,6 +38,7 @@ public:
virtual void renderElement(OctreeElement* element, RenderArgs* args);
virtual float getSizeScale() const;
virtual int getBoundaryLevelAdjust() const;
virtual void setTree(Octree* newTree);
void update();
@ -48,6 +49,8 @@ public:
virtual void init();
virtual void render(RenderMode renderMode = DEFAULT_RENDER_MODE);
virtual const FBXGeometry* getGeometryForModel(const ModelItem& modelItem);
protected:
Model* getModel(const ModelItem& modelItem);
QMap<uint32_t, Model*> _knownModelsItemModels;

View file

@ -127,5 +127,9 @@ void ParticleTreeRenderer::renderElement(OctreeElement* element, RenderArgs* arg
}
void ParticleTreeRenderer::processEraseMessage(const QByteArray& dataByteArray, const SharedNodePointer& sourceNode) {
static_cast<ParticleTree*>(_tree)->processEraseMessage(dataByteArray, sourceNode);
if (_tree){
_tree->lockForWrite();
static_cast<ParticleTree*>(_tree)->processEraseMessage(dataByteArray, sourceNode);
_tree->unlock();
}
}

View file

@ -14,6 +14,8 @@
#include <QOpenGLFramebufferObject>
#include <PerfStat.h>
#include "Application.h"
#include "GlowEffect.h"
#include "ProgramObject.h"
@ -119,6 +121,8 @@ static void maybeRelease(QOpenGLFramebufferObject* fbo) {
}
QOpenGLFramebufferObject* GlowEffect::render(bool toTexture) {
PerformanceTimer perfTimer("paintGL/glowEffect");
QOpenGLFramebufferObject* primaryFBO = Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject();
primaryFBO->release();
glBindTexture(GL_TEXTURE_2D, primaryFBO->texture());
@ -139,8 +143,7 @@ QOpenGLFramebufferObject* GlowEffect::render(bool toTexture) {
if (_isEmpty && _renderMode != DIFFUSE_ADD_MODE) {
// copy the primary to the screen
if (QOpenGLFramebufferObject::hasOpenGLFramebufferBlit()) {
QOpenGLFramebufferObject::blitFramebuffer(destFBO, primaryFBO);
QOpenGLFramebufferObject::blitFramebuffer(destFBO, primaryFBO);
} else {
maybeBind(destFBO);
glEnable(GL_TEXTURE_2D);

View file

@ -0,0 +1,101 @@
//
// JointState.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 10/18/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/gtx/norm.hpp>
//#include <GeometryUtil.h>
#include <SharedUtil.h>
#include "JointState.h"
JointState::JointState() :
_animationPriority(0.0f),
_fbxJoint(NULL) {
}
void JointState::setFBXJoint(const FBXJoint* joint) {
assert(joint != NULL);
_rotationInParentFrame = joint->rotation;
// NOTE: JointState does not own the FBXJoint to which it points.
_fbxJoint = joint;
}
void JointState::copyState(const JointState& state) {
_rotationInParentFrame = state._rotationInParentFrame;
_transform = state._transform;
_rotation = extractRotation(_transform);
_animationPriority = state._animationPriority;
// DO NOT copy _fbxJoint
}
void JointState::computeTransform(const glm::mat4& parentTransform) {
glm::quat modifiedRotation = _fbxJoint->preRotation * _rotationInParentFrame * _fbxJoint->postRotation;
glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(modifiedRotation) * _fbxJoint->postTransform;
_transform = parentTransform * glm::translate(_fbxJoint->translation) * modifiedTransform;
_rotation = extractRotation(_transform);
}
glm::quat JointState::getRotationFromBindToModelFrame() const {
return _rotation * _fbxJoint->inverseBindRotation;
}
void JointState::restoreRotation(float fraction, float priority) {
assert(_fbxJoint != NULL);
if (priority == _animationPriority || _animationPriority == 0.0f) {
_rotationInParentFrame = safeMix(_rotationInParentFrame, _fbxJoint->rotation, fraction);
_animationPriority = 0.0f;
}
}
void JointState::setRotationFromBindFrame(const glm::quat& rotation, float priority) {
assert(_fbxJoint != NULL);
if (priority >= _animationPriority) {
// rotation is from bind- to model-frame
_rotationInParentFrame = _rotationInParentFrame * glm::inverse(_rotation) * rotation * glm::inverse(_fbxJoint->inverseBindRotation);
_animationPriority = priority;
}
}
void JointState::clearTransformTranslation() {
_transform[3][0] = 0.0f;
_transform[3][1] = 0.0f;
_transform[3][2] = 0.0f;
}
void JointState::setRotation(const glm::quat& rotation, bool constrain, float priority) {
applyRotationDelta(rotation * glm::inverse(_rotation), true, priority);
}
void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, float priority) {
// NOTE: delta is in jointParent-frame
assert(_fbxJoint != NULL);
if (priority < _animationPriority) {
return;
}
_animationPriority = priority;
if (!constrain || (_fbxJoint->rotationMin == glm::vec3(-PI, -PI, -PI) &&
_fbxJoint->rotationMax == glm::vec3(PI, PI, PI))) {
// no constraints
_rotationInParentFrame = _rotationInParentFrame * glm::inverse(_rotation) * delta * _rotation;
_rotation = delta * _rotation;
return;
}
glm::quat targetRotation = delta * _rotation;
glm::vec3 eulers = safeEulerAngles(_rotationInParentFrame * glm::inverse(_rotation) * targetRotation);
glm::quat newRotation = glm::quat(glm::clamp(eulers, _fbxJoint->rotationMin, _fbxJoint->rotationMax));
_rotation = _rotation * glm::inverse(_rotationInParentFrame) * newRotation;
_rotationInParentFrame = newRotation;
}
const glm::vec3& JointState::getDefaultTranslationInParentFrame() const {
assert(_fbxJoint != NULL);
return _fbxJoint->translation;
}

View file

@ -0,0 +1,66 @@
//
// JointState.h
// interface/src/renderer
//
// Created by Andrzej Kapolka on 10/18/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_JointState_h
#define hifi_JointState_h
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/transform.hpp>
#include <FBXReader.h>
class JointState {
public:
JointState();
void setFBXJoint(const FBXJoint* joint);
const FBXJoint& getFBXJoint() const { return *_fbxJoint; }
void copyState(const JointState& state);
void computeTransform(const glm::mat4& parentTransform);
const glm::mat4& getTransform() const { return _transform; }
glm::quat getRotation() const { return _rotation; }
glm::vec3 getPosition() const { return extractTranslation(_transform); }
/// \return rotation from bind to model frame
glm::quat getRotationFromBindToModelFrame() const;
/// \param rotation rotation of joint in model-frame
void setRotation(const glm::quat& rotation, bool constrain, float priority);
/// \param delta is in the jointParent-frame
void applyRotationDelta(const glm::quat& delta, bool constrain = true, float priority = 1.0f);
const glm::vec3& getDefaultTranslationInParentFrame() const;
void restoreRotation(float fraction, float priority);
/// \param rotation is from bind- to model-frame
/// computes and sets new _rotationInParentFrame
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationFromBindFrame(const glm::quat& rotation, float priority);
void clearTransformTranslation();
glm::quat _rotationInParentFrame; // joint- to parentJoint-frame
float _animationPriority; // the priority of the animation affecting this joint
private:
glm::mat4 _transform; // joint- to model-frame
glm::quat _rotation; // joint- to model-frame
const FBXJoint* _fbxJoint; // JointState does NOT own its FBXJoint
};
#endif // hifi_JointState_h

File diff suppressed because it is too large Load diff

View file

@ -22,6 +22,7 @@
#include "GeometryCache.h"
#include "InterfaceConfig.h"
#include "JointState.h"
#include "ProgramObject.h"
#include "TextureCache.h"
@ -79,7 +80,7 @@ public:
enum RenderMode { DEFAULT_RENDER_MODE, SHADOW_RENDER_MODE, DIFFUSE_RENDER_MODE, NORMAL_RENDER_MODE };
bool render(float alpha = 1.0f, RenderMode mode = DEFAULT_RENDER_MODE);
bool render(float alpha = 1.0f, RenderMode mode = DEFAULT_RENDER_MODE, bool receiveShadows = true);
/// Sets the URL of the model to render.
/// \param fallback the URL of a fallback model to render if the requested model fails to load
@ -113,13 +114,7 @@ public:
bool getJointState(int index, glm::quat& rotation) const;
/// Sets the joint state at the specified index.
void setJointState(int index, bool valid, const glm::quat& rotation = glm::quat());
/// Returns the index of the left hand joint, or -1 if not found.
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
/// Returns the index of the right hand joint, or -1 if not found.
int getRightHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().rightHandJointIndex : -1; }
void setJointState(int index, bool valid, const glm::quat& rotation = glm::quat(), float priority = 1.0f);
/// Returns the index of the parent of the indexed joint, or -1 if not found.
int getParentJointIndex(int jointIndex) const;
@ -127,68 +122,14 @@ public:
/// Returns the index of the last free ancestor of the indexed joint, or -1 if not found.
int getLastFreeJointIndex(int jointIndex) const;
/// Returns the position of the head joint.
/// \return whether or not the head was found
bool getHeadPosition(glm::vec3& headPosition) const;
/// Returns the position of the neck joint.
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint.
/// \return whether or not the neck was found
bool getNeckRotation(glm::quat& neckRotation) const;
/// Returns the rotation of the neck joint's parent.
/// \return whether or not the neck was found
bool getNeckParentRotation(glm::quat& neckRotation) const;
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
/// Retrieve the position of the left hand
/// \return true whether or not the position was found
bool getLeftHandPosition(glm::vec3& position) const;
/// Retrieve the rotation of the left hand
/// \return true whether or not the rotation was found
bool getLeftHandRotation(glm::quat& rotation) const;
/// Retrieve the position of the right hand
/// \return true whether or not the position was found
bool getRightHandPosition(glm::vec3& position) const;
/// Retrieve the rotation of the right hand
/// \return true whether or not the rotation was found
bool getRightHandRotation(glm::quat& rotation) const;
/// Restores some percentage of the default position of the left hand.
/// \param percent the percentage of the default position to restore
/// \return whether or not the left hand joint was found
bool restoreLeftHandPosition(float percent = 1.0f);
/// Gets the position of the left shoulder.
/// \return whether or not the left shoulder joint was found
bool getLeftShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the left hand to its last free ancestor.
float getLeftArmLength() const;
/// Restores some percentage of the default position of the right hand.
/// \param percent the percentage of the default position to restore
/// \return whether or not the right hand joint was found
bool restoreRightHandPosition(float percent = 1.0f);
/// Gets the position of the right shoulder.
/// \return whether or not the right shoulder joint was found
bool getRightShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the right hand to its first free ancestor.
float getRightArmLength() const;
bool getJointPositionInWorldFrame(int jointIndex, glm::vec3& position) const;
bool getJointRotationInWorldFrame(int jointIndex, glm::quat& rotation) const;
bool getJointCombinedRotation(int jointIndex, glm::quat& rotation) const;
/// \param jointIndex index of joint in model structure
/// \param position[out] position of joint in model-frame
/// \return true if joint exists
bool getJointPosition(int jointIndex, glm::vec3& position) const;
bool getJointRotation(int jointIndex, glm::quat& rotation, bool fromBind = false) const;
QStringList getJointNames() const;
@ -199,7 +140,7 @@ public:
void clearShapes();
void rebuildShapes();
void resetShapePositions();
void updateShapePositions();
virtual void updateShapePositions();
void renderJointCollisionShapes(float alpha);
void renderBoundingCollisionShapes(float alpha);
@ -248,15 +189,6 @@ protected:
bool _snappedToCenter; /// are we currently snapped to center
int _rootIndex;
class JointState {
public:
glm::vec3 translation; // translation relative to parent
glm::quat rotation; // rotation relative to parent
glm::mat4 transform; // rotation to world frame + translation in model frame
glm::quat combinedRotation; // rotation from joint local to world frame
bool animationDisabled; // if true, animations do not affect this joint
};
bool _shapesAreDirty;
QVector<JointState> _jointStates;
QVector<Shape*> _jointShapes;
@ -274,6 +206,8 @@ protected:
// returns 'true' if needs fullUpdate after geometry change
bool updateGeometry();
virtual void setJointStates(QVector<JointState> states);
void setScaleInternal(const glm::vec3& scale);
void scaleToFit();
@ -284,29 +218,28 @@ protected:
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
virtual void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
bool setJointPosition(int jointIndex, const glm::vec3& translation, const glm::quat& rotation = glm::quat(),
/// \param jointIndex index of joint in model structure
/// \param position position of joint in model-frame
/// \param rotation rotation of joint in model-frame
/// \param useRotation false if rotation should be ignored
/// \param lastFreeIndex
/// \param allIntermediatesFree
/// \param alignment
/// \return true if joint exists
bool setJointPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation = glm::quat(),
bool useRotation = false, int lastFreeIndex = -1, bool allIntermediatesFree = false,
const glm::vec3& alignment = glm::vec3(0.0f, -1.0f, 0.0f));
bool setJointRotation(int jointIndex, const glm::quat& rotation, bool fromBind = false);
void setJointTranslation(int jointIndex, const glm::vec3& translation);
const glm::vec3& alignment = glm::vec3(0.0f, -1.0f, 0.0f), float priority = 1.0f);
/// Restores the indexed joint to its default position.
/// \param percent the percentage of the default position to apply (i.e., 0.25f to slerp one fourth of the way to
/// \param fraction the fraction of the default position to apply (i.e., 0.25f to slerp one fourth of the way to
/// the original position
/// \return true if the joint was found
bool restoreJointPosition(int jointIndex, float percent = 1.0f);
bool restoreJointPosition(int jointIndex, float fraction = 1.0f, float priority = 0.0f);
/// Computes and returns the extended length of the limb terminating at the specified joint and starting at the joint's
/// first free ancestor.
float getLimbLength(int jointIndex) const;
void applyRotationDelta(int jointIndex, const glm::quat& delta, bool constrain = true);
void computeBoundingShape(const FBXGeometry& geometry);
private:
@ -315,7 +248,7 @@ private:
void applyNextGeometry();
void deleteGeometry();
void renderMeshes(float alpha, RenderMode mode, bool translucent);
void renderMeshes(float alpha, RenderMode mode, bool translucent, bool receiveShadows);
QVector<JointState> createJointStates(const FBXGeometry& geometry);
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
@ -344,15 +277,47 @@ private:
static ProgramObject _normalMapProgram;
static ProgramObject _specularMapProgram;
static ProgramObject _normalSpecularMapProgram;
static ProgramObject _shadowMapProgram;
static ProgramObject _shadowNormalMapProgram;
static ProgramObject _shadowSpecularMapProgram;
static ProgramObject _shadowNormalSpecularMapProgram;
static ProgramObject _cascadedShadowMapProgram;
static ProgramObject _cascadedShadowNormalMapProgram;
static ProgramObject _cascadedShadowSpecularMapProgram;
static ProgramObject _cascadedShadowNormalSpecularMapProgram;
static ProgramObject _shadowProgram;
static ProgramObject _skinProgram;
static ProgramObject _skinNormalMapProgram;
static ProgramObject _skinSpecularMapProgram;
static ProgramObject _skinNormalSpecularMapProgram;
static ProgramObject _skinShadowMapProgram;
static ProgramObject _skinShadowNormalMapProgram;
static ProgramObject _skinShadowSpecularMapProgram;
static ProgramObject _skinShadowNormalSpecularMapProgram;
static ProgramObject _skinCascadedShadowMapProgram;
static ProgramObject _skinCascadedShadowNormalMapProgram;
static ProgramObject _skinCascadedShadowSpecularMapProgram;
static ProgramObject _skinCascadedShadowNormalSpecularMapProgram;
static ProgramObject _skinShadowProgram;
static int _normalMapTangentLocation;
static int _normalSpecularMapTangentLocation;
static int _shadowNormalMapTangentLocation;
static int _shadowNormalSpecularMapTangentLocation;
static int _cascadedShadowNormalMapTangentLocation;
static int _cascadedShadowNormalSpecularMapTangentLocation;
static int _cascadedShadowMapDistancesLocation;
static int _cascadedShadowNormalMapDistancesLocation;
static int _cascadedShadowSpecularMapDistancesLocation;
static int _cascadedShadowNormalSpecularMapDistancesLocation;
class SkinLocations {
public:
@ -360,15 +325,25 @@ private:
int clusterIndices;
int clusterWeights;
int tangent;
int shadowDistances;
};
static SkinLocations _skinLocations;
static SkinLocations _skinNormalMapLocations;
static SkinLocations _skinSpecularMapLocations;
static SkinLocations _skinNormalSpecularMapLocations;
static SkinLocations _skinShadowMapLocations;
static SkinLocations _skinShadowNormalMapLocations;
static SkinLocations _skinShadowSpecularMapLocations;
static SkinLocations _skinShadowNormalSpecularMapLocations;
static SkinLocations _skinCascadedShadowMapLocations;
static SkinLocations _skinCascadedShadowNormalMapLocations;
static SkinLocations _skinCascadedShadowSpecularMapLocations;
static SkinLocations _skinCascadedShadowNormalSpecularMapLocations;
static SkinLocations _skinShadowLocations;
static void initSkinProgram(ProgramObject& program, SkinLocations& locations, int specularTextureUnit = 1);
static void initSkinProgram(ProgramObject& program, SkinLocations& locations,
int specularTextureUnit = 1, int shadowTextureUnit = 1);
};
Q_DECLARE_METATYPE(QPointer<Model>)
@ -381,6 +356,9 @@ class AnimationHandle : public QObject {
public:
void setRole(const QString& role) { _role = role; }
const QString& getRole() const { return _role; }
void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
@ -393,9 +371,30 @@ public:
void setLoop(bool loop) { _loop = loop; }
bool getLoop() const { return _loop; }
void setHold(bool hold) { _hold = hold; }
bool getHold() const { return _hold; }
void setStartAutomatically(bool startAutomatically);
bool getStartAutomatically() const { return _startAutomatically; }
void setFirstFrame(float firstFrame) { _firstFrame = firstFrame; }
float getFirstFrame() const { return _firstFrame; }
void setLastFrame(float lastFrame) { _lastFrame = lastFrame; }
float getLastFrame() const { return _lastFrame; }
void setMaskedJoints(const QStringList& maskedJoints);
const QStringList& getMaskedJoints() const { return _maskedJoints; }
void setRunning(bool running);
bool isRunning() const { return _running; }
signals:
void runningChanged(bool running);
public slots:
void start() { setRunning(true); }
void stop() { setRunning(false); }
@ -406,14 +405,22 @@ private:
AnimationHandle(Model* model);
void simulate(float deltaTime);
void applyFrame(float frameIndex);
void replaceMatchingPriorities(float newPriority);
Model* _model;
WeakAnimationHandlePointer _self;
AnimationPointer _animation;
QString _role;
QUrl _url;
float _fps;
float _priority;
bool _loop;
bool _hold;
bool _startAutomatically;
float _firstFrame;
float _lastFrame;
QStringList _maskedJoints;
bool _running;
QVector<int> _jointMappings;
float _frameIndex;

View file

@ -0,0 +1,167 @@
//
// RagDoll.cpp
// interface/src/avatar
//
// Created by Andrew Meadows 2014.05.30
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/transform.hpp>
#include <CollisionInfo.h>
#include <SharedUtil.h>
#include <CapsuleShape.h>
#include <SphereShape.h>
#include "RagDoll.h"
// ----------------------------------------------------------------------------
// FixedConstraint
// ----------------------------------------------------------------------------
FixedConstraint::FixedConstraint(glm::vec3* point, const glm::vec3& anchor) : _point(point), _anchor(anchor) {
}
float FixedConstraint::enforce() {
assert(_point != NULL);
float distance = glm::distance(_anchor, *_point);
*_point = _anchor;
return distance;
}
void FixedConstraint::setPoint(glm::vec3* point) {
_point = point;
}
void FixedConstraint::setAnchor(const glm::vec3& anchor) {
_anchor = anchor;
}
// ----------------------------------------------------------------------------
// DistanceConstraint
// ----------------------------------------------------------------------------
DistanceConstraint::DistanceConstraint(glm::vec3* startPoint, glm::vec3* endPoint) : _distance(-1.0f) {
_points[0] = startPoint;
_points[1] = endPoint;
_distance = glm::distance(*(_points[0]), *(_points[1]));
}
DistanceConstraint::DistanceConstraint(const DistanceConstraint& other) {
_distance = other._distance;
_points[0] = other._points[0];
_points[1] = other._points[1];
}
void DistanceConstraint::setDistance(float distance) {
_distance = fabsf(distance);
}
float DistanceConstraint::enforce() {
float newDistance = glm::distance(*(_points[0]), *(_points[1]));
glm::vec3 direction(0.0f, 1.0f, 0.0f);
if (newDistance > EPSILON) {
direction = (*(_points[0]) - *(_points[1])) / newDistance;
}
glm::vec3 center = 0.5f * (*(_points[0]) + *(_points[1]));
*(_points[0]) = center + (0.5f * _distance) * direction;
*(_points[1]) = center - (0.5f * _distance) * direction;
return glm::abs(newDistance - _distance);
}
void DistanceConstraint::updateProxyShape(Shape* shape, const glm::quat& rotation, const glm::vec3& translation) const {
if (!shape) {
return;
}
switch (shape->getType()) {
case Shape::SPHERE_SHAPE: {
// sphere collides at endPoint
SphereShape* sphere = static_cast<SphereShape*>(shape);
sphere->setPosition(translation + rotation * (*_points[1]));
}
break;
case Shape::CAPSULE_SHAPE: {
// capsule collides from startPoint to endPoint
CapsuleShape* capsule = static_cast<CapsuleShape*>(shape);
capsule->setEndPoints(translation + rotation * (*_points[0]), translation + rotation * (*_points[1]));
}
break;
default:
break;
}
}
// ----------------------------------------------------------------------------
// RagDoll
// ----------------------------------------------------------------------------
RagDoll::RagDoll() {
}
RagDoll::~RagDoll() {
clear();
}
void RagDoll::init(const QVector<JointState>& states) {
clear();
const int numStates = states.size();
_points.reserve(numStates);
for (int i = 0; i < numStates; ++i) {
const JointState& state = states[i];
_points.push_back(state.getPosition());
int parentIndex = state.getFBXJoint().parentIndex;
assert(parentIndex < i);
if (parentIndex == -1) {
FixedConstraint* anchor = new FixedConstraint(&(_points[i]), glm::vec3(0.0f));
_constraints.push_back(anchor);
} else {
DistanceConstraint* stick = new DistanceConstraint(&(_points[i]), &(_points[parentIndex]));
_constraints.push_back(stick);
}
}
}
/// Delete all data.
void RagDoll::clear() {
int numConstraints = _constraints.size();
for (int i = 0; i < numConstraints; ++i) {
delete _constraints[i];
}
_constraints.clear();
_points.clear();
}
float RagDoll::slaveToSkeleton(const QVector<JointState>& states, float fraction) {
const int numStates = states.size();
assert(numStates == _points.size());
fraction = glm::clamp(fraction, 0.0f, 1.0f);
float maxDistance = 0.0f;
for (int i = 0; i < numStates; ++i) {
glm::vec3 oldPoint = _points[i];
_points[i] = (1.0f - fraction) * _points[i] + fraction * states[i].getPosition();
maxDistance = glm::max(maxDistance, glm::distance(oldPoint, _points[i]));
}
return maxDistance;
}
float RagDoll::enforceConstraints() {
float maxDistance = 0.0f;
const int numConstraints = _constraints.size();
for (int i = 0; i < numConstraints; ++i) {
DistanceConstraint* c = static_cast<DistanceConstraint*>(_constraints[i]);
//maxDistance = glm::max(maxDistance, _constraints[i]->enforce());
maxDistance = glm::max(maxDistance, c->enforce());
}
return maxDistance;
}
void RagDoll::updateShapes(const QVector<Shape*>& shapes, const glm::quat& rotation, const glm::vec3& translation) const {
int numShapes = shapes.size();
int numConstraints = _constraints.size();
for (int i = 0; i < numShapes && i < numConstraints; ++i) {
_constraints[i]->updateProxyShape(shapes[i], rotation, translation);
}
}

View file

@ -0,0 +1,95 @@
//
// RagDoll.h
// interface/src/avatar
//
// Created by Andrew Meadows 2014.05.30
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RagDoll_h
#define hifi_RagDoll_h
#include "renderer/Model.h"
class Shape;
class Constraint {
public:
Constraint() {}
virtual ~Constraint() {}
/// Enforce contraint by moving relevant points.
/// \return max distance of point movement
virtual float enforce() = 0;
/// \param shape pointer to shape that will be this Constraint's collision proxy
/// \param rotation rotation into shape's collision frame
/// \param translation translation into shape's collision frame
/// Moves the shape such that it will collide at this constraint's position
virtual void updateProxyShape(Shape* shape, const glm::quat& rotation, const glm::vec3& translation) const {}
protected:
int _type;
};
class FixedConstraint : public Constraint {
public:
FixedConstraint(glm::vec3* point, const glm::vec3& anchor);
float enforce();
void setPoint(glm::vec3* point);
void setAnchor(const glm::vec3& anchor);
private:
glm::vec3* _point;
glm::vec3 _anchor;
};
class DistanceConstraint : public Constraint {
public:
DistanceConstraint(glm::vec3* startPoint, glm::vec3* endPoint);
DistanceConstraint(const DistanceConstraint& other);
float enforce();
void setDistance(float distance);
void updateProxyShape(Shape* shape, const glm::quat& rotation, const glm::vec3& translation) const;
private:
float _distance;
glm::vec3* _points[2];
};
class RagDoll {
public:
RagDoll();
virtual ~RagDoll();
/// Create points and constraints based on topology of collection of joints
/// \param joints list of connected joint states
void init(const QVector<JointState>& states);
/// Delete all data.
void clear();
/// \param states list of joint states
/// \param fraction range from 0.0 (no movement) to 1.0 (use joint locations)
/// \return max distance of point movement
float slaveToSkeleton(const QVector<JointState>& states, float fraction);
/// Enforce contraints.
/// \return max distance of point movement
float enforceConstraints();
const QVector<glm::vec3>& getPoints() const { return _points; }
/// \param shapes list of shapes to be updated with new positions
/// \param rotation rotation into shapes' collision frame
/// \param translation translation into shapes' collision frame
void updateShapes(const QVector<Shape*>& shapes, const glm::quat& rotation, const glm::vec3& translation) const;
private:
QVector<Constraint*> _constraints;
QVector<glm::vec3> _points;
};
#endif // hifi_RagDoll_h

View file

@ -35,6 +35,14 @@ void MenuScriptingInterface::removeMenu(const QString& menu) {
QMetaObject::invokeMethod(Menu::getInstance(), "removeMenu", Q_ARG(const QString&, menu));
}
bool MenuScriptingInterface::menuExists(const QString& menu) {
bool result;
QMetaObject::invokeMethod(Menu::getInstance(), "menuExists", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result),
Q_ARG(const QString&, menu));
return result;
}
void MenuScriptingInterface::addSeparator(const QString& menuName, const QString& separatorName) {
QMetaObject::invokeMethod(Menu::getInstance(), "addSeparator",
Q_ARG(const QString&, menuName),
@ -67,6 +75,15 @@ void MenuScriptingInterface::removeMenuItem(const QString& menu, const QString&
Q_ARG(const QString&, menuitem));
};
bool MenuScriptingInterface::menuItemExists(const QString& menu, const QString& menuitem) {
bool result;
QMetaObject::invokeMethod(Menu::getInstance(), "menuItemExists", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result),
Q_ARG(const QString&, menu),
Q_ARG(const QString&, menuitem));
return result;
}
bool MenuScriptingInterface::isOptionChecked(const QString& menuOption) {
bool result;
QMetaObject::invokeMethod(Menu::getInstance(), "isOptionChecked", Qt::BlockingQueuedConnection,

Some files were not shown because too many files have changed in this diff Show more