enable -Wdouble-promotion

This commit is contained in:
Seth Alves 2015-06-16 18:23:10 -07:00
parent 49a68b4565
commit 508ae276c6
55 changed files with 1844 additions and 1740 deletions

View file

@ -47,7 +47,7 @@ if (WIN32)
# TODO: Remove when building 64-bit.
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE")
else ()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -fno-strict-aliasing -Wno-unused-parameter")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Wdouble-promotion -fno-strict-aliasing -Wno-unused-parameter")
if (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ggdb")
endif ()

View file

@ -295,10 +295,10 @@ void AudioMixerClientData::printUpstreamDownstreamStats() const {
void AudioMixerClientData::printAudioStreamStats(const AudioStreamStats& streamStats) const {
printf(" Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)\n",
streamStats._packetStreamStats.getLostRate() * 100.0f,
streamStats._packetStreamStats._lost,
streamStats._packetStreamWindowStats.getLostRate() * 100.0f,
streamStats._packetStreamWindowStats._lost);
(double)(streamStats._packetStreamStats.getLostRate() * 100.0f),
streamStats._packetStreamStats._lost,
(double)(streamStats._packetStreamWindowStats.getLostRate() * 100.0f),
streamStats._packetStreamWindowStats._lost);
printf(" Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u\n",
streamStats._desiredJitterBufferFrames,

View file

@ -127,7 +127,7 @@ void OctreeServer::resetSendingStats() {
_noProcessWait = 0;
}
void OctreeServer::trackEncodeTime(float time) {
void OctreeServer::trackEncodeTime(float time) {
const float MAX_SHORT_TIME = 10.0f;
const float MAX_LONG_TIME = 100.0f;
@ -144,10 +144,10 @@ void OctreeServer::trackEncodeTime(float time) {
_extraLongEncode++;
_averageExtraLongEncodeTime.updateAverage(time);
}
_averageEncodeTime.updateAverage(time);
_averageEncodeTime.updateAverage(time);
}
void OctreeServer::trackTreeWaitTime(float time) {
void OctreeServer::trackTreeWaitTime(float time) {
const float MAX_SHORT_TIME = 10.0f;
const float MAX_LONG_TIME = 100.0f;
if (time == SKIP_TIME) {
@ -166,7 +166,7 @@ void OctreeServer::trackTreeWaitTime(float time) {
_averageTreeWaitTime.updateAverage(time);
}
void OctreeServer::trackCompressAndWriteTime(float time) {
void OctreeServer::trackCompressAndWriteTime(float time) {
const float MAX_SHORT_TIME = 10.0f;
const float MAX_LONG_TIME = 100.0f;
if (time == SKIP_TIME) {
@ -182,19 +182,19 @@ void OctreeServer::trackCompressAndWriteTime(float time) {
_extraLongCompress++;
_averageExtraLongCompressTime.updateAverage(time);
}
_averageCompressAndWriteTime.updateAverage(time);
_averageCompressAndWriteTime.updateAverage(time);
}
void OctreeServer::trackPacketSendingTime(float time) {
void OctreeServer::trackPacketSendingTime(float time) {
if (time == SKIP_TIME) {
_noSend++;
time = 0.0f;
}
_averagePacketSendingTime.updateAverage(time);
_averagePacketSendingTime.updateAverage(time);
}
void OctreeServer::trackProcessWaitTime(float time) {
void OctreeServer::trackProcessWaitTime(float time) {
const float MAX_SHORT_TIME = 10.0f;
const float MAX_LONG_TIME = 100.0f;
if (time == SKIP_TIME) {
@ -243,9 +243,9 @@ OctreeServer::OctreeServer(const QByteArray& packet) :
_averageLoopTime.updateAverage(0);
qDebug() << "Octree server starting... [" << this << "]";
// make sure the AccountManager has an Auth URL for payment redemptions
AccountManager::getInstance().setAuthURL(NetworkingConstants::METAVERSE_SERVER_URL);
}
@ -278,13 +278,13 @@ OctreeServer::~OctreeServer() {
delete _jurisdiction;
_jurisdiction = NULL;
// cleanup our tree here...
qDebug() << qPrintable(_safeServerName) << "server START cleaning up octree... [" << this << "]";
delete _tree;
_tree = NULL;
qDebug() << qPrintable(_safeServerName) << "server DONE cleaning up octree... [" << this << "]";
if (_instance == this) {
_instance = NULL; // we are gone
}
@ -400,10 +400,10 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
statsString += QString().sprintf(" Internal Elements: %s nodes (%5.2f%%)\r\n",
locale.toString((uint)internalNodeCount).rightJustified(16,
' ').toLocal8Bit().constData(),
((float)internalNodeCount / (float)nodeCount) * AS_PERCENT);
(double)((internalNodeCount / nodeCount) * AS_PERCENT));
statsString += QString().sprintf(" Leaf Elements: %s nodes (%5.2f%%)\r\n",
locale.toString((uint)leafNodeCount).rightJustified(16, ' ').toLocal8Bit().constData(),
((float)leafNodeCount / (float)nodeCount) * AS_PERCENT);
(double)((leafNodeCount / nodeCount) * AS_PERCENT));
statsString += "\r\n";
statsString += "\r\n";
@ -422,7 +422,7 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
.arg(locale.toString((uint)getCurrentClientCount()).rightJustified(COLUMN_WIDTH, ' '));
quint64 oneSecondAgo = usecTimestampNow() - USECS_PER_SECOND;
statsString += QString(" process() last second: %1 clients\r\n")
.arg(locale.toString((uint)howManyThreadsDidProcess(oneSecondAgo)).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString(" packetDistributor() last second: %1 clients\r\n")
@ -434,13 +434,13 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
float averageLoopTime = getAverageLoopTime();
statsString += QString().sprintf(" Average packetLoop() time: %7.2f msecs"
" samples: %12d \r\n",
averageLoopTime, _averageLoopTime.getSampleCount());
" samples: %12d \r\n",
(double)averageLoopTime, _averageLoopTime.getSampleCount());
float averageInsideTime = getAverageInsideTime();
statsString += QString().sprintf(" Average 'inside' time: %9.2f usecs"
" samples: %12d \r\n\r\n",
averageInsideTime, _averageInsideTime.getSampleCount());
" samples: %12d \r\n\r\n",
(double)averageInsideTime, _averageInsideTime.getSampleCount());
// Process Wait
@ -450,30 +450,30 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
float averageProcessWaitTime = getAverageProcessWaitTime();
statsString += QString().sprintf(" Average process lock wait time:"
" %9.2f usecs samples: %12d \r\n",
averageProcessWaitTime, allWaitTimes);
(double)averageProcessWaitTime, allWaitTimes);
float zeroVsTotal = (allWaitTimes > 0) ? ((float)_noProcessWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" No Lock Wait:"
" (%6.2f%%) samples: %12d \r\n",
zeroVsTotal * AS_PERCENT, _noProcessWait);
(double)(zeroVsTotal * AS_PERCENT), _noProcessWait);
float shortVsTotal = (allWaitTimes > 0) ? ((float)_shortProcessWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" Avg process lock short wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageProcessShortWaitTime.getAverage(),
shortVsTotal * AS_PERCENT, _shortProcessWait);
(double)_averageProcessShortWaitTime.getAverage(),
(double)(shortVsTotal * AS_PERCENT), _shortProcessWait);
float longVsTotal = (allWaitTimes > 0) ? ((float)_longProcessWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" Avg process lock long wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageProcessLongWaitTime.getAverage(),
longVsTotal * AS_PERCENT, _longProcessWait);
(double)_averageProcessLongWaitTime.getAverage(),
(double)(longVsTotal * AS_PERCENT), _longProcessWait);
float extraLongVsTotal = (allWaitTimes > 0) ? ((float)_extraLongProcessWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf("Avg process lock extralong wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n\r\n",
_averageProcessExtraLongWaitTime.getAverage(),
extraLongVsTotal * AS_PERCENT, _extraLongProcessWait);
(double)_averageProcessExtraLongWaitTime.getAverage(),
(double)(extraLongVsTotal * AS_PERCENT), _extraLongProcessWait);
}
// Tree Wait
@ -482,122 +482,125 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
float averageTreeWaitTime = getAverageTreeWaitTime();
statsString += QString().sprintf(" Average tree lock wait time:"
" %9.2f usecs samples: %12d \r\n",
averageTreeWaitTime, allWaitTimes);
(double)averageTreeWaitTime, allWaitTimes);
float zeroVsTotal = (allWaitTimes > 0) ? ((float)_noTreeWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" No Lock Wait:"
" (%6.2f%%) samples: %12d \r\n",
zeroVsTotal * AS_PERCENT, _noTreeWait);
(double)(zeroVsTotal * AS_PERCENT), _noTreeWait);
float shortVsTotal = (allWaitTimes > 0) ? ((float)_shortTreeWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" Avg tree lock short wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageTreeShortWaitTime.getAverage(),
shortVsTotal * AS_PERCENT, _shortTreeWait);
(double)_averageTreeShortWaitTime.getAverage(),
(double)(shortVsTotal * AS_PERCENT), _shortTreeWait);
float longVsTotal = (allWaitTimes > 0) ? ((float)_longTreeWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" Avg tree lock long wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageTreeLongWaitTime.getAverage(),
longVsTotal * AS_PERCENT, _longTreeWait);
(double)_averageTreeLongWaitTime.getAverage(),
(double)(longVsTotal * AS_PERCENT), _longTreeWait);
float extraLongVsTotal = (allWaitTimes > 0) ? ((float)_extraLongTreeWait / (float)allWaitTimes) : 0.0f;
statsString += QString().sprintf(" Avg tree lock extra long wait time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n\r\n",
_averageTreeExtraLongWaitTime.getAverage(),
extraLongVsTotal * AS_PERCENT, _extraLongTreeWait);
(double)_averageTreeExtraLongWaitTime.getAverage(),
(double)(extraLongVsTotal * AS_PERCENT), _extraLongTreeWait);
// encode
float averageEncodeTime = getAverageEncodeTime();
statsString += QString().sprintf(" Average encode time: %9.2f usecs\r\n", averageEncodeTime);
statsString += QString().sprintf(" Average encode time: %9.2f usecs\r\n", (double)averageEncodeTime);
int allEncodeTimes = _noEncode + _shortEncode + _longEncode + _extraLongEncode;
float zeroVsTotalEncode = (allEncodeTimes > 0) ? ((float)_noEncode / (float)allEncodeTimes) : 0.0f;
statsString += QString().sprintf(" No Encode:"
" (%6.2f%%) samples: %12d \r\n",
zeroVsTotalEncode * AS_PERCENT, _noEncode);
(double)(zeroVsTotalEncode * AS_PERCENT), _noEncode);
float shortVsTotalEncode = (allEncodeTimes > 0) ? ((float)_shortEncode / (float)allEncodeTimes) : 0.0f;
statsString += QString().sprintf(" Avg short encode time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageShortEncodeTime.getAverage(),
shortVsTotalEncode * AS_PERCENT, _shortEncode);
(double)_averageShortEncodeTime.getAverage(),
(double)(shortVsTotalEncode * AS_PERCENT), _shortEncode);
float longVsTotalEncode = (allEncodeTimes > 0) ? ((float)_longEncode / (float)allEncodeTimes) : 0.0f;
statsString += QString().sprintf(" Avg long encode time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageLongEncodeTime.getAverage(),
longVsTotalEncode * AS_PERCENT, _longEncode);
(double)_averageLongEncodeTime.getAverage(),
(double)(longVsTotalEncode * AS_PERCENT), _longEncode);
float extraLongVsTotalEncode = (allEncodeTimes > 0) ? ((float)_extraLongEncode / (float)allEncodeTimes) : 0.0f;
statsString += QString().sprintf(" Avg extra long encode time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n\r\n",
_averageExtraLongEncodeTime.getAverage(),
extraLongVsTotalEncode * AS_PERCENT, _extraLongEncode);
(double)_averageExtraLongEncodeTime.getAverage(),
(double)(extraLongVsTotalEncode * AS_PERCENT), _extraLongEncode);
float averageCompressAndWriteTime = getAverageCompressAndWriteTime();
statsString += QString().sprintf(" Average compress and write time: %9.2f usecs\r\n",
averageCompressAndWriteTime);
statsString += QString().sprintf(" Average compress and write time: %9.2f usecs\r\n",
(double)averageCompressAndWriteTime);
int allCompressTimes = _noCompress + _shortCompress + _longCompress + _extraLongCompress;
float zeroVsTotalCompress = (allCompressTimes > 0) ? ((float)_noCompress / (float)allCompressTimes) : 0.0f;
statsString += QString().sprintf(" No compression:"
" (%6.2f%%) samples: %12d \r\n",
zeroVsTotalCompress * AS_PERCENT, _noCompress);
(double)(zeroVsTotalCompress * AS_PERCENT), _noCompress);
float shortVsTotalCompress = (allCompressTimes > 0) ? ((float)_shortCompress / (float)allCompressTimes) : 0.0f;
statsString += QString().sprintf(" Avg short compress time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageShortCompressTime.getAverage(),
shortVsTotalCompress * AS_PERCENT, _shortCompress);
(double)_averageShortCompressTime.getAverage(),
(double)(shortVsTotalCompress * AS_PERCENT), _shortCompress);
float longVsTotalCompress = (allCompressTimes > 0) ? ((float)_longCompress / (float)allCompressTimes) : 0.0f;
statsString += QString().sprintf(" Avg long compress time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n",
_averageLongCompressTime.getAverage(),
longVsTotalCompress * AS_PERCENT, _longCompress);
(double)_averageLongCompressTime.getAverage(),
(double)(longVsTotalCompress * AS_PERCENT), _longCompress);
float extraLongVsTotalCompress = (allCompressTimes > 0) ? ((float)_extraLongCompress / (float)allCompressTimes) : 0.0f;
statsString += QString().sprintf(" Avg extra long compress time:"
" %9.2f usecs (%6.2f%%) samples: %12d \r\n\r\n",
_averageExtraLongCompressTime.getAverage(),
extraLongVsTotalCompress * AS_PERCENT, _extraLongCompress);
(double)_averageExtraLongCompressTime.getAverage(),
(double)(extraLongVsTotalCompress * AS_PERCENT), _extraLongCompress);
float averagePacketSendingTime = getAveragePacketSendingTime();
statsString += QString().sprintf(" Average packet sending time: %9.2f usecs (includes node lock)\r\n",
averagePacketSendingTime);
statsString += QString().sprintf(" Average packet sending time: %9.2f usecs (includes node lock)\r\n",
(double)averagePacketSendingTime);
float noVsTotalSend = (_averagePacketSendingTime.getSampleCount() > 0) ?
float noVsTotalSend = (_averagePacketSendingTime.getSampleCount() > 0) ?
((float)_noSend / (float)_averagePacketSendingTime.getSampleCount()) : 0.0f;
statsString += QString().sprintf(" Not sending:"
" (%6.2f%%) samples: %12d \r\n",
noVsTotalSend * AS_PERCENT, _noSend);
(double)(noVsTotalSend * AS_PERCENT), _noSend);
float averageNodeWaitTime = getAverageNodeWaitTime();
statsString += QString().sprintf(" Average node lock wait time: %9.2f usecs\r\n", averageNodeWaitTime);
statsString += QString().sprintf(" Average node lock wait time: %9.2f usecs\r\n",
(double)averageNodeWaitTime);
statsString += QString().sprintf("--------------------------------------------------------------\r\n");
float encodeToInsidePercent = averageInsideTime == 0.0f ? 0.0f : (averageEncodeTime / averageInsideTime) * AS_PERCENT;
statsString += QString().sprintf(" encode ratio: %5.2f%%\r\n",
encodeToInsidePercent);
statsString += QString().sprintf(" encode ratio: %5.2f%%\r\n",
(double)encodeToInsidePercent);
float waitToInsidePercent = averageInsideTime == 0.0f ? 0.0f
float waitToInsidePercent = averageInsideTime == 0.0f ? 0.0f
: ((averageTreeWaitTime + averageNodeWaitTime) / averageInsideTime) * AS_PERCENT;
statsString += QString().sprintf(" waiting ratio: %5.2f%%\r\n", waitToInsidePercent);
statsString += QString().sprintf(" waiting ratio: %5.2f%%\r\n",
(double)waitToInsidePercent);
float compressAndWriteToInsidePercent = averageInsideTime == 0.0f ? 0.0f
float compressAndWriteToInsidePercent = averageInsideTime == 0.0f ? 0.0f
: (averageCompressAndWriteTime / averageInsideTime) * AS_PERCENT;
statsString += QString().sprintf(" compress and write ratio: %5.2f%%\r\n",
compressAndWriteToInsidePercent);
statsString += QString().sprintf(" compress and write ratio: %5.2f%%\r\n",
(double)compressAndWriteToInsidePercent);
float sendingToInsidePercent = averageInsideTime == 0.0f ? 0.0f
float sendingToInsidePercent = averageInsideTime == 0.0f ? 0.0f
: (averagePacketSendingTime / averageInsideTime) * AS_PERCENT;
statsString += QString().sprintf(" sending ratio: %5.2f%%\r\n", sendingToInsidePercent);
statsString += QString().sprintf(" sending ratio: %5.2f%%\r\n",
(double)sendingToInsidePercent);
statsString += QString("\r\n");
@ -610,13 +613,13 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
.arg(locale.toString((uint)totalWastedBytes).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString().sprintf(" Total OctalCode Bytes: %s bytes (%5.2f%%)\r\n",
locale.toString((uint)totalBytesOfOctalCodes).rightJustified(COLUMN_WIDTH, ' ').toLocal8Bit().constData(),
((float)totalBytesOfOctalCodes / (float)totalOutboundBytes) * AS_PERCENT);
(double)((totalBytesOfOctalCodes / totalOutboundBytes) * AS_PERCENT));
statsString += QString().sprintf(" Total BitMasks Bytes: %s bytes (%5.2f%%)\r\n",
locale.toString((uint)totalBytesOfBitMasks).rightJustified(COLUMN_WIDTH, ' ').toLocal8Bit().constData(),
((float)totalBytesOfBitMasks / (float)totalOutboundBytes) * AS_PERCENT);
(double)((totalBytesOfBitMasks / totalOutboundBytes) * AS_PERCENT));
statsString += QString().sprintf(" Total Color Bytes: %s bytes (%5.2f%%)\r\n",
locale.toString((uint)totalBytesOfColor).rightJustified(COLUMN_WIDTH, ' ').toLocal8Bit().constData(),
((float)totalBytesOfColor / (float)totalOutboundBytes) * AS_PERCENT);
(double)((totalBytesOfColor / totalOutboundBytes) * AS_PERCENT));
statsString += "\r\n";
statsString += "\r\n";
@ -638,7 +641,8 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
.arg(locale.toString((uint)totalPacketsProcessed).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString(" Total Inbound Elements: %1 elements\r\n")
.arg(locale.toString((uint)totalElementsProcessed).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString().sprintf(" Average Inbound Elements/Packet: %f elements/packet\r\n", averageElementsPerPacket);
statsString += QString().sprintf(" Average Inbound Elements/Packet: %f elements/packet\r\n",
(double)averageElementsPerPacket);
statsString += QString(" Average Transit Time/Packet: %1 usecs\r\n")
.arg(locale.toString((uint)averageTransitTimePerPacket).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString(" Average Process Time/Packet: %1 usecs\r\n")
@ -676,7 +680,7 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
statsString += QString(" Total Inbound Elements: %1 elements\r\n")
.arg(locale.toString((uint)totalElementsProcessed).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString().sprintf(" Average Inbound Elements/Packet: %f elements/packet\r\n",
averageElementsPerPacket);
(double)averageElementsPerPacket);
statsString += QString(" Average Transit Time/Packet: %1 usecs\r\n")
.arg(locale.toString((uint)averageTransitTimePerPacket).rightJustified(COLUMN_WIDTH, ' '));
statsString += QString(" Average Process Time/Packet: %1 usecs\r\n")
@ -710,14 +714,15 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
}
statsString += QString().sprintf("Element Node Memory Usage: %8.2f %s\r\n",
OctreeElement::getOctreeMemoryUsage() / memoryScale, memoryScaleLabel);
OctreeElement::getOctreeMemoryUsage() / (double)memoryScale, memoryScaleLabel);
statsString += QString().sprintf("Octcode Memory Usage: %8.2f %s\r\n",
OctreeElement::getOctcodeMemoryUsage() / memoryScale, memoryScaleLabel);
OctreeElement::getOctcodeMemoryUsage() / (double)memoryScale, memoryScaleLabel);
statsString += QString().sprintf("External Children Memory Usage: %8.2f %s\r\n",
OctreeElement::getExternalChildrenMemoryUsage() / memoryScale, memoryScaleLabel);
OctreeElement::getExternalChildrenMemoryUsage() / (double)memoryScale,
memoryScaleLabel);
statsString += " -----------\r\n";
statsString += QString().sprintf(" Total: %8.2f %s\r\n",
OctreeElement::getTotalMemoryUsage() / memoryScale, memoryScaleLabel);
OctreeElement::getTotalMemoryUsage() / (double)memoryScale, memoryScaleLabel);
statsString += "\r\n";
statsString += "OctreeElement Children Population Statistics...\r\n";
@ -726,7 +731,7 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
checkSum += OctreeElement::getChildrenCount(i);
statsString += QString().sprintf(" Nodes with %d children: %s nodes (%5.2f%%)\r\n", i,
locale.toString((uint)OctreeElement::getChildrenCount(i)).rightJustified(16, ' ').toLocal8Bit().constData(),
((float)OctreeElement::getChildrenCount(i) / (float)nodeCount) * AS_PERCENT);
(double)((OctreeElement::getChildrenCount(i) / nodeCount) * AS_PERCENT));
}
statsString += " ----------------------\r\n";
statsString += QString(" Total: %1 nodes\r\n")
@ -831,7 +836,7 @@ void OctreeServer::readPendingDatagram(const QByteArray& receivedPacket, const H
// If we know we're shutting down we just drop these packets on the floor.
// This stops us from initializing send threads we just shut down.
if (!_isShuttingDown) {
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
PacketType packetType = packetTypeForPacket(receivedPacket);
@ -841,7 +846,7 @@ void OctreeServer::readPendingDatagram(const QByteArray& receivedPacket, const H
// need to make sure we have it in our nodeList.
if (matchingNode) {
nodeList->updateNodeWithDataFromPacket(matchingNode, receivedPacket);
OctreeQueryNode* nodeData = (OctreeQueryNode*) matchingNode->getLinkedData();
if (nodeData && !nodeData->isOctreeSendThreadInitalized()) {
nodeData->initializeOctreeSendThread(this, matchingNode);
@ -870,33 +875,33 @@ void OctreeServer::readPendingDatagram(const QByteArray& receivedPacket, const H
void OctreeServer::setupDatagramProcessingThread() {
auto nodeList = DependencyManager::get<NodeList>();
// we do not want this event loop to be the handler for UDP datagrams, so disconnect
disconnect(&nodeList->getNodeSocket(), 0, this, 0);
// setup a QThread with us as parent that will house the OctreeServerDatagramProcessor
_datagramProcessingThread = new QThread(this);
_datagramProcessingThread->setObjectName("Octree Datagram Processor");
// create an OctreeServerDatagramProcessor and move it to that thread
OctreeServerDatagramProcessor* datagramProcessor = new OctreeServerDatagramProcessor(nodeList->getNodeSocket(), thread());
datagramProcessor->moveToThread(_datagramProcessingThread);
// remove the NodeList as the parent of the node socket
nodeList->getNodeSocket().setParent(NULL);
nodeList->getNodeSocket().moveToThread(_datagramProcessingThread);
// let the datagram processor handle readyRead from node socket
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead,
datagramProcessor, &OctreeServerDatagramProcessor::readPendingDatagrams);
// connect to the datagram processing thread signal that tells us we have to handle a packet
connect(datagramProcessor, &OctreeServerDatagramProcessor::packetRequiresProcessing, this, &OctreeServer::readPendingDatagram);
// delete the datagram processor and the associated thread when the QThread quits
connect(_datagramProcessingThread, &QThread::finished, datagramProcessor, &QObject::deleteLater);
connect(datagramProcessor, &QObject::destroyed, _datagramProcessingThread, &QThread::deleteLater);
// start the datagram processing thread
_datagramProcessingThread->start();
}
@ -961,16 +966,16 @@ void OctreeServer::readConfiguration() {
// wait until we have the domain-server settings, otherwise we bail
auto nodeList = DependencyManager::get<NodeList>();
DomainHandler& domainHandler = nodeList->getDomainHandler();
qDebug() << "Waiting for domain settings from domain-server.";
// block until we get the settingsRequestComplete signal
QEventLoop loop;
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
domainHandler.requestDomainSettings();
loop.exec();
if (domainHandler.getSettingsObject().isEmpty()) {
qDebug() << "No settings object from domain-server.";
}
@ -978,7 +983,7 @@ void OctreeServer::readConfiguration() {
QString settingsKey = getMyDomainSettingsKey();
QJsonObject settingsSectionObject = settingsObject[settingsKey].toObject();
_settings = settingsSectionObject; // keep this for later
if (!readOptionString(QString("statusHost"), settingsSectionObject, _statusHost) || _statusHost.isEmpty()) {
_statusHost = getLocalAddress().toString();
}
@ -1002,7 +1007,7 @@ void OctreeServer::readConfiguration() {
bool hasRoot = readOptionString(QString("jurisdictionRoot"), settingsSectionObject, jurisdictionRoot);
QString jurisdictionEndNodes;
bool hasEndNodes = readOptionString(QString("jurisdictionEndNodes"), settingsSectionObject, jurisdictionEndNodes);
if (hasRoot || hasEndNodes) {
_jurisdiction = new JurisdictionMap(qPrintable(jurisdictionRoot), qPrintable(jurisdictionEndNodes));
}
@ -1050,7 +1055,7 @@ void OctreeServer::readConfiguration() {
qDebug() << "wantBackup=" << _wantBackup;
//qDebug() << "settingsSectionObject:" << settingsSectionObject;
} else {
qDebug("persistFilename= DISABLED");
}
@ -1072,7 +1077,7 @@ void OctreeServer::readConfiguration() {
_packetsPerClientPerInterval = 1;
}
}
qDebug("packetsPerSecondPerClientMax=%d _packetsPerClientPerInterval=%d",
qDebug("packetsPerSecondPerClientMax=%d _packetsPerClientPerInterval=%d",
packetsPerSecondPerClientMax, _packetsPerClientPerInterval);
// Check to see if the user passed in a command line option for setting packet send rate
@ -1083,10 +1088,10 @@ void OctreeServer::readConfiguration() {
_packetsTotalPerInterval = 1;
}
}
qDebug("packetsPerSecondTotalMax=%d _packetsTotalPerInterval=%d",
qDebug("packetsPerSecondTotalMax=%d _packetsTotalPerInterval=%d",
packetsPerSecondTotalMax, _packetsTotalPerInterval);
readAdditionalConfiguration(settingsSectionObject);
}
@ -1101,8 +1106,8 @@ void OctreeServer::run() {
// make sure our NodeList knows what type we are
auto nodeList = DependencyManager::get<NodeList>();
nodeList->setOwnerType(getMyNodeType());
// use common init to setup common timers and logging
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
@ -1110,7 +1115,7 @@ void OctreeServer::run() {
// read the configuration from either the payload or the domain server configuration
readConfiguration();
beforeRun(); // after payload has been processed
connect(nodeList.data(), SIGNAL(nodeAdded(SharedNodePointer)), SLOT(nodeAdded(SharedNodePointer)));
@ -1119,7 +1124,7 @@ void OctreeServer::run() {
// we need to ask the DS about agents so we can ping/reply with them
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
#ifndef WIN32
setvbuf(stdout, NULL, _IOLBF, 0);
#endif
@ -1209,7 +1214,7 @@ void OctreeServer::forceNodeShutdown(SharedNodePointer node) {
quint64 end = usecTimestampNow();
quint64 usecsElapsed = (end - start);
qDebug() << qPrintable(_safeServerName) << "server forceNodeShutdown() took: "
qDebug() << qPrintable(_safeServerName) << "server forceNodeShutdown() took: "
<< usecsElapsed << " usecs for node:" << *node;
}
@ -1225,7 +1230,7 @@ void OctreeServer::aboutToFinish() {
// This ensures that when we forceNodeShutdown below for each node we don't get any more newly connecting nodes
auto nodeList = DependencyManager::get<NodeList>();
nodeList->linkedDataCreateCallback = NULL;
if (_octreeInboundPacketProcessor) {
_octreeInboundPacketProcessor->terminating();
}
@ -1240,7 +1245,7 @@ void OctreeServer::aboutToFinish() {
qDebug() << qPrintable(_safeServerName) << "server about to finish while node still connected node:" << *node;
forceNodeShutdown(node);
});
if (_persistThread) {
_persistThread->aboutToFinish();
_persistThread->terminating();
@ -1282,7 +1287,7 @@ QString OctreeServer::getUptime() {
if (hours > 0 || minutes > 0) {
formattedUptime += QString(" ");
}
formattedUptime += QString().sprintf("%.3f seconds", seconds);
formattedUptime += QString().sprintf("%.3f seconds", (double)seconds);
}
return formattedUptime;
}
@ -1290,13 +1295,13 @@ QString OctreeServer::getUptime() {
QString OctreeServer::getFileLoadTime() {
QString result;
if (isInitialLoadComplete()) {
const int USECS_PER_MSEC = 1000;
const int MSECS_PER_SEC = 1000;
const int SECS_PER_MIN = 60;
const int MIN_PER_HOUR = 60;
const int MSECS_PER_MIN = MSECS_PER_SEC * SECS_PER_MIN;
quint64 msecsElapsed = getLoadElapsedTime() / USECS_PER_MSEC;;
float seconds = (msecsElapsed % MSECS_PER_MIN)/(float)MSECS_PER_SEC;
int minutes = (msecsElapsed/(MSECS_PER_MIN)) % MIN_PER_HOUR;
@ -1321,7 +1326,7 @@ QString OctreeServer::getFileLoadTime() {
if (hours > 0 || minutes > 0) {
result += QString(" ");
}
result += QString().sprintf("%.3f seconds", seconds);
result += QString().sprintf("%.3f seconds", (double)seconds);
}
} else {
result = "Not yet loaded...";
@ -1356,27 +1361,27 @@ void OctreeServer::sendStatsPacket() {
// 2) only send new data
// 3) automatically break up into multiple packets
static QJsonObject statsObject1;
QString baseName = getMyServerName() + QString("Server");
statsObject1[baseName + QString(".0.1.configuration")] = getConfiguration();
statsObject1[baseName + QString(".0.2.detailed_stats_url")] = getStatusLink();
statsObject1[baseName + QString(".0.3.uptime")] = getUptime();
statsObject1[baseName + QString(".0.4.persistFileLoadTime")] = getFileLoadTime();
statsObject1[baseName + QString(".0.5.clients")] = getCurrentClientCount();
quint64 oneSecondAgo = usecTimestampNow() - USECS_PER_SECOND;
statsObject1[baseName + QString(".0.6.threads.1.processing")] = (double)howManyThreadsDidProcess(oneSecondAgo);
statsObject1[baseName + QString(".0.6.threads.2.packetDistributor")] =
statsObject1[baseName + QString(".0.6.threads.2.packetDistributor")] =
(double)howManyThreadsDidPacketDistributor(oneSecondAgo);
statsObject1[baseName + QString(".0.6.threads.3.handlePacektSend")] =
statsObject1[baseName + QString(".0.6.threads.3.handlePacektSend")] =
(double)howManyThreadsDidHandlePacketSend(oneSecondAgo);
statsObject1[baseName + QString(".0.6.threads.4.writeDatagram")] =
(double)howManyThreadsDidCallWriteDatagram(oneSecondAgo);
statsObject1[baseName + QString(".0.6.threads.4.writeDatagram")] =
(double)howManyThreadsDidCallWriteDatagram(oneSecondAgo);
statsObject1[baseName + QString(".1.1.octree.elementCount")] = (double)OctreeElement::getNodeCount();
statsObject1[baseName + QString(".1.2.octree.internalElementCount")] = (double)OctreeElement::getInternalNodeCount();
statsObject1[baseName + QString(".1.3.octree.leafElementCount")] = (double)OctreeElement::getLeafNodeCount();
@ -1388,9 +1393,9 @@ void OctreeServer::sendStatsPacket() {
statsObject2[baseName + QString(".2.outbound.data.totalPackets")] = (double)OctreeSendThread::_totalPackets;
statsObject2[baseName + QString(".2.outbound.data.totalBytes")] = (double)OctreeSendThread::_totalBytes;
statsObject2[baseName + QString(".2.outbound.data.totalBytesWasted")] = (double)OctreeSendThread::_totalWastedBytes;
statsObject2[baseName + QString(".2.outbound.data.totalBytesOctalCodes")] =
statsObject2[baseName + QString(".2.outbound.data.totalBytesOctalCodes")] =
(double)OctreePacketData::getTotalBytesOfOctalCodes();
statsObject2[baseName + QString(".2.outbound.data.totalBytesBitMasks")] =
statsObject2[baseName + QString(".2.outbound.data.totalBytesBitMasks")] =
(double)OctreePacketData::getTotalBytesOfBitMasks();
statsObject2[baseName + QString(".2.outbound.data.totalBytesBitMasks")] = (double)OctreePacketData::getTotalBytesOfColor();
@ -1406,19 +1411,19 @@ void OctreeServer::sendStatsPacket() {
static QJsonObject statsObject3;
statsObject3[baseName + QString(".3.inbound.data.1.totalPackets")] =
statsObject3[baseName + QString(".3.inbound.data.1.totalPackets")] =
(double)_octreeInboundPacketProcessor->getTotalPacketsProcessed();
statsObject3[baseName + QString(".3.inbound.data.2.totalElements")] =
statsObject3[baseName + QString(".3.inbound.data.2.totalElements")] =
(double)_octreeInboundPacketProcessor->getTotalElementsProcessed();
statsObject3[baseName + QString(".3.inbound.timing.1.avgTransitTimePerPacket")] =
statsObject3[baseName + QString(".3.inbound.timing.1.avgTransitTimePerPacket")] =
(double)_octreeInboundPacketProcessor->getAverageTransitTimePerPacket();
statsObject3[baseName + QString(".3.inbound.timing.2.avgProcessTimePerPacket")] =
statsObject3[baseName + QString(".3.inbound.timing.2.avgProcessTimePerPacket")] =
(double)_octreeInboundPacketProcessor->getAverageProcessTimePerPacket();
statsObject3[baseName + QString(".3.inbound.timing.3.avgLockWaitTimePerPacket")] =
statsObject3[baseName + QString(".3.inbound.timing.3.avgLockWaitTimePerPacket")] =
(double)_octreeInboundPacketProcessor->getAverageLockWaitTimePerPacket();
statsObject3[baseName + QString(".3.inbound.timing.4.avgProcessTimePerElement")] =
statsObject3[baseName + QString(".3.inbound.timing.4.avgProcessTimePerElement")] =
(double)_octreeInboundPacketProcessor->getAverageProcessTimePerElement();
statsObject3[baseName + QString(".3.inbound.timing.5.avgLockWaitTimePerElement")] =
statsObject3[baseName + QString(".3.inbound.timing.5.avgLockWaitTimePerElement")] =
(double)_octreeInboundPacketProcessor->getAverageLockWaitTimePerElement();
DependencyManager::get<NodeList>()->sendStatsToDomainServer(statsObject3);

View file

@ -815,9 +815,9 @@ void Application::initializeGL() {
_idleLoopStdev.reset();
if (_justStarted) {
float startupTime = (float)_applicationStartupTime.elapsed() / 1000.0;
float startupTime = (float)_applicationStartupTime.elapsed() / 1000.0f;
_justStarted = false;
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", startupTime);
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTime);
}
// update before the first render
@ -1664,8 +1664,8 @@ void Application::touchUpdateEvent(QTouchEvent* event) {
int numTouches = tPoints.count();
if (numTouches > 1) {
for (int i = 0; i < numTouches; ++i) {
_touchAvgX += tPoints[i].pos().x();
_touchAvgY += tPoints[i].pos().y();
_touchAvgX += (float)tPoints[i].pos().x();
_touchAvgY += (float)tPoints[i].pos().y();
}
_touchAvgX /= (float)(numTouches);
_touchAvgY /= (float)(numTouches);
@ -3645,8 +3645,8 @@ glm::vec2 Application::getScaledScreenPoint(glm::vec2 projectedPoint) {
// +-----------------------+
// -1,-1 1,-1
glm::vec2 screenPoint((projectedPoint.x + 1.0) * horizontalScale,
((projectedPoint.y + 1.0) * -verticalScale) + _glWidget->getDeviceHeight());
glm::vec2 screenPoint((projectedPoint.x + 1.0f) * horizontalScale,
((projectedPoint.y + 1.0f) * -verticalScale) + _glWidget->getDeviceHeight());
return screenPoint;
}
@ -3699,7 +3699,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
} else {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
float ratio = QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
float ratio = (float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
glViewport(x, size.height() - y - height, width, height);
glScissor(x, size.height() - y - height, width, height);
@ -3857,7 +3857,7 @@ void Application::nodeKilled(SharedNodePointer node) {
_entityServerJurisdictions.unlock();
qCDebug(interfaceapp, "model server going away...... v[%f, %f, %f, %f]",
rootDetails.x, rootDetails.y, rootDetails.z, rootDetails.s);
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
// Add the jurisditionDetails object to the list of "fade outs"
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
@ -3943,7 +3943,8 @@ int Application::parseOctreeStats(const QByteArray& packet, const SharedNodePoin
jurisdiction->unlock();
qCDebug(interfaceapp, "stats from new %s server... [%f, %f, %f, %f]",
qPrintable(serverType), rootDetails.x, rootDetails.y, rootDetails.z, rootDetails.s);
qPrintable(serverType),
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
// Add the jurisditionDetails object to the list of "fade outs"
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {

View file

@ -39,11 +39,11 @@ bool GLCanvas::isThrottleRendering() const {
}
int GLCanvas::getDeviceWidth() const {
return width() * (windowHandle() ? windowHandle()->devicePixelRatio() : 1.0f);
return width() * (windowHandle() ? (float)windowHandle()->devicePixelRatio() : 1.0f);
}
int GLCanvas::getDeviceHeight() const {
return height() * (windowHandle() ? windowHandle()->devicePixelRatio() : 1.0f);
return height() * (windowHandle() ? (float)windowHandle()->devicePixelRatio() : 1.0f);
}
void GLCanvas::initializeGL() {

View file

@ -205,11 +205,11 @@ QString LODManager::getLODFeedbackText() {
int relativeToTwentyTwenty = 20 / relativeToDefault;
QString result;
if (relativeToDefault > 1.01) {
if (relativeToDefault > 1.01f) {
result = QString("20:%1 or %2 times further than average vision%3").arg(relativeToTwentyTwenty).arg(relativeToDefault,0,'f',2).arg(granularityFeedback);
} else if (relativeToDefault > 0.99) {
} else if (relativeToDefault > 0.99f) {
result = QString("20:20 or the default distance for average vision%1").arg(granularityFeedback);
} else if (relativeToDefault > 0.01) {
} else if (relativeToDefault > 0.01f) {
result = QString("20:%1 or %2 of default distance for average vision%3").arg(relativeToTwentyTwenty).arg(relativeToDefault,0,'f',3).arg(granularityFeedback);
} else {
result = QString("%2 of default distance for average vision%3").arg(relativeToDefault,0,'f',3).arg(granularityFeedback);

View file

@ -33,19 +33,19 @@ _basePath(basePath),
_geometry(geometry)
{
setWindowTitle("Set Model Properties");
QFormLayout* form = new QFormLayout();
setLayout(form);
form->addRow("Name:", _name = new QLineEdit());
form->addRow("Texture Directory:", _textureDirectory = new QPushButton());
connect(_textureDirectory, SIGNAL(clicked(bool)), SLOT(chooseTextureDirectory()));
form->addRow("Scale:", _scale = new QDoubleSpinBox());
_scale->setMaximum(FLT_MAX);
_scale->setSingleStep(0.01);
if (_modelType != FSTReader::ENTITY_MODEL) {
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
QHBoxLayout* translation = new QHBoxLayout();
@ -57,7 +57,7 @@ _geometry(geometry)
form->addRow("Pivot Joint:", _pivotJoint = createJointBox());
connect(_pivotAboutCenter, SIGNAL(toggled(bool)), SLOT(updatePivotJoint()));
_pivotAboutCenter->setChecked(true);
} else {
form->addRow("Left Eye Joint:", _leftEyeJoint = createJointBox());
form->addRow("Right Eye Joint:", _rightEyeJoint = createJointBox());
@ -69,22 +69,22 @@ _geometry(geometry)
form->addRow("Head Joint:", _headJoint = createJointBox());
form->addRow("Left Hand Joint:", _leftHandJoint = createJointBox());
form->addRow("Right Hand Joint:", _rightHandJoint = createJointBox());
form->addRow("Free Joints:", _freeJoints = new QVBoxLayout());
QPushButton* newFreeJoint = new QPushButton("New Free Joint");
_freeJoints->addWidget(newFreeJoint);
connect(newFreeJoint, SIGNAL(clicked(bool)), SLOT(createNewFreeJoint()));
}
}
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok |
QDialogButtonBox::Cancel | QDialogButtonBox::Reset);
connect(buttons, SIGNAL(accepted()), SLOT(accept()));
connect(buttons, SIGNAL(rejected()), SLOT(reject()));
connect(buttons->button(QDialogButtonBox::Reset), SIGNAL(clicked(bool)), SLOT(reset()));
form->addRow(buttons);
// reset to initialize the fields
reset();
}
@ -100,42 +100,42 @@ QVariantHash ModelPropertiesDialog::getMapping() const {
mapping.insert(NAME_FIELD, _name->text());
mapping.insert(TEXDIR_FIELD, _textureDirectory->text());
mapping.insert(SCALE_FIELD, QString::number(_scale->value()));
// update the joint indices
QVariantHash jointIndices;
for (int i = 0; i < _geometry.joints.size(); i++) {
jointIndices.insert(_geometry.joints.at(i).name, QString::number(i));
}
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
if (_modelType != FSTReader::ENTITY_MODEL) {
QVariantHash joints = mapping.value(JOINT_FIELD).toHash();
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
glm::vec3 pivot;
if (_pivotAboutCenter->isChecked()) {
pivot = (_geometry.meshExtents.minimum + _geometry.meshExtents.maximum) * 0.5f;
} else if (_pivotJoint->currentIndex() != 0) {
pivot = extractTranslation(_geometry.joints.at(_pivotJoint->currentIndex() - 1).transform);
}
mapping.insert(TRANSLATION_X_FIELD, -pivot.x * _scale->value() + _translationX->value());
mapping.insert(TRANSLATION_Y_FIELD, -pivot.y * _scale->value() + _translationY->value());
mapping.insert(TRANSLATION_Z_FIELD, -pivot.z * _scale->value() + _translationZ->value());
mapping.insert(TRANSLATION_X_FIELD, -pivot.x * (float)_scale->value() + (float)_translationX->value());
mapping.insert(TRANSLATION_Y_FIELD, -pivot.y * (float)_scale->value() + (float)_translationY->value());
mapping.insert(TRANSLATION_Z_FIELD, -pivot.z * (float)_scale->value() + (float)_translationZ->value());
} else {
insertJointMapping(joints, "jointEyeLeft", _leftEyeJoint->currentText());
insertJointMapping(joints, "jointEyeRight", _rightEyeJoint->currentText());
insertJointMapping(joints, "jointNeck", _neckJoint->currentText());
}
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
insertJointMapping(joints, "jointRoot", _rootJoint->currentText());
insertJointMapping(joints, "jointLean", _leanJoint->currentText());
insertJointMapping(joints, "jointHead", _headJoint->currentText());
insertJointMapping(joints, "jointLeftHand", _leftHandJoint->currentText());
insertJointMapping(joints, "jointRightHand", _rightHandJoint->currentText());
mapping.remove(FREE_JOINT_FIELD);
for (int i = 0; i < _freeJoints->count() - 1; i++) {
QComboBox* box = static_cast<QComboBox*>(_freeJoints->itemAt(i)->widget()->layout()->itemAt(0)->widget());
@ -144,7 +144,7 @@ QVariantHash ModelPropertiesDialog::getMapping() const {
}
mapping.insert(JOINT_FIELD, joints);
}
return mapping;
}
@ -156,9 +156,9 @@ void ModelPropertiesDialog::reset() {
_name->setText(_originalMapping.value(NAME_FIELD).toString());
_textureDirectory->setText(_originalMapping.value(TEXDIR_FIELD).toString());
_scale->setValue(_originalMapping.value(SCALE_FIELD).toDouble());
QVariantHash jointHash = _originalMapping.value(JOINT_FIELD).toHash();
if (_modelType != FSTReader::ENTITY_MODEL) {
if (_modelType == FSTReader::ATTACHMENT_MODEL) {
_translationX->setValue(_originalMapping.value(TRANSLATION_X_FIELD).toDouble());
@ -166,20 +166,20 @@ void ModelPropertiesDialog::reset() {
_translationZ->setValue(_originalMapping.value(TRANSLATION_Z_FIELD).toDouble());
_pivotAboutCenter->setChecked(true);
_pivotJoint->setCurrentIndex(0);
} else {
setJointText(_leftEyeJoint, jointHash.value("jointEyeLeft").toString());
setJointText(_rightEyeJoint, jointHash.value("jointEyeRight").toString());
setJointText(_neckJoint, jointHash.value("jointNeck").toString());
}
if (_modelType == FSTReader::BODY_ONLY_MODEL || _modelType == FSTReader::HEAD_AND_BODY_MODEL) {
setJointText(_rootJoint, jointHash.value("jointRoot").toString());
setJointText(_leanJoint, jointHash.value("jointLean").toString());
setJointText(_headJoint, jointHash.value("jointHead").toString());
setJointText(_leftHandJoint, jointHash.value("jointLeftHand").toString());
setJointText(_rightHandJoint, jointHash.value("jointRightHand").toString());
while (_freeJoints->count() > 1) {
delete _freeJoints->itemAt(0)->widget();
}

View file

@ -16,13 +16,13 @@
#include "InterfaceConfig.h"
#include "starfield/Controller.h"
Stars::Stars() :
Stars::Stars() :
_controller(0l), _starsLoaded(false) {
_controller = new starfield::Controller;
}
Stars::~Stars() {
delete _controller;
Stars::~Stars() {
delete _controller;
}
bool Stars::generate(unsigned numStars, unsigned seed) {
@ -30,22 +30,20 @@ bool Stars::generate(unsigned numStars, unsigned seed) {
return _starsLoaded;
}
bool Stars::setResolution(unsigned k) {
return _controller->setResolution(k);
bool Stars::setResolution(unsigned k) {
return _controller->setResolution(k);
}
void Stars::render(float fovY, float aspect, float nearZ, float alpha) {
// determine length of screen diagonal from quadrant height and aspect ratio
float quadrantHeight = nearZ * tan(RADIANS_PER_DEGREE * fovY * 0.5f);
float quadrantHeight = nearZ * tanf(RADIANS_PER_DEGREE * fovY * 0.5f);
float halfDiagonal = sqrt(quadrantHeight * quadrantHeight * (1.0f + aspect * aspect));
// determine fov angle in respect to the diagonal
float fovDiagonal = atan(halfDiagonal / nearZ) * 2.0f;
float fovDiagonal = atanf(halfDiagonal / nearZ) * 2.0f;
// pull the modelview matrix off the GL stack
glm::mat4 view; glGetFloatv(GL_MODELVIEW_MATRIX, glm::value_ptr(view));
glm::mat4 view; glGetFloatv(GL_MODELVIEW_MATRIX, glm::value_ptr(view));
_controller->render(fovDiagonal, aspect, glm::affineInverse(view), alpha);
}

View file

@ -70,6 +70,6 @@ void UIUtil::internalScaleWidgetFontSizes(QWidget* widget, float scale) {
}
QFont font = widget->font();
font.setPointSizeF(font.pointSizeF() * scale);
font.setPointSizeF(font.pointSizeF() * (double)scale);
widget->setFont(font);
}

View file

@ -41,7 +41,7 @@ void renderWorldBox(gpu::Batch& batch) {
static const glm::vec3 green(0.0f, 1.0f, 0.0f);
static const glm::vec3 blue(0.0f, 0.0f, 1.0f);
static const glm::vec3 grey(0.5f, 0.5f, 0.5f);
auto transform = Transform{};
batch.setModelTransform(transform);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(TREE_SCALE, 0.0f, 0.0f), red);
@ -49,13 +49,13 @@ void renderWorldBox(gpu::Batch& batch) {
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, TREE_SCALE), blue);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), grey);
geometryCache->renderLine(batch, glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, 0.0f), grey);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
const float MARKER_RADIUS = 0.05f;
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
@ -79,10 +79,10 @@ const glm::vec3 randVector() {
}
static TextRenderer* textRenderer(int mono) {
static TextRenderer* monoRenderer = TextRenderer::getInstance(MONO_FONT_FAMILY);
static TextRenderer* monoRenderer = TextRenderer::getInstance(MONO_FONT_FAMILY);
static TextRenderer* proportionalRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY,
-1, -1, false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* inconsolataRenderer = TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, -1, INCONSOLATA_FONT_WEIGHT,
static TextRenderer* inconsolataRenderer = TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, -1, INCONSOLATA_FONT_WEIGHT,
false);
switch (mono) {
case 1:
@ -133,46 +133,46 @@ void runTimingTests() {
QElapsedTimer startTime;
startTime.start();
float elapsedUsecs;
float NSEC_TO_USEC = 1.0f / 1000.0f;
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "QElapsedTimer::nsecElapsed() usecs: %f", elapsedUsecs);
qCDebug(interfaceapp, "QElapsedTimer::nsecElapsed() usecs: %f", (double)elapsedUsecs);
// Test sleep functions for accuracy
startTime.start();
QThread::msleep(1);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "QThread::msleep(1) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "QThread::msleep(1) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
QThread::sleep(1);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "QThread::sleep(1) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "QThread::sleep(1) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
usleep(1);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "usleep(1) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "usleep(1) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
usleep(10);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "usleep(10) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "usleep(10) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
usleep(100);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "usleep(100) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "usleep(100) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
usleep(1000);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "usleep(1000) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "usleep(1000) ms: %f", (double)(elapsedUsecs / 1000.0f));
startTime.start();
usleep(15000);
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "usleep(15000) ms: %f", elapsedUsecs / 1000.0f);
qCDebug(interfaceapp, "usleep(15000) ms: %f", (double)(elapsedUsecs / 1000.0f));
// Random number generation
startTime.start();
@ -180,7 +180,8 @@ void runTimingTests() {
iResults[i] = rand();
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "rand() stored in array usecs: %f, first result:%d", elapsedUsecs / (float) numTests, iResults[0]);
qCDebug(interfaceapp, "rand() stored in array usecs: %f, first result:%d",
(double)(elapsedUsecs / numTests), iResults[0]);
// Random number generation using randFloat()
startTime.start();
@ -188,7 +189,8 @@ void runTimingTests() {
fResults[i] = randFloat();
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "randFloat() stored in array usecs: %f, first result: %f", elapsedUsecs / (float) numTests, fResults[0]);
qCDebug(interfaceapp, "randFloat() stored in array usecs: %f, first result: %f",
(double)(elapsedUsecs / numTests), (double)(fResults[0]));
free(iResults);
free(fResults);
@ -200,7 +202,7 @@ void runTimingTests() {
fTest = powf(fTest, 0.5f);
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "powf(f, 0.5) usecs: %f", elapsedUsecs / (float) numTests);
qCDebug(interfaceapp, "powf(f, 0.5) usecs: %f", (double)(elapsedUsecs / (float) numTests));
// Vector Math
float distance;
@ -213,19 +215,20 @@ void runTimingTests() {
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "vector math usecs: %f [%f usecs total for %d tests], last result:%f",
elapsedUsecs / (float) numTests, elapsedUsecs, numTests, distance);
(double)(elapsedUsecs / (float) numTests), (double)elapsedUsecs, numTests, (double)distance);
// Vec3 test
glm::vec3 vecA(randVector()), vecB(randVector());
float result;
startTime.start();
for (int i = 0; i < numTests; i++) {
glm::vec3 temp = vecA-vecB;
result = glm::dot(temp,temp);
}
elapsedUsecs = (float)startTime.nsecsElapsed() * NSEC_TO_USEC;
qCDebug(interfaceapp, "vec3 assign and dot() usecs: %f, last result:%f", elapsedUsecs / (float) numTests, result);
qCDebug(interfaceapp, "vec3 assign and dot() usecs: %f, last result:%f",
(double)(elapsedUsecs / numTests), (double)result);
}
bool rayIntersectsSphere(const glm::vec3& rayStarting, const glm::vec3& rayNormalizedDirection,

View file

@ -41,32 +41,32 @@ void AudioIOStatsRenderer::render(const float* color, int width, int height) {
if (!_isEnabled) {
return;
}
const int linesWhenCentered = _shouldShowInjectedStreams ? 34 : 27;
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * linesWhenCentered;
int lines = _shouldShowInjectedStreams ? _stats->getMixerInjectedStreamStatsMap().size() * 7 + 27 : 27;
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
static const glm::vec4 backgroundColor = { 0.2f, 0.2f, 0.2f, 0.6f };
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
int w = STATS_WIDTH;
int h = statsHeight;
DependencyManager::get<GeometryCache>()->renderQuad(x, y, w, h, backgroundColor);
int horizontalOffset = x + 5;
int verticalOffset = y;
float scale = 0.10f;
float rotation = 0.0f;
int font = 2;
char latencyStatString[512];
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
if (!audioMixerNodePointer.isNull()) {
@ -78,48 +78,62 @@ void AudioIOStatsRenderer::render(const float* color, int width, int height) {
audioOutputBufferLatency = _stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
}
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
sprintf(latencyStatString, " Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s", audioInputBufferLatency);
sprintf(latencyStatString,
" Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s",
(double)audioInputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s", inputRingBufferLatency);
sprintf(latencyStatString,
" Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s",
(double)inputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Network to mixer: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
sprintf(latencyStatString,
" Network to mixer: %7.2fms - half of last ping value calculated by the node list",
(double)(networkRoundtripLatency / 2.0f));
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s", mixerRingBufferLatency);
sprintf(latencyStatString,
" AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s",
(double)mixerRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Network to client: %7.2fms - half of last ping value calculated by the node list", networkRoundtripLatency / 2.0f);
sprintf(latencyStatString,
" Network to client: %7.2fms - half of last ping value calculated by the node list",
(double)(networkRoundtripLatency / 2.0f));
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s", outputRingBufferLatency);
sprintf(latencyStatString,
" Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s",
(double)outputRingBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s", audioOutputBufferLatency);
sprintf(latencyStatString,
" Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s",
(double)audioOutputBufferLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
sprintf(latencyStatString, " TOTAL: %7.2fms\n", totalLatency);
sprintf(latencyStatString, " TOTAL: %7.2fms\n", (double)totalLatency);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char clientUpstreamMicLabelString[] = "Upstream Mic Audio Packets Sent Gaps (by client):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, clientUpstreamMicLabelString, color);
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
char stringBuffer[512];
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data(),
@ -127,47 +141,47 @@ void AudioIOStatsRenderer::render(const float* color, int width, int height) {
formatUsecTime(packetSentTimeGaps.getAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data(),
formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamMicLabelString[] = "Upstream mic audio stats (received and reported by audio-mixer):";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), horizontalOffset, verticalOffset,
scale, rotation, font, color);
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char downstreamLabelString[] = "Downstream mixed audio stats:";
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
renderAudioStreamStats(&downstreamStats, horizontalOffset, verticalOffset,
scale, rotation, font, color, true);
if (_shouldShowInjectedStreams) {
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
char upstreamInjectedLabelString[512];
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
renderAudioStreamStats(&injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
}
}
@ -175,22 +189,22 @@ void AudioIOStatsRenderer::render(const float* color, int width, int height) {
void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
char stringBuffer[512];
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
streamStats->_packetStreamStats.getLostRate() * 100.0f,
(double)(streamStats->_packetStreamStats.getLostRate() * 100.0f),
streamStats->_packetStreamStats._lost,
streamStats->_packetStreamWindowStats.getLostRate() * 100.0f,
(double)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f),
streamStats->_packetStreamWindowStats._lost);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
if (isDownstreamStats) {
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
streamStats->_desiredJitterBufferFrames,
streamStats->_framesAvailableAverage,
(int)(_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
(int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
streamStats->_framesAvailable,
(int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample()
/ AudioConstants::NETWORK_FRAME_MSECS));
@ -200,10 +214,10 @@ void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* stream
streamStats->_framesAvailableAverage,
streamStats->_framesAvailable);
}
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
streamStats->_starveCount,
streamStats->_consecutiveNotMixedCount,
@ -211,18 +225,18 @@ void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* stream
streamStats->_overflowCount);
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data(),
formatUsecTime(streamStats->_timeGapWindowAverage).toLatin1().data());
verticalOffset += STATS_HEIGHT_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
}
}

View file

@ -691,16 +691,18 @@ float Avatar::calculateDisplayNameScaleFactor(const glm::vec3& textPosition, boo
glm::dvec4 p0 = modelViewMatrix * glm::dvec4(testPoint0, 1.0);
p0 = projectionMatrix * p0;
glm::dvec2 result0 = glm::vec2(windowSizeX * (p0.x / p0.w + 1.0f) * 0.5f, windowSizeY * (p0.y / p0.w + 1.0f) * 0.5f);
glm::dvec2 result0 = glm::vec2(windowSizeX * ((float)p0.x / (float)p0.w + 1.0f) * 0.5f,
windowSizeY * ((float)p0.y / (float)p0.w + 1.0f) * 0.5f);
glm::dvec4 p1 = modelViewMatrix * glm::dvec4(testPoint1, 1.0);
p1 = projectionMatrix * p1;
glm::vec2 result1 = glm::vec2(windowSizeX * (p1.x / p1.w + 1.0f) * 0.5f, windowSizeY * (p1.y / p1.w + 1.0f) * 0.5f);
textWindowHeight = abs(result1.y - result0.y);
glm::vec2 result1 = glm::vec2(windowSizeX * ((float)p1.x / (float)p1.w + 1.0f) * 0.5f,
windowSizeY * ((float)p1.y / (float)p1.w + 1.0f) * 0.5f);
textWindowHeight = fabs((double)result1.y - (double)result0.y);
// need to scale to compensate for the font resolution due to the device
float scaleFactor = QApplication::desktop()->windowHandle()->devicePixelRatio() *
((textWindowHeight > EPSILON) ? 1.0f / textWindowHeight : 1.0f);
float scaleFactor = (float)QApplication::desktop()->windowHandle()->devicePixelRatio() *
(((float)textWindowHeight > EPSILON) ? 1.0f / (float)textWindowHeight : 1.0f);
if (inHMD) {
const float HMDMODE_NAME_SCALE = 0.65f;
scaleFactor *= HMDMODE_NAME_SCALE;

View file

@ -13,8 +13,18 @@
#include <QScriptEngine>
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <glm/gtx/string_cast.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <GlowEffect.h>
#include <PerfStat.h>
#include <RegisteredMetaTypes.h>

View file

@ -77,7 +77,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.0f;
_averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
if (_longTermAverageLoudness == -1.0) {
if (_longTermAverageLoudness == -1.0f) {
_longTermAverageLoudness = _averageLoudness;
} else {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));

View file

@ -59,10 +59,10 @@ const float MAX_WALKING_SPEED = 2.5f; // human walking speed
const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // keyboard motor gets additive boost below this speed
const float MIN_AVATAR_SPEED = 0.05f; // speed is set to zero below this
// TODO: normalize avatar speed for standard avatar size, then scale all motion logic
// TODO: normalize avatar speed for standard avatar size, then scale all motion logic
// to properly follow avatar size.
float MAX_AVATAR_SPEED = 300.0f;
float MAX_KEYBOARD_MOTOR_SPEED = MAX_AVATAR_SPEED;
float MAX_KEYBOARD_MOTOR_SPEED = MAX_AVATAR_SPEED;
float DEFAULT_KEYBOARD_MOTOR_TIMESCALE = 0.25f;
float MIN_SCRIPTED_MOTOR_TIMESCALE = 0.005f;
float DEFAULT_SCRIPTED_MOTOR_TIMESCALE = 1.0e6f;
@ -142,7 +142,7 @@ void MyAvatar::update(float deltaTime) {
if (_referential) {
_referential->update();
}
Head* head = getHead();
head->relaxLean(deltaTime);
updateFromTrackers(deltaTime);
@ -159,12 +159,12 @@ void MyAvatar::update(float deltaTime) {
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("simulate");
// Play back recording
if (_player && _player->isPlaying()) {
_player->play();
}
if (_scale != _targetScale) {
float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale;
setScale(scale);
@ -175,7 +175,7 @@ void MyAvatar::simulate(float deltaTime) {
updateOrientation(deltaTime);
updatePosition(deltaTime);
}
{
PerformanceTimer perfTimer("hand");
// update avatar skeleton and simulate hand and head
@ -218,12 +218,12 @@ void MyAvatar::simulate(float deltaTime) {
head->setScale(_scale);
head->simulate(deltaTime, true);
}
// Record avatars movements.
if (_recorder && _recorder->isRecording()) {
_recorder->record();
}
// consider updating our billboard
maybeUpdateBillboard();
}
@ -231,18 +231,18 @@ void MyAvatar::simulate(float deltaTime) {
// Update avatar head rotation with sensor data
void MyAvatar::updateFromTrackers(float deltaTime) {
glm::vec3 estimatedPosition, estimatedRotation;
bool inHmd = qApp->isHMDMode();
if (isPlaying() && inHmd) {
return;
}
if (inHmd) {
estimatedPosition = qApp->getHeadPosition();
estimatedPosition = qApp->getHeadPosition();
estimatedPosition.x *= -1.0f;
_trackedHeadPosition = estimatedPosition;
const float OCULUS_LEAN_SCALE = 0.05f;
estimatedPosition /= OCULUS_LEAN_SCALE;
} else {
@ -253,7 +253,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
}
}
// Rotate the body if the head is turned beyond the screen
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
@ -313,7 +313,7 @@ void MyAvatar::renderDebugBodyPoints() {
glm::vec3 headPosition(getHead()->getEyePosition());
float torsoToHead = glm::length(headPosition - torsoPosition);
glm::vec3 position;
qCDebug(interfaceapp, "head-above-torso %.2f, scale = %0.2f", torsoToHead, getScale());
qCDebug(interfaceapp, "head-above-torso %.2f, scale = %0.2f", (double)torsoToHead, (double)getScale());
// Torso Sphere
position = torsoPosition;
@ -429,7 +429,7 @@ void MyAvatar::startRecording() {
auto audioClient = DependencyManager::get<AudioClient>();
connect(audioClient.data(), &AudioClient::inputReceived, _recorder.data(),
&Recorder::recordAudio, Qt::BlockingQueuedConnection);
_recorder->startRecording();
}
@ -445,7 +445,7 @@ void MyAvatar::stopRecording() {
// stop grabbing audio from the AudioClient
auto audioClient = DependencyManager::get<AudioClient>();
disconnect(audioClient.data(), 0, _recorder.data(), 0);
_recorder->stopRecording();
}
}
@ -477,7 +477,7 @@ void MyAvatar::loadLastRecording() {
if (!_player) {
_player = PlayerPointer(new Player(this));
}
_player->loadRecording(_recorder->getRecording());
}
@ -569,7 +569,7 @@ AnimationDetails MyAvatar::getAnimationDetailsByRole(const QString& role) {
AnimationDetails result;
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "getAnimationDetailsByRole", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(AnimationDetails, result),
Q_RETURN_ARG(AnimationDetails, result),
Q_ARG(const QString&, role));
return result;
}
@ -586,7 +586,7 @@ AnimationDetails MyAvatar::getAnimationDetails(const QString& url) {
AnimationDetails result;
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "getAnimationDetails", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(AnimationDetails, result),
Q_RETURN_ARG(AnimationDetails, result),
Q_ARG(const QString&, url));
return result;
}
@ -634,7 +634,7 @@ void MyAvatar::saveData() {
settings.setValue("scale", attachment.scale);
}
settings.endArray();
settings.beginWriteArray("animationHandles");
for (int i = 0; i < _animationHandles.size(); i++) {
settings.setArrayIndex(i);
@ -651,7 +651,7 @@ void MyAvatar::saveData() {
settings.setValue("maskedJoints", pointer->getMaskedJoints());
}
settings.endArray();
settings.setValue("displayName", _displayName);
settings.endGroup();
@ -677,7 +677,7 @@ void MyAvatar::loadData() {
_targetScale = loadSetting(settings, "scale", 1.0f);
setScale(_scale);
// The old preferences only stored the face and skeleton URLs, we didn't track if the user wanted to use 1 or 2 urls
// The old preferences only stored the face and skeleton URLs, we didn't track if the user wanted to use 1 or 2 urls
// for their avatar, So we need to attempt to detect this old case and set our new preferences accordingly. If
// the head URL is empty, then we will assume they are using a full url...
bool isOldSettings = !(settings.contains("useFullAvatar") || settings.contains("fullAvatarURL"));
@ -689,7 +689,7 @@ void MyAvatar::loadData() {
_headModelName = settings.value("headModelName", DEFAULT_HEAD_MODEL_NAME).toString();
_bodyModelName = settings.value("bodyModelName", DEFAULT_BODY_MODEL_NAME).toString();
_fullAvatarModelName = settings.value("fullAvatarModelName", DEFAULT_FULL_AVATAR_MODEL_NAME).toString();
if (isOldSettings) {
bool assumeFullAvatar = _headURLFromPreferences.isEmpty();
_useFullAvatar = assumeFullAvatar;
@ -715,14 +715,14 @@ void MyAvatar::loadData() {
QVariantHash bodyFST = FSTReader::downloadMapping(_skeletonURLFromPreferences.toString());
_bodyModelName = bodyFST["name"].toString();
}
if (_headURLFromPreferences == DEFAULT_HEAD_MODEL_URL) {
_headModelName = DEFAULT_HEAD_MODEL_NAME;
} else {
QVariantHash headFST = FSTReader::downloadMapping(_headURLFromPreferences.toString());
_headModelName = headFST["name"].toString();
}
_fullAvatarModelName = "Default";
}
}
@ -732,7 +732,7 @@ void MyAvatar::loadData() {
} else {
useHeadAndBodyURLs(_headURLFromPreferences, _skeletonURLFromPreferences, _headModelName, _bodyModelName);
}
QVector<AttachmentData> attachmentData;
int attachmentCount = settings.beginReadArray("attachmentData");
for (int i = 0; i < attachmentCount; i++) {
@ -753,7 +753,7 @@ void MyAvatar::loadData() {
}
settings.endArray();
setAttachmentData(attachmentData);
int animationCount = settings.beginReadArray("animationHandles");
while (_animationHandles.size() > animationCount) {
_animationHandles.takeLast()->stop();
@ -776,7 +776,7 @@ void MyAvatar::loadData() {
handle->setStartAutomatically(settings.value("startAutomatically", true).toBool());
}
settings.endArray();
setDisplayName(settings.value("displayName").toString());
settings.endGroup();
@ -788,7 +788,7 @@ void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
settings.beginGroup(_skeletonModel.getURL().toString());
settings.beginGroup(attachment.modelURL.toString());
settings.setValue("jointName", attachment.jointName);
settings.beginGroup(attachment.jointName);
settings.setValue("translation_x", attachment.translation.x);
settings.setValue("translation_y", attachment.translation.y);
@ -798,7 +798,7 @@ void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
settings.setValue("rotation_y", eulers.y);
settings.setValue("rotation_z", eulers.z);
settings.setValue("scale", attachment.scale);
settings.endGroup();
settings.endGroup();
settings.endGroup();
@ -810,7 +810,7 @@ AttachmentData MyAvatar::loadAttachmentData(const QUrl& modelURL, const QString&
settings.beginGroup("savedAttachmentData");
settings.beginGroup(_skeletonModel.getURL().toString());
settings.beginGroup(modelURL.toString());
AttachmentData attachment;
attachment.modelURL = modelURL;
if (jointName.isEmpty()) {
@ -832,18 +832,18 @@ AttachmentData MyAvatar::loadAttachmentData(const QUrl& modelURL, const QString&
} else {
attachment = AttachmentData();
}
settings.endGroup();
settings.endGroup();
settings.endGroup();
settings.endGroup();
return attachment;
}
int MyAvatar::parseDataAtOffset(const QByteArray& packet, int offset) {
qCDebug(interfaceapp) << "Error: ignoring update packet for MyAvatar"
<< " packetLength = " << packet.size()
<< " packetLength = " << packet.size()
<< " offset = " << offset;
// this packet is just bad, so we pretend that we unpacked it ALL
return packet.size() - offset;
@ -861,14 +861,14 @@ void MyAvatar::updateLookAtTargetAvatar() {
//
_lookAtTargetAvatar.reset();
_targetAvatarPosition = glm::vec3(0.0f);
glm::vec3 lookForward = getHead()->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
float smallestAngleTo = glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES) / 2.0f;
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
int howManyLookingAtMe = 0;
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.get());
@ -1000,13 +1000,13 @@ void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelN
}
_useFullAvatar = true;
if (_fullAvatarURLFromPreferences != fullAvatarURL) {
_fullAvatarURLFromPreferences = fullAvatarURL;
if (modelName.isEmpty()) {
QVariantHash fullAvatarFST = FSTReader::downloadMapping(_fullAvatarURLFromPreferences.toString());
_fullAvatarModelName = fullAvatarFST["name"].toString();
} else {
} else {
_fullAvatarModelName = modelName;
}
}
@ -1047,7 +1047,7 @@ void MyAvatar::useHeadAndBodyURLs(const QUrl& headURL, const QUrl& bodyURL, cons
if (headName.isEmpty()) {
QVariantHash headFST = FSTReader::downloadMapping(_headURLFromPreferences.toString());
_headModelName = headFST["name"].toString();
} else {
} else {
_headModelName = headName;
}
}
@ -1057,7 +1057,7 @@ void MyAvatar::useHeadAndBodyURLs(const QUrl& headURL, const QUrl& bodyURL, cons
if (bodyName.isEmpty()) {
QVariantHash bodyFST = FSTReader::downloadMapping(_skeletonURLFromPreferences.toString());
_bodyModelName = bodyFST["name"].toString();
} else {
} else {
_bodyModelName = bodyName;
}
}
@ -1088,7 +1088,7 @@ void MyAvatar::setAttachmentData(const QVector<AttachmentData>& attachmentData)
glm::vec3 MyAvatar::getSkeletonPosition() const {
CameraMode mode = Application::getInstance()->getCamera()->getMode();
if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_INDEPENDENT) {
// The avatar is rotated PI about the yAxis, so we have to correct for it
// The avatar is rotated PI about the yAxis, so we have to correct for it
// to get the skeleton offset contribution in the world-frame.
const glm::quat FLIP = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 skeletonOffset = _skeletonOffset;
@ -1131,9 +1131,9 @@ void MyAvatar::setScriptedMotorVelocity(const glm::vec3& velocity) {
}
void MyAvatar::setScriptedMotorTimescale(float timescale) {
// we clamp the timescale on the large side (instead of just the low side) to prevent
// we clamp the timescale on the large side (instead of just the low side) to prevent
// obnoxiously large values from introducing NaN into avatar's velocity
_scriptedMotorTimescale = glm::clamp(timescale, MIN_SCRIPTED_MOTOR_TIMESCALE,
_scriptedMotorTimescale = glm::clamp(timescale, MIN_SCRIPTED_MOTOR_TIMESCALE,
DEFAULT_SCRIPTED_MOTOR_TIMESCALE);
}
@ -1151,14 +1151,14 @@ void MyAvatar::clearScriptableSettings() {
clearJointAnimationPriorities();
_scriptedMotorVelocity = glm::vec3(0.0f);
_scriptedMotorTimescale = DEFAULT_SCRIPTED_MOTOR_TIMESCALE;
}
}
void MyAvatar::attach(const QString& modelURL, const QString& jointName, const glm::vec3& translation,
const glm::quat& rotation, float scale, bool allowDuplicates, bool useSaved) {
if (QThread::currentThread() != thread()) {
if (QThread::currentThread() != thread()) {
Avatar::attach(modelURL, jointName, translation, rotation, scale, allowDuplicates, useSaved);
return;
}
}
if (useSaved) {
AttachmentData attachment = loadAttachmentData(modelURL, jointName);
if (attachment.isValid()) {
@ -1195,7 +1195,7 @@ const float RENDER_HEAD_CUTOFF_DISTANCE = 0.50f;
bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const {
const Head* head = getHead();
return (renderArgs->_renderMode != RenderArgs::NORMAL_RENDER_MODE) || (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
return (renderArgs->_renderMode != RenderArgs::NORMAL_RENDER_MODE) || (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON) ||
(glm::length(cameraPosition - head->getEyePosition()) > RENDER_HEAD_CUTOFF_DISTANCE * _scale);
}
@ -1245,10 +1245,10 @@ void MyAvatar::updateOrientation(float deltaTime) {
//Invert yaw and roll when in mirror mode
if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
YAW(euler) *= -1.0;
ROLL(euler) *= -1.0;
YAW(euler) *= -1.0f;
ROLL(euler) *= -1.0f;
}
Head* head = getHead();
head->setBaseYaw(YAW(euler));
head->setBasePitch(PITCH(euler));
@ -1261,9 +1261,9 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
return localVelocity;
}
// compute motor efficiency
// The timescale of the motor is the approximate time it takes for the motor to
// accomplish its intended localVelocity. A short timescale makes the motor strong,
// and a long timescale makes it weak. The value of timescale to use depends
// The timescale of the motor is the approximate time it takes for the motor to
// accomplish its intended localVelocity. A short timescale makes the motor strong,
// and a long timescale makes it weak. The value of timescale to use depends
// on what the motor is doing:
//
// (1) braking --> short timescale (aggressive motor assertion)
@ -1274,8 +1274,8 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
float MIN_KEYBOARD_BRAKE_SPEED = 0.3f;
float timescale = MAX_KEYBOARD_MOTOR_TIMESCALE;
bool isThrust = (glm::length2(_thrust) > EPSILON);
if (_isPushing || isThrust ||
(_scriptedMotorTimescale < MAX_KEYBOARD_MOTOR_TIMESCALE &&
if (_isPushing || isThrust ||
(_scriptedMotorTimescale < MAX_KEYBOARD_MOTOR_TIMESCALE &&
_motionBehaviors | AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED)) {
// we don't want to brake if something is pushing the avatar around
timescale = _keyboardMotorTimescale;
@ -1292,18 +1292,18 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
float motorEfficiency = glm::clamp(deltaTime / timescale, 0.0f, 1.0f);
glm::vec3 newLocalVelocity = localVelocity;
float keyboardInput = fabsf(_driveKeys[FWD] - _driveKeys[BACK]) +
(fabsf(_driveKeys[RIGHT] - _driveKeys[LEFT])) +
float keyboardInput = fabsf(_driveKeys[FWD] - _driveKeys[BACK]) +
(fabsf(_driveKeys[RIGHT] - _driveKeys[LEFT])) +
fabsf(_driveKeys[UP] - _driveKeys[DOWN]);
if (keyboardInput) {
// Compute keyboard input
glm::vec3 front = (_driveKeys[FWD] - _driveKeys[BACK]) * IDENTITY_FRONT;
glm::vec3 right = (_driveKeys[RIGHT] - _driveKeys[LEFT]) * IDENTITY_RIGHT;
glm::vec3 up = (_driveKeys[UP] - _driveKeys[DOWN]) * IDENTITY_UP;
glm::vec3 direction = front + right + up;
float directionLength = glm::length(direction);
// Compute motor magnitude
if (directionLength > EPSILON) {
direction /= directionLength;
@ -1333,7 +1333,7 @@ glm::vec3 MyAvatar::applyKeyboardMotor(float deltaTime, const glm::vec3& localVe
motorEfficiency = glm::clamp(deltaTime / WALK_ACCELERATION_TIMESCALE, 0.0f, 1.0f);
}
_isPushing = true;
}
}
newLocalVelocity = localVelocity + motorEfficiency * (_keyboardMotorVelocity - localVelocity);
} else {
_keyboardMotorVelocity = glm::vec3(0.0f);
@ -1394,13 +1394,13 @@ void MyAvatar::updatePosition(float deltaTime) {
_targetVelocity *= MAX_AVATAR_SPEED / speed;
speed = MAX_AVATAR_SPEED;
}
if (speed > MIN_AVATAR_SPEED && !_characterController.isEnabled()) {
// update position ourselves
applyPositionDelta(deltaTime * _targetVelocity);
measureMotionDerivatives(deltaTime);
} // else physics will move avatar later
// update _moving flag based on speed
const float MOVING_SPEED_THRESHOLD = 0.01f;
_moving = speed > MOVING_SPEED_THRESHOLD;
@ -1417,7 +1417,7 @@ bool findAvatarAvatarPenetration(const glm::vec3 positionA, float radiusA, float
float xzDistance = sqrt(positionBA.x * positionBA.x + positionBA.z * positionBA.z);
if (xzDistance < (radiusA + radiusB)) {
float yDistance = fabs(positionBA.y);
float halfHeights = 0.5 * (heightA + heightB);
float halfHeights = 0.5f * (heightA + heightB);
if (yDistance < halfHeights) {
// cylinders collide
if (xzDistance > 0.0f) {
@ -1466,54 +1466,54 @@ void MyAvatar::maybeUpdateBillboard() {
buffer.open(QIODevice::WriteOnly);
image.save(&buffer, "PNG");
_billboardValid = true;
sendBillboardPacket();
}
void MyAvatar::increaseSize() {
if ((1.0f + SCALING_RATIO) * _targetScale < MAX_AVATAR_SCALE) {
_targetScale *= (1.0f + SCALING_RATIO);
qCDebug(interfaceapp, "Changed scale to %f", _targetScale);
qCDebug(interfaceapp, "Changed scale to %f", (double)_targetScale);
}
}
void MyAvatar::decreaseSize() {
if (MIN_AVATAR_SCALE < (1.0f - SCALING_RATIO) * _targetScale) {
_targetScale *= (1.0f - SCALING_RATIO);
qCDebug(interfaceapp, "Changed scale to %f", _targetScale);
qCDebug(interfaceapp, "Changed scale to %f", (double)_targetScale);
}
}
void MyAvatar::resetSize() {
_targetScale = 1.0f;
qCDebug(interfaceapp, "Reseted scale to %f", _targetScale);
qCDebug(interfaceapp, "Reseted scale to %f", (double)_targetScale);
}
void MyAvatar::goToLocation(const glm::vec3& newPosition,
bool hasOrientation, const glm::quat& newOrientation,
bool shouldFaceLocation) {
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
<< newPosition.y << ", " << newPosition.z;
glm::vec3 shiftedPosition = newPosition;
if (hasOrientation) {
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
// orient the user to face the target
glm::quat quatOrientation = newOrientation;
if (shouldFaceLocation) {
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
// move the user a couple units away
const float DISTANCE_TO_USER = 2.0f;
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
}
setOrientation(quatOrientation);
}
@ -1581,8 +1581,8 @@ void MyAvatar::clearDriveKeys() {
}
}
void MyAvatar::relayDriveKeysToCharacterController() {
void MyAvatar::relayDriveKeysToCharacterController() {
if (_driveKeys[UP] > 0.0f) {
_characterController.jump();
_characterController.jump();
}
}

View file

@ -415,7 +415,7 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
float browUp = _coefficients[_browUpCenterIndex];
if (isFiltering) {
const float BROW_VELOCITY_FILTER_STRENGTH = 0.5f;
float velocity = fabs(browUp - _lastBrowUp) / _averageMessageTime;
float velocity = fabsf(browUp - _lastBrowUp) / _averageMessageTime;
float velocityFilter = glm::clamp(velocity * BROW_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredBrowUp = velocityFilter * browUp + (1.0f - velocityFilter) * _filteredBrowUp;
_lastBrowUp = browUp;
@ -438,11 +438,12 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
// Velocity filter EyeBlink values
const float DDE_EYEBLINK_SCALE = 3.0f;
float eyeBlinks[] = { DDE_EYEBLINK_SCALE * _coefficients[_leftBlinkIndex], DDE_EYEBLINK_SCALE * _coefficients[_rightBlinkIndex] };
float eyeBlinks[] = { DDE_EYEBLINK_SCALE * _coefficients[_leftBlinkIndex],
DDE_EYEBLINK_SCALE * _coefficients[_rightBlinkIndex] };
if (isFiltering) {
const float BLINK_VELOCITY_FILTER_STRENGTH = 0.3f;
for (int i = 0; i < 2; i++) {
float velocity = fabs(eyeBlinks[i] - _lastEyeBlinks[i]) / _averageMessageTime;
float velocity = fabsf(eyeBlinks[i] - _lastEyeBlinks[i]) / _averageMessageTime;
float velocityFilter = glm::clamp(velocity * BLINK_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredEyeBlinks[i] = velocityFilter * eyeBlinks[i] + (1.0f - velocityFilter) * _filteredEyeBlinks[i];
_lastEyeBlinks[i] = eyeBlinks[i];
@ -479,9 +480,9 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
if (_eyeStates[i] == EYE_CLOSING) {
// Close eyelid until it's fully closed
float closingValue = _lastEyeCoefficients[i] + EYELID_MOVEMENT_RATE * _averageMessageTime;
if (closingValue >= 1.0) {
if (closingValue >= 1.0f) {
_eyeStates[i] = EYE_CLOSED;
eyeCoefficients[i] = 1.0;
eyeCoefficients[i] = 1.0f;
} else {
eyeCoefficients[i] = closingValue;
}

View file

@ -17,7 +17,7 @@
#include <QDesktopWidget>
#include <QGuiApplication>
#include <QScreen>
#include <QScreen>
#include <QOpenGLTimerQuery>
#include <glm/glm.hpp>
@ -214,7 +214,7 @@ void OculusManager::connect() {
#endif
} else {
_isConnected = false;
// we're definitely not in "VR mode" so tell the menu that
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
}
@ -346,7 +346,7 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
break;
default:
break;
}
}
@ -395,7 +395,7 @@ void OculusManager::generateDistortionMesh() {
v->texB.y = ov->TanEyeAnglesB.y;
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
v++;
v++;
ov++;
}
@ -410,7 +410,7 @@ void OculusManager::generateDistortionMesh() {
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
delete [] pVBVerts;
ovrHmd_DestroyDistortionMesh(&meshData);
@ -476,7 +476,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
// Every so often do some additional timing calculations and debug output
bool debugFrame = 0 == _frameIndex % 400;
#if 0
// Try to measure the amount of time taken to do the distortion
// (does not seem to work on OSX with SDK based distortion)
@ -487,14 +487,14 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
if (!timerQuery.isCreated()) {
timerQuery.create();
}
if (timerActive && timerQuery.isResultAvailable()) {
auto result = timerQuery.waitForResult();
if (result) { qCDebug(interfaceapp) << "Distortion took " << result << "ns"; };
timerActive = false;
}
#endif
#ifdef OVR_DIRECT_MODE
static bool attached = false;
if (!attached) {
@ -505,7 +505,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
}
}
#endif
#ifndef OVR_CLIENT_DISTORTION
// FIXME: we need a better way of responding to the HSW. In particular
// we need to ensure that it's only displayed once per session, rather than
@ -522,7 +522,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
}
}
#endif
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
@ -545,14 +545,14 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glm::quat orientation;
glm::vec3 trackerPosition;
auto deviceSize = qApp->getDeviceSize();
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
if (_calibrationState != CALIBRATED) {
@ -628,7 +628,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
} else {
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
@ -645,7 +645,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
#endif
#ifdef OVR_CLIENT_DISTORTION
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
@ -656,7 +656,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
0, 0, fboSize.x, fboSize.y,
0, 0, deviceSize.width(), deviceSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
#else
#else
//Clear the color buffer to ensure that there isnt any residual color
//Left over from when OR was not connected.
glClear(GL_COLOR_BUFFER_BIT);
@ -672,7 +672,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
glEyeTexture.OGL.TexId = finalFbo->texture();
});
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
@ -699,12 +699,14 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
if (nonZero)
{
qCDebug(interfaceapp) << QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms",
latencies[0], latencies[1], latencies[2], latencies[3], latencies[4]);
qCDebug(interfaceapp)
<< QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms",
(double)latencies[0], (double)latencies[1], (double)latencies[2],
(double)latencies[3], (double)latencies[4]);
}
}
}
}
#ifdef OVR_CLIENT_DISTORTION
@ -728,7 +730,7 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
//Render the distortion meshes for each eye
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeTextures[eyeNum].Header.RenderViewport,
_UVScaleOffset[eyeNum]);
@ -844,7 +846,7 @@ int OculusManager::getHMDScreen() {
const int SIMILAR_NAMES = 10;
const int EXACT_LOCATION_MATCH = 50;
const int EXACT_RESOLUTION_MATCH = 25;
int bestMatchScore = 0;
// look at the display list and see if we can find the best match
@ -853,7 +855,7 @@ int OculusManager::getHMDScreen() {
foreach (QScreen* screen, QGuiApplication::screens()) {
QString screenName = screen->name();
QRect screenRect = desktop->screenGeometry(screenNumber);
int screenScore = 0;
if (screenName == productNameFromOVR) {
screenScore += EXACT_NAME_MATCH;

View file

@ -65,25 +65,25 @@ SixenseManager::SixenseManager() :
SixenseManager::~SixenseManager() {
#ifdef HAVE_SIXENSE_
if (_isInitialized) {
#ifdef __APPLE__
SixenseBaseFunction sixenseExit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseExit");
#endif
sixenseExit();
}
#ifdef __APPLE__
delete _sixenseLibrary;
#endif
#endif
}
void SixenseManager::initialize() {
#ifdef HAVE_SIXENSE
if (!_isInitialized) {
_lowVelocityFilter = false;
_controllersAtBase = true;
@ -91,22 +91,22 @@ void SixenseManager::initialize() {
// By default we assume the _neckBase (in orb frame) is as high above the orb
// as the "torso" is below it.
_neckBase = glm::vec3(NECK_X, -NECK_Y, NECK_Z);
#ifdef __APPLE__
if (!_sixenseLibrary) {
#ifdef SIXENSE_LIB_FILENAME
_sixenseLibrary = new QLibrary(SIXENSE_LIB_FILENAME);
#else
const QString SIXENSE_LIBRARY_NAME = "libsixense_x64";
QString frameworkSixenseLibrary = QCoreApplication::applicationDirPath() + "/../Frameworks/"
+ SIXENSE_LIBRARY_NAME;
_sixenseLibrary = new QLibrary(frameworkSixenseLibrary);
#endif
}
if (_sixenseLibrary->load()){
qCDebug(interfaceapp) << "Loaded sixense library for hydra support -" << _sixenseLibrary->fileName();
} else {
@ -114,14 +114,14 @@ void SixenseManager::initialize() {
<< "Continuing without hydra support.";
return;
}
SixenseBaseFunction sixenseInit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseInit");
#endif
sixenseInit();
_isInitialized = true;
}
#endif
}
@ -132,7 +132,7 @@ void SixenseManager::setFilter(bool filter) {
#ifdef __APPLE__
SixenseTakeIntFunction sixenseSetFilterEnabled = (SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseSetFilterEnabled");
#endif
if (filter) {
sixenseSetFilterEnabled(1);
} else {
@ -151,32 +151,32 @@ void SixenseManager::update(float deltaTime) {
SixenseBaseFunction sixenseGetNumActiveControllers =
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
#endif
if (sixenseGetNumActiveControllers() == 0) {
_hydrasConnected = false;
return;
}
PerformanceTimer perfTimer("sixense");
if (!_hydrasConnected) {
_hydrasConnected = true;
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
}
#ifdef __APPLE__
SixenseBaseFunction sixenseGetMaxControllers =
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetMaxControllers");
#endif
int maxControllers = sixenseGetMaxControllers();
// we only support two controllers
sixenseControllerData controllers[2];
#ifdef __APPLE__
SixenseTakeIntFunction sixenseIsControllerEnabled =
(SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseIsControllerEnabled");
SixenseTakeIntAndSixenseControllerData sixenseGetNewestData =
(SixenseTakeIntAndSixenseControllerData) _sixenseLibrary->resolve("sixenseGetNewestData");
#endif
@ -189,9 +189,9 @@ void SixenseManager::update(float deltaTime) {
sixenseControllerData* data = controllers + numActiveControllers;
++numActiveControllers;
sixenseGetNewestData(i, data);
// Set palm position and normal based on Hydra position/orientation
// Either find a palm matching the sixense controller, or make a new one
PalmData* palm;
bool foundHand = false;
@ -208,45 +208,45 @@ void SixenseManager::update(float deltaTime) {
palm->setSixenseID(data->controller_index);
qCDebug(interfaceapp, "Found new Sixense controller, ID %i", data->controller_index);
}
// Disable the hands (and return to default pose) if both controllers are at base station
if (foundHand) {
palm->setActive(!_controllersAtBase);
} else {
palm->setActive(false); // if this isn't a Sixsense ID palm, always make it inactive
}
// Read controller buttons and joystick into the hand
palm->setControllerButtons(data->buttons);
palm->setTrigger(data->trigger);
palm->setJoystick(data->joystick_x, data->joystick_y);
// Emulate the mouse so we can use scripts
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
emulateMouse(palm, numActiveControllers - 1);
}
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
position *= METERS_PER_MILLIMETER;
// Check to see if this hand/controller is on the base
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
numControllersAtBase++;
}
// Transform the measured position into body frame.
glm::vec3 neck = _neckBase;
// Zeroing y component of the "neck" effectively raises the measured position a little bit.
neck.y = 0.0f;
position = _orbRotation * (position - neck);
// Rotation of Palm
glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * _orbRotation * rotation;
// Compute current velocity from position change
glm::vec3 rawVelocity;
if (deltaTime > 0.0f) {
@ -255,11 +255,11 @@ void SixenseManager::update(float deltaTime) {
rawVelocity = glm::vec3(0.0f);
}
palm->setRawVelocity(rawVelocity); // meters/sec
// adjustment for hydra controllers fit into hands
float sign = (i == 0) ? -1.0f : 1.0f;
rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
// Angular Velocity of Palm
glm::quat deltaRotation = rotation * glm::inverse(palm->getRawRotation());
glm::vec3 angularVelocity(0.0f);
@ -271,7 +271,7 @@ void SixenseManager::update(float deltaTime) {
} else {
palm->setRawAngularVelocity(glm::vec3(0.0f));
}
if (_lowVelocityFilter) {
// Use a velocity sensitive filter to damp small motions and preserve large ones with
// no latency.
@ -284,7 +284,7 @@ void SixenseManager::update(float deltaTime) {
palm->setRawPosition(position);
palm->setRawRotation(rotation);
}
// Store the one fingertip in the palm structure so we can track velocity
const float FINGER_LENGTH = 0.3f; // meters
const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
@ -297,7 +297,7 @@ void SixenseManager::update(float deltaTime) {
}
palm->setTipPosition(newTipPosition);
}
if (numActiveControllers == 2) {
updateCalibration(controllers);
}
@ -309,7 +309,7 @@ void SixenseManager::update(float deltaTime) {
//Constants for getCursorPixelRangeMultiplier()
const float MIN_PIXEL_RANGE_MULT = 0.4f;
const float MAX_PIXEL_RANGE_MULT = 2.0f;
const float RANGE_MULT = (MAX_PIXEL_RANGE_MULT - MIN_PIXEL_RANGE_MULT) * 0.01;
const float RANGE_MULT = (MAX_PIXEL_RANGE_MULT - MIN_PIXEL_RANGE_MULT) * 0.01f;
//Returns a multiplier to be applied to the cursor range for the controllers
float SixenseManager::getCursorPixelRangeMult() const {
@ -398,7 +398,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
_calibrationState = CALIBRATION_STATE_X;
}
return;
}
}
quint64 now = usecTimestampNow() + LOCK_DURATION;
// these are weighted running averages
@ -408,7 +408,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
if (_calibrationState == CALIBRATION_STATE_X) {
// compute new sliding average
float distance = glm::distance(_averageLeft, _averageRight);
if (fabs(distance - _lastDistance) > MAXIMUM_NOISE_LEVEL) {
if (fabsf(distance - _lastDistance) > MAXIMUM_NOISE_LEVEL) {
// distance is increasing so acquire the data and push the expiry out
_reachLeft = _averageLeft;
_reachRight = _averageRight;
@ -427,7 +427,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
glm::vec3 torso = 0.5f * (_reachLeft + _reachRight);
glm::vec3 averagePosition = 0.5f * (_averageLeft + _averageRight);
float distance = (averagePosition - torso).y;
if (fabs(distance) > fabs(_lastDistance) + MAXIMUM_NOISE_LEVEL) {
if (fabsf(distance) > fabsf(_lastDistance) + MAXIMUM_NOISE_LEVEL) {
// distance is increasing so acquire the data and push the expiry out
_reachUp = averagePosition;
_lastDistance = distance;
@ -458,11 +458,11 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
_lastDistance = distance;
_lockExpiry = now + LOCK_DURATION;
} else if (now > _lockExpiry) {
if (fabs(_lastDistance) > 0.05f * MINIMUM_ARM_REACH) {
if (fabsf(_lastDistance) > 0.05f * MINIMUM_ARM_REACH) {
// lock has expired so clamp the data and move on
_calibrationState = CALIBRATION_STATE_COMPLETE;
qCDebug(interfaceapp, "success: sixense calibration: forward");
// TODO: it is theoretically possible to detect that the controllers have been
// TODO: it is theoretically possible to detect that the controllers have been
// accidentally switched (left hand is holding right controller) and to swap the order.
}
}
@ -473,7 +473,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
void SixenseManager::emulateMouse(PalmData* palm, int index) {
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
QPoint pos;
Qt::MouseButton bumperButton;
Qt::MouseButton triggerButton;
@ -487,7 +487,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
triggerButton = Qt::LeftButton;
}
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
pos = qApp->getApplicationOverlay().getPalmClickLocation(palm);
} else {
@ -496,7 +496,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)M_PI_2));
auto canvasSize = qApp->getCanvasSize();
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * getCursorPixelRangeMult();
@ -539,11 +539,11 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
if (!_bumperPressed[(int)(!index)]) {
qApp->mouseMoveEvent(&mouseEvent, deviceID);
}
}
}
}
_oldX[index] = pos.x();
_oldY[index] = pos.y();
//We need separate coordinates for clicks, since we need to check if
//a magnification window was clicked on
@ -557,7 +557,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
if (palm->getControllerButtons() & BUTTON_FWD) {
if (!_bumperPressed[index]) {
_bumperPressed[index] = true;
QMouseEvent mouseEvent(QEvent::MouseButtonPress, pos, bumperButton, bumperButton, 0);
qApp->mousePressEvent(&mouseEvent, deviceID);

View file

@ -62,34 +62,34 @@ glm::vec3 getPoint(float yaw, float pitch) {
bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r, float* result)
{
//Source: http://wiki.cgsociety.org/index.php/Ray_Sphere_Intersection
//Compute A, B and C coefficients
float a = glm::dot(dir, dir);
float b = 2 * glm::dot(dir, origin);
float c = glm::dot(origin, origin) - (r * r);
//Find discriminant
float disc = b * b - 4 * a * c;
// if discriminant is negative there are no real roots, so return
// false as ray misses sphere
if (disc < 0) {
return false;
}
// compute q as described above
float distSqrt = sqrtf(disc);
float q;
if (b < 0) {
q = (-b - distSqrt) / 2.0;
q = (-b - distSqrt) / 2.0f;
} else {
q = (-b + distSqrt) / 2.0;
q = (-b + distSqrt) / 2.0f;
}
// compute t0 and t1
float t0 = q / a;
float t1 = c / q;
// make sure t0 is smaller than t1
if (t0 > t1) {
// if t0 is bigger than t1 swap them around
@ -97,13 +97,13 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
t0 = t1;
t1 = temp;
}
// if t1 is less than zero, the object is in the ray's negative direction
// and consequently the ray misses the sphere
if (t1 < 0) {
return false;
}
// if t0 is less than zero, the intersection point is at t1
if (t0 < 0) {
*result = t1;
@ -122,15 +122,15 @@ void ApplicationOverlay::renderReticle(glm::quat orientation, float alpha) {
glm::vec3 topRight = getPoint(-reticleSize / 2.0f, -reticleSize / 2.0f);
glm::vec3 bottomLeft = getPoint(reticleSize / 2.0f, reticleSize / 2.0f);
glm::vec3 bottomRight = getPoint(-reticleSize / 2.0f, reticleSize / 2.0f);
// TODO: this version of renderQuad() needs to take a color
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], alpha };
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomLeft, bottomRight, topRight,
glm::vec2(0.0f, 0.0f), glm::vec2(1.0f, 0.0f),
glm::vec2(1.0f, 1.0f), glm::vec2(0.0f, 1.0f),
glm::vec2(1.0f, 1.0f), glm::vec2(0.0f, 1.0f),
reticleColor, _reticleQuad);
} glPopMatrix();
}
@ -155,7 +155,7 @@ ApplicationOverlay::ApplicationOverlay() :
memset(_magSizeMult, 0, sizeof(_magSizeMult));
auto geometryCache = DependencyManager::get<GeometryCache>();
_reticleQuad = geometryCache->allocateID();
_magnifierQuad = geometryCache->allocateID();
_audioRedQuad = geometryCache->allocateID();
@ -163,10 +163,10 @@ ApplicationOverlay::ApplicationOverlay() :
_audioBlueQuad = geometryCache->allocateID();
_domainStatusBorder = geometryCache->allocateID();
_magnifierBorder = geometryCache->allocateID();
// Once we move UI rendering and screen rendering to different
// threads, we need to use a sync object to deteremine when
// the current UI texture is no longer being read from, and only
// the current UI texture is no longer being read from, and only
// then release it back to the UI for re-use
auto offscreenUi = DependencyManager::get<OffscreenUi>();
connect(offscreenUi.data(), &OffscreenUi::textureUpdated, this, [&](GLuint textureId) {
@ -187,13 +187,13 @@ ApplicationOverlay::~ApplicationOverlay() {
void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");
Overlays& overlays = qApp->getOverlays();
_textureFov = glm::radians(_hmdUIAngularSize);
glm::vec2 size = qApp->getCanvasSize();
_textureAspectRatio = aspect(size);
//Handle fading and deactivation/activation of UI
// Render 2D overlay
glMatrixMode(GL_PROJECTION);
glDisable(GL_DEPTH_TEST);
@ -238,7 +238,7 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
_overlays.release();
}
// A quick and dirty solution for compositing the old overlay
// A quick and dirty solution for compositing the old overlay
// texture with the new one
template <typename F>
void with_each_texture(GLuint firstPassTexture, GLuint secondPassTexture, F f) {
@ -276,7 +276,7 @@ void ApplicationOverlay::displayOverlayTexture() {
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
DependencyManager::get<GeometryCache>()->renderQuad(
topLeft, bottomRight,
topLeft, bottomRight,
texCoordTopLeft, texCoordBottomRight,
glm::vec4(1.0f, 1.0f, 1.0f, _alpha));
});
@ -302,7 +302,7 @@ void ApplicationOverlay::displayOverlayTexture() {
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(
mouseTopLeft + mousePosition, mouseBottomRight + mousePosition,
mouseTopLeft + mousePosition, mouseBottomRight + mousePosition,
texCoordTopLeft, texCoordBottomRight,
reticleColor);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
@ -328,13 +328,13 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
//Update and draw the magnifiers
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
const glm::quat& orientation = myAvatar->getOrientation();
// Always display the HMD overlay relative to the camera position but
// remove the HMD pose offset. This results in an overlay that sticks with you
// Always display the HMD overlay relative to the camera position but
// remove the HMD pose offset. This results in an overlay that sticks with you
// even in third person mode, but isn't drawn at a fixed distance.
glm::vec3 position = whichCamera.getPosition();
position -= qApp->getCamera()->getHmdPosition();
const float scale = myAvatar->getScale() * _oculusUIRadius;
// glm::vec3 eyeOffset = setEyeOffsetPosition;
glMatrixMode(GL_MODELVIEW);
glPushMatrix(); {
@ -343,7 +343,7 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
glMultMatrixf(&rotation[0][0]);
glScalef(scale, scale, scale);
for (int i = 0; i < NUMBER_OF_RETICLES; i++) {
if (_magActive[i]) {
_magSizeMult[i] += MAG_SPEED;
if (_magSizeMult[i] > 1.0f) {
@ -355,7 +355,7 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
_magSizeMult[i] = 0.0f;
}
}
if (_magSizeMult[i] > 0.0f) {
//Render magnifier, but dont show border for mouse magnifier
glm::vec2 projection = screenToOverlay(glm::vec2(_reticlePosition[MOUSE].x(),
@ -365,16 +365,16 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
});
}
}
glDepthMask(GL_FALSE);
glDisable(GL_ALPHA_TEST);
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
if (textureFOV != _textureFov ||
textureAspectRatio != _textureAspectRatio) {
textureFOV = _textureFov;
textureAspectRatio = _textureAspectRatio;
_overlays.buildVBO(_textureFov, _textureAspectRatio, 80, 80);
}
@ -387,7 +387,7 @@ void ApplicationOverlay::displayOverlayTextureHmd(Camera& whichCamera) {
}
glDepthMask(GL_TRUE);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
} glPopMatrix();
@ -398,17 +398,17 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
if (_alpha == 0.0f) {
return;
}
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
const glm::vec3& viewMatrixTranslation = qApp->getViewMatrixTranslation();
glEnable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
// Transform to world space
@ -416,25 +416,25 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
glm::vec3 axis2 = glm::axis(rotation);
glRotatef(-glm::degrees(glm::angle(rotation)), axis2.x, axis2.y, axis2.z);
glTranslatef(viewMatrixTranslation.x, viewMatrixTranslation.y, viewMatrixTranslation.z);
// Translate to the front of the camera
glm::vec3 pos = whichCamera.getPosition();
glm::quat rot = myAvatar->getOrientation();
glm::vec3 axis = glm::axis(rot);
glTranslatef(pos.x, pos.y, pos.z);
glRotatef(glm::degrees(glm::angle(rot)), axis.x, axis.y, axis.z);
glm::vec4 overlayColor = {1.0f, 1.0f, 1.0f, _alpha};
//Render
const GLfloat distance = 1.0f;
const GLfloat halfQuadHeight = distance * tan(fov);
const GLfloat halfQuadHeight = distance * tanf(fov);
const GLfloat halfQuadWidth = halfQuadHeight * aspectRatio;
const GLfloat quadWidth = halfQuadWidth * 2.0f;
const GLfloat quadHeight = halfQuadHeight * 2.0f;
GLfloat x = -halfQuadWidth;
GLfloat y = -halfQuadHeight;
glDisable(GL_DEPTH_TEST);
@ -444,16 +444,16 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
glm::vec3(x + quadWidth, y + quadHeight, -distance),
glm::vec3(x + quadWidth, y, -distance),
glm::vec3(x, y, -distance),
glm::vec2(0.0f, 1.0f), glm::vec2(1.0f, 1.0f),
glm::vec2(0.0f, 1.0f), glm::vec2(1.0f, 1.0f),
glm::vec2(1.0f, 0.0f), glm::vec2(0.0f, 0.0f),
overlayColor);
});
if (!_crosshairTexture) {
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() +
"images/sixense-reticle.png");
}
//draw the mouse pointer
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
@ -464,26 +464,26 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
x -= reticleSize / 2.0f;
y += reticleSize / 2.0f;
const float mouseX = (qApp->getMouseX() / (float)canvasSize.x) * quadWidth;
const float mouseY = (1.0 - (qApp->getMouseY() / (float)canvasSize.y)) * quadHeight;
const float mouseY = (1.0f - ((float)qApp->getMouseY() / (float)canvasSize.y)) * quadHeight;
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(glm::vec3(x + mouseX, y + mouseY, -distance),
DependencyManager::get<GeometryCache>()->renderQuad(glm::vec3(x + mouseX, y + mouseY, -distance),
glm::vec3(x + mouseX + reticleSize, y + mouseY, -distance),
glm::vec3(x + mouseX + reticleSize, y + mouseY - reticleSize, -distance),
glm::vec3(x + mouseX, y + mouseY - reticleSize, -distance),
glm::vec2(0.0f, 0.0f), glm::vec2(1.0f, 0.0f),
glm::vec2(0.0f, 0.0f), glm::vec2(1.0f, 0.0f),
glm::vec2(1.0f, 1.0f), glm::vec2(0.0f, 1.0f),
reticleColor, _reticleQuad);
glEnable(GL_DEPTH_TEST);
glPopMatrix();
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glEnable(GL_LIGHTING);
}
@ -493,7 +493,7 @@ void ApplicationOverlay::computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origi
const glm::vec2 projection = screenToSpherical(cursorPos);
// The overlay space orientation of the mouse coordinates
const glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
// FIXME We now have the direction of the ray FROM THE DEFAULT HEAD POSE.
// FIXME We now have the direction of the ray FROM THE DEFAULT HEAD POSE.
// Now we need to account for the actual camera position relative to the overlay
glm::vec3 overlaySpaceDirection = glm::normalize(orientation * IDENTITY_FRONT);
@ -556,8 +556,8 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
}
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * canvasSize.x);
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * canvasSize.y);
rv.setX(((ndcSpacePos.x + 1.0f) / 2.0f) * canvasSize.x);
rv.setY((1.0f - ((ndcSpacePos.y + 1.0f) / 2.0f)) * canvasSize.y);
}
return rv;
}
@ -565,7 +565,7 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
//Finds the collision point of a world space ray
bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat inverseOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 relativePosition = inverseOrientation * (position - myAvatar->getDefaultEyePosition());
@ -599,7 +599,7 @@ void ApplicationOverlay::renderPointers() {
_lastMouseMove = usecTimestampNow();
}
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
static const int MAX_IDLE_TIME = 3;
if (_reticlePosition[MOUSE] != position) {
_lastMouseMove = usecTimestampNow();
@ -608,7 +608,7 @@ void ApplicationOverlay::renderPointers() {
//OculusManager::getEulerAngles(yaw, pitch, roll);
glm::quat orientation = qApp->getHeadOrientation(); // (glm::vec3(pitch, yaw, roll));
glm::vec3 result;
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
if (calculateRayUICollisionPoint(myAvatar->getEyePosition(),
myAvatar->getOrientation() * orientation * IDENTITY_FRONT,
@ -623,7 +623,7 @@ void ApplicationOverlay::renderPointers() {
qDebug() << "No collision point";
}
}
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
@ -716,7 +716,7 @@ void ApplicationOverlay::renderControllerPointers() {
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + M_PI_2);
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)M_PI_2));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
@ -746,7 +746,7 @@ void ApplicationOverlay::renderControllerPointers() {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
glm::vec4(RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f));
}
}
@ -760,7 +760,7 @@ void ApplicationOverlay::renderPointersOculus() {
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_MODELVIEW);
//Controller Pointers
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
@ -771,7 +771,7 @@ void ApplicationOverlay::renderPointersOculus() {
glm::quat orientation = glm::quat(glm::vec3(polar.y, -polar.x, 0.0f));
// Render reticle at location
renderReticle(orientation, _alpha);
}
}
}
//Mouse Pointer
@ -793,10 +793,10 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
return;
}
auto canvasSize = qApp->getCanvasSize();
const int widgetWidth = canvasSize.x;
const int widgetHeight = canvasSize.y;
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
// Magnification Texture Coordinates
@ -804,7 +804,7 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
//Get yaw / pitch value for the corners
@ -812,7 +812,7 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
magPos.y - newHalfHeight));
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
magPos.y + newHalfHeight));
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
@ -835,7 +835,7 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
_previousMagnifierTopLeft = topLeft;
_previousMagnifierTopRight = topRight;
}
glPushMatrix(); {
if (showBorder) {
glDisable(GL_TEXTURE_2D);
@ -847,12 +847,12 @@ void ApplicationOverlay::renderMagnifier(glm::vec2 magPos, float sizeMult, bool
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
glm::vec2(magnifyULeft, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVTop),
glm::vec2(magnifyULeft, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVBottom),
glm::vec2(magnifyURight, magnifyVTop),
glm::vec2(magnifyULeft, magnifyVTop),
magnifierColor, _magnifierQuad);
} glPopMatrix();
}
@ -920,7 +920,7 @@ void ApplicationOverlay::renderAudioMeter() {
float loudness = audio->getLastInputLoudness() + 1.0f;
_trailingAudioLoudness = AUDIO_METER_AVERAGING * _trailingAudioLoudness + (1.0f - AUDIO_METER_AVERAGING) * loudness;
float log2loudness = log(_trailingAudioLoudness) / LOG2;
float log2loudness = logf(_trailingAudioLoudness) / LOG2;
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
audioLevel = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE * audioMeterScaleWidth;
@ -933,7 +933,7 @@ void ApplicationOverlay::renderAudioMeter() {
bool isClipping = ((audio->getTimeSinceLastClip() > 0.0f) && (audio->getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME));
DependencyManager::get<AudioToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, cameraSpace, boxed);
auto canvasSize = qApp->getCanvasSize();
DependencyManager::get<AudioScope>()->render(canvasSize.x, canvasSize.y);
DependencyManager::get<AudioIOStatsRenderer>()->render(WHITE_TEXT, canvasSize.x, canvasSize.y);
@ -953,11 +953,11 @@ void ApplicationOverlay::renderAudioMeter() {
}
// Draw Red Quad
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioRedStart,
audioMeterY,
audioLevel - audioRedStart,
audioMeterY,
audioLevel - audioRedStart,
AUDIO_METER_HEIGHT, quadColor,
_audioRedQuad);
audioLevel = audioRedStart;
}
@ -970,8 +970,8 @@ void ApplicationOverlay::renderAudioMeter() {
}
// Draw Green Quad
DependencyManager::get<GeometryCache>()->renderQuad(audioMeterX + audioGreenStart,
audioMeterY,
audioLevel - audioGreenStart,
audioMeterY,
audioLevel - audioGreenStart,
AUDIO_METER_HEIGHT, quadColor,
_audioGreenQuad);
@ -996,7 +996,7 @@ void ApplicationOverlay::renderAudioMeter() {
void ApplicationOverlay::renderStatsAndLogs() {
Application* application = Application::getInstance();
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
const OctreePacketProcessor& octreePacketProcessor = application->getOctreePacketProcessor();
NodeBounds& nodeBoundsDisplay = application->getNodeBoundsDisplay();
@ -1011,7 +1011,7 @@ void ApplicationOverlay::renderStatsAndLogs() {
if (shouldDisplayTimingDetail != PerformanceTimer::isActive()) {
PerformanceTimer::setActive(shouldDisplayTimingDetail);
}
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
@ -1098,12 +1098,12 @@ void ApplicationOverlay::TexturedHemisphere::buildVBO(const float fov,
}
// Cleanup old VBO if necessary
cleanupVBO();
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
// Compute number of vertices needed
_vertices = slices * stacks;
// Compute vertices positions and texture UV coordinate
TextureVertex* vertexData = new TextureVertex[_vertices];
TextureVertex* vertexPtr = &vertexData[0];
@ -1111,12 +1111,12 @@ void ApplicationOverlay::TexturedHemisphere::buildVBO(const float fov,
float stacksRatio = (float)i / (float)(stacks - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (stacksRatio - 0.5f);
for (int j = 0; j < slices; j++) {
float slicesRatio = (float)j / (float)(slices - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (slicesRatio - 0.5f);
vertexPtr->position = getPoint(yaw, pitch);
vertexPtr->uv.x = slicesRatio;
vertexPtr->uv.y = stacksRatio;
@ -1129,14 +1129,14 @@ void ApplicationOverlay::TexturedHemisphere::buildVBO(const float fov,
static const int BYTES_PER_VERTEX = sizeof(TextureVertex);
glBufferData(GL_ARRAY_BUFFER, _vertices * BYTES_PER_VERTEX, vertexData, GL_STATIC_DRAW);
delete[] vertexData;
// Compute number of indices needed
static const int VERTEX_PER_TRANGLE = 3;
static const int TRIANGLE_PER_RECTANGLE = 2;
int numberOfRectangles = (slices - 1) * (stacks - 1);
_indices = numberOfRectangles * TRIANGLE_PER_RECTANGLE * VERTEX_PER_TRANGLE;
// Compute indices order
GLushort* indexData = new GLushort[_indices];
GLushort* indexPtr = indexData;
@ -1146,11 +1146,11 @@ void ApplicationOverlay::TexturedHemisphere::buildVBO(const float fov,
GLushort bottomRightIndex = bottomLeftIndex + 1;
GLushort topLeftIndex = bottomLeftIndex + slices;
GLushort topRightIndex = topLeftIndex + 1;
*(indexPtr++) = topLeftIndex;
*(indexPtr++) = bottomLeftIndex;
*(indexPtr++) = topRightIndex;
*(indexPtr++) = topRightIndex;
*(indexPtr++) = bottomLeftIndex;
*(indexPtr++) = bottomRightIndex;
@ -1182,11 +1182,11 @@ void ApplicationOverlay::TexturedHemisphere::buildFramebufferObject() {
// Already build
return;
}
if (_framebufferObject != NULL) {
delete _framebufferObject;
}
_framebufferObject = new QOpenGLFramebufferObject(fboSize, QOpenGLFramebufferObject::Depth);
glBindTexture(GL_TEXTURE_2D, getTexture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
@ -1204,24 +1204,24 @@ void ApplicationOverlay::TexturedHemisphere::render() {
qDebug() << "TexturedHemisphere::render(): Incorrect initialisation";
return;
}
glBindBuffer(GL_ARRAY_BUFFER, _vbo.first);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _vbo.second);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static const int STRIDE = sizeof(TextureVertex);
static const void* VERTEX_POINTER = 0;
static const void* TEX_COORD_POINTER = (void*)sizeof(glm::vec3);
glVertexPointer(3, GL_FLOAT, STRIDE, VERTEX_POINTER);
glTexCoordPointer(2, GL_FLOAT, STRIDE, TEX_COORD_POINTER);
glDrawRangeElements(GL_TRIANGLES, 0, _vertices - 1, _indices, GL_UNSIGNED_SHORT, 0);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
@ -1242,7 +1242,7 @@ glm::vec2 ApplicationOverlay::directionToSpherical(const glm::vec3& direction) {
}
// Compute pitch
result.y = angleBetween(IDENTITY_UP, direction) - PI_OVER_TWO;
return result;
}
@ -1258,22 +1258,22 @@ glm::vec2 ApplicationOverlay::screenToSpherical(const glm::vec2& screenPos) {
result.y = (screenPos.y / screenSize.y - 0.5f);
result.x *= MOUSE_YAW_RANGE;
result.y *= MOUSE_PITCH_RANGE;
return result;
}
glm::vec2 ApplicationOverlay::sphericalToScreen(const glm::vec2& sphericalPos) {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result.x *= -1.0f;
result /= MOUSE_RANGE;
result += 0.5f;
result *= qApp->getCanvasSize();
return result;
return result;
}
glm::vec2 ApplicationOverlay::sphericalToOverlay(const glm::vec2& sphericalPos) const {
glm::vec2 result = sphericalPos;
result.x *= -1.0;
result.x *= -1.0f;
result /= _textureFov;
result.x /= _textureAspectRatio;
result += 0.5f;
@ -1285,7 +1285,7 @@ glm::vec2 ApplicationOverlay::overlayToSpherical(const glm::vec2& overlayPos) c
glm::vec2 result = overlayPos;
result /= qApp->getCanvasSize();
result -= 0.5f;
result *= _textureFov;
result *= _textureFov;
result.x *= _textureAspectRatio;
result.x *= -1.0f;
return result;

View file

@ -11,10 +11,19 @@
#include "InterfaceConfig.h"
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <QDesktopWidget>
#include <QTextBlock>
#include <QtGui>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <PathUtils.h>
#include <SharedUtil.h>

View file

@ -75,20 +75,20 @@ void Circle3DOverlay::render(RenderArgs* args) {
float alpha = getAlpha();
if (alpha == 0.0) {
if (alpha == 0.0f) {
return; // do nothing if our alpha is 0, we're not visible
}
// Create the circle in the coordinates origin
float outerRadius = getOuterRadius();
float innerRadius = getInnerRadius(); // only used in solid case
float startAt = getStartAt();
float endAt = getEndAt();
bool geometryChanged = (startAt != _lastStartAt || endAt != _lastEndAt ||
innerRadius != _lastInnerRadius || outerRadius != _lastOuterRadius);
const float FULL_CIRCLE = 360.0f;
const float SLICES = 180.0f; // The amount of segment to create the circle
const float SLICE_ANGLE = FULL_CIRCLE / SLICES;
@ -102,7 +102,7 @@ void Circle3DOverlay::render(RenderArgs* args) {
_lastColor = colorX;
glDisable(GL_LIGHTING);
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec2 dimensions = getDimensions();
@ -126,46 +126,46 @@ void Circle3DOverlay::render(RenderArgs* args) {
glLineWidth(_lineWidth);
auto geometryCache = DependencyManager::get<GeometryCache>();
// for our overlay, is solid means we draw a ring between the inner and outer radius of the circle, otherwise
// we just draw a line...
if (getIsSolid()) {
if (_quadVerticesID == GeometryCache::UNKNOWN_ID) {
_quadVerticesID = geometryCache->allocateID();
}
if (geometryChanged || colorChanged) {
QVector<glm::vec2> points;
float angle = startAt;
float angleInRadians = glm::radians(angle);
glm::vec2 firstInnerPoint(cos(angleInRadians) * innerRadius, sin(angleInRadians) * innerRadius);
glm::vec2 firstOuterPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 firstInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
glm::vec2 firstOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << firstInnerPoint << firstOuterPoint;
while (angle < endAt) {
angleInRadians = glm::radians(angle);
glm::vec2 thisInnerPoint(cos(angleInRadians) * innerRadius, sin(angleInRadians) * innerRadius);
glm::vec2 thisOuterPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 thisInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
glm::vec2 thisOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << thisOuterPoint << thisInnerPoint;
angle += SLICE_ANGLE;
}
// get the last slice portion....
angle = endAt;
angleInRadians = glm::radians(angle);
glm::vec2 lastInnerPoint(cos(angleInRadians) * innerRadius, sin(angleInRadians) * innerRadius);
glm::vec2 lastOuterPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 lastInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
glm::vec2 lastOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << lastOuterPoint << lastInnerPoint;
geometryCache->updateVertices(_quadVerticesID, points, color);
}
geometryCache->renderVertices(gpu::QUAD_STRIP, _quadVerticesID);
} else {
@ -175,30 +175,30 @@ void Circle3DOverlay::render(RenderArgs* args) {
if (geometryChanged || colorChanged) {
QVector<glm::vec2> points;
float angle = startAt;
float angleInRadians = glm::radians(angle);
glm::vec2 firstPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 firstPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << firstPoint;
while (angle < endAt) {
angle += SLICE_ANGLE;
angleInRadians = glm::radians(angle);
glm::vec2 thisPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 thisPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << thisPoint;
if (getIsDashedLine()) {
angle += SLICE_ANGLE / 2.0f; // short gap
angleInRadians = glm::radians(angle);
glm::vec2 dashStartPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 dashStartPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << dashStartPoint;
}
}
// get the last slice portion....
angle = endAt;
angleInRadians = glm::radians(angle);
glm::vec2 lastPoint(cos(angleInRadians) * outerRadius, sin(angleInRadians) * outerRadius);
glm::vec2 lastPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
points << lastPoint;
geometryCache->updateVertices(_lineVerticesID, points, color);
@ -210,7 +210,7 @@ void Circle3DOverlay::render(RenderArgs* args) {
geometryCache->renderVertices(gpu::LINE_STRIP, _lineVerticesID);
}
}
// draw our tick marks
// for our overlay, is solid means we draw a ring between the inner and outer radius of the circle, otherwise
// we just draw a line...
@ -229,9 +229,9 @@ void Circle3DOverlay::render(RenderArgs* args) {
// draw our major tick marks
if (getMajorTickMarksAngle() > 0.0f && getMajorTickMarksLength() != 0.0f) {
float tickMarkAngle = getMajorTickMarksAngle();
float angle = startAt - fmod(startAt, tickMarkAngle) + tickMarkAngle;
float angle = startAt - fmodf(startAt, tickMarkAngle) + tickMarkAngle;
float angleInRadians = glm::radians(angle);
float tickMarkLength = getMajorTickMarksLength();
float startRadius = (tickMarkLength > 0.0f) ? innerRadius : outerRadius;
@ -240,20 +240,20 @@ void Circle3DOverlay::render(RenderArgs* args) {
while (angle <= endAt) {
angleInRadians = glm::radians(angle);
glm::vec2 thisPointA(cos(angleInRadians) * startRadius, sin(angleInRadians) * startRadius);
glm::vec2 thisPointB(cos(angleInRadians) * endRadius, sin(angleInRadians) * endRadius);
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
glm::vec2 thisPointB(cosf(angleInRadians) * endRadius, sinf(angleInRadians) * endRadius);
majorPoints << thisPointA << thisPointB;
angle += tickMarkAngle;
}
}
// draw our minor tick marks
if (getMinorTickMarksAngle() > 0.0f && getMinorTickMarksLength() != 0.0f) {
float tickMarkAngle = getMinorTickMarksAngle();
float angle = startAt - fmod(startAt, tickMarkAngle) + tickMarkAngle;
float angle = startAt - fmodf(startAt, tickMarkAngle) + tickMarkAngle;
float angleInRadians = glm::radians(angle);
float tickMarkLength = getMinorTickMarksLength();
float startRadius = (tickMarkLength > 0.0f) ? innerRadius : outerRadius;
@ -262,11 +262,11 @@ void Circle3DOverlay::render(RenderArgs* args) {
while (angle <= endAt) {
angleInRadians = glm::radians(angle);
glm::vec2 thisPointA(cos(angleInRadians) * startRadius, sin(angleInRadians) * startRadius);
glm::vec2 thisPointB(cos(angleInRadians) * endRadius, sin(angleInRadians) * endRadius);
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
glm::vec2 thisPointB(cosf(angleInRadians) * endRadius, sinf(angleInRadians) * endRadius);
minorPoints << thisPointA << thisPointB;
angle += tickMarkAngle;
}
}
@ -286,18 +286,18 @@ void Circle3DOverlay::render(RenderArgs* args) {
geometryCache->renderVertices(gpu::LINES, _minorTicksVerticesID);
}
glPopMatrix();
glPopMatrix();
if (geometryChanged) {
_lastStartAt = startAt;
_lastEndAt = endAt;
_lastInnerRadius = innerRadius;
_lastOuterRadius = outerRadius;
}
if (glower) {
delete glower;
}
@ -305,7 +305,7 @@ void Circle3DOverlay::render(RenderArgs* args) {
void Circle3DOverlay::setProperties(const QScriptValue &properties) {
Planar3DOverlay::setProperties(properties);
QScriptValue startAt = properties.property("startAt");
if (startAt.isValid()) {
setStartAt(startAt.toVariant().toFloat());
@ -415,7 +415,7 @@ QScriptValue Circle3DOverlay::getProperty(const QString& property) {
}
bool Circle3DOverlay::findRayIntersection(const glm::vec3& origin,
bool Circle3DOverlay::findRayIntersection(const glm::vec3& origin,
const glm::vec3& direction, float& distance, BoxFace& face) {
bool intersects = Planar3DOverlay::findRayIntersection(origin, direction, distance, face);

View file

@ -33,11 +33,20 @@
#include <QtMultimedia/QAudioInput>
#include <QtMultimedia/QAudioOutput>
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
extern "C" {
#include <gverb/gverb.h>
#include <gverb/gverbdsp.h>
}
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <soxr.h>
#include <NodeList.h>

View file

@ -167,7 +167,7 @@ public slots:
QString getDefaultDeviceName(QAudio::Mode mode);
QVector<QString> getDeviceNames(QAudio::Mode mode);
float getInputVolume() const { return (_audioInput) ? _audioInput->volume() : 0.0f; }
float getInputVolume() const { return (_audioInput) ? (float)_audioInput->volume() : 0.0f; }
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
void setReverb(bool reverb);
void setReverbOptions(const AudioEffectOptions* options);

View file

@ -211,7 +211,7 @@ float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
const int16_t* _bufferLastAt = _buffer + _bufferLength - 1;
for (int i = 0; i < _numFrameSamples; ++i) {
loudness += std::abs(*sampleAt);
loudness += (float) std::abs(*sampleAt);
sampleAt = sampleAt == _bufferLastAt ? _buffer : sampleAt + 1;
}
loudness /= _numFrameSamples;

View file

@ -11,10 +11,18 @@
#include <QByteArray>
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/string_cast.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <gpu/GPUConfig.h>
#include <DeferredLightingEffect.h>

View file

@ -639,8 +639,8 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
void EntityItem::debugDump() const {
auto position = getPosition();
qCDebug(entities) << "EntityItem id:" << getEntityItemID();
qCDebug(entities, " edited ago:%f", getEditedAgo());
qCDebug(entities, " position:%f,%f,%f", position.x, position.y, position.z);
qCDebug(entities, " edited ago:%f", (double)getEditedAgo());
qCDebug(entities, " position:%f,%f,%f", (double)position.x, (double)position.y, (double)position.z);
qCDebug(entities) << " dimensions:" << getDimensions();
}

View file

@ -1251,7 +1251,7 @@ FBXLight extractLight(const FBXNode& object) {
if (property.name == "P") {
QString propname = property.properties.at(0).toString();
if (propname == "Intensity") {
light.intensity = 0.01f * property.properties.at(valIndex).value<double>();
light.intensity = 0.01f * property.properties.at(valIndex).value<float>();
} else if (propname == "Color") {
light.color = getVec3(property.properties, valIndex);
}

View file

@ -581,20 +581,20 @@ void sphericalHarmonicsScale(float * result, int order, const float * input, flo
void sphericalHarmonicsEvaluateDirection(float * result, int order, const glm::vec3 & dir) {
// calculate coefficients for first 3 bands of spherical harmonics
double P_0_0 = 0.282094791773878140;
double P_1_0 = 0.488602511902919920 * dir.z;
double P_1_0 = 0.488602511902919920 * (double)dir.z;
double P_1_1 = -0.488602511902919920;
double P_2_0 = 0.946174695757560080 * dir.z * dir.z - 0.315391565252520050;
double P_2_1 = -1.092548430592079200 * dir.z;
double P_2_0 = 0.946174695757560080 * (double)dir.z * (double)dir.z - 0.315391565252520050;
double P_2_1 = -1.092548430592079200 * (double)dir.z;
double P_2_2 = 0.546274215296039590;
result[0] = P_0_0;
result[1] = P_1_1 * dir.y;
result[1] = P_1_1 * (double)dir.y;
result[2] = P_1_0;
result[3] = P_1_1 * dir.x;
result[4] = P_2_2 * (dir.x * dir.y + dir.y * dir.x);
result[5] = P_2_1 * dir.y;
result[3] = P_1_1 * (double)dir.x;
result[4] = P_2_2 * ((double)dir.x * (double)dir.y + (double)dir.y * (double)dir.x);
result[5] = P_2_1 * (double)dir.y;
result[6] = P_2_0;
result[7] = P_2_1 * dir.x;
result[8] = P_2_2 * (dir.x * dir.x - dir.y * dir.y);
result[7] = P_2_1 * (double)dir.x;
result[8] = P_2_2 * ((double)dir.x * (double)dir.x - (double)dir.y * (double)dir.y);
}
bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<glm::vec3> & output, const uint order) {
@ -767,4 +767,4 @@ void SphericalHarmonics::evalFromTexture(const Texture& texture) {
L21 = coefs[7];
L22 = coefs[8];
}
}
}

View file

@ -73,7 +73,7 @@ void Light::setMaximumRadius(float radius) {
radius = 1.0f;
}
float CutOffIntensityRatio = 0.05f;
float surfaceRadius = radius / (sqrt(1.0f / CutOffIntensityRatio) - 1.f);
float surfaceRadius = radius / (sqrtf(1.0f / CutOffIntensityRatio) - 1.0f);
editSchema()._attenuation = Vec4(surfaceRadius, 1.0f/surfaceRadius, CutOffIntensityRatio, radius);
}

View file

@ -1,325 +1,325 @@
//
// Stage.cpp
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Stage.h"
#include <glm/gtx/transform.hpp>
#include <math.h>
#include <qcompilerdetection.h>
#include "SkyFromAtmosphere_vert.h"
#include "SkyFromAtmosphere_frag.h"
using namespace model;
void EarthSunModel::updateAll() const {
updateWorldToSurface();
updateSurfaceToEye();
updateSun();
}
Mat4d EarthSunModel::evalWorldToGeoLocationMat(double longitude, double latitude, double absAltitude, double scale) {
// Longitude is along Z axis but - from east to west
Mat4d rotLon = glm::rotate(glm::radians(longitude), Vec3d(0.0, 0.0, 1.0));
// latitude is along X axis + from south to north
Mat4d rotLat = glm::rotate(-glm::radians(latitude), Vec3d(1.0, 0.0, 0.0));
// translation is movin to the earth surface + altiture at the radius along Y axis
Mat4d surfaceT = glm::translate(Vec3d(0.0, -absAltitude, 0.0));
// Mat4d worldScale = glm::scale(Vec3d(scale));
Mat4d worldToGeoLocMat = surfaceT * rotLat * rotLon;
return worldToGeoLocMat;
}
void EarthSunModel::updateWorldToSurface() const {
// Check if the final position is too close to the earth center ?
double absAltitude = _earthRadius + _altitude;
if ( absAltitude < 0.01) {
absAltitude = 0.01;
}
// Final world to local Frame
_worldToSurfaceMat = evalWorldToGeoLocationMat(_longitude, _latitude, absAltitude, _scale);
// and the inverse
_surfaceToWorldMat = glm::inverse(_worldToSurfaceMat);
_surfacePos = Vec3d(_surfaceToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0));
}
void EarthSunModel::updateSurfaceToEye() const {
_surfaceToEyeMat = glm::inverse(_eyeToSurfaceMat);
_worldToEyeMat = _surfaceToEyeMat * _worldToSurfaceMat;
_eyeToWorldMat = _surfaceToWorldMat * _eyeToSurfaceMat;
_eyePos = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0) );
_eyeDir = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, -1.0, 0.0) );
}
void EarthSunModel::updateSun() const {
// Longitude is along Y axis but - from east to west
Mat4d rotSunLon;
Mat4d rotSun = evalWorldToGeoLocationMat(_sunLongitude, _sunLatitude, _earthRadius, _scale);
rotSun = glm::inverse(rotSun);
_sunDir = Vec3d(rotSun * Vec4d(0.0, 1.0, 0.0, 0.0));
// sun direction is looking up toward Y axis at the specified sun lat, long
Vec3d lssd = Vec3d(_worldToSurfaceMat * Vec4d(_sunDir.x, _sunDir.y, _sunDir.z, 0.0));
// apply surface rotation offset
glm::dquat dSurfOrient(_surfaceOrientation);
lssd = glm::rotate(dSurfOrient, lssd);
_surfaceSunDir = glm::normalize(Vec3(lssd.x, lssd.y, lssd.z));
}
void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
_surfaceOrientation = orientation;
invalidate();
}
double moduloRange(double val, double minVal, double maxVal) {
double range = maxVal - minVal;
double rval = (val - minVal) / range;
rval = rval - floor(rval);
return rval * range + minVal;
}
const float MAX_LONGITUDE = 180.0f;
const float MAX_LATITUDE = 90.0f;
float validateLongitude(float lon) {
return moduloRange(lon, -MAX_LONGITUDE, MAX_LONGITUDE);
}
float validateLatitude(float lat) {
return moduloRange(lat, -MAX_LATITUDE, MAX_LATITUDE);
}
float validateAltitude(float altitude) {
const float MIN_ALTITUDE = -1000.0f;
const float MAX_ALTITUDE = 100000.0f;
return std::min(std::max(altitude, MIN_ALTITUDE), MAX_ALTITUDE);
}
void EarthSunModel::setLatitude(float lat) {
_latitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setLongitude(float lon) {
_longitude = validateLongitude(lon);
invalidate();
}
void EarthSunModel::setAltitude(float altitude) {
_altitude = validateAltitude(altitude);
invalidate();
}
void EarthSunModel::setSunLatitude(float lat) {
_sunLatitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setSunLongitude(float lon) {
_sunLongitude = validateLongitude(lon);
invalidate();
}
Atmosphere::Atmosphere() {
// only if created from nothing shall we create the Buffer to store the properties
Data data;
_dataBuffer = gpu::BufferView(new gpu::Buffer(sizeof(Data), (const gpu::Byte*) &data));
setScatteringWavelength(_scatteringWavelength);
setRayleighScattering(_rayleighScattering);
setInnerOuterRadiuses(getInnerRadius(), getOuterRadius());
}
void Atmosphere::setScatteringWavelength(Vec3 wavelength) {
_scatteringWavelength = wavelength;
Data& data = editData();
data._invWaveLength = Vec4(1.0f / powf(wavelength.x, 4.0f), 1.0f / powf(wavelength.y, 4.0f), 1.0f / powf(wavelength.z, 4.0f), 0.0f);
}
void Atmosphere::setRayleighScattering(float scattering) {
_rayleighScattering = scattering;
updateScattering();
}
void Atmosphere::setMieScattering(float scattering) {
_mieScattering = scattering;
updateScattering();
}
void Atmosphere::setSunBrightness(float brightness) {
_sunBrightness = brightness;
updateScattering();
}
void Atmosphere::updateScattering() {
Data& data = editData();
data._scatterings.x = getRayleighScattering() * getSunBrightness();
data._scatterings.y = getMieScattering() * getSunBrightness();
data._scatterings.z = getRayleighScattering() * 4.0f * glm::pi<float>();
data._scatterings.w = getMieScattering() * 4.0f * glm::pi<float>();
}
void Atmosphere::setInnerOuterRadiuses(float inner, float outer) {
Data& data = editData();
data._radiuses.x = inner;
data._radiuses.y = outer;
data._scales.x = 1.0f / (outer - inner);
data._scales.z = data._scales.x / data._scales.y;
}
const int NUM_DAYS_PER_YEAR = 365;
const float NUM_HOURS_PER_DAY = 24.0f;
const float NUM_HOURS_PER_HALF_DAY = NUM_HOURS_PER_DAY * 0.5f;
SunSkyStage::SunSkyStage() :
_sunLight(new Light()),
_skybox(new Skybox())
{
_sunLight->setType(Light::SUN);
setSunIntensity(1.0f);
setSunAmbientIntensity(0.5f);
setSunColor(Vec3(1.0f, 1.0f, 1.0f));
// Default origin location is a special place in the world...
setOriginLocation(122.407f, 37.777f, 0.03f);
// Noun
setDayTime(12.0f);
// Begining of march
setYearTime(60.0f);
auto skyFromAtmosphereVertex = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(SkyFromAtmosphere_vert)));
auto skyFromAtmosphereFragment = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(SkyFromAtmosphere_frag)));
auto skyShader = gpu::ShaderPointer(gpu::Shader::createProgram(skyFromAtmosphereVertex, skyFromAtmosphereFragment));
auto skyState = gpu::StatePointer(new gpu::State());
// skyState->setStencilEnable(false);
// skyState->setBlendEnable(false);
_skyPipeline = gpu::PipelinePointer(gpu::Pipeline::create(skyShader, skyState));
_skybox.reset(new Skybox());
_skybox->setColor(Color(1.0f, 0.0f, 0.0f));
}
SunSkyStage::~SunSkyStage() {
}
void SunSkyStage::setDayTime(float hour) {
_dayTime = moduloRange(hour, 0.f, NUM_HOURS_PER_DAY);
invalidate();
}
void SunSkyStage::setYearTime(unsigned int day) {
_yearTime = day % NUM_DAYS_PER_YEAR;
invalidate();
}
void SunSkyStage::setOriginOrientation(const Quat& orientation) {
_earthSunModel.setSurfaceOrientation(orientation);
invalidate();
}
void SunSkyStage::setOriginLocation(float longitude, float latitude, float altitude) {
_earthSunModel.setLongitude(longitude);
_earthSunModel.setLatitude(latitude);
_earthSunModel.setAltitude(altitude);
invalidate();
}
void SunSkyStage::setSunModelEnable(bool isEnabled) {
_sunModelEnable = isEnabled;
invalidate();
}
void SunSkyStage::setSunColor(const Vec3& color) {
_sunLight->setColor(color);
}
void SunSkyStage::setSunIntensity(float intensity) {
_sunLight->setIntensity(intensity);
}
void SunSkyStage::setSunAmbientIntensity(float intensity) {
_sunLight->setAmbientIntensity(intensity);
}
void SunSkyStage::setSunDirection(const Vec3& direction) {
if (!isSunModelEnabled()) {
_sunLight->setDirection(direction);
}
}
// THe sun declinaison calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclinaison(double dayNumber) {
return -(23.0 + 44.0/60.0)*cos(glm::radians((360.0/365.0)*(dayNumber + 10.0)));
}
void SunSkyStage::updateGraphicsObject() const {
// Always update the sunLongitude based on the current dayTime and the current origin
// The day time is supposed to be local at the origin
double signedNormalizedDayTime = (_dayTime - NUM_HOURS_PER_HALF_DAY) / NUM_HOURS_PER_HALF_DAY;
double sunLongitude = _earthSunModel.getLongitude() + (MAX_LONGITUDE * signedNormalizedDayTime);
_earthSunModel.setSunLongitude(sunLongitude);
// And update the sunLAtitude as the declinaison depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclinaison(_yearTime));
if (isSunModelEnabled()) {
Vec3d sunLightDir = -_earthSunModel.getSurfaceSunDir();
_sunLight->setDirection(Vec3(sunLightDir.x, sunLightDir.y, sunLightDir.z));
double originAlt = _earthSunModel.getAltitude();
_sunLight->setPosition(Vec3(0.0f, originAlt, 0.0f));
}
// Background
switch (getBackgroundMode()) {
case NO_BACKGROUND: {
break;
}
case SKY_DOME: {
break;
}
case SKY_BOX: {
break;
}
case NUM_BACKGROUND_MODES:
Q_UNREACHABLE();
};
static int firstTime = 0;
if (firstTime == 0) {
firstTime++;
gpu::Shader::makeProgram(*(_skyPipeline->getProgram()));
}
}
void SunSkyStage::setBackgroundMode(BackgroundMode mode) {
_backgroundMode = mode;
invalidate();
}
void SunSkyStage::setSkybox(const SkyboxPointer& skybox) {
_skybox = skybox;
invalidate();
}
//
// Stage.cpp
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Stage.h"
#include <glm/gtx/transform.hpp>
#include <math.h>
#include <qcompilerdetection.h>
#include "SkyFromAtmosphere_vert.h"
#include "SkyFromAtmosphere_frag.h"
using namespace model;
void EarthSunModel::updateAll() const {
updateWorldToSurface();
updateSurfaceToEye();
updateSun();
}
Mat4d EarthSunModel::evalWorldToGeoLocationMat(double longitude, double latitude, double absAltitude, double scale) {
// Longitude is along Z axis but - from east to west
Mat4d rotLon = glm::rotate(glm::radians(longitude), Vec3d(0.0, 0.0, 1.0));
// latitude is along X axis + from south to north
Mat4d rotLat = glm::rotate(-glm::radians(latitude), Vec3d(1.0, 0.0, 0.0));
// translation is movin to the earth surface + altiture at the radius along Y axis
Mat4d surfaceT = glm::translate(Vec3d(0.0, -absAltitude, 0.0));
// Mat4d worldScale = glm::scale(Vec3d(scale));
Mat4d worldToGeoLocMat = surfaceT * rotLat * rotLon;
return worldToGeoLocMat;
}
void EarthSunModel::updateWorldToSurface() const {
// Check if the final position is too close to the earth center ?
float absAltitude = _earthRadius + (double)_altitude;
if (absAltitude < 0.01f) {
absAltitude = 0.01f;
}
// Final world to local Frame
_worldToSurfaceMat = evalWorldToGeoLocationMat(_longitude, _latitude, absAltitude, _scale);
// and the inverse
_surfaceToWorldMat = glm::inverse(_worldToSurfaceMat);
_surfacePos = Vec3d(_surfaceToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0));
}
void EarthSunModel::updateSurfaceToEye() const {
_surfaceToEyeMat = glm::inverse(_eyeToSurfaceMat);
_worldToEyeMat = _surfaceToEyeMat * _worldToSurfaceMat;
_eyeToWorldMat = _surfaceToWorldMat * _eyeToSurfaceMat;
_eyePos = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, 0.0, 1.0) );
_eyeDir = Vec3d(_eyeToWorldMat * Vec4d(0.0, 0.0, -1.0, 0.0) );
}
void EarthSunModel::updateSun() const {
// Longitude is along Y axis but - from east to west
Mat4d rotSunLon;
Mat4d rotSun = evalWorldToGeoLocationMat(_sunLongitude, _sunLatitude, _earthRadius, _scale);
rotSun = glm::inverse(rotSun);
_sunDir = Vec3d(rotSun * Vec4d(0.0, 1.0, 0.0, 0.0));
// sun direction is looking up toward Y axis at the specified sun lat, long
Vec3d lssd = Vec3d(_worldToSurfaceMat * Vec4d(_sunDir.x, _sunDir.y, _sunDir.z, 0.0));
// apply surface rotation offset
glm::dquat dSurfOrient(_surfaceOrientation);
lssd = glm::rotate(dSurfOrient, lssd);
_surfaceSunDir = glm::normalize(Vec3(lssd.x, lssd.y, lssd.z));
}
void EarthSunModel::setSurfaceOrientation(const Quat& orientation) {
_surfaceOrientation = orientation;
invalidate();
}
double moduloRange(double val, double minVal, double maxVal) {
double range = maxVal - minVal;
double rval = (val - minVal) / range;
rval = rval - floor(rval);
return rval * range + minVal;
}
const float MAX_LONGITUDE = 180.0f;
const float MAX_LATITUDE = 90.0f;
float validateLongitude(float lon) {
return moduloRange(lon, -MAX_LONGITUDE, MAX_LONGITUDE);
}
float validateLatitude(float lat) {
return moduloRange(lat, -MAX_LATITUDE, MAX_LATITUDE);
}
float validateAltitude(float altitude) {
const float MIN_ALTITUDE = -1000.0f;
const float MAX_ALTITUDE = 100000.0f;
return std::min(std::max(altitude, MIN_ALTITUDE), MAX_ALTITUDE);
}
void EarthSunModel::setLatitude(float lat) {
_latitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setLongitude(float lon) {
_longitude = validateLongitude(lon);
invalidate();
}
void EarthSunModel::setAltitude(float altitude) {
_altitude = validateAltitude(altitude);
invalidate();
}
void EarthSunModel::setSunLatitude(float lat) {
_sunLatitude = validateLatitude(lat);
invalidate();
}
void EarthSunModel::setSunLongitude(float lon) {
_sunLongitude = validateLongitude(lon);
invalidate();
}
Atmosphere::Atmosphere() {
// only if created from nothing shall we create the Buffer to store the properties
Data data;
_dataBuffer = gpu::BufferView(new gpu::Buffer(sizeof(Data), (const gpu::Byte*) &data));
setScatteringWavelength(_scatteringWavelength);
setRayleighScattering(_rayleighScattering);
setInnerOuterRadiuses(getInnerRadius(), getOuterRadius());
}
void Atmosphere::setScatteringWavelength(Vec3 wavelength) {
_scatteringWavelength = wavelength;
Data& data = editData();
data._invWaveLength = Vec4(1.0f / powf(wavelength.x, 4.0f), 1.0f / powf(wavelength.y, 4.0f), 1.0f / powf(wavelength.z, 4.0f), 0.0f);
}
void Atmosphere::setRayleighScattering(float scattering) {
_rayleighScattering = scattering;
updateScattering();
}
void Atmosphere::setMieScattering(float scattering) {
_mieScattering = scattering;
updateScattering();
}
void Atmosphere::setSunBrightness(float brightness) {
_sunBrightness = brightness;
updateScattering();
}
void Atmosphere::updateScattering() {
Data& data = editData();
data._scatterings.x = getRayleighScattering() * getSunBrightness();
data._scatterings.y = getMieScattering() * getSunBrightness();
data._scatterings.z = getRayleighScattering() * 4.0f * glm::pi<float>();
data._scatterings.w = getMieScattering() * 4.0f * glm::pi<float>();
}
void Atmosphere::setInnerOuterRadiuses(float inner, float outer) {
Data& data = editData();
data._radiuses.x = inner;
data._radiuses.y = outer;
data._scales.x = 1.0f / (outer - inner);
data._scales.z = data._scales.x / data._scales.y;
}
const int NUM_DAYS_PER_YEAR = 365;
const float NUM_HOURS_PER_DAY = 24.0f;
const float NUM_HOURS_PER_HALF_DAY = NUM_HOURS_PER_DAY * 0.5f;
SunSkyStage::SunSkyStage() :
_sunLight(new Light()),
_skybox(new Skybox())
{
_sunLight->setType(Light::SUN);
setSunIntensity(1.0f);
setSunAmbientIntensity(0.5f);
setSunColor(Vec3(1.0f, 1.0f, 1.0f));
// Default origin location is a special place in the world...
setOriginLocation(122.407f, 37.777f, 0.03f);
// Noun
setDayTime(12.0f);
// Begining of march
setYearTime(60.0f);
auto skyFromAtmosphereVertex = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(SkyFromAtmosphere_vert)));
auto skyFromAtmosphereFragment = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(SkyFromAtmosphere_frag)));
auto skyShader = gpu::ShaderPointer(gpu::Shader::createProgram(skyFromAtmosphereVertex, skyFromAtmosphereFragment));
auto skyState = gpu::StatePointer(new gpu::State());
// skyState->setStencilEnable(false);
// skyState->setBlendEnable(false);
_skyPipeline = gpu::PipelinePointer(gpu::Pipeline::create(skyShader, skyState));
_skybox.reset(new Skybox());
_skybox->setColor(Color(1.0f, 0.0f, 0.0f));
}
SunSkyStage::~SunSkyStage() {
}
void SunSkyStage::setDayTime(float hour) {
_dayTime = moduloRange(hour, 0.f, NUM_HOURS_PER_DAY);
invalidate();
}
void SunSkyStage::setYearTime(unsigned int day) {
_yearTime = day % NUM_DAYS_PER_YEAR;
invalidate();
}
void SunSkyStage::setOriginOrientation(const Quat& orientation) {
_earthSunModel.setSurfaceOrientation(orientation);
invalidate();
}
void SunSkyStage::setOriginLocation(float longitude, float latitude, float altitude) {
_earthSunModel.setLongitude(longitude);
_earthSunModel.setLatitude(latitude);
_earthSunModel.setAltitude(altitude);
invalidate();
}
void SunSkyStage::setSunModelEnable(bool isEnabled) {
_sunModelEnable = isEnabled;
invalidate();
}
void SunSkyStage::setSunColor(const Vec3& color) {
_sunLight->setColor(color);
}
void SunSkyStage::setSunIntensity(float intensity) {
_sunLight->setIntensity(intensity);
}
void SunSkyStage::setSunAmbientIntensity(float intensity) {
_sunLight->setAmbientIntensity(intensity);
}
void SunSkyStage::setSunDirection(const Vec3& direction) {
if (!isSunModelEnabled()) {
_sunLight->setDirection(direction);
}
}
// THe sun declinaison calculus is taken from https://en.wikipedia.org/wiki/Position_of_the_Sun
double evalSunDeclinaison(double dayNumber) {
return -(23.0 + 44.0/60.0)*cos(glm::radians((360.0/365.0)*(dayNumber + 10.0)));
}
void SunSkyStage::updateGraphicsObject() const {
// Always update the sunLongitude based on the current dayTime and the current origin
// The day time is supposed to be local at the origin
float signedNormalizedDayTime = (_dayTime - NUM_HOURS_PER_HALF_DAY) / NUM_HOURS_PER_HALF_DAY;
float sunLongitude = _earthSunModel.getLongitude() + (MAX_LONGITUDE * signedNormalizedDayTime);
_earthSunModel.setSunLongitude(sunLongitude);
// And update the sunLAtitude as the declinaison depending of the time of the year
_earthSunModel.setSunLatitude(evalSunDeclinaison(_yearTime));
if (isSunModelEnabled()) {
Vec3d sunLightDir = -_earthSunModel.getSurfaceSunDir();
_sunLight->setDirection(Vec3(sunLightDir.x, sunLightDir.y, sunLightDir.z));
double originAlt = _earthSunModel.getAltitude();
_sunLight->setPosition(Vec3(0.0f, originAlt, 0.0f));
}
// Background
switch (getBackgroundMode()) {
case NO_BACKGROUND: {
break;
}
case SKY_DOME: {
break;
}
case SKY_BOX: {
break;
}
case NUM_BACKGROUND_MODES:
Q_UNREACHABLE();
};
static int firstTime = 0;
if (firstTime == 0) {
firstTime++;
gpu::Shader::makeProgram(*(_skyPipeline->getProgram()));
}
}
void SunSkyStage::setBackgroundMode(BackgroundMode mode) {
_backgroundMode = mode;
invalidate();
}
void SunSkyStage::setSkybox(const SkyboxPointer& skybox) {
_skybox = skybox;
invalidate();
}

View file

@ -1,249 +1,249 @@
//
// Stage.h
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_model_Stage_h
#define hifi_model_Stage_h
#include "gpu/Pipeline.h"
#include "Light.h"
#include "Skybox.h"
namespace model {
typedef glm::dvec3 Vec3d;
typedef glm::dvec4 Vec4d;
typedef glm::dmat4 Mat4d;
typedef glm::mat4 Mat4;
class EarthSunModel {
public:
void setScale(float scale);
float getScale() const { return _scale; }
void setLatitude(float lat);
float getLatitude() const { return _latitude; }
void setLongitude(float lon);
float getLongitude() const { return _longitude; }
void setAltitude(float altitude);
float getAltitude() const { return _altitude; }
void setSurfaceOrientation(const Quat& orientation);
const Quat& getSurfaceOrientation() const { valid(); return _surfaceOrientation; }
const Vec3d& getSurfacePos() const { valid(); return _surfacePos; }
const Mat4d& getSurfaceToWorldMat() const { valid(); return _surfaceToWorldMat; }
const Mat4d& getWoldToSurfaceMat() const { valid(); return _worldToSurfaceMat; }
const Mat4d& getEyeToSurfaceMat() const { valid(); return _eyeToSurfaceMat; }
const Mat4d& getSurfaceToEyeMat() const { valid(); return _surfaceToEyeMat; }
const Mat4d& getEyeToWorldMat() const { valid(); return _eyeToWorldMat; }
const Mat4d& getWorldToEyeMat() const { valid(); return _worldToEyeMat; }
//or set the surfaceToEye mat directly
void setEyeToSurfaceMat( const Mat4d& e2s);
const Vec3d& getEyePos() const { valid(); return _eyePos; }
const Vec3d& getEyeDir() const { valid(); return _eyeDir; }
void setSunLongitude(float lon);
float getSunLongitude() const { return _sunLongitude; }
void setSunLatitude(float lat);
float getSunLatitude() const { return _sunLatitude; }
const Vec3d& getWorldSunDir() const { valid(); return _sunDir; }
const Vec3d& getSurfaceSunDir() const { valid(); return _surfaceSunDir; }
EarthSunModel() { valid(); }
protected:
float _scale = 1000.0f; //Km
double _earthRadius = 6360.0;
Quat _surfaceOrientation;
float _longitude = 0.0f;
float _latitude = 0.0f;
float _altitude = 0.01f;
mutable Vec3d _surfacePos;
mutable Mat4d _worldToSurfaceMat;
mutable Mat4d _surfaceToWorldMat;
void updateWorldToSurface() const;
mutable Mat4d _surfaceToEyeMat;
mutable Mat4d _eyeToSurfaceMat;
mutable Vec3d _eyeDir;
mutable Vec3d _eyePos;
void updateSurfaceToEye() const;
mutable Mat4d _worldToEyeMat;
mutable Mat4d _eyeToWorldMat;
float _sunLongitude = 0.0f;
float _sunLatitude = 0.0f;
mutable Vec3d _sunDir;
mutable Vec3d _surfaceSunDir;
void updateSun() const;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateAll(); _invalid = false; } }
void updateAll() const;
static Mat4d evalWorldToGeoLocationMat(double longitude, double latitude, double altitude, double scale);
};
class Atmosphere {
public:
Atmosphere();
Atmosphere(const Atmosphere& atmosphere);
Atmosphere& operator= (const Atmosphere& atmosphere);
virtual ~Atmosphere() {};
void setScatteringWavelength(Vec3 wavelength);
const Vec3& getScatteringWavelength() const { return _scatteringWavelength; }
void setRayleighScattering(float scattering);
float getRayleighScattering() const { return _rayleighScattering; }
void setMieScattering(float scattering);
float getMieScattering() const { return _mieScattering; }
void setSunBrightness(float brightness);
float getSunBrightness() const { return _sunBrightness; }
void setInnerOuterRadiuses(float inner, float outer);
float getInnerRadius() const { return getData()._radiuses.x; }
float getOuterRadius() const { return getData()._radiuses.y; }
// Data to access the attribute values of the atmosphere
class Data {
public:
Vec4 _invWaveLength = Vec4(0.0f);
Vec4 _radiuses = Vec4(6000.0f, 6025.0f, 0.0f, 0.0f);
Vec4 _scales = Vec4(0.0f, 0.25f, 0.0f, 0.0f);
Vec4 _scatterings = Vec4(0.0f);
Vec4 _control = Vec4(2.0f, -0.990f, -0.990f*-0.990f, 0.f);
Data() {}
};
const UniformBufferView& getDataBuffer() const { return _dataBuffer; }
protected:
UniformBufferView _dataBuffer;
Vec3 _scatteringWavelength = Vec3(0.650f, 0.570f, 0.475f);
float _rayleighScattering = 0.0025f;
float _mieScattering = 0.0010f;
float _sunBrightness = 20.0f;
const Data& getData() const { return _dataBuffer.get<Data>(); }
Data& editData() { return _dataBuffer.edit<Data>(); }
void updateScattering();
};
typedef std::shared_ptr< Atmosphere > AtmospherePointer;
// Sun sky stage generates the rendering primitives to display a scene realistically
// at the specified location and time around earth
class SunSkyStage {
public:
SunSkyStage();
~SunSkyStage();
// time of the day (local to the position) expressed in decimal hour in the range [0.0, 24.0]
void setDayTime(float hour);
float getDayTime() const { return _dayTime; }
// time of the year expressed in day in the range [0, 365]
void setYearTime(unsigned int day);
unsigned int getYearTime() const { return _yearTime; }
// Origin orientation used to modify the cardinal axis alignement used.
// THe default is north along +Z axis and west along +X axis. this orientation gets added
// to the transform stack producing the sun light direction.
void setOriginOrientation(const Quat& orientation);
const Quat& getOriginOrientation() const { return _earthSunModel.getSurfaceOrientation(); }
// Location used to define the sun & sky is a longitude and latitude [rad] and a earth surface altitude [km]
void setOriginLocation(float longitude, float latitude, float surfaceAltitude);
float getOriginLatitude() const { return _earthSunModel.getLatitude(); }
float getOriginLongitude() const { return _earthSunModel.getLongitude(); }
float getOriginSurfaceAltitude() const { return _earthSunModel.getAltitude(); }
// Enable / disable the effect of the time and location on the sun direction and color
void setSunModelEnable(bool isEnabled);
bool isSunModelEnabled() const { return _sunModelEnable; }
// Sun properties
void setSunColor(const Vec3& color);
const Vec3& getSunColor() const { return getSunLight()->getColor(); }
void setSunIntensity(float intensity);
float getSunIntensity() const { return getSunLight()->getIntensity(); }
void setSunAmbientIntensity(float intensity);
float getSunAmbientIntensity() const { return getSunLight()->getAmbientIntensity(); }
// The sun direction is expressed in the world space
void setSunDirection(const Vec3& direction);
const Vec3& getSunDirection() const { return getSunLight()->getDirection(); }
LightPointer getSunLight() const { valid(); return _sunLight; }
AtmospherePointer getAtmosphere() const { valid(); return _atmosphere; }
enum BackgroundMode {
NO_BACKGROUND = 0,
SKY_DOME,
SKY_BOX,
NUM_BACKGROUND_MODES,
};
void setBackgroundMode(BackgroundMode mode);
BackgroundMode getBackgroundMode() const { return _backgroundMode; }
// Skybox
void setSkybox(const SkyboxPointer& skybox);
const SkyboxPointer& getSkybox() const { valid(); return _skybox; }
protected:
BackgroundMode _backgroundMode = SKY_BOX;
LightPointer _sunLight;
AtmospherePointer _atmosphere;
mutable SkyboxPointer _skybox;
gpu::PipelinePointer _skyPipeline;
float _dayTime = 12.0f;
int _yearTime = 0;
mutable EarthSunModel _earthSunModel;
bool _sunModelEnable = true;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateGraphicsObject(); _invalid = false; } }
void updateGraphicsObject() const;
};
typedef std::shared_ptr< SunSkyStage > SunSkyStagePointer;
};
#endif
//
// Stage.h
// libraries/model/src/model
//
// Created by Sam Gateau on 2/24/2015.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_model_Stage_h
#define hifi_model_Stage_h
#include "gpu/Pipeline.h"
#include "Light.h"
#include "Skybox.h"
namespace model {
typedef glm::dvec3 Vec3d;
typedef glm::dvec4 Vec4d;
typedef glm::dmat4 Mat4d;
typedef glm::mat4 Mat4;
class EarthSunModel {
public:
void setScale(float scale);
float getScale() const { return _scale; }
void setLatitude(float lat);
float getLatitude() const { return _latitude; }
void setLongitude(float lon);
float getLongitude() const { return _longitude; }
void setAltitude(float altitude);
float getAltitude() const { return _altitude; }
void setSurfaceOrientation(const Quat& orientation);
const Quat& getSurfaceOrientation() const { valid(); return _surfaceOrientation; }
const Vec3d& getSurfacePos() const { valid(); return _surfacePos; }
const Mat4d& getSurfaceToWorldMat() const { valid(); return _surfaceToWorldMat; }
const Mat4d& getWoldToSurfaceMat() const { valid(); return _worldToSurfaceMat; }
const Mat4d& getEyeToSurfaceMat() const { valid(); return _eyeToSurfaceMat; }
const Mat4d& getSurfaceToEyeMat() const { valid(); return _surfaceToEyeMat; }
const Mat4d& getEyeToWorldMat() const { valid(); return _eyeToWorldMat; }
const Mat4d& getWorldToEyeMat() const { valid(); return _worldToEyeMat; }
//or set the surfaceToEye mat directly
void setEyeToSurfaceMat( const Mat4d& e2s);
const Vec3d& getEyePos() const { valid(); return _eyePos; }
const Vec3d& getEyeDir() const { valid(); return _eyeDir; }
void setSunLongitude(float lon);
float getSunLongitude() const { return _sunLongitude; }
void setSunLatitude(float lat);
float getSunLatitude() const { return _sunLatitude; }
const Vec3d& getWorldSunDir() const { valid(); return _sunDir; }
const Vec3d& getSurfaceSunDir() const { valid(); return _surfaceSunDir; }
EarthSunModel() { valid(); }
protected:
float _scale = 1000.0f; //Km
double _earthRadius = 6360.0;
Quat _surfaceOrientation;
float _longitude = 0.0f;
float _latitude = 0.0f;
float _altitude = 0.01f;
mutable Vec3d _surfacePos;
mutable Mat4d _worldToSurfaceMat;
mutable Mat4d _surfaceToWorldMat;
void updateWorldToSurface() const;
mutable Mat4d _surfaceToEyeMat;
mutable Mat4d _eyeToSurfaceMat;
mutable Vec3d _eyeDir;
mutable Vec3d _eyePos;
void updateSurfaceToEye() const;
mutable Mat4d _worldToEyeMat;
mutable Mat4d _eyeToWorldMat;
float _sunLongitude = 0.0f;
float _sunLatitude = 0.0f;
mutable Vec3d _sunDir;
mutable Vec3d _surfaceSunDir;
void updateSun() const;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateAll(); _invalid = false; } }
void updateAll() const;
static Mat4d evalWorldToGeoLocationMat(double longitude, double latitude, double altitude, double scale);
};
class Atmosphere {
public:
Atmosphere();
Atmosphere(const Atmosphere& atmosphere);
Atmosphere& operator= (const Atmosphere& atmosphere);
virtual ~Atmosphere() {};
void setScatteringWavelength(Vec3 wavelength);
const Vec3& getScatteringWavelength() const { return _scatteringWavelength; }
void setRayleighScattering(float scattering);
float getRayleighScattering() const { return _rayleighScattering; }
void setMieScattering(float scattering);
float getMieScattering() const { return _mieScattering; }
void setSunBrightness(float brightness);
float getSunBrightness() const { return _sunBrightness; }
void setInnerOuterRadiuses(float inner, float outer);
float getInnerRadius() const { return getData()._radiuses.x; }
float getOuterRadius() const { return getData()._radiuses.y; }
// Data to access the attribute values of the atmosphere
class Data {
public:
Vec4 _invWaveLength = Vec4(0.0f);
Vec4 _radiuses = Vec4(6000.0f, 6025.0f, 0.0f, 0.0f);
Vec4 _scales = Vec4(0.0f, 0.25f, 0.0f, 0.0f);
Vec4 _scatterings = Vec4(0.0f);
Vec4 _control = Vec4(2.0f, -0.990f, -0.990f*-0.990f, 0.f);
Data() {}
};
const UniformBufferView& getDataBuffer() const { return _dataBuffer; }
protected:
UniformBufferView _dataBuffer;
Vec3 _scatteringWavelength = Vec3(0.650f, 0.570f, 0.475f);
float _rayleighScattering = 0.0025f;
float _mieScattering = 0.0010f;
float _sunBrightness = 20.0f;
const Data& getData() const { return _dataBuffer.get<Data>(); }
Data& editData() { return _dataBuffer.edit<Data>(); }
void updateScattering();
};
typedef std::shared_ptr< Atmosphere > AtmospherePointer;
// Sun sky stage generates the rendering primitives to display a scene realistically
// at the specified location and time around earth
class SunSkyStage {
public:
SunSkyStage();
~SunSkyStage();
// time of the day (local to the position) expressed in decimal hour in the range [0.0, 24.0]
void setDayTime(float hour);
float getDayTime() const { return _dayTime; }
// time of the year expressed in day in the range [0, 365]
void setYearTime(unsigned int day);
unsigned int getYearTime() const { return _yearTime; }
// Origin orientation used to modify the cardinal axis alignement used.
// THe default is north along +Z axis and west along +X axis. this orientation gets added
// to the transform stack producing the sun light direction.
void setOriginOrientation(const Quat& orientation);
const Quat& getOriginOrientation() const { return _earthSunModel.getSurfaceOrientation(); }
// Location used to define the sun & sky is a longitude and latitude [rad] and a earth surface altitude [km]
void setOriginLocation(float longitude, float latitude, float surfaceAltitude);
float getOriginLatitude() const { return _earthSunModel.getLatitude(); }
float getOriginLongitude() const { return _earthSunModel.getLongitude(); }
float getOriginSurfaceAltitude() const { return _earthSunModel.getAltitude(); }
// Enable / disable the effect of the time and location on the sun direction and color
void setSunModelEnable(bool isEnabled);
bool isSunModelEnabled() const { return _sunModelEnable; }
// Sun properties
void setSunColor(const Vec3& color);
const Vec3& getSunColor() const { return getSunLight()->getColor(); }
void setSunIntensity(float intensity);
float getSunIntensity() const { return getSunLight()->getIntensity(); }
void setSunAmbientIntensity(float intensity);
float getSunAmbientIntensity() const { return getSunLight()->getAmbientIntensity(); }
// The sun direction is expressed in the world space
void setSunDirection(const Vec3& direction);
const Vec3& getSunDirection() const { return getSunLight()->getDirection(); }
LightPointer getSunLight() const { valid(); return _sunLight; }
AtmospherePointer getAtmosphere() const { valid(); return _atmosphere; }
enum BackgroundMode {
NO_BACKGROUND = 0,
SKY_DOME,
SKY_BOX,
NUM_BACKGROUND_MODES,
};
void setBackgroundMode(BackgroundMode mode);
BackgroundMode getBackgroundMode() const { return _backgroundMode; }
// Skybox
void setSkybox(const SkyboxPointer& skybox);
const SkyboxPointer& getSkybox() const { valid(); return _skybox; }
protected:
BackgroundMode _backgroundMode = SKY_BOX;
LightPointer _sunLight;
AtmospherePointer _atmosphere;
mutable SkyboxPointer _skybox;
gpu::PipelinePointer _skyPipeline;
float _dayTime = 12.0f;
int _yearTime = 0;
mutable EarthSunModel _earthSunModel;
bool _sunModelEnable = true;
mutable bool _invalid = true;
void invalidate() const { _invalid = true; }
void valid() const { if (_invalid) { updateGraphicsObject(); _invalid = false; } }
void updateGraphicsObject() const;
};
typedef std::shared_ptr< SunSkyStage > SunSkyStagePointer;
};
#endif

View file

@ -21,7 +21,7 @@ BandwidthRecorder::Channel::Channel() {
float BandwidthRecorder::Channel::getAverageInputPacketsPerSecond() {
float delt = _input.getEventDeltaAverage();
if (delt > 0.0f) {
return (1.0 / delt);
return (1.0f / delt);
}
return 0.0f;
}
@ -29,7 +29,7 @@ float BandwidthRecorder::Channel::getAverageInputPacketsPerSecond() {
float BandwidthRecorder::Channel::getAverageOutputPacketsPerSecond() {
float delt = _input.getEventDeltaAverage();
if (delt > 0.0f) {
return (1.0 / _output.getEventDeltaAverage());
return (1.0f / _output.getEventDeltaAverage());
}
return 0.0f;
}

View file

@ -75,7 +75,7 @@ CoverageMap::~CoverageMap() {
void CoverageMap::printStats() {
qCDebug(octree, "CoverageMap::printStats()...");
qCDebug(octree, "MINIMUM_POLYGON_AREA_TO_STORE=%f",MINIMUM_POLYGON_AREA_TO_STORE);
qCDebug(octree, "MINIMUM_POLYGON_AREA_TO_STORE=%f", (double)MINIMUM_POLYGON_AREA_TO_STORE);
qCDebug(octree, "_mapCount=%d",_mapCount);
qCDebug(octree, "_checkMapRootCalls=%d",_checkMapRootCalls);
qCDebug(octree, "_notAllInView=%d",_notAllInView);

View file

@ -82,7 +82,7 @@ void CoverageMapV2::erase() {
if (_isRoot && wantDebugging) {
qCDebug(octree, "CoverageMapV2 last to be deleted...");
qCDebug(octree, "MINIMUM_POLYGON_AREA_TO_STORE=%f",MINIMUM_POLYGON_AREA_TO_STORE);
qCDebug(octree, "MINIMUM_POLYGON_AREA_TO_STORE=%f", (double)MINIMUM_POLYGON_AREA_TO_STORE);
qCDebug(octree, "_mapCount=%d",_mapCount);
qCDebug(octree, "_checkMapRootCalls=%d",_checkMapRootCalls);
qCDebug(octree, "_notAllInView=%d",_notAllInView);

View file

@ -69,8 +69,8 @@ void OctreeElement::init(unsigned char * octalCode) {
// set up the _children union
_childBitmask = 0;
_childrenExternal = false;
#ifdef BLENDED_UNION_CHILDREN
_children.external = NULL;
_singleChildrenCount++;
@ -662,7 +662,7 @@ void OctreeElement::deleteAllChildren() {
delete childAt;
}
}
if (_childrenExternal) {
// if the children_t union represents _children.external we need to delete it here
delete[] _children.external;
@ -764,7 +764,7 @@ void OctreeElement::setChildAtIndex(int childIndex, OctreeElement* child) {
memset(_children.external, 0, sizeof(OctreeElement*) * NUMBER_OF_CHILDREN);
_children.external[firstIndex] = previousChild;
_children.external[childIndex] = child;
_childrenExternal = true;
_externalChildrenMemoryUsage += NUMBER_OF_CHILDREN * sizeof(OctreeElement*);
@ -776,7 +776,7 @@ void OctreeElement::setChildAtIndex(int childIndex, OctreeElement* child) {
delete[] _children.external;
_childrenExternal = false;
_externalChildrenMemoryUsage -= NUMBER_OF_CHILDREN * sizeof(OctreeElement*);
if (childIndex == firstIndex) {
_children.single = previousSecondChild;
@ -1203,12 +1203,13 @@ void OctreeElement::printDebugDetails(const char* label) const {
setAtBit(childBits,i);
}
}
QDebug elementDebug = qDebug().nospace();
QString resultString;
resultString.sprintf("%s - Voxel at corner=(%f,%f,%f) size=%f\n isLeaf=%s isDirty=%s shouldRender=%s\n children=", label,
_cube.getCorner().x, _cube.getCorner().y, _cube.getCorner().z, _cube.getScale(),
(double)_cube.getCorner().x, (double)_cube.getCorner().y, (double)_cube.getCorner().z,
(double)_cube.getScale(),
debug::valueOf(isLeaf()), debug::valueOf(isDirty()), debug::valueOf(getShouldRender()));
elementDebug << resultString;
@ -1236,7 +1237,7 @@ ViewFrustum::location OctreeElement::inFrustum(const ViewFrustum& viewFrustum) c
// By doing this, we don't need to test each child voxel's position vs the LOD boundary
bool OctreeElement::calculateShouldRender(const ViewFrustum* viewFrustum, float voxelScaleSize, int boundaryLevelAdjust) const {
bool shouldRender = false;
if (hasContent()) {
float furthestDistance = furthestDistanceToCamera(*viewFrustum);
float childBoundary = boundaryDistanceForRenderLevel(getLevel() + 1 + boundaryLevelAdjust, voxelScaleSize);
@ -1330,7 +1331,7 @@ void OctreeElement::notifyUpdateHooks() {
}
bool OctreeElement::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
void** intersectedObject, bool precisionPicking) {
keepSearching = true; // assume that we will continue searching after this.
@ -1354,7 +1355,7 @@ bool OctreeElement::findRayIntersection(const glm::vec3& origin, const glm::vec3
// for any details inside the cube to be closer so we don't need to consider them.
if (_cube.contains(origin) || distanceToElementCube < distance) {
if (findDetailedRayIntersection(origin, direction, keepSearching, element, distanceToElementDetails,
if (findDetailedRayIntersection(origin, direction, keepSearching, element, distanceToElementDetails,
face, intersectedObject, precisionPicking, distanceToElementCube)) {
if (distanceToElementDetails < distance) {
@ -1368,10 +1369,10 @@ bool OctreeElement::findRayIntersection(const glm::vec3& origin, const glm::vec3
}
bool OctreeElement::findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
void** intersectedObject, bool precisionPicking, float distanceToElementCube) {
// we did hit this element, so calculate appropriate distances
// we did hit this element, so calculate appropriate distances
if (hasContent()) {
element = this;
distance = distanceToElementCube;
@ -1399,7 +1400,8 @@ OctreeElement* OctreeElement::getOrCreateChildElementAt(float x, float y, float
float halfOurScale = ourScale / 2.0f;
if(s > ourScale) {
qCDebug(octree, "UNEXPECTED -- OctreeElement::getOrCreateChildElementAt() s=[%f] > ourScale=[%f] ", s, ourScale);
qCDebug(octree, "UNEXPECTED -- OctreeElement::getOrCreateChildElementAt() s=[%f] > ourScale=[%f] ",
(double)s, (double)ourScale);
}
if (s > halfOurScale) {
@ -1466,21 +1468,21 @@ OctreeElement* OctreeElement::getOrCreateChildElementAt(float x, float y, float
OctreeElement* OctreeElement::getOrCreateChildElementContaining(const AACube& cube) {
OctreeElement* child = NULL;
int childIndex = getMyChildContaining(cube);
// If getMyChildContaining() returns CHILD_UNKNOWN then it means that our level
// is the correct level for this cube
if (childIndex == CHILD_UNKNOWN) {
return this;
}
// Now, check if we have a child at that location
child = getChildAtIndex(childIndex);
if (!child) {
child = addChildAtIndex(childIndex);
}
// if we've made a really small child, then go ahead and use that one.
if (child->getScale() <= SMALLEST_REASONABLE_OCTREE_ELEMENT_SCALE) {
return child;
@ -1492,21 +1494,21 @@ OctreeElement* OctreeElement::getOrCreateChildElementContaining(const AACube& cu
OctreeElement* OctreeElement::getOrCreateChildElementContaining(const AABox& box) {
OctreeElement* child = NULL;
int childIndex = getMyChildContaining(box);
// If getMyChildContaining() returns CHILD_UNKNOWN then it means that our level
// is the correct level for this cube
if (childIndex == CHILD_UNKNOWN) {
return this;
}
// Now, check if we have a child at that location
child = getChildAtIndex(childIndex);
if (!child) {
child = addChildAtIndex(childIndex);
}
// if we've made a really small child, then go ahead and use that one.
if (child->getScale() <= SMALLEST_REASONABLE_OCTREE_ELEMENT_SCALE) {
return child;
@ -1542,7 +1544,7 @@ int OctreeElement::getMyChildContaining(const AACube& cube) const {
if (childIndexCubeMinimum != childIndexCubeMaximum) {
return CHILD_UNKNOWN;
}
return childIndexCubeMinimum; // either would do, they are the same
}
return CHILD_UNKNOWN; // since cube is not contained in our element, it can't be in one of our children
@ -1555,7 +1557,7 @@ int OctreeElement::getMyChildContaining(const AABox& box) const {
// TODO: consider changing this to assert()
if(boxLargestScale > ourScale) {
qCDebug(octree, "UNEXPECTED -- OctreeElement::getMyChildContaining() "
"boxLargestScale=[%f] > ourScale=[%f] ", boxLargestScale, ourScale);
"boxLargestScale=[%f] > ourScale=[%f] ", (double)boxLargestScale, (double)ourScale);
}
// Determine which of our children the minimum and maximum corners of the cube live in...
@ -1566,7 +1568,7 @@ int OctreeElement::getMyChildContaining(const AABox& box) const {
int childIndexCubeMinimum = getMyChildContainingPoint(cubeCornerMinimum);
int childIndexCubeMaximum = getMyChildContainingPoint(cubeCornerMaximum);
// If the minimum and maximum corners of the cube are in two different children's cubes,
// If the minimum and maximum corners of the cube are in two different children's cubes,
// then we are the containing element
if (childIndexCubeMinimum != childIndexCubeMaximum) {
return CHILD_UNKNOWN;
@ -1579,12 +1581,12 @@ int OctreeElement::getMyChildContaining(const AABox& box) const {
int OctreeElement::getMyChildContainingPoint(const glm::vec3& point) const {
glm::vec3 ourCenter = _cube.calcCenter();
int childIndex = CHILD_UNKNOWN;
// since point is not contained in our element, it can't be in one of our children
if (!_cube.contains(point)) {
return CHILD_UNKNOWN;
}
// left half
if (point.x > ourCenter.x) {
if (point.y > ourCenter.y) {

View file

@ -96,8 +96,9 @@ void BoundingBox::explandToInclude(const BoundingBox& box) {
void BoundingBox::printDebugDetails(const char* label) const {
qCDebug(octree, "%s _set=%s\n corner=%f,%f size=%f,%f\n bounds=[(%f,%f) to (%f,%f)]",
(label ? label : "BoundingBox"),
debug::valueOf(_set), corner.x, corner.y, size.x, size.y, corner.x, corner.y, corner.x+size.x, corner.y+size.y);
(label ? label : "BoundingBox"),
debug::valueOf(_set), (double)corner.x, (double)corner.y, (double)size.x, (double)size.y,
(double)corner.x, (double)corner.y, (double)(corner.x+size.x), (double)(corner.y+size.y));
}
@ -107,7 +108,7 @@ long OctreeProjectedPolygon::intersects_calls = 0;
OctreeProjectedPolygon::OctreeProjectedPolygon(const BoundingBox& box) :
_vertexCount(4),
_vertexCount(4),
_maxX(-FLT_MAX), _maxY(-FLT_MAX), _minX(FLT_MAX), _minY(FLT_MAX),
_distance(0)
{
@ -117,9 +118,9 @@ OctreeProjectedPolygon::OctreeProjectedPolygon(const BoundingBox& box) :
}
void OctreeProjectedPolygon::setVertex(int vertex, const glm::vec2& point) {
void OctreeProjectedPolygon::setVertex(int vertex, const glm::vec2& point) {
_vertices[vertex] = point;
// keep track of our bounding box
if (point.x > _maxX) {
_maxX = point.x;
@ -133,14 +134,14 @@ void OctreeProjectedPolygon::setVertex(int vertex, const glm::vec2& point) {
if (point.y < _minY) {
_minY = point.y;
}
}
// can be optimized with new pointInside()
bool OctreeProjectedPolygon::occludes(const OctreeProjectedPolygon& occludee, bool checkAllInView) const {
OctreeProjectedPolygon::occludes_calls++;
// if we are completely out of view, then we definitely don't occlude!
// if the occludee is completely out of view, then we also don't occlude it
//
@ -158,7 +159,7 @@ bool OctreeProjectedPolygon::occludes(const OctreeProjectedPolygon& occludee, bo
(occludee.getMinY() < getMinY())) {
return false;
}
// we need to test for identity as well, because in the case of identity, none of the points
// will be "inside" but we don't want to bail early on the first non-inside point
bool potentialIdenity = false;
@ -171,7 +172,7 @@ bool OctreeProjectedPolygon::occludes(const OctreeProjectedPolygon& occludee, bo
for(int i = 0; i < occludee.getVertexCount(); i++) {
bool vertexMatched = false;
if (!pointInside(occludee.getVertex(i), &vertexMatched)) {
// so the point we just tested isn't inside, but it might have matched a vertex
// if it didn't match a vertext, then we bail because we can't be an identity
// or if we're not expecting identity, then we also bail early, no matter what
@ -182,7 +183,7 @@ bool OctreeProjectedPolygon::occludes(const OctreeProjectedPolygon& occludee, bo
pointsInside++;
}
}
// we're only here if all points are inside matched and/or we had a potentialIdentity we need to check
if (pointsInside == occludee.getVertexCount()) {
return true;
@ -192,7 +193,7 @@ bool OctreeProjectedPolygon::occludes(const OctreeProjectedPolygon& occludee, bo
if (potentialIdenity) {
return matches(occludee);
}
return false; // if we got this far, then we're not occluded
}
@ -211,7 +212,7 @@ bool OctreeProjectedPolygon::matches(const OctreeProjectedPolygon& testee) const
int originIndex = 0;
for(int i = 0; i < vertextCount; i++) {
glm::vec2 testeeVertex = testee.getVertex(i);
// if they match, we found our origin.
if (testeeVertex == polygonVertex) {
originIndex = i;
@ -224,10 +225,10 @@ bool OctreeProjectedPolygon::matches(const OctreeProjectedPolygon& testee) const
glm::vec2 testeeVertex = testee.getVertex((i + originIndex) % vertextCount);
glm::vec2 polygonVertex = getVertex(i);
if (testeeVertex != polygonVertex) {
return false; // we don't match, therefore we're not the same
}
}
return true; // all of our vertices match, therefore we're the same
return false; // we don't match, therefore we're not the same
}
}
return true; // all of our vertices match, therefore we're the same
}
bool OctreeProjectedPolygon::matches(const BoundingBox& box) const {
@ -259,17 +260,17 @@ bool OctreeProjectedPolygon::pointInside(const glm::vec2& point, bool* matchesVe
return false;
}
}
return true;
}
void OctreeProjectedPolygon::printDebugDetails() const {
qCDebug(octree, "OctreeProjectedPolygon..."
" minX=%f maxX=%f minY=%f maxY=%f", getMinX(), getMaxX(), getMinY(), getMaxY());
qCDebug(octree, " vertex count=%d distance=%f", getVertexCount(), getDistance());
" minX=%f maxX=%f minY=%f maxY=%f", (double)getMinX(), (double)getMaxX(), (double)getMinY(), (double)getMaxY());
qCDebug(octree, " vertex count=%d distance=%f", getVertexCount(), (double)getDistance());
for (int i = 0; i < getVertexCount(); i++) {
glm::vec2 point = getVertex(i);
qCDebug(octree, " vertex[%d] = %f, %f ", i, point.x, point.y);
qCDebug(octree, " vertex[%d] = %f, %f ", i, (double)point.x, (double)point.y);
}
}
@ -291,7 +292,7 @@ bool OctreeProjectedPolygon::intersects(const OctreeProjectedPolygon& testee) co
// intersect on all axes.
//
// Note: this only works on convex polygons
//
//
//
bool OctreeProjectedPolygon::intersectsOnAxes(const OctreeProjectedPolygon& testee) const {
@ -307,11 +308,11 @@ bool OctreeProjectedPolygon::intersectsOnAxes(const OctreeProjectedPolygon& test
// points that are ON the edge, are considered to be "outside"
for (int j = 0; j < testee.getVertexCount(); j++) {
glm::vec2 testeeVertex = testee.getVertex(j);
// in comparison below:
// >= will cause points on edge to be considered inside
// > will cause points on edge to be considered outside
float c2 = a * testeeVertex.x + b * testeeVertex.y;
if (c2 >= c) {
goto CONTINUE_OUTER;
@ -328,11 +329,11 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// RIGHT/NEAR
// LEFT/NEAR
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR) ||
getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)
)
)
) {
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(5)) {
return true;
@ -350,10 +351,10 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/BOTTOM
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM)
)
)
) {
if (getVertex(0) == that.getVertex(5) && getVertex(3) == that.getVertex(4)) {
return true;
@ -371,10 +372,10 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/TOP
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP)
)
)
) {
if (getVertex(0) == that.getVertex(5) && getVertex(1) == that.getVertex(2)) {
return true;
@ -393,11 +394,11 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// RIGHT/NEAR & NEAR/RIGHT/TOP
// LEFT/NEAR & NEAR/LEFT/TOP
if (
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(3) == that.getVertex(2)) {
@ -407,12 +408,12 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// RIGHT/NEAR & NEAR/RIGHT/TOP
// LEFT/NEAR & NEAR/LEFT/TOP
if (
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(2) == that.getVertex(3)) {
@ -423,12 +424,12 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// RIGHT/NEAR & NEAR/RIGHT/BOTTOM
// NEAR/LEFT & NEAR/LEFT/BOTTOM
if (
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(3) == that.getVertex(2)) {
@ -438,11 +439,11 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// RIGHT/NEAR & NEAR/RIGHT/BOTTOM
// NEAR/LEFT & NEAR/LEFT/BOTTOM
if (
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(2) == that.getVertex(3)) {
@ -453,7 +454,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/TOP & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(1) == that.getVertex(2)) {
@ -464,7 +465,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/TOP & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(2) == that.getVertex(1)) {
@ -475,7 +476,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/BOTTOM & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
)
{
if (getVertex(2) == that.getVertex(3) && getVertex(3) == that.getVertex(0)) {
@ -486,7 +487,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/BOTTOM & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
)
{
if (getVertex(3) == that.getVertex(2) && getVertex(0) == that.getVertex(3)) {
@ -497,7 +498,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/RIGHT & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
)
{
if (getVertex(0) == that.getVertex(1) && getVertex(3) == that.getVertex(4)) {
@ -508,18 +509,18 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/RIGHT & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
)
{
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(3)) {
return true;
}
}
// NEAR/LEFT & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
)
{
if (getVertex(1) == that.getVertex(1) && getVertex(2) == that.getVertex(4)) {
@ -530,7 +531,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// NEAR/LEFT & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
)
{
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(3)) {
@ -559,7 +560,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
return true;
}
}
// NEAR/RIGHT/BOTTOM & NEAR/BOTTOM
if (
@ -582,7 +583,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
return true;
}
}
// NEAR/LEFT/BOTTOM & NEAR/BOTTOM
if (
((getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR )) &&
@ -592,12 +593,12 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
if (getVertex(2) == that.getVertex(0) && getVertex(4) == that.getVertex(4)) {
return true;
}
}
}
// NEAR/LEFT/BOTTOM & NEAR/BOTTOM
if (
((that.getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR | PROJECTION_LEFT )))
(getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR | PROJECTION_LEFT )))
)
{
if (getVertex(0) == that.getVertex(2) && getVertex(4) == that.getVertex(4)) {
@ -609,7 +610,7 @@ bool OctreeProjectedPolygon::canMerge(const OctreeProjectedPolygon& that) const
// LEFT/NEAR/BOTTOM
// LEFT/NEAR/TOP
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM ) ||
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP ) ||
@ -646,11 +647,11 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// RIGHT/NEAR
// LEFT/NEAR
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR) ||
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR) ||
getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)
)
)
) {
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(5)) {
//setVertex(0, this.getVertex(0)); // no change
@ -689,13 +690,13 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
return; // done
}
}
// NEAR/BOTTOM
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM)
)
)
) {
if (getVertex(0) == that.getVertex(5) && getVertex(3) == that.getVertex(4)) {
setVertex(0, that.getVertex(0));
@ -737,10 +738,10 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/TOP
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP)
)
)
) {
if (getVertex(0) == that.getVertex(5) && getVertex(1) == that.getVertex(2)) {
setVertex(0, that.getVertex(0));
@ -779,16 +780,16 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
return; // done
}
}
// RIGHT/NEAR & NEAR/RIGHT/TOP
// LEFT/NEAR & NEAR/LEFT/TOP
if (
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(3) == that.getVertex(2)) {
@ -806,12 +807,12 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// RIGHT/NEAR & NEAR/RIGHT/TOP
// LEFT/NEAR & NEAR/LEFT/TOP
if (
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_TOP)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(2) == that.getVertex(3)) {
@ -829,12 +830,12 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// RIGHT/NEAR & NEAR/RIGHT/BOTTOM
// NEAR/LEFT & NEAR/LEFT/BOTTOM
if (
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(3) == that.getVertex(2)) {
@ -851,11 +852,11 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// RIGHT/NEAR & NEAR/RIGHT/BOTTOM
// NEAR/LEFT & NEAR/LEFT/BOTTOM
if (
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR)))
||
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
((getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR | PROJECTION_BOTTOM)) &&
(that.getProjectionType() == (PROJECTION_LEFT | PROJECTION_NEAR)))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(2) == that.getVertex(3)) {
@ -865,16 +866,16 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
//setVertex(3, this.getVertex(3)); // no change
//setVertex(4, this.getVertex(4)); // no change
//setVertex(5, this.getVertex(5)); // no change
setProjectionType((PROJECTION_RIGHT | PROJECTION_NEAR));
setProjectionType((PROJECTION_RIGHT | PROJECTION_NEAR));
return; // done
}
}
// NEAR/TOP & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
)
{
if (getVertex(0) == that.getVertex(5) && getVertex(1) == that.getVertex(2)) {
@ -891,7 +892,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/TOP & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_TOP ))
)
{
if (getVertex(5) == that.getVertex(0) && getVertex(2) == that.getVertex(1)) {
@ -904,11 +905,11 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
return; // done
}
}
// NEAR/BOTTOM & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
)
{
if (getVertex(2) == that.getVertex(3) && getVertex(3) == that.getVertex(0)) {
@ -924,7 +925,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/BOTTOM & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_BOTTOM ))
)
{
if (getVertex(3) == that.getVertex(2) && getVertex(0) == that.getVertex(3)) {
@ -941,7 +942,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/RIGHT & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
)
{
if (getVertex(0) == that.getVertex(1) && getVertex(3) == that.getVertex(4)) {
@ -957,7 +958,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/RIGHT & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_RIGHT ))
)
{
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(3)) {
@ -974,7 +975,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/LEFT & NEAR
if (
(getProjectionType() == (PROJECTION_NEAR )) &&
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
(that.getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
)
{
if (getVertex(1) == that.getVertex(1) && getVertex(2) == that.getVertex(4)) {
@ -991,7 +992,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/LEFT & NEAR
if (
(that.getProjectionType() == (PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
(getProjectionType() == (PROJECTION_NEAR | PROJECTION_LEFT ))
)
{
if (getVertex(1) == that.getVertex(0) && getVertex(4) == that.getVertex(3)) {
@ -1096,7 +1097,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// NEAR/LEFT/BOTTOM & NEAR/BOTTOM
if (
((that.getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR )) &&
(getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR | PROJECTION_LEFT )))
(getProjectionType() == (PROJECTION_BOTTOM | PROJECTION_NEAR | PROJECTION_LEFT )))
)
{
if (getVertex(0) == that.getVertex(2) && getVertex(4) == that.getVertex(4)) {
@ -1118,7 +1119,7 @@ void OctreeProjectedPolygon::merge(const OctreeProjectedPolygon& that) {
// LEFT/NEAR/BOTTOM
// LEFT/NEAR/TOP
if (
(getProjectionType() == that.getProjectionType()) &&
(getProjectionType() == that.getProjectionType()) &&
(
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_BOTTOM ) ||
getProjectionType() == (PROJECTION_RIGHT | PROJECTION_NEAR | PROJECTION_TOP ) ||

View file

@ -707,8 +707,8 @@ const char* OctreeSceneStats::getItemValue(Item item) {
float elapsedAverage = _elapsedAverage.getAverage();
calcAverageFPS = (float)USECS_PER_SECOND / (float)elapsedAverage;
sprintf(_itemValueBuffer, "%llu usecs (%d fps) Average: %.0f usecs (%d fps)",
(long long unsigned int)_elapsed, calcFPS, elapsedAverage, calcAverageFPS);
sprintf(_itemValueBuffer, "%llu usecs (%d fps) Average: %.0f usecs (%d fps)",
(long long unsigned int)_elapsed, calcFPS, (double)elapsedAverage, calcAverageFPS);
break;
}
case ITEM_ENCODE:
@ -733,7 +733,7 @@ const char* OctreeSceneStats::getItemValue(Item item) {
float calculatedBPV = total == 0 ? 0 : (_bytes * 8) / total;
float averageBPV = _bitsPerOctreeAverage.getAverage();
sprintf(_itemValueBuffer, "%lu (%.2f bits/octree Average: %.2f bits/octree) %lu internal %lu leaves",
total, calculatedBPV, averageBPV,
total, (double)calculatedBPV, (double)averageBPV,
(long unsigned int)_existsInPacketBitsWritten,
(long unsigned int)_colorSent);
break;

View file

@ -66,5 +66,6 @@ float Plane::distance(const glm::vec3 &point) const {
void Plane::print() const {
qCDebug(octree, "Plane - point (x=%f y=%f z=%f) normal (x=%f y=%f z=%f) d=%f",
_point.x, _point.y, _point.z, _normal.x, _normal.y, _normal.z, _dCoefficient);
(double)_point.x, (double)_point.y, (double)_point.z,
(double)_normal.x, (double)_normal.y, (double)_normal.z, (double)_dCoefficient);
}

View file

@ -387,36 +387,36 @@ bool ViewFrustum::matches(const ViewFrustum& compareTo, bool debug) const {
if (!result && debug) {
qCDebug(octree, "ViewFrustum::matches()... result=%s", debug::valueOf(result));
qCDebug(octree, "%s -- compareTo._position=%f,%f,%f _position=%f,%f,%f",
(testMatches(compareTo._position,_position) ? "MATCHES " : "NO MATCH"),
compareTo._position.x, compareTo._position.y, compareTo._position.z,
_position.x, _position.y, _position.z );
(testMatches(compareTo._position,_position) ? "MATCHES " : "NO MATCH"),
(double)compareTo._position.x, (double)compareTo._position.y, (double)compareTo._position.z,
(double)_position.x, (double)_position.y, (double)_position.z);
qCDebug(octree, "%s -- compareTo._direction=%f,%f,%f _direction=%f,%f,%f",
(testMatches(compareTo._direction, _direction) ? "MATCHES " : "NO MATCH"),
compareTo._direction.x, compareTo._direction.y, compareTo._direction.z,
_direction.x, _direction.y, _direction.z );
(testMatches(compareTo._direction, _direction) ? "MATCHES " : "NO MATCH"),
(double)compareTo._direction.x, (double)compareTo._direction.y, (double)compareTo._direction.z,
(double)_direction.x, (double)_direction.y, (double)_direction.z );
qCDebug(octree, "%s -- compareTo._up=%f,%f,%f _up=%f,%f,%f",
(testMatches(compareTo._up, _up) ? "MATCHES " : "NO MATCH"),
compareTo._up.x, compareTo._up.y, compareTo._up.z,
_up.x, _up.y, _up.z );
(testMatches(compareTo._up, _up) ? "MATCHES " : "NO MATCH"),
(double)compareTo._up.x, (double)compareTo._up.y, (double)compareTo._up.z,
(double)_up.x, (double)_up.y, (double)_up.z );
qCDebug(octree, "%s -- compareTo._right=%f,%f,%f _right=%f,%f,%f",
(testMatches(compareTo._right, _right) ? "MATCHES " : "NO MATCH"),
compareTo._right.x, compareTo._right.y, compareTo._right.z,
_right.x, _right.y, _right.z );
(testMatches(compareTo._right, _right) ? "MATCHES " : "NO MATCH"),
(double)compareTo._right.x, (double)compareTo._right.y, (double)compareTo._right.z,
(double)_right.x, (double)_right.y, (double)_right.z );
qCDebug(octree, "%s -- compareTo._fieldOfView=%f _fieldOfView=%f",
(testMatches(compareTo._fieldOfView, _fieldOfView) ? "MATCHES " : "NO MATCH"),
compareTo._fieldOfView, _fieldOfView);
(testMatches(compareTo._fieldOfView, _fieldOfView) ? "MATCHES " : "NO MATCH"),
(double)compareTo._fieldOfView, (double)_fieldOfView);
qCDebug(octree, "%s -- compareTo._aspectRatio=%f _aspectRatio=%f",
(testMatches(compareTo._aspectRatio, _aspectRatio) ? "MATCHES " : "NO MATCH"),
compareTo._aspectRatio, _aspectRatio);
(testMatches(compareTo._aspectRatio, _aspectRatio) ? "MATCHES " : "NO MATCH"),
(double)compareTo._aspectRatio, (double)_aspectRatio);
qCDebug(octree, "%s -- compareTo._nearClip=%f _nearClip=%f",
(testMatches(compareTo._nearClip, _nearClip) ? "MATCHES " : "NO MATCH"),
compareTo._nearClip, _nearClip);
(testMatches(compareTo._nearClip, _nearClip) ? "MATCHES " : "NO MATCH"),
(double)compareTo._nearClip, (double)_nearClip);
qCDebug(octree, "%s -- compareTo._farClip=%f _farClip=%f",
(testMatches(compareTo._farClip, _farClip) ? "MATCHES " : "NO MATCH"),
compareTo._farClip, _farClip);
(testMatches(compareTo._farClip, _farClip) ? "MATCHES " : "NO MATCH"),
(double)compareTo._farClip, (double)_farClip);
qCDebug(octree, "%s -- compareTo._focalLength=%f _focalLength=%f",
(testMatches(compareTo._focalLength, _focalLength) ? "MATCHES " : "NO MATCH"),
compareTo._focalLength, _focalLength);
(testMatches(compareTo._focalLength, _focalLength) ? "MATCHES " : "NO MATCH"),
(double)compareTo._focalLength, (double)_focalLength);
}
return result;
}
@ -448,33 +448,34 @@ bool ViewFrustum::isVerySimilar(const ViewFrustum& compareTo, bool debug) const
if (!result && debug) {
qCDebug(octree, "ViewFrustum::isVerySimilar()... result=%s\n", debug::valueOf(result));
qCDebug(octree, "%s -- compareTo._position=%f,%f,%f _position=%f,%f,%f",
(testMatches(compareTo._position,_position, POSITION_SIMILAR_ENOUGH) ? "IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
compareTo._position.x, compareTo._position.y, compareTo._position.z,
_position.x, _position.y, _position.z );
(testMatches(compareTo._position,_position, POSITION_SIMILAR_ENOUGH) ?
"IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
(double)compareTo._position.x, (double)compareTo._position.y, (double)compareTo._position.z,
(double)_position.x, (double)_position.y, (double)_position.z );
qCDebug(octree, "%s -- positionDistance=%f",
(testMatches(0,positionDistance, POSITION_SIMILAR_ENOUGH) ? "IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
positionDistance);
(testMatches(0,positionDistance, POSITION_SIMILAR_ENOUGH) ? "IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
(double)positionDistance);
qCDebug(octree, "%s -- angleOrientation=%f",
(testMatches(0, angleOrientation, ORIENTATION_SIMILAR_ENOUGH) ? "IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
angleOrientation);
(testMatches(0, angleOrientation, ORIENTATION_SIMILAR_ENOUGH) ? "IS SIMILAR ENOUGH " : "IS NOT SIMILAR ENOUGH"),
(double)angleOrientation);
qCDebug(octree, "%s -- compareTo._fieldOfView=%f _fieldOfView=%f",
(testMatches(compareTo._fieldOfView, _fieldOfView) ? "MATCHES " : "NO MATCH"),
compareTo._fieldOfView, _fieldOfView);
(testMatches(compareTo._fieldOfView, _fieldOfView) ? "MATCHES " : "NO MATCH"),
(double)compareTo._fieldOfView, (double)_fieldOfView);
qCDebug(octree, "%s -- compareTo._aspectRatio=%f _aspectRatio=%f",
(testMatches(compareTo._aspectRatio, _aspectRatio) ? "MATCHES " : "NO MATCH"),
compareTo._aspectRatio, _aspectRatio);
(testMatches(compareTo._aspectRatio, _aspectRatio) ? "MATCHES " : "NO MATCH"),
(double)compareTo._aspectRatio, (double)_aspectRatio);
qCDebug(octree, "%s -- compareTo._nearClip=%f _nearClip=%f",
(testMatches(compareTo._nearClip, _nearClip) ? "MATCHES " : "NO MATCH"),
compareTo._nearClip, _nearClip);
(testMatches(compareTo._nearClip, _nearClip) ? "MATCHES " : "NO MATCH"),
(double)compareTo._nearClip, (double)_nearClip);
qCDebug(octree, "%s -- compareTo._farClip=%f _farClip=%f",
(testMatches(compareTo._farClip, _farClip) ? "MATCHES " : "NO MATCH"),
compareTo._farClip, _farClip);
(testMatches(compareTo._farClip, _farClip) ? "MATCHES " : "NO MATCH"),
(double)compareTo._farClip, (double)_farClip);
qCDebug(octree, "%s -- compareTo._focalLength=%f _focalLength=%f",
(testMatches(compareTo._focalLength, _focalLength) ? "MATCHES " : "NO MATCH"),
compareTo._focalLength, _focalLength);
(testMatches(compareTo._focalLength, _focalLength) ? "MATCHES " : "NO MATCH"),
(double)compareTo._focalLength, (double)_focalLength);
}
return result;
}
@ -531,16 +532,16 @@ void ViewFrustum::computeOffAxisFrustum(float& left, float& right, float& bottom
void ViewFrustum::printDebugDetails() const {
qCDebug(octree, "ViewFrustum::printDebugDetails()...");
qCDebug(octree, "_position=%f,%f,%f", _position.x, _position.y, _position.z );
qCDebug(octree, "_direction=%f,%f,%f", _direction.x, _direction.y, _direction.z );
qCDebug(octree, "_up=%f,%f,%f", _up.x, _up.y, _up.z );
qCDebug(octree, "_right=%f,%f,%f", _right.x, _right.y, _right.z );
qCDebug(octree, "_fieldOfView=%f", _fieldOfView);
qCDebug(octree, "_aspectRatio=%f", _aspectRatio);
qCDebug(octree, "_keyHoleRadius=%f", _keyholeRadius);
qCDebug(octree, "_nearClip=%f", _nearClip);
qCDebug(octree, "_farClip=%f", _farClip);
qCDebug(octree, "_focalLength=%f", _focalLength);
qCDebug(octree, "_position=%f,%f,%f", (double)_position.x, (double)_position.y, (double)_position.z );
qCDebug(octree, "_direction=%f,%f,%f", (double)_direction.x, (double)_direction.y, (double)_direction.z );
qCDebug(octree, "_up=%f,%f,%f", (double)_up.x, (double)_up.y, (double)_up.z );
qCDebug(octree, "_right=%f,%f,%f", (double)_right.x, (double)_right.y, (double)_right.z );
qCDebug(octree, "_fieldOfView=%f", (double)_fieldOfView);
qCDebug(octree, "_aspectRatio=%f", (double)_aspectRatio);
qCDebug(octree, "_keyHoleRadius=%f", (double)_keyholeRadius);
qCDebug(octree, "_nearClip=%f", (double)_nearClip);
qCDebug(octree, "_farClip=%f", (double)_farClip);
qCDebug(octree, "_focalLength=%f", (double)_focalLength);
}
glm::vec2 ViewFrustum::projectPoint(glm::vec3 point, bool& pointInView) const {

View file

@ -12,22 +12,31 @@
#ifndef hifi_ProgramObject_h
#define hifi_ProgramObject_h
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <QGLShaderProgram>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <glm/glm.hpp>
class ProgramObject : public QGLShaderProgram {
public:
ProgramObject(QObject* parent = 0);
void setUniform(int location, const glm::vec2& value);
void setUniform(const char* name, const glm::vec2& value);
void setUniform(int location, const glm::vec3& value);
void setUniform(const char* name, const glm::vec3& value);
void setUniform(int location, const glm::vec4& value);
void setUniform(const char* name, const glm::vec4& value);
void setUniformArray(const char* name, const glm::vec3* values, int count);
void setUniformArray(const char* name, const glm::vec3* values, int count);
};
#endif // hifi_ProgramObject_h

View file

@ -18,12 +18,21 @@
#include <QBuffer>
#include <QFile>
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
// FIXME, decouple from the GL headers
#include <QOpenGLShaderProgram>
#include <QOpenGLTexture>
#include <QOpenGLVertexArrayObject>
#include <QOpenGLBuffer>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>

View file

@ -35,7 +35,7 @@
const QString NO_SCRIPT("");
const unsigned int SCRIPT_DATA_CALLBACK_USECS = floor(((1.0 / 60.0f) * 1000 * 1000) + 0.5);
const unsigned int SCRIPT_DATA_CALLBACK_USECS = floor(((1.0f / 60.0f) * 1000 * 1000) + 0.5f);
typedef QHash<QString, QScriptValueList> RegisteredEventHandlers;

View file

@ -40,7 +40,7 @@ TouchEvent::TouchEvent() :
isRotating(false),
rotating("none")
{
}
TouchEvent::TouchEvent(const QTouchEvent& event) :
@ -77,9 +77,9 @@ void TouchEvent::initWithQTouchEvent(const QTouchEvent& event) {
touchPoints = tPoints.count();
if (touchPoints > 1) {
for (int i = 0; i < touchPoints; ++i) {
touchAvgX += tPoints[i].pos().x();
touchAvgY += tPoints[i].pos().y();
touchAvgX += (float)tPoints[i].pos().x();
touchAvgY += (float)tPoints[i].pos().y();
// add it to our points vector
glm::vec2 thisPoint(tPoints[i].pos().x(), tPoints[i].pos().y());
points << thisPoint;
@ -94,7 +94,7 @@ void TouchEvent::initWithQTouchEvent(const QTouchEvent& event) {
}
x = touchAvgX;
y = touchAvgY;
// after calculating the center point (average touch point), determine the maximum radius
// also calculate the rotation angle for each point
float maxRadius = 0.0f;
@ -105,25 +105,25 @@ void TouchEvent::initWithQTouchEvent(const QTouchEvent& event) {
if (thisRadius > maxRadius) {
maxRadius = thisRadius;
}
// calculate the angle for this point
float thisAngle = angleBetweenPoints(center,touchPoint);
angles << thisAngle;
}
radius = maxRadius;
// after calculating the angles for each touch point, determine the average angle
float totalAngle = 0.0f;
for (int i = 0; i < touchPoints; ++i) {
totalAngle += angles[i];
}
angle = totalAngle/(float)touchPoints;
isPressed = event.touchPointStates().testFlag(Qt::TouchPointPressed);
isMoved = event.touchPointStates().testFlag(Qt::TouchPointMoved);
isStationary = event.touchPointStates().testFlag(Qt::TouchPointStationary);
isReleased = event.touchPointStates().testFlag(Qt::TouchPointReleased);
// keyboard modifiers
isShifted = event.modifiers().testFlag(Qt::ShiftModifier);
isMeta = event.modifiers().testFlag(Qt::MetaModifier);
@ -143,7 +143,7 @@ void TouchEvent::calculateMetaAttributes(const TouchEvent& other) {
isPinching = other.isPinching;
isPinchOpening = other.isPinchOpening;
}
// determine if the points are rotating...
// note: if the number of touch points change between events, then we don't consider ourselves to be rotating
if (touchPoints == other.touchPoints) {
@ -178,7 +178,7 @@ QScriptValue TouchEvent::toScriptValue(QScriptEngine* engine, const TouchEvent&
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
obj.setProperty("touchPoints", event.touchPoints);
QScriptValue pointsObj = engine->newArray();
int index = 0;
foreach (glm::vec2 point, event.points) {
@ -190,7 +190,7 @@ QScriptValue TouchEvent::toScriptValue(QScriptEngine* engine, const TouchEvent&
obj.setProperty("radius", event.radius);
obj.setProperty("isPinching", event.isPinching);
obj.setProperty("isPinchOpening", event.isPinchOpening);
obj.setProperty("angle", event.angle);
obj.setProperty("deltaAngle", event.deltaAngle);
QScriptValue anglesObj = engine->newArray();
@ -200,7 +200,7 @@ QScriptValue TouchEvent::toScriptValue(QScriptEngine* engine, const TouchEvent&
index++;
}
obj.setProperty("angles", anglesObj);
obj.setProperty("isRotating", event.isRotating);
obj.setProperty("rotating", event.rotating);
return obj;

View file

@ -16,20 +16,20 @@
#include "GeometryUtil.h"
#include "NumericalConstants.h"
AABox::AABox(const AACube& other) :
AABox::AABox(const AACube& other) :
_corner(other.getCorner()), _scale(other.getScale(), other.getScale(), other.getScale()) {
}
AABox::AABox(const Extents& other) :
AABox::AABox(const Extents& other) :
_corner(other.minimum),
_scale(other.maximum - other.minimum) {
}
AABox::AABox(const glm::vec3& corner, float size) :
AABox::AABox(const glm::vec3& corner, float size) :
_corner(corner), _scale(size, size, size) {
};
AABox::AABox(const glm::vec3& corner, const glm::vec3& dimensions) :
AABox::AABox(const glm::vec3& corner, const glm::vec3& dimensions) :
_corner(corner), _scale(dimensions) {
};
@ -39,7 +39,7 @@ AABox::AABox() : _corner(std::numeric_limits<float>::infinity()), _scale(0.0f) {
glm::vec3 AABox::calcCenter() const {
glm::vec3 center(_corner);
center += (_scale * 0.5f);
return center;
return center;
}
void AABox::scale(float scale) {
@ -134,12 +134,12 @@ bool AABox::contains(const AABox& otherBox) const {
bool AABox::touches(const AABox& otherBox) const {
glm::vec3 relativeCenter = _corner - otherBox._corner + ((_scale - otherBox._scale) * 0.5f);
glm::vec3 totalHalfScale = (_scale + otherBox._scale) * 0.5f;
return fabs(relativeCenter.x) <= totalHalfScale.x &&
fabs(relativeCenter.y) <= totalHalfScale.y &&
fabs(relativeCenter.z) <= totalHalfScale.z;
return fabsf(relativeCenter.x) <= totalHalfScale.x &&
fabsf(relativeCenter.y) <= totalHalfScale.y &&
fabsf(relativeCenter.z) <= totalHalfScale.z;
}
bool AABox::contains(const AACube& otherCube) const {
@ -154,12 +154,12 @@ bool AABox::contains(const AACube& otherCube) const {
bool AABox::touches(const AACube& otherCube) const {
glm::vec3 relativeCenter = _corner - otherCube.getCorner() + ((_scale - otherCube.getDimensions()) * 0.5f);
glm::vec3 totalHalfScale = (_scale + otherCube.getDimensions()) * 0.5f;
return fabs(relativeCenter.x) <= totalHalfScale.x &&
fabs(relativeCenter.y) <= totalHalfScale.y &&
fabs(relativeCenter.z) <= totalHalfScale.z;
return fabsf(relativeCenter.x) <= totalHalfScale.x &&
fabsf(relativeCenter.y) <= totalHalfScale.y &&
fabsf(relativeCenter.z) <= totalHalfScale.z;
}
// determines whether a value is within the expanded extents
@ -244,14 +244,14 @@ bool AABox::findRayIntersection(const glm::vec3& origin, const glm::vec3& direct
isWithin(origin.y + axisDistance*direction.y, _corner.y, _scale.y) &&
isWithin(origin.x + axisDistance*direction.x, _corner.x, _scale.x))) {
distance = axisDistance;
face = direction.z > 0 ? MAX_Z_FACE : MIN_Z_FACE;
face = direction.z > 0 ? MAX_Z_FACE : MIN_Z_FACE;
return true;
}
// This case is unexpected, but mimics the previous behavior for inside out intersections
distance = 0;
return true;
}
// check each axis
float axisDistance;
if ((findIntersection(origin.x, direction.x, _corner.x, _scale.x, axisDistance) && axisDistance >= 0 &&
@ -272,7 +272,7 @@ bool AABox::findRayIntersection(const glm::vec3& origin, const glm::vec3& direct
isWithin(origin.y + axisDistance*direction.y, _corner.y, _scale.y) &&
isWithin(origin.x + axisDistance*direction.x, _corner.x, _scale.x))) {
distance = axisDistance;
face = direction.z > 0 ? MIN_Z_FACE : MAX_Z_FACE;
face = direction.z > 0 ? MIN_Z_FACE : MAX_Z_FACE;
return true;
}
return false;
@ -280,7 +280,7 @@ bool AABox::findRayIntersection(const glm::vec3& origin, const glm::vec3& direct
bool AABox::findSpherePenetration(const glm::vec3& center, float radius, glm::vec3& penetration) const {
glm::vec4 center4 = glm::vec4(center, 1.0f);
float minPenetrationLength = FLT_MAX;
for (int i = 0; i < FACE_COUNT; i++) {
glm::vec4 facePlane = getPlane((BoxFace)i);
@ -297,7 +297,7 @@ bool AABox::findSpherePenetration(const glm::vec3& center, float radius, glm::ve
minPenetrationLength = vectorLength;
}
}
return true;
}
@ -305,7 +305,7 @@ bool AABox::findCapsulePenetration(const glm::vec3& start, const glm::vec3& end,
glm::vec4 start4 = glm::vec4(start, 1.0f);
glm::vec4 end4 = glm::vec4(end, 1.0f);
glm::vec4 startToEnd = glm::vec4(end - start, 0.0f);
float minPenetrationLength = FLT_MAX;
for (int i = 0; i < FACE_COUNT; i++) {
// find the vector from the segment to the closest point on the face (starting from deeper end)
@ -324,8 +324,8 @@ bool AABox::findCapsulePenetration(const glm::vec3& start, const glm::vec3& end,
vector * ((vectorLength + radius) / -vectorLength);
minPenetrationLength = vectorLength;
}
}
}
return true;
}
@ -334,23 +334,23 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec3& point, BoxFace face) con
case MIN_X_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x, _corner.y + _scale.y, _corner.z + _scale.z));
case MAX_X_FACE:
return glm::clamp(point, glm::vec3(_corner.x + _scale.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale.x, _corner.y + _scale.y, _corner.z + _scale.z));
case MIN_Y_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale.x, _corner.y, _corner.z + _scale.z));
case MAX_Y_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y + _scale.y, _corner.z),
glm::vec3(_corner.x + _scale.x, _corner.y + _scale.y, _corner.z + _scale.z));
case MIN_Z_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale.z, _corner.y + _scale.y, _corner.z));
default: //quiet windows warnings
case MAX_Z_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z + _scale.z),
@ -373,7 +373,7 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4&
}
anyOutside = true;
float divisor = glm::dot(direction, iPlane);
if (fabs(divisor) < EPSILON) {
if (fabsf(divisor) < EPSILON) {
continue; // segment is parallel to plane
}
// find intersection and see if it lies within face bounds
@ -389,29 +389,29 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4&
}
}
return getClosestPointOnFace(glm::vec3(intersection), face);
outerContinue: ;
}
// if we were outside any of the sides, we must check against the diagonals
if (anyOutside) {
int faceAxis = face / 2;
int secondAxis = (faceAxis + 1) % 3;
int thirdAxis = (faceAxis + 2) % 3;
glm::vec4 secondAxisMinPlane = getPlane((BoxFace)(secondAxis * 2));
glm::vec4 secondAxisMaxPlane = getPlane((BoxFace)(secondAxis * 2 + 1));
glm::vec4 thirdAxisMaxPlane = getPlane((BoxFace)(thirdAxis * 2 + 1));
glm::vec4 offset = glm::vec4(0.0f, 0.0f, 0.0f,
glm::dot(glm::vec3(secondAxisMaxPlane + thirdAxisMaxPlane), _scale) * 0.5f);
glm::vec4 diagonals[] = { secondAxisMinPlane + thirdAxisMaxPlane + offset,
secondAxisMaxPlane + thirdAxisMaxPlane + offset };
float minDistance = FLT_MAX;
for (size_t i = 0; i < sizeof(diagonals) / sizeof(diagonals[0]); i++) {
float divisor = glm::dot(direction, diagonals[i]);
if (fabs(divisor) < EPSILON) {
if (fabsf(divisor) < EPSILON) {
continue; // segment is parallel to diagonal plane
}
minDistance = glm::min(-glm::dot(origin, diagonals[i]) / divisor, minDistance);
@ -420,7 +420,7 @@ glm::vec3 AABox::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4&
return getClosestPointOnFace(glm::vec3(origin + direction * minDistance), face);
}
}
// last resort or all inside: clamp origin to face
return getClosestPointOnFace(glm::vec3(origin), face);
}
@ -453,7 +453,7 @@ AABox AABox::clamp(const glm::vec3& min, const glm::vec3& max) const {
glm::vec3 clampedCorner = glm::clamp(_corner, min, max);
glm::vec3 clampedTopFarLeft = glm::clamp(calcTopFarLeft(), min, max);
glm::vec3 clampedScale = clampedTopFarLeft - clampedCorner;
return AABox(clampedCorner, clampedScale);
}
@ -461,7 +461,7 @@ AABox AABox::clamp(float min, float max) const {
glm::vec3 clampedCorner = glm::clamp(_corner, min, max);
glm::vec3 clampedTopFarLeft = glm::clamp(calcTopFarLeft(), min, max);
glm::vec3 clampedScale = clampedTopFarLeft - clampedCorner;
return AABox(clampedCorner, clampedScale);
}

View file

@ -21,14 +21,14 @@ AACube::AACube(const AABox& other) :
_corner(other.getCorner()), _scale(other.getLargestDimension()) {
}
AACube::AACube(const Extents& other) :
_corner(other.minimum)
AACube::AACube(const Extents& other) :
_corner(other.minimum)
{
glm::vec3 dimensions = other.maximum - other.minimum;
_scale = glm::max(dimensions.x, dimensions.y, dimensions.z);
}
AACube::AACube(const glm::vec3& corner, float size) :
AACube::AACube(const glm::vec3& corner, float size) :
_corner(corner), _scale(size) {
};
@ -38,13 +38,13 @@ AACube::AACube() : _corner(0,0,0), _scale(0) {
glm::vec3 AACube::calcCenter() const {
glm::vec3 center(_corner);
center += (glm::vec3(_scale, _scale, _scale) * 0.5f);
return center;
return center;
}
glm::vec3 AACube::calcTopFarLeft() const {
glm::vec3 AACube::calcTopFarLeft() const {
glm::vec3 topFarLeft(_corner);
topFarLeft += glm::vec3(_scale, _scale, _scale);
return topFarLeft;
return topFarLeft;
};
void AACube::scale(float scale) {
@ -135,9 +135,9 @@ bool AACube::contains(const AACube& otherCube) const {
bool AACube::touches(const AACube& otherCube) const {
glm::vec3 relativeCenter = _corner - otherCube._corner + (glm::vec3(_scale - otherCube._scale) * 0.5f);
float totalHalfScale = 0.5f * (_scale + otherCube._scale);
return fabs(relativeCenter.x) <= totalHalfScale &&
fabs(relativeCenter.y) <= totalHalfScale &&
fabs(relativeCenter.z) <= totalHalfScale;
return fabsf(relativeCenter.x) <= totalHalfScale &&
fabsf(relativeCenter.y) <= totalHalfScale &&
fabsf(relativeCenter.z) <= totalHalfScale;
}
bool AACube::contains(const AABox& otherBox) const {
@ -154,10 +154,10 @@ bool AACube::touches(const AABox& otherBox) const {
glm::vec3 myDimensions = glm::vec3(_scale);
glm::vec3 relativeCenter = _corner - otherBox.getCorner() + ((myDimensions - otherBox.getScale()) * 0.5f);
glm::vec3 totalHalfScale = (myDimensions + otherBox.getDimensions()) * 0.5f;
return fabs(relativeCenter.x) <= totalHalfScale.x &&
fabs(relativeCenter.y) <= totalHalfScale.y &&
fabs(relativeCenter.z) <= totalHalfScale.z;
return fabsf(relativeCenter.x) <= totalHalfScale.x &&
fabsf(relativeCenter.y) <= totalHalfScale.y &&
fabsf(relativeCenter.z) <= totalHalfScale.z;
}
// determines whether a value is within the expanded extents
@ -222,7 +222,7 @@ bool AACube::expandedIntersectsSegment(const glm::vec3& start, const glm::vec3&
bool AACube::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance, BoxFace& face) const {
// handle the trivial case where the box contains the origin
if (contains(origin)) {
// We still want to calculate the distance from the origin to the inside out plane
float axisDistance;
if ((findInsideOutIntersection(origin.x, direction.x, _corner.x, _scale, axisDistance) && axisDistance >= 0 &&
@ -243,14 +243,14 @@ bool AACube::findRayIntersection(const glm::vec3& origin, const glm::vec3& direc
isWithin(origin.y + axisDistance*direction.y, _corner.y, _scale) &&
isWithin(origin.x + axisDistance*direction.x, _corner.x, _scale))) {
distance = axisDistance;
face = direction.z > 0 ? MAX_Z_FACE : MIN_Z_FACE;
face = direction.z > 0 ? MAX_Z_FACE : MIN_Z_FACE;
return true;
}
// This case is unexpected, but mimics the previous behavior for inside out intersections
distance = 0;
return true;
}
// check each axis
float axisDistance;
if ((findIntersection(origin.x, direction.x, _corner.x, _scale, axisDistance) && axisDistance >= 0 &&
@ -271,7 +271,7 @@ bool AACube::findRayIntersection(const glm::vec3& origin, const glm::vec3& direc
isWithin(origin.y + axisDistance*direction.y, _corner.y, _scale) &&
isWithin(origin.x + axisDistance*direction.x, _corner.x, _scale))) {
distance = axisDistance;
face = direction.z > 0 ? MIN_Z_FACE : MAX_Z_FACE;
face = direction.z > 0 ? MIN_Z_FACE : MAX_Z_FACE;
return true;
}
return false;
@ -279,7 +279,7 @@ bool AACube::findRayIntersection(const glm::vec3& origin, const glm::vec3& direc
bool AACube::findSpherePenetration(const glm::vec3& center, float radius, glm::vec3& penetration) const {
glm::vec4 center4 = glm::vec4(center, 1.0f);
float minPenetrationLength = FLT_MAX;
for (int i = 0; i < FACE_COUNT; i++) {
glm::vec4 facePlane = getPlane((BoxFace)i);
@ -296,7 +296,7 @@ bool AACube::findSpherePenetration(const glm::vec3& center, float radius, glm::v
minPenetrationLength = vectorLength;
}
}
return true;
}
@ -304,7 +304,7 @@ bool AACube::findCapsulePenetration(const glm::vec3& start, const glm::vec3& end
glm::vec4 start4 = glm::vec4(start, 1.0f);
glm::vec4 end4 = glm::vec4(end, 1.0f);
glm::vec4 startToEnd = glm::vec4(end - start, 0.0f);
float minPenetrationLength = FLT_MAX;
for (int i = 0; i < FACE_COUNT; i++) {
// find the vector from the segment to the closest point on the face (starting from deeper end)
@ -323,8 +323,8 @@ bool AACube::findCapsulePenetration(const glm::vec3& start, const glm::vec3& end
vector * ((vectorLength + radius) / -vectorLength);
minPenetrationLength = vectorLength;
}
}
}
return true;
}
@ -333,23 +333,23 @@ glm::vec3 AACube::getClosestPointOnFace(const glm::vec3& point, BoxFace face) co
case MIN_X_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x, _corner.y + _scale, _corner.z + _scale));
case MAX_X_FACE:
return glm::clamp(point, glm::vec3(_corner.x + _scale, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale, _corner.y + _scale, _corner.z + _scale));
case MIN_Y_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale, _corner.y, _corner.z + _scale));
case MAX_Y_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y + _scale, _corner.z),
glm::vec3(_corner.x + _scale, _corner.y + _scale, _corner.z + _scale));
case MIN_Z_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z),
glm::vec3(_corner.x + _scale, _corner.y + _scale, _corner.z));
default: //quiet windows warnings
case MAX_Z_FACE:
return glm::clamp(point, glm::vec3(_corner.x, _corner.y, _corner.z + _scale),
@ -372,7 +372,7 @@ glm::vec3 AACube::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4
}
anyOutside = true;
float divisor = glm::dot(direction, iPlane);
if (fabs(divisor) < EPSILON) {
if (fabsf(divisor) < EPSILON) {
continue; // segment is parallel to plane
}
// find intersection and see if it lies within face bounds
@ -388,29 +388,29 @@ glm::vec3 AACube::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4
}
}
return getClosestPointOnFace(glm::vec3(intersection), face);
outerContinue: ;
}
// if we were outside any of the sides, we must check against the diagonals
if (anyOutside) {
int faceAxis = face / 2;
int secondAxis = (faceAxis + 1) % 3;
int thirdAxis = (faceAxis + 2) % 3;
glm::vec4 secondAxisMinPlane = getPlane((BoxFace)(secondAxis * 2));
glm::vec4 secondAxisMaxPlane = getPlane((BoxFace)(secondAxis * 2 + 1));
glm::vec4 thirdAxisMaxPlane = getPlane((BoxFace)(thirdAxis * 2 + 1));
glm::vec4 offset = glm::vec4(0.0f, 0.0f, 0.0f,
glm::dot(glm::vec3(secondAxisMaxPlane + thirdAxisMaxPlane), glm::vec3(_scale, _scale, _scale)) * 0.5f);
glm::vec4 diagonals[] = { secondAxisMinPlane + thirdAxisMaxPlane + offset,
secondAxisMaxPlane + thirdAxisMaxPlane + offset };
float minDistance = FLT_MAX;
for (size_t i = 0; i < sizeof(diagonals) / sizeof(diagonals[0]); i++) {
float divisor = glm::dot(direction, diagonals[i]);
if (fabs(divisor) < EPSILON) {
if (fabsf(divisor) < EPSILON) {
continue; // segment is parallel to diagonal plane
}
minDistance = glm::min(-glm::dot(origin, diagonals[i]) / divisor, minDistance);
@ -419,7 +419,7 @@ glm::vec3 AACube::getClosestPointOnFace(const glm::vec4& origin, const glm::vec4
return getClosestPointOnFace(glm::vec3(origin + direction * minDistance), face);
}
}
// last resort or all inside: clamp origin to face
return getClosestPointOnFace(glm::vec3(origin), face);
}

View file

@ -120,7 +120,7 @@ bool HingeConstraint::clamp(glm::quat& rotation) const {
forward /= length;
float sign = (glm::dot(glm::cross(_forwardAxis, forward), _rotationAxis) > 0.0f ? 1.0f : -1.0f);
//float angle = sign * acos(glm::dot(forward, _forwardAxis) / length);
float angle = sign * acos(glm::dot(forward, _forwardAxis));
float angle = sign * acosf(glm::dot(forward, _forwardAxis));
glm::quat newRotation = glm::angleAxis(clampAngle(angle, _minAngle, _maxAngle), _rotationAxis);
if (fabsf(1.0f - glm::dot(newRotation, rotation)) > EPSILON * EPSILON) {
rotation = newRotation;

View file

@ -238,7 +238,7 @@ glm::quat extractRotation(const glm::mat4& matrix, bool assumeOrthogonal) {
float sd10 = previous[0][1] * previous[2][2] - previous[2][1] * previous[0][2];
float sd20 = previous[0][1] * previous[1][2] - previous[1][1] * previous[0][2];
float det = previous[0][0] * sd00 + previous[2][0] * sd20 - previous[1][0] * sd10;
if (fabs(det) == 0.0f) {
if (fabsf(det) == 0.0f) {
// determinant is zero; matrix is not invertible
break;
}

View file

@ -28,10 +28,19 @@ using glm::vec3;
using glm::vec4;
using glm::quat;
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <QtCore/QByteArray>
#include <QtGui/QMatrix4x4>
#include <QtGui/QColor>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include "SharedUtil.h"
// this is where the coordinate system is represented

View file

@ -47,7 +47,7 @@ void printOctalCode(const unsigned char* octalCode) {
char sectionValue(const unsigned char* startByte, char startIndexInByte) {
char rightShift = 8 - startIndexInByte - 3;
if (rightShift < 0) {
return ((startByte[0] << -rightShift) & 7) + (startByte[1] >> (8 + rightShift));
} else {
@ -65,7 +65,7 @@ size_t bytesRequiredForCodeLength(unsigned char threeBitCodes) {
int branchIndexWithDescendant(const unsigned char* ancestorOctalCode, const unsigned char* descendantOctalCode) {
int parentSections = numberOfThreeBitSectionsInCode(ancestorOctalCode);
int branchStartBit = parentSections * 3;
// Note: this does not appear to be "multi-byte length code" safe. When octal codes are larger than 255 bytes
// long, the length code is stored in two bytes. The "1" below appears to assume that the length is always one
@ -74,45 +74,45 @@ int branchIndexWithDescendant(const unsigned char* ancestorOctalCode, const unsi
}
unsigned char* childOctalCode(const unsigned char* parentOctalCode, char childNumber) {
// find the length (in number of three bit code sequences)
// in the parent
int parentCodeSections = parentOctalCode
? numberOfThreeBitSectionsInCode(parentOctalCode)
: 0;
// get the number of bytes used by the parent octal code
size_t parentCodeBytes = bytesRequiredForCodeLength(parentCodeSections);
// child code will have one more section than the parent
size_t childCodeBytes = bytesRequiredForCodeLength(parentCodeSections + 1);
// create a new buffer to hold the new octal code
unsigned char* newCode = new unsigned char[childCodeBytes];
// copy the parent code to the child
if (parentOctalCode) {
memcpy(newCode, parentOctalCode, parentCodeBytes);
}
}
// the child octal code has one more set of three bits
*newCode = parentCodeSections + 1;
if (childCodeBytes > parentCodeBytes) {
// we have a new byte due to the addition of the child code
// so set it to zero for correct results when shifting later
newCode[childCodeBytes - 1] = 0;
}
// add the child code bits to newCode
// find the start bit index
int startBit = parentCodeSections * 3;
// calculate the amount of left shift required
// this will be -1 or -2 if there's wrap
char leftShift = 8 - (startBit % 8) - 3;
if (leftShift < 0) {
// we have a wrap-around to accomodate
// right shift for the end of first byte
@ -123,7 +123,7 @@ unsigned char* childOctalCode(const unsigned char* parentOctalCode, char childNu
// no wraparound, left shift and add
newCode[(startBit / 8) + 1] += (childNumber << leftShift);
}
return newCode;
}
@ -135,9 +135,9 @@ void voxelDetailsForCode(const unsigned char* octalCode, VoxelPositionSize& voxe
if (octalCode) {
for (int i = 0; i < numberOfThreeBitSectionsInCode(octalCode); i++) {
currentScale *= 0.5f;
int sectionIndex = sectionValue(octalCode + 1 + (BITS_IN_OCTAL * i / BITS_IN_BYTE),
int sectionIndex = sectionValue(octalCode + 1 + (BITS_IN_OCTAL * i / BITS_IN_BYTE),
(BITS_IN_OCTAL * i) % BITS_IN_BYTE);
for (int j = 0; j < BITS_IN_OCTAL; j++) {
output[j] += currentScale * (float)oneAtBit(sectionIndex, (BITS_IN_BYTE - BITS_IN_OCTAL) + j);
}
@ -151,17 +151,17 @@ void voxelDetailsForCode(const unsigned char* octalCode, VoxelPositionSize& voxe
void copyFirstVertexForCode(const unsigned char* octalCode, float* output) {
memset(output, 0, 3 * sizeof(float));
float currentScale = 0.5f;
for (int i = 0; i < numberOfThreeBitSectionsInCode(octalCode); i++) {
int sectionIndex = sectionValue(octalCode + 1 + (3 * i / 8), (3 * i) % 8);
for (int j = 0; j < 3; j++) {
output[j] += currentScale * (int)oneAtBit(sectionIndex, 5 + j);
}
currentScale *= 0.5;
currentScale *= 0.5f;
}
}
@ -171,7 +171,7 @@ OctalCodeComparison compareOctalCodes(const unsigned char* codeA, const unsigned
}
OctalCodeComparison result = LESS_THAN; // assume it's shallower
size_t numberOfBytes = std::min(bytesRequiredForCodeLength(*codeA), bytesRequiredForCodeLength(*codeB));
int compare = memcmp(codeA, codeB, numberOfBytes);
@ -205,7 +205,7 @@ char getOctalCodeSectionValue(const unsigned char* octalCode, int section) {
int startAtByte = 1 + (BITS_IN_OCTAL * section / BITS_IN_BYTE);
char startIndexInByte = (BITS_IN_OCTAL * section) % BITS_IN_BYTE;
const unsigned char* startByte = octalCode + startAtByte;
return sectionValue(startByte, startIndexInByte);
}
@ -227,7 +227,7 @@ void setOctalCodeSectionValue(unsigned char* octalCode, int section, char sectio
unsigned char oldValue = *byteAt & ~shiftedMask;
unsigned char newValue = oldValue | shiftedValue;
*byteAt = newValue;
// If the requested section is partially in the byte, then we
// need to also set the portion of the section value in the next byte
// there's only two cases where this happens, if the bit in byte is
@ -255,7 +255,7 @@ unsigned char* chopOctalCode(const unsigned char* originalOctalCode, int chopLev
int newLength = codeLength - chopLevels;
newCode = new unsigned char[newLength+1];
*newCode = newLength; // set the length byte
for (int section = chopLevels; section < codeLength; section++) {
char sectionValue = getOctalCodeSectionValue(originalOctalCode, section);
setOctalCodeSectionValue(newCode, section - chopLevels, sectionValue);
@ -264,9 +264,9 @@ unsigned char* chopOctalCode(const unsigned char* originalOctalCode, int chopLev
return newCode;
}
unsigned char* rebaseOctalCode(const unsigned char* originalOctalCode, const unsigned char* newParentOctalCode,
unsigned char* rebaseOctalCode(const unsigned char* originalOctalCode, const unsigned char* newParentOctalCode,
bool includeColorSpace) {
int oldCodeLength = numberOfThreeBitSectionsInCode(originalOctalCode);
int newParentCodeLength = numberOfThreeBitSectionsInCode(newParentOctalCode);
int newCodeLength = newParentCodeLength + oldCodeLength;
@ -298,12 +298,12 @@ bool isAncestorOf(const unsigned char* possibleAncestor, const unsigned char* po
}
int descendentCodeLength = numberOfThreeBitSectionsInCode(possibleDescendent);
// if the caller also include a child, then our descendent length is actually one extra!
if (descendentsChild != CHECK_NODE_ONLY) {
descendentCodeLength++;
}
if (ancestorCodeLength > descendentCodeLength) {
return false; // if the descendent is shorter, it can't be a descendent
}
@ -322,7 +322,7 @@ bool isAncestorOf(const unsigned char* possibleAncestor, const unsigned char* po
return false; // first non-match, means they don't match
}
}
// they all match, so we are an ancestor
return true;
}
@ -335,7 +335,7 @@ unsigned char* hexStringToOctalCode(const QString& input) {
// allocate byte array based on half of string length
unsigned char* bytes = new unsigned char[(input.length()) / HEX_BYTE_SIZE];
// loop through the string - 2 bytes at a time converting
// it to decimal equivalent and store in byte array
bool ok = false;
@ -348,7 +348,7 @@ unsigned char* hexStringToOctalCode(const QString& input) {
stringIndex += HEX_BYTE_SIZE;
byteArrayIndex++;
}
// something went wrong
if (!ok) {
delete[] bytes;

View file

@ -881,7 +881,7 @@ bool capsuleVsAACube(const Shape* shapeA, const Shape* shapeB, CollisionList& co
faceNormal = glm::vec3(0.0f, 0.0f, signs.z);
}
if (fabs(glm::dot(faceNormal, capsuleAxis)) < EPSILON) {
if (fabsf(glm::dot(faceNormal, capsuleAxis)) < EPSILON) {
if (glm::dot(nearestApproach, faceNormal) > cubeB->getScale() + capsuleA->getRadius()) {
return false;
}

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,11 @@
#include "TextRenderer.h"
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif
#include <QWindow>
#include <QFile>
#include <QTime>
@ -26,6 +31,10 @@
#include <QApplication>
#include <QOpenGLDebugLogger>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <unordered_map>
#include <memory>
#include <glm/glm.hpp>

View file

@ -297,7 +297,7 @@ void AngularConstraintTests::testConeRollerConstraint() {
glm::vec3 maxAngles(maxAngleX, maxAngleY, maxAngleZ);
AngularConstraint* c = AngularConstraint::newAngularConstraint(minAngles, maxAngles);
float expectedConeAngle = 0.25 * (maxAngleX - minAngleX + maxAngleY - minAngleY);
float expectedConeAngle = 0.25f * (maxAngleX - minAngleX + maxAngleY - minAngleY);
glm::vec3 middleAngles = 0.5f * (maxAngles + minAngles);
glm::quat yaw = glm::angleAxis(middleAngles[1], glm::vec3(0.0f, 1.0f, 0.0f));
glm::quat pitch = glm::angleAxis(middleAngles[0], glm::vec3(1.0f, 0.0f, 0.0f));

View file

@ -62,7 +62,8 @@ void MovingMinMaxAvgTests::runAllTests() {
assert(stats.getMin() == min);
assert(stats.getMax() == max);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON || fabs(stats.getAverage() - average) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON ||
fabsf((float)stats.getAverage() - (float)average) < EPSILON);
if ((i + 1) % INTERVAL_LENGTH == 0) {
@ -81,7 +82,8 @@ void MovingMinMaxAvgTests::runAllTests() {
assert(stats.getWindowMin() == windowMin);
assert(stats.getWindowMax() == windowMax);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON || fabs(stats.getAverage() - average) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON ||
fabsf((float)stats.getAverage() - (float)average) < EPSILON);
} else {
assert(!stats.getNewStatsAvailableFlag());
@ -126,7 +128,7 @@ void MovingMinMaxAvgTests::runAllTests() {
assert(stats.getMin() == min);
assert(stats.getMax() == max);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON);
if ((i + 1) % INTERVAL_LENGTH == 0) {
@ -145,7 +147,7 @@ void MovingMinMaxAvgTests::runAllTests() {
assert(stats.getWindowMin() == windowMin);
assert(stats.getWindowMax() == windowMax);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON);
} else {
assert(!stats.getNewStatsAvailableFlag());
@ -185,12 +187,12 @@ void MovingMinMaxAvgTests::runAllTests() {
min = std::min(min, sample);
max = std::max(max, sample);
average = (average * totalSamples + sample) / (totalSamples + 1);
average = (average * totalSamples + (double)sample) / (totalSamples + 1);
totalSamples++;
assert(stats.getMin() == min);
assert(stats.getMax() == max);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON);
if ((i + 1) % INTERVAL_LENGTH == 0) {
@ -209,7 +211,7 @@ void MovingMinMaxAvgTests::runAllTests() {
assert(stats.getWindowMin() == windowMin);
assert(stats.getWindowMax() == windowMax);
assert(fabs(stats.getAverage() / average - 1.0) < EPSILON);
assert(fabsf((float)stats.getAverage() / (float)average - 1.0f) < EPSILON);
} else {
assert(!stats.getNewStatsAvailableFlag());
@ -218,4 +220,3 @@ void MovingMinMaxAvgTests::runAllTests() {
}
printf("moving min/max/avg test passed!\n");
}

View file

@ -240,7 +240,7 @@ bool vhacd::VHACDUtil::computeVHACD(FBXGeometry& geometry,
continue;
}
if (maximumMeshSize > 0.0 && largestDimension > maximumMeshSize) {
if (maximumMeshSize > 0.0f && largestDimension > maximumMeshSize) {
qDebug() << " Skipping (too large)...";
count++;
continue;