Merge branch 'master' of https://github.com/worklist/hifi into 19507

Conflicts:
	interface/interface_en.ts
This commit is contained in:
stojce 2014-03-25 17:47:05 +01:00
commit 5e350df128
117 changed files with 1212 additions and 350 deletions

View file

@ -38,7 +38,7 @@ Any variables that need to be set for CMake to find dependencies can be set as E
For example, to pass the QT_CMAKE_PREFIX_PATH variable during build file generation:
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/clang_64/lib/cmake
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.2.0/lib/cmake
UNIX

View file

@ -16,7 +16,11 @@ elseif (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
endif(WIN32)
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} $ENV{QT_CMAKE_PREFIX_PATH})
if (NOT QT_CMAKE_PREFIX_PATH)
set(QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
endif ()
set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${QT_CMAKE_PREFIX_PATH})
# set our Base SDK to 10.8
set(CMAKE_OSX_SYSROOT /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.8.sdk)

View file

@ -120,10 +120,12 @@ void Agent::readPendingDatagrams() {
}
}
const QString AGENT_LOGGING_NAME = "agent";
void Agent::run() {
NodeList* nodeList = NodeList::getInstance();
nodeList->setOwnerType(NodeType::Agent);
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
NodeList* nodeList = NodeList::getInstance();
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet() << NodeType::AudioMixer << NodeType::AvatarMixer);
// figure out the URL for the script for this agent assignment
@ -148,17 +150,6 @@ void Agent::run() {
qDebug() << "Downloaded script:" << scriptContents;
timeval startTime;
gettimeofday(&startTime, NULL);
QTimer* domainServerTimer = new QTimer(this);
connect(domainServerTimer, SIGNAL(timeout()), this, SLOT(checkInWithDomainServerOrExit()));
domainServerTimer->start(DOMAIN_SERVER_CHECK_IN_USECS / 1000);
QTimer* silentNodeTimer = new QTimer(this);
connect(silentNodeTimer, SIGNAL(timeout()), nodeList, SLOT(removeSilentNodes()));
silentNodeTimer->start(NODE_SILENCE_THRESHOLD_USECS / 1000);
// setup an Avatar for the script to use
AvatarData scriptedAvatar;

View file

@ -33,6 +33,7 @@
#include <glm/gtx/vector_angle.hpp>
#include <QtCore/QCoreApplication>
#include <QtCore/QJsonObject>
#include <QtCore/QTimer>
#include <Logging.h>
@ -66,7 +67,11 @@ void attachNewBufferToNode(Node *newNode) {
AudioMixer::AudioMixer(const QByteArray& packet) :
ThreadedAssignment(packet),
_trailingSleepRatio(1.0f),
_minAudibilityThreshold(LOUDNESS_TO_DISTANCE_RATIO / 2.0f)
_minAudibilityThreshold(LOUDNESS_TO_DISTANCE_RATIO / 2.0f),
_performanceThrottlingRatio(0.0f),
_numStatFrames(0),
_sumListeners(0),
_sumMixes(0)
{
}
@ -94,6 +99,8 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
return;
}
++_sumMixes;
glm::quat inverseOrientation = glm::inverse(listeningNodeBuffer->getOrientation());
float distanceSquareToSource = glm::dot(relativePosition, relativePosition);
@ -349,9 +356,29 @@ void AudioMixer::readPendingDatagrams() {
}
}
void AudioMixer::sendStatsPacket() {
static QJsonObject statsObject;
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
statsObject["average_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
if (_sumListeners > 0) {
statsObject["average_mixes_per_listener"] = (float) _sumMixes / (float) _sumListeners;
} else {
statsObject["average_mixes_per_listener"] = 0.0;
}
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
_sumListeners = 0;
_sumMixes = 0;
_numStatFrames = 0;
}
void AudioMixer::run() {
commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
NodeList* nodeList = NodeList::getInstance();
@ -368,7 +395,6 @@ void AudioMixer::run() {
+ numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)];
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
float audabilityCutoffRatio = 0;
const int TRAILING_AVERAGE_FRAMES = 100;
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
@ -396,47 +422,39 @@ void AudioMixer::run() {
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) BUFFER_SEND_INTERVAL_USECS);
float lastCutoffRatio = audabilityCutoffRatio;
float lastCutoffRatio = _performanceThrottlingRatio;
bool hasRatioChanged = false;
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
if (framesSinceCutoffEvent % TRAILING_AVERAGE_FRAMES == 0) {
qDebug() << "Current trailing sleep ratio:" << _trailingSleepRatio;
}
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
// we're struggling - change our min required loudness to reduce some load
audabilityCutoffRatio = audabilityCutoffRatio + (0.5f * (1.0f - audabilityCutoffRatio));
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
<< lastCutoffRatio << "and is now" << audabilityCutoffRatio;
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
hasRatioChanged = true;
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && audabilityCutoffRatio != 0) {
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
// we've recovered and can back off the required loudness
audabilityCutoffRatio = audabilityCutoffRatio - RATIO_BACK_OFF;
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
if (audabilityCutoffRatio < 0) {
audabilityCutoffRatio = 0;
if (_performanceThrottlingRatio < 0) {
_performanceThrottlingRatio = 0;
}
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
<< lastCutoffRatio << "and is now" << audabilityCutoffRatio;
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
hasRatioChanged = true;
}
if (hasRatioChanged) {
// set out min audability threshold from the new ratio
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - audabilityCutoffRatio));
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
framesSinceCutoffEvent = 0;
}
}
if (!hasRatioChanged) {
++framesSinceCutoffEvent;
}
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
if (node->getType() == NodeType::Agent && node->getActiveSocket() && node->getLinkedData()
&& ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioRingBuffer()) {
@ -446,6 +464,8 @@ void AudioMixer::run() {
memcpy(clientMixBuffer + numBytesPacketHeader, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
nodeList->writeDatagram(clientMixBuffer, NETWORK_BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader, node);
++_sumListeners;
}
}
@ -456,6 +476,8 @@ void AudioMixer::run() {
}
}
++_numStatFrames;
QCoreApplication::processEvents();
if (_isFinished) {

View file

@ -28,6 +28,8 @@ public slots:
void run();
void readPendingDatagrams();
void sendStatsPacket();
private:
/// adds one buffer to the mix for a listening node
void addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuffer* bufferToAdd,
@ -42,6 +44,10 @@ private:
float _trailingSleepRatio;
float _minAudibilityThreshold;
float _performanceThrottlingRatio;
int _numStatFrames;
int _sumListeners;
int _sumMixes;
};
#endif /* defined(__hifi__AudioMixer__) */

View file

@ -12,6 +12,7 @@
#include <QtCore/QCoreApplication>
#include <QtCore/QElapsedTimer>
#include <QtCore/QJsonObject>
#include <QtCore/QTimer>
#include <Logging.h>
@ -29,7 +30,11 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
const unsigned int AVATAR_DATA_SEND_INTERVAL_USECS = (1 / 60.0) * 1000 * 1000;
AvatarMixer::AvatarMixer(const QByteArray& packet) :
ThreadedAssignment(packet)
ThreadedAssignment(packet),
_trailingSleepRatio(1.0f),
_performanceThrottlingRatio(0.0f),
_sumListeners(0),
_numStatFrames(0)
{
// make sure we hear about node kills so we can tell the other nodes
connect(NodeList::getInstance(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
@ -48,7 +53,7 @@ void attachAvatarDataToNode(Node* newNode) {
// 3) if we need to rate limit the amount of data we send, we can use a distance weighted "semi-random" function to
// determine which avatars are included in the packet stream
// 4) we should optimize the avatar data format to be more compact (100 bytes is pretty wasteful).
void broadcastAvatarData() {
void AvatarMixer::broadcastAvatarData() {
static QByteArray mixedAvatarByteArray;
int numPacketHeaderBytes = populatePacketHeader(mixedAvatarByteArray, PacketTypeBulkAvatarData);
@ -57,6 +62,7 @@ void broadcastAvatarData() {
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
if (node->getLinkedData() && node->getType() == NodeType::Agent && node->getActiveSocket()) {
++_sumListeners;
// reset packet pointers for this node
mixedAvatarByteArray.resize(numPacketHeaderBytes);
@ -78,7 +84,8 @@ void broadcastAvatarData() {
// at a distance of twice the full rate distance, there will be a 50% chance of sending this avatar's update
const float FULL_RATE_DISTANCE = 2.f;
// Decide whether to send this avatar's data based on it's distance from us
if ((distanceToAvatar == 0.f) || (randFloat() < FULL_RATE_DISTANCE / distanceToAvatar)) {
if ((distanceToAvatar == 0.f) || (randFloat() < FULL_RATE_DISTANCE / distanceToAvatar)
* (1 - _performanceThrottlingRatio)) {
QByteArray avatarByteArray;
avatarByteArray.append(otherNode->getUUID().toRfc4122());
avatarByteArray.append(otherAvatar.toByteArray());
@ -241,11 +248,24 @@ void AvatarMixer::readPendingDatagrams() {
}
}
void AvatarMixer::sendStatsPacket() {
QJsonObject statsObject;
statsObject["average_listeners_last_second"] = (float) _sumListeners / (float) _numStatFrames;
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100;
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
_sumListeners = 0;
_numStatFrames = 0;
}
const qint64 AVATAR_IDENTITY_KEYFRAME_MSECS = 5000;
const qint64 AVATAR_BILLBOARD_KEYFRAME_MSECS = 5000;
void AvatarMixer::run() {
commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
ThreadedAssignment::commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
NodeList* nodeList = NodeList::getInstance();
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
@ -263,12 +283,57 @@ void AvatarMixer::run() {
QElapsedTimer billboardTimer;
billboardTimer.start();
int usecToSleep = AVATAR_DATA_SEND_INTERVAL_USECS;
const int TRAILING_AVERAGE_FRAMES = 100;
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
while (!_isFinished) {
QCoreApplication::processEvents();
++_numStatFrames;
if (_isFinished) {
break;
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
const float RATIO_BACK_OFF = 0.02f;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
if (usecToSleep < 0) {
usecToSleep = 0;
}
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) AVATAR_DATA_SEND_INTERVAL_USECS);
float lastCutoffRatio = _performanceThrottlingRatio;
bool hasRatioChanged = false;
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
// we're struggling - change our min required loudness to reduce some load
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
hasRatioChanged = true;
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
// we've recovered and can back off the required loudness
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
if (_performanceThrottlingRatio < 0) {
_performanceThrottlingRatio = 0;
}
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
hasRatioChanged = true;
}
if (hasRatioChanged) {
framesSinceCutoffEvent = 0;
}
}
broadcastAvatarData();
@ -286,7 +351,13 @@ void AvatarMixer::run() {
billboardTimer.restart();
}
int usecToSleep = usecTimestamp(&startTime) + (++nextFrame * AVATAR_DATA_SEND_INTERVAL_USECS) - usecTimestampNow();
QCoreApplication::processEvents();
if (_isFinished) {
break;
}
usecToSleep = usecTimestamp(&startTime) + (++nextFrame * AVATAR_DATA_SEND_INTERVAL_USECS) - usecTimestampNow();
if (usecToSleep > 0) {
usleep(usecToSleep);

View file

@ -24,6 +24,17 @@ public slots:
void nodeKilled(SharedNodePointer killedNode);
void readPendingDatagrams();
void sendStatsPacket();
private:
void broadcastAvatarData();
float _trailingSleepRatio;
float _performanceThrottlingRatio;
int _sumListeners;
int _numStatFrames;
};
#endif /* defined(__hifi__AvatarMixer__) */

View file

@ -35,7 +35,7 @@ public:
virtual void run();
virtual void readPendingDatagrams();
private slots:
void maybeAttachSession(const SharedNodePointer& node);

View file

@ -236,7 +236,7 @@ void OctreeServer::initHTTPManager(int port) {
_httpManager = new HTTPManager(port, documentRoot, this, this);
}
bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QString& path) {
bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url) {
#ifdef FORCE_CRASH
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
@ -259,9 +259,9 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QString&
bool showStats = false;
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
if (path == "/") {
if (url.path() == "/") {
showStats = true;
} else if (path == "/resetStats") {
} else if (url.path() == "/resetStats") {
_octreeInboundPacketProcessor->resetStats();
resetSendingStats();
showStats = true;
@ -823,9 +823,9 @@ void OctreeServer::run() {
_safeServerName = getMyServerName();
// Before we do anything else, create our tree...
_tree = createTree();
// change the logging target name while this is running
Logging::setTargetName(getMyLoggingServerTargetName());
// use common init to setup common timers and logging
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
// Now would be a good time to parse our arguments, if we got them as assignment
if (getPayload().size() > 0) {
@ -988,14 +988,6 @@ void OctreeServer::run() {
strftime(utcBuffer, MAX_TIME_LENGTH, " [%m/%d/%Y %X UTC]", gmtm);
}
qDebug() << "Now running... started at: " << localBuffer << utcBuffer;
QTimer* domainServerTimer = new QTimer(this);
connect(domainServerTimer, SIGNAL(timeout()), this, SLOT(checkInWithDomainServerOrExit()));
domainServerTimer->start(DOMAIN_SERVER_CHECK_IN_USECS / 1000);
QTimer* silentNodeTimer = new QTimer(this);
connect(silentNodeTimer, SIGNAL(timeout()), nodeList, SLOT(removeSilentNodes()));
silentNodeTimer->start(NODE_SILENCE_THRESHOLD_USECS / 1000);
}
void OctreeServer::nodeAdded(SharedNodePointer node) {

View file

@ -97,7 +97,7 @@ public:
static void trackPacketSendingTime(float time);
static float getAveragePacketSendingTime() { return _averagePacketSendingTime.getAverage(); }
bool handleHTTPRequest(HTTPConnection* connection, const QString& path);
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url);
virtual void aboutToFinish();

View file

@ -46,7 +46,6 @@ public:
virtual bool hasSpecialPacketToSend(const SharedNodePointer& node);
virtual int sendSpecialPacket(const SharedNodePointer& node);
private:
bool _sendEnvironments;
bool _sendMinimalEnvironment;

View file

@ -38,4 +38,8 @@
span.port {
color: #666666;
}
.stale {
color: red;
}

View file

@ -1,3 +1,3 @@
</div>
<script src='js/jquery-2.0.3.min.js'></script>
<script src="js/bootstrap.min.js"></script>
<script src='/js/jquery-2.0.3.min.js'></script>
<script src='/js/bootstrap.min.js'></script>

View file

@ -4,8 +4,8 @@
<title>domain-server</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!-- Bootstrap -->
<link href="css/bootstrap.min.css" rel="stylesheet" media="screen">
<link href="css/style.css" rel="stylesheet" media="screen">
<link href="/css/bootstrap.min.css" rel="stylesheet" media="screen">
<link href="/css/style.css" rel="stylesheet" media="screen">
</head>
<body>
<div class="container">

View file

@ -7,7 +7,7 @@ $(document).ready(function(){
$.each(json.nodes, function (uuid, data) {
nodesTableBody += "<tr>";
nodesTableBody += "<td>" + data.type + "</td>";
nodesTableBody += "<td>" + uuid + "</td>";
nodesTableBody += "<td><a href='stats/?uuid=" + uuid + "'>" + uuid + "</a></td>";
nodesTableBody += "<td>" + (data.pool ? data.pool : "") + "</td>";
nodesTableBody += "<td>" + data.public.ip + "<span class='port'>:" + data.public.port + "</span></td>";
nodesTableBody += "<td>" + data.local.ip + "<span class='port'>:" + data.local.port + "</span></td>";
@ -42,7 +42,7 @@ $(document).ready(function(){
$(document.body).on('click', '.glyphicon-remove', function(){
// fire off a delete for this node
$.ajax({
url: "/node/" + $(this).data('uuid'),
url: "/nodes/" + $(this).data('uuid'),
type: 'DELETE',
success: function(result) {
console.log("Succesful request to delete node.");

View file

@ -0,0 +1,6 @@
<!--#include virtual="header.html"-->
<div id="stats-lead" class="table-lead"><h3>Stats</h3><div class="lead-line"></div></div>
<table id="stats-table" class="table"><tbody></tbody></table>
<!--#include virtual="footer.html"-->
<script src='js/stats.js'></script>
<!--#include virtual="page-end.html"-->

View file

@ -0,0 +1,41 @@
function qs(key) {
key = key.replace(/[*+?^$.\[\]{}()|\\\/]/g, "\\$&"); // escape RegEx meta chars
var match = location.search.match(new RegExp("[?&]"+key+"=([^&]+)(&|$)"));
return match && decodeURIComponent(match[1].replace(/\+/g, " "));
}
$(document).ready(function(){
// setup a function to grab the nodeStats
function getNodeStats() {
var uuid = qs("uuid");
var statsTableBody = "";
$.getJSON("/nodes/" + uuid + ".json", function(json){
// update the table header with the right node type
$('#stats-lead h3').html(json.node_type + " stats (" + uuid + ")");
delete json.node_type;
$.each(json, function(key, value) {
statsTableBody += "<tr>";
statsTableBody += "<td>" + key + "</td>";
statsTableBody += "<td>" + value + "</td>";
statsTableBody += "</tr>";
});
$('#stats-table tbody').html(statsTableBody);
}).fail(function(data) {
$('#stats-table td').each(function(){
$(this).addClass('stale');
});
});
}
// do the first GET on page load
getNodeStats();
// grab the new assignments JSON every second
var getNodeStatsInterval = setInterval(getNodeStats, 1000);
});

View file

@ -603,6 +603,11 @@ void DomainServer::readAvailableDatagrams() {
if (noisyMessage) {
lastNoisyMessage = timeNow;
}
} else if (requestType == PacketTypeNodeJsonStats) {
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
if (matchingNode) {
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->parseJSONStatsPacket(receivedPacket);
}
}
}
}
@ -646,14 +651,14 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
return nodeJson;
}
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString& path) {
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url) {
const QString JSON_MIME_TYPE = "application/json";
const QString URI_ASSIGNMENT = "/assignment";
const QString URI_NODE = "/node";
const QString URI_NODES = "/nodes";
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
if (path == "/assignments.json") {
if (url.path() == "/assignments.json") {
// user is asking for json list of assignments
// setup the JSON
@ -697,7 +702,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
// we've processed this request
return true;
} else if (path == "/nodes.json") {
} else if (url.path() == QString("%1.json").arg(URI_NODES)) {
// setup the JSON
QJsonObject rootJSON;
QJsonObject nodesJSON;
@ -718,14 +723,41 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
// send the response
connection->respond(HTTPConnection::StatusCode200, nodesDocument.toJson(), qPrintable(JSON_MIME_TYPE));
return true;
} else {
const QString NODE_REGEX_STRING =
QString("\\%1\\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).json\\/?$").arg(URI_NODES);
QRegExp nodeShowRegex(NODE_REGEX_STRING);
if (nodeShowRegex.indexIn(url.path()) != -1) {
QUuid matchingUUID = QUuid(nodeShowRegex.cap(1));
// see if we have a node that matches this ID
SharedNodePointer matchingNode = NodeList::getInstance()->nodeWithUUID(matchingUUID);
if (matchingNode) {
// create a QJsonDocument with the stats QJsonObject
QJsonObject statsObject =
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->getStatsJSONObject();
// add the node type to the JSON data for output purposes
statsObject["node_type"] = NodeType::getNodeTypeName(matchingNode->getType()).toLower().replace(' ', '-');
QJsonDocument statsDocument(statsObject);
// send the response
connection->respond(HTTPConnection::StatusCode200, statsDocument.toJson(), qPrintable(JSON_MIME_TYPE));
// tell the caller we processed the request
return true;
}
}
}
} else if (connection->requestOperation() == QNetworkAccessManager::PostOperation) {
if (path == URI_ASSIGNMENT) {
if (url.path() == URI_ASSIGNMENT) {
// this is a script upload - ask the HTTPConnection to parse the form data
QList<FormData> formData = connection->parseFormData();
// check how many instances of this assignment the user wants by checking the ASSIGNMENT-INSTANCES header
const QString ASSIGNMENT_INSTANCES_HEADER = "ASSIGNMENT-INSTANCES";
@ -765,13 +797,15 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QString&
// respond with a 200 code for successful upload
connection->respond(HTTPConnection::StatusCode200);
return true;
}
} else if (connection->requestOperation() == QNetworkAccessManager::DeleteOperation) {
if (path.startsWith(URI_NODE)) {
if (url.path().startsWith(URI_NODES)) {
// this is a request to DELETE a node by UUID
// pull the UUID from the url
QUuid deleteUUID = QUuid(path.mid(URI_NODE.size() + sizeof('/')));
QUuid deleteUUID = QUuid(url.path().mid(URI_NODES.size() + sizeof('/')));
if (!deleteUUID.isNull()) {
SharedNodePointer nodeToKill = NodeList::getInstance()->nodeWithUUID(deleteUUID);

View file

@ -30,7 +30,7 @@ public:
bool requiresAuthentication() const { return !_nodeAuthenticationURL.isEmpty(); }
bool handleHTTPRequest(HTTPConnection* connection, const QString& path);
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url);
void exit(int retCode = 0);

View file

@ -6,11 +6,43 @@
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#include <QtCore/QDataStream>
#include <QtCore/QJsonObject>
#include <QtCore/QVariant>
#include <PacketHeaders.h>
#include "DomainServerNodeData.h"
DomainServerNodeData::DomainServerNodeData() :
_sessionSecretHash(),
_staticAssignmentUUID()
_staticAssignmentUUID(),
_statsJSONObject()
{
}
void DomainServerNodeData::parseJSONStatsPacket(const QByteArray& statsPacket) {
// push past the packet header
QDataStream packetStream(statsPacket);
packetStream.skipRawData(numBytesForPacketHeader(statsPacket));
QVariantMap unpackedVariantMap;
packetStream >> unpackedVariantMap;
QJsonObject unpackedStatsJSON = QJsonObject::fromVariantMap(unpackedVariantMap);
_statsJSONObject = mergeJSONStatsFromNewObject(unpackedStatsJSON, _statsJSONObject);
}
QJsonObject DomainServerNodeData::mergeJSONStatsFromNewObject(const QJsonObject& newObject, QJsonObject destinationObject) {
foreach(const QString& key, newObject.keys()) {
if (newObject[key].isObject() && destinationObject.contains(key)) {
destinationObject[key] = mergeJSONStatsFromNewObject(newObject[key].toObject(), destinationObject[key].toObject());
} else {
destinationObject[key] = newObject[key];
}
}
return destinationObject;
}

View file

@ -19,13 +19,20 @@ public:
DomainServerNodeData();
int parseData(const QByteArray& packet) { return 0; }
const QJsonObject& getStatsJSONObject() const { return _statsJSONObject; }
void parseJSONStatsPacket(const QByteArray& statsPacket);
void setStaticAssignmentUUID(const QUuid& staticAssignmentUUID) { _staticAssignmentUUID = staticAssignmentUUID; }
const QUuid& getStaticAssignmentUUID() const { return _staticAssignmentUUID; }
QHash<QUuid, QUuid>& getSessionSecretHash() { return _sessionSecretHash; }
private:
QJsonObject mergeJSONStatsFromNewObject(const QJsonObject& newObject, QJsonObject destinationObject);
QHash<QUuid, QUuid> _sessionSecretHash;
QUuid _staticAssignmentUUID;
QJsonObject _statsJSONObject;
};
#endif /* defined(__hifi__DomainServerNodeData__) */

View file

@ -0,0 +1,52 @@
//
// audioDeviceExample.js
// hifi
//
// Created by Brad Hefta-Gaub on 3/22/14
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
// This is an example script that demonstrates use of the Menu object
//
var outputDevices = AudioDevice.getOutputDevices();
var defaultOutputDevice = AudioDevice.getDefaultOutputDevice();
var selectOutputDevice = outputDevices[0];
print("Output Devices:");
for(var i = 0; i < outputDevices.length; i++) {
if (outputDevices[i] == defaultOutputDevice) {
print(" " + outputDevices[i] + " << default");
} else {
print(" " + outputDevices[i]);
}
}
print("Default Output Device:" + defaultOutputDevice);
print("Selected Output Device:" + selectOutputDevice);
print("Current Audio Output Device: " + AudioDevice.getOutputDevice());
AudioDevice.setOutputDevice(selectOutputDevice);
print("Audio Output Device: " + AudioDevice.getOutputDevice());
var inputDevices = AudioDevice.getInputDevices();
var selectInputDevice = inputDevices[0];
var defaultInputDevice = AudioDevice.getDefaultInputDevice();
print("Input Devices:");
for(var i = 0; i < inputDevices.length; i++) {
if (inputDevices[i] == defaultInputDevice) {
print(" " + inputDevices[i] + " << default");
} else {
print(" " + inputDevices[i]);
}
}
print("Default Input Device:" + defaultInputDevice);
print("Selected Input Device:" + selectInputDevice);
print("Current Audio Input Device: " + AudioDevice.getInputDevice());
AudioDevice.setInputDevice(selectInputDevice);
print("Audio Input Device: " + AudioDevice.getInputDevice());
print("Audio Input Device Level: " + AudioDevice.getInputVolume());
AudioDevice.setInputVolume(AudioDevice.getInputVolume() * 2); // twice as loud!
print("Audio Input Device Level: " + AudioDevice.getInputVolume());
Script.stop();

View file

@ -0,0 +1,5 @@
// defaultScripts.js
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
Script.include("lookWithTouch.js");
Script.include("editVoxels.js");
Script.include("selectAudioDevice.js");

View file

@ -32,6 +32,7 @@ function setupMenus() {
Menu.addSeparator("Foo","Removable Tools");
Menu.addMenuItem("Foo","Remove Foo item 4");
Menu.addMenuItem("Foo","Remove Foo");
Menu.addMenuItem("Foo","Remove Bar-Spam");
Menu.addMenu("Bar");
Menu.addMenuItem("Bar","Bar item 1", "b");
@ -91,6 +92,10 @@ function menuItemEvent(menuItem) {
if (menuItem == "Remove Foo") {
Menu.removeMenu("Foo");
}
if (menuItem == "Remove Bar-Spam") {
Menu.removeMenu("Bar > Spam");
}
if (menuItem == "Remove Spam item 2") {
Menu.removeMenuItem("Bar > Spam", "Spam item 2");
}

View file

@ -0,0 +1,114 @@
//
// audioDeviceExample.js
// hifi
//
// Created by Brad Hefta-Gaub on 3/22/14
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
// This is an example script that demonstrates use of the Menu object
//
if (typeof String.prototype.startsWith != 'function') {
String.prototype.startsWith = function (str){
return this.slice(0, str.length) == str;
};
}
if (typeof String.prototype.endsWith != 'function') {
String.prototype.endsWith = function (str){
return this.slice(-str.length) == str;
};
}
if (typeof String.prototype.trimStartsWith != 'function') {
String.prototype.trimStartsWith = function (str){
if (this.startsWith(str)) {
return this.substr(str.length);
}
return this;
};
}
if (typeof String.prototype.trimEndsWith != 'function') {
String.prototype.trimEndsWith = function (str){
if (this.endsWith(str)) {
return this.substr(0,this.length - str.length);
}
return this;
};
}
var selectedInputMenu = "";
var selectedOutputMenu = "";
function setupAudioMenus() {
Menu.addMenu("Tools > Audio");
Menu.addSeparator("Tools > Audio","Output Audio Device");
var outputDevices = AudioDevice.getOutputDevices();
var selectedOutputDevice = AudioDevice.getOutputDevice();
for(var i = 0; i < outputDevices.length; i++) {
var thisDeviceSelected = (outputDevices[i] == selectedOutputDevice);
var menuItem = "Use " + outputDevices[i] + " for Output";
Menu.addMenuItem({
menuName: "Tools > Audio",
menuItemName: menuItem,
isCheckable: true,
isChecked: thisDeviceSelected
});
if (thisDeviceSelected) {
selectedOutputMenu = menuItem;
}
}
Menu.addSeparator("Tools > Audio","Input Audio Device");
var inputDevices = AudioDevice.getInputDevices();
var selectedInputDevice = AudioDevice.getInputDevice();
for(var i = 0; i < inputDevices.length; i++) {
var thisDeviceSelected = (inputDevices[i] == selectedInputDevice);
var menuItem = "Use " + inputDevices[i] + " for Input";
Menu.addMenuItem({
menuName: "Tools > Audio",
menuItemName: menuItem,
isCheckable: true,
isChecked: thisDeviceSelected
});
if (thisDeviceSelected) {
selectedInputMenu = menuItem;
}
}
}
setupAudioMenus();
function scriptEnding() {
Menu.removeMenu("Tools > Audio");
}
Script.scriptEnding.connect(scriptEnding);
function menuItemEvent(menuItem) {
if (menuItem.startsWith("Use ")) {
if (menuItem.endsWith(" for Output")) {
var selectedDevice = menuItem.trimStartsWith("Use ").trimEndsWith(" for Output");
print("output audio selection..." + selectedDevice);
Menu.setIsOptionChecked(selectedOutputMenu, false);
selectedOutputMenu = menuItem;
Menu.setIsOptionChecked(selectedOutputMenu, true);
AudioDevice.setOutputDevice(selectedDevice);
} else if (menuItem.endsWith(" for Input")) {
var selectedDevice = menuItem.trimStartsWith("Use ").trimEndsWith(" for Input");
print("input audio selection..." + selectedDevice);
Menu.setIsOptionChecked(selectedInputMenu, false);
selectedInputMenu = menuItem;
Menu.setIsOptionChecked(selectedInputMenu, true);
AudioDevice.setInputDevice(selectedDevice);
}
}
}
Menu.menuItemEvent.connect(menuItemEvent);

View file

@ -0,0 +1,18 @@
//
// settingsExample.js
// hifi
//
// Created by Brad Hefta-Gaub on 3/22/14
// Copyright (c) 2013 HighFidelity, Inc. All rights reserved.
//
// This is an example script that demonstrates use of the Menu object
//
print("mySetting: " + Settings.getValue("mySetting"));
Settings.setValue("mySetting", "spam");
print("mySetting: " + Settings.getValue("mySetting"));
Script.stop();

View file

@ -48,7 +48,7 @@ configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVe
# grab the implementation and header files from src dirs
file(GLOB INTERFACE_SRCS src/*.cpp src/*.h)
foreach(SUBDIR avatar devices renderer ui starfield location)
foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels)
file(GLOB_RECURSE SUBDIR_SRCS src/${SUBDIR}/*.cpp src/${SUBDIR}/*.h)
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
endforeach(SUBDIR)

View file

@ -4,22 +4,22 @@
<context>
<name>Application</name>
<message>
<location filename="src/Application.cpp" line="1364"/>
<location filename="src/Application.cpp" line="1368"/>
<source>Export Voxels</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Application.cpp" line="1365"/>
<location filename="src/Application.cpp" line="1369"/>
<source>Sparse Voxel Octree Files (*.svo)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Application.cpp" line="3580"/>
<location filename="src/Application.cpp" line="3596"/>
<source>Open Script</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Application.cpp" line="3581"/>
<location filename="src/Application.cpp" line="3597"/>
<source>JavaScript Files (*.js)</source>
<translation type="unfinished"></translation>
</message>
@ -113,18 +113,18 @@
<context>
<name>Menu</name>
<message>
<location filename="src/Menu.cpp" line="438"/>
<location filename="src/Menu.cpp" line="449"/>
<source>Open .ini config file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Menu.cpp" line="440"/>
<location filename="src/Menu.cpp" line="452"/>
<location filename="src/Menu.cpp" line="451"/>
<location filename="src/Menu.cpp" line="463"/>
<source>Text files (*.ini)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Menu.cpp" line="450"/>
<location filename="src/Menu.cpp" line="461"/>
<source>Save .ini config file</source>
<translation type="unfinished"></translation>
</message>
@ -132,51 +132,30 @@
<context>
<name>QObject</name>
<message>
<location filename="src/ImportDialog.cpp" line="22"/>
<location filename="src/ImportDialog.cpp" line="23"/>
<location filename="src/ui/ImportDialog.cpp" line="22"/>
<location filename="src/ui/ImportDialog.cpp" line="23"/>
<source>Import Voxels</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ImportDialog.cpp" line="24"/>
<location filename="src/ui/ImportDialog.cpp" line="24"/>
<source>Loading ...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ImportDialog.cpp" line="25"/>
<location filename="src/ui/ImportDialog.cpp" line="25"/>
<source>Place voxels</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ImportDialog.cpp" line="26"/>
<location filename="src/ui/ImportDialog.cpp" line="26"/>
<source>&lt;b&gt;Import&lt;/b&gt; %1 as voxels</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ImportDialog.cpp" line="27"/>
<location filename="src/ui/ImportDialog.cpp" line="27"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SnapshotShareDialog</name>
<message>
<location filename="ui/shareSnapshot.ui" line="38"/>
<location filename="../build/interface/ui_shareSnapshot.h" line="166"/>
<source>Share with community</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/shareSnapshot.ui" line="170"/>
<location filename="../build/interface/ui_shareSnapshot.h" line="167"/>
<source>Notes about this image</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/shareSnapshot.ui" line="240"/>
<location filename="../build/interface/ui_shareSnapshot.h" line="168"/>
<source>Share</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>

View file

@ -0,0 +1,14 @@
#version 120
//
// model_shadow.frag
// fragment shader
//
// Created by Andrzej Kapolka on 3/24/14.
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
//
void main(void) {
// fixed color for now (we may eventually want to use texture alpha)
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}

View file

@ -0,0 +1,14 @@
#version 120
//
// model_shadow.vert
// vertex shader
//
// Created by Andrzej Kapolka on 3/24/14.
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
//
void main(void) {
// just use standard pipeline transform
gl_Position = ftransform();
}

View file

@ -0,0 +1,27 @@
#version 120
//
// skin_model_shadow.vert
// vertex shader
//
// Created by Andrzej Kapolka on 3/24/14.
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
//
const int MAX_CLUSTERS = 128;
const int INDICES_PER_VERTEX = 4;
uniform mat4 clusterMatrices[MAX_CLUSTERS];
attribute vec4 clusterIndices;
attribute vec4 clusterWeights;
void main(void) {
vec4 position = vec4(0.0, 0.0, 0.0, 0.0);
for (int i = 0; i < INDICES_PER_VERTEX; i++) {
mat4 clusterMatrix = clusterMatrices[int(clusterIndices[i])];
float clusterWeight = clusterWeights[i];
position += clusterMatrix * gl_Vertex * clusterWeight;
}
gl_Position = gl_ModelViewProjectionMatrix * position;
}

View file

@ -65,17 +65,21 @@
#include <FstReader.h>
#include "Application.h"
#include "ClipboardScriptingInterface.h"
#include "InterfaceVersion.h"
#include "Menu.h"
#include "MenuScriptingInterface.h"
#include "Util.h"
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "renderer/ProgramObject.h"
#include "ui/TextRenderer.h"
#include "InfoView.h"
#include "scripting/AudioDeviceScriptingInterface.h"
#include "scripting/ClipboardScriptingInterface.h"
#include "scripting/MenuScriptingInterface.h"
#include "scripting/SettingsScriptingInterface.h"
#include "ui/InfoView.h"
#include "ui/Snapshot.h"
#include "ui/TextRenderer.h"
using namespace std;
@ -2163,21 +2167,22 @@ void Application::updateShadowMap() {
glViewport(0, 0, fbo->width(), fbo->height());
glm::vec3 lightDirection = -getSunDirection();
glm::quat rotation = glm::inverse(rotationBetween(IDENTITY_FRONT, lightDirection));
glm::vec3 translation = glm::vec3();
glm::quat rotation = rotationBetween(IDENTITY_FRONT, lightDirection);
glm::quat inverseRotation = glm::inverse(rotation);
float nearScale = 0.0f;
const float MAX_SHADOW_DISTANCE = 2.0f;
float farScale = (MAX_SHADOW_DISTANCE - _viewFrustum.getNearClip()) / (_viewFrustum.getFarClip() - _viewFrustum.getNearClip());
float farScale = (MAX_SHADOW_DISTANCE - _viewFrustum.getNearClip()) /
(_viewFrustum.getFarClip() - _viewFrustum.getNearClip());
loadViewFrustum(_myCamera, _viewFrustum);
glm::vec3 points[] = {
rotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), nearScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), nearScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), nearScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), nearScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), farScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), farScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), farScale) + translation),
rotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), farScale) + translation) };
inverseRotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), nearScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), nearScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), nearScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), nearScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearTopLeft(), _viewFrustum.getFarTopLeft(), farScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearTopRight(), _viewFrustum.getFarTopRight(), farScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearBottomLeft(), _viewFrustum.getFarBottomLeft(), farScale)),
inverseRotation * (glm::mix(_viewFrustum.getNearBottomRight(), _viewFrustum.getFarBottomRight(), farScale)) };
glm::vec3 minima(FLT_MAX, FLT_MAX, FLT_MAX), maxima(-FLT_MAX, -FLT_MAX, -FLT_MAX);
for (size_t i = 0; i < sizeof(points) / sizeof(points[0]); i++) {
minima = glm::min(minima, points[i]);
@ -2190,9 +2195,20 @@ void Application::updateShadowMap() {
// save the combined matrix for rendering
_shadowMatrix = glm::transpose(glm::translate(glm::vec3(0.5f, 0.5f, 0.5f)) * glm::scale(glm::vec3(0.5f, 0.5f, 0.5f)) *
glm::ortho(minima.x, maxima.x, minima.y, maxima.y, -maxima.z, -minima.z) *
glm::mat4_cast(rotation) * glm::translate(translation));
glm::ortho(minima.x, maxima.x, minima.y, maxima.y, -maxima.z, -minima.z) * glm::mat4_cast(inverseRotation));
// update the shadow view frustum
_shadowViewFrustum.setPosition(rotation * ((minima + maxima) * 0.5f));
_shadowViewFrustum.setOrientation(rotation);
_shadowViewFrustum.setOrthographic(true);
_shadowViewFrustum.setWidth(maxima.x - minima.x);
_shadowViewFrustum.setHeight(maxima.y - minima.y);
_shadowViewFrustum.setNearClip(minima.z);
_shadowViewFrustum.setFarClip(maxima.z);
_shadowViewFrustum.setEyeOffsetPosition(glm::vec3());
_shadowViewFrustum.setEyeOffsetOrientation(glm::quat());
_shadowViewFrustum.calculate();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
@ -2201,16 +2217,14 @@ void Application::updateShadowMap() {
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glm::vec3 axis = glm::axis(inverseRotation);
glRotatef(glm::degrees(glm::angle(inverseRotation)), axis.x, axis.y, axis.z);
// store view matrix without translation, which we'll use for precision-sensitive objects
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat*)&_untranslatedViewMatrix);
_viewMatrixTranslation = translation;
_viewMatrixTranslation = glm::vec3();
glTranslatef(translation.x, translation.y, translation.z);
_avatarManager.renderAvatars(true);
_avatarManager.renderAvatars(Avatar::SHADOW_RENDER_MODE);
_particles.render();
glPopMatrix();
@ -2388,7 +2402,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
}
bool mirrorMode = (whichCamera.getInterpolatedMode() == CAMERA_MODE_MIRROR);
_avatarManager.renderAvatars(mirrorMode, selfAvatarOnly);
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE, selfAvatarOnly);
if (!selfAvatarOnly) {
// Render the world box
@ -3548,6 +3562,8 @@ void Application::loadScript(const QString& fileNameString) {
scriptEngine->registerGlobalObject("Overlays", &_overlays);
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
QThread* workerThread = new QThread(this);

View file

@ -29,12 +29,12 @@
#include <ParticleEditPacketSender.h>
#include <ScriptEngine.h>
#include <OctreeQuery.h>
#include <ViewFrustum.h>
#include <VoxelEditPacketSender.h>
#include "Audio.h"
#include "BandwidthMeter.h"
#include "BuckyBalls.h"
#include "Camera.h"
#include "ControllerScriptingInterface.h"
#include "DatagramProcessor.h"
#include "Environment.h"
#include "FileLogger.h"
@ -44,13 +44,6 @@
#include "PacketHeaders.h"
#include "ParticleTreeRenderer.h"
#include "Stars.h"
#include "ViewFrustum.h"
#include "VoxelFade.h"
#include "VoxelEditPacketSender.h"
#include "VoxelHideShowThread.h"
#include "VoxelPacketProcessor.h"
#include "VoxelSystem.h"
#include "VoxelImporter.h"
#include "avatar/Avatar.h"
#include "avatar/AvatarManager.h"
#include "avatar/MyAvatar.h"
@ -63,14 +56,21 @@
#include "renderer/PointShader.h"
#include "renderer/TextureCache.h"
#include "renderer/VoxelShader.h"
#include "scripting/ControllerScriptingInterface.h"
#include "ui/BandwidthDialog.h"
#include "ui/BandwidthMeter.h"
#include "ui/OctreeStatsDialog.h"
#include "ui/RearMirrorTools.h"
#include "ui/SnapshotShareDialog.h"
#include "ui/LodToolsDialog.h"
#include "ui/LogDialog.h"
#include "ui/UpdateDialog.h"
#include "ui/Overlays.h"
#include "ui/overlays/Overlays.h"
#include "voxels/VoxelFade.h"
#include "voxels/VoxelHideShowThread.h"
#include "voxels/VoxelImporter.h"
#include "voxels/VoxelPacketProcessor.h"
#include "voxels/VoxelSystem.h"
class QAction;
@ -156,6 +156,7 @@ public:
Audio* getAudio() { return &_audio; }
Camera* getCamera() { return &_myCamera; }
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
ViewFrustum* getShadowViewFrustum() { return &_shadowViewFrustum; }
VoxelSystem* getVoxels() { return &_voxels; }
VoxelTree* getVoxelTree() { return _voxels.getTree(); }
ParticleTreeRenderer* getParticles() { return &_particles; }
@ -172,7 +173,11 @@ public:
Visage* getVisage() { return &_visage; }
SixenseManager* getSixenseManager() { return &_sixenseManager; }
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
QSettings* getSettings() { return _settings; }
/// if you need to access the application settings, use lockSettings()/unlockSettings()
QSettings* lockSettings() { _settingsMutex.lock(); return _settings; }
void unlockSettings() { _settingsMutex.unlock(); }
QMainWindow* getWindow() { return _window; }
NodeToOctreeSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
void lockOctreeSceneStats() { _octreeSceneStatsLock.lockForRead(); }
@ -353,6 +358,7 @@ private:
DatagramProcessor _datagramProcessor;
QNetworkAccessManager* _networkAccessManager;
QMutex _settingsMutex;
QSettings* _settings;
glm::vec3 _gravity;
@ -386,6 +392,7 @@ private:
ViewFrustum _viewFrustum; // current state of view frustum, perspective, orientation, etc.
ViewFrustum _lastQueriedViewFrustum; /// last view frustum used to query octree servers (voxels, particles)
ViewFrustum _shadowViewFrustum;
quint64 _lastQueriedTime;
Oscilloscope _audioScope;

View file

@ -92,6 +92,16 @@ void Audio::reset() {
_ringBuffer.reset();
}
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
QAudioDeviceInfo result;
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
if (audioDevice.deviceName().trimmed() == deviceName.trimmed()) {
result = audioDevice;
}
}
return result;
}
QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
#ifdef __APPLE__
if (QAudioDeviceInfo::availableDevices(mode).size() > 1) {
@ -249,27 +259,105 @@ void Audio::start() {
_desiredOutputFormat.setChannelCount(2);
QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
qDebug() << "The audio input device is" << inputDeviceInfo.deviceName();
qDebug() << "The default audio input device is" << inputDeviceInfo.deviceName();
bool inputFormatSupported = switchInputToAudioDevice(inputDeviceInfo.deviceName());
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
qDebug() << "The default audio output device is" << outputDeviceInfo.deviceName();
bool outputFormatSupported = switchOutputToAudioDevice(outputDeviceInfo.deviceName());
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
qDebug() << "The format to be used for audio input is" << _inputFormat;
if (!inputFormatSupported || !outputFormatSupported) {
qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.";
}
}
QString Audio::getDefaultDeviceName(QAudio::Mode mode) {
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
return deviceInfo.deviceName();
}
QVector<QString> Audio::getDeviceNames(QAudio::Mode mode) {
QVector<QString> deviceNames;
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
deviceNames << audioDevice.deviceName().trimmed();
}
return deviceNames;
}
bool Audio::switchInputToAudioDevice(const QString& inputDeviceName) {
bool supportedFormat = false;
// cleanup any previously initialized device
if (_audioInput) {
_audioInput->stop();
disconnect(_inputDevice, 0, 0, 0);
_inputDevice = NULL;
delete _audioInput;
_audioInput = NULL;
_numInputCallbackBytes = 0;
_inputAudioDeviceName = "";
}
QAudioDeviceInfo inputDeviceInfo = getNamedAudioDeviceForMode(QAudio::AudioInput, inputDeviceName);
if (!inputDeviceInfo.isNull()) {
qDebug() << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
qDebug() << "The format to be used for audio input is" << _inputFormat;
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
_numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
* (_inputFormat.sampleRate() / SAMPLE_RATE)
/ CALLBACK_ACCELERATOR_RATIO;
_audioInput->setBufferSize(_numInputCallbackBytes);
_audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
_numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
* (_inputFormat.sampleRate() / SAMPLE_RATE)
/ CALLBACK_ACCELERATOR_RATIO;
_audioInput->setBufferSize(_numInputCallbackBytes);
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
qDebug() << "The audio output device is" << outputDeviceInfo.deviceName();
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
qDebug() << "The format to be used for audio output is" << _outputFormat;
// how do we want to handle input working, but output not working?
_inputRingBuffer.resizeForFrameSize(_numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO / sizeof(int16_t));
_inputDevice = _audioInput->start();
connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
supportedFormat = true;
}
}
return supportedFormat;
}
bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
bool supportedFormat = false;
// cleanup any previously initialized device
if (_audioOutput) {
_audioOutput->stop();
disconnect(_outputDevice, 0, 0, 0);
_outputDevice = NULL;
delete _audioOutput;
_audioOutput = NULL;
_numInputCallbackBytes = 0;
_loopbackOutputDevice = NULL;
delete _loopbackAudioOutput;
_loopbackAudioOutput = NULL;
_proceduralOutputDevice = NULL;
delete _proceduralAudioOutput;
_proceduralAudioOutput = NULL;
_outputAudioDeviceName = "";
}
QAudioDeviceInfo outputDeviceInfo = getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName);
if (!outputDeviceInfo.isNull()) {
qDebug() << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
_outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed();
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
qDebug() << "The format to be used for audio output is" << _outputFormat;
// setup our general output device for audio-mixer audio
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
_audioOutput->setBufferSize(_ringBuffer.getSampleCapacity() * sizeof(int16_t));
@ -278,17 +366,15 @@ void Audio::start() {
// setup a loopback audio output device
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
// setup a procedural audio output device
_proceduralAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
gettimeofday(&_lastReceiveTime, NULL);
supportedFormat = true;
}
return;
}
qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.";
return supportedFormat;
}
void Audio::handleAudioInput() {
@ -309,13 +395,15 @@ void Audio::handleAudioInput() {
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted) {
// if this person wants local loopback add that to the locally injected audio
if (!_loopbackOutputDevice) {
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
// we didn't have the loopback output device going so set that up now
_loopbackOutputDevice = _loopbackAudioOutput->start();
}
if (_inputFormat == _outputFormat) {
_loopbackOutputDevice->write(inputByteArray);
if (_loopbackOutputDevice) {
_loopbackOutputDevice->write(inputByteArray);
}
} else {
static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
* (_outputFormat.channelCount() / _inputFormat.channelCount());
@ -326,7 +414,9 @@ void Audio::handleAudioInput() {
inputByteArray.size() / sizeof(int16_t),
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
_loopbackOutputDevice->write(loopBackByteArray);
if (_loopbackOutputDevice) {
_loopbackOutputDevice->write(loopBackByteArray);
}
}
}
@ -455,7 +545,7 @@ void Audio::handleAudioInput() {
addProceduralSounds(monoAudioSamples,
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
if (!_proceduralOutputDevice) {
if (!_proceduralOutputDevice && _proceduralAudioOutput) {
_proceduralOutputDevice = _proceduralAudioOutput->start();
}
@ -469,7 +559,9 @@ void Audio::handleAudioInput() {
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * 4,
_desiredInputFormat, _outputFormat);
_proceduralOutputDevice->write(proceduralOutput);
if (_proceduralOutputDevice) {
_proceduralOutputDevice->write(proceduralOutput);
}
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
@ -553,7 +645,7 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
static float networkOutputToOutputRatio = (_desiredOutputFormat.sampleRate() / (float) _outputFormat.sampleRate())
* (_desiredOutputFormat.channelCount() / (float) _outputFormat.channelCount());
if (!_ringBuffer.isStarved() && _audioOutput->bytesFree() == _audioOutput->bufferSize()) {
if (!_ringBuffer.isStarved() && _audioOutput && _audioOutput->bytesFree() == _audioOutput->bufferSize()) {
// we don't have any audio data left in the output buffer
// we just starved
//qDebug() << "Audio output just starved.";

View file

@ -19,17 +19,20 @@
#include "InterfaceConfig.h"
#include <QAudio>
#include <QAudioInput>
#include <QGLWidget>
#include <QtCore/QObject>
#include <QtCore/QVector>
#include <QtMultimedia/QAudioFormat>
#include <QVector>
#include <AbstractAudioInterface.h>
#include <AudioRingBuffer.h>
#include <StdDev.h>
#include "Oscilloscope.h"
#include "ui/Oscilloscope.h"
#include <QGLWidget>
static const int NUM_AUDIO_CHANNELS = 2;
@ -72,7 +75,7 @@ public:
int getNetworkSampleRate() { return SAMPLE_RATE; }
int getNetworkBufferLengthSamplesPerChannel() { return NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; }
public slots:
void start();
void addReceivedAudioToBuffer(const QByteArray& audioByteArray);
@ -83,10 +86,21 @@ public slots:
virtual void handleAudioByteArray(const QByteArray& audioByteArray);
bool switchInputToAudioDevice(const QString& inputDeviceName);
bool switchOutputToAudioDevice(const QString& outputDeviceName);
QString getDeviceName(QAudio::Mode mode) const { return (mode == QAudio::AudioInput) ?
_inputAudioDeviceName : _outputAudioDeviceName; }
QString getDefaultDeviceName(QAudio::Mode mode);
QVector<QString> getDeviceNames(QAudio::Mode mode);
float getInputVolume() const { return (_audioInput) ? _audioInput->volume() : 0.0f; }
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
signals:
bool muteToggled();
private:
QByteArray firstInputFrame;
QAudioInput* _audioInput;
QAudioFormat _desiredInputFormat;
@ -105,6 +119,9 @@ private:
QIODevice* _proceduralOutputDevice;
AudioRingBuffer _inputRingBuffer;
AudioRingBuffer _ringBuffer;
QString _inputAudioDeviceName;
QString _outputAudioDeviceName;
Oscilloscope* _scope;
StDev _stdev;

View file

@ -33,11 +33,11 @@
#include "Application.h"
#include "Menu.h"
#include "MenuScriptingInterface.h"
#include "scripting/MenuScriptingInterface.h"
#include "Util.h"
#include "InfoView.h"
#include "ui/InfoView.h"
#include "ui/MetavoxelEditor.h"
#include "ModelBrowser.h"
#include "ui/ModelBrowser.h"
Menu* Menu::_instance = NULL;
@ -374,8 +374,10 @@ Menu::~Menu() {
}
void Menu::loadSettings(QSettings* settings) {
bool lockedSettings = false;
if (!settings) {
settings = Application::getInstance()->getSettings();
settings = Application::getInstance()->lockSettings();
lockedSettings = true;
}
_audioJitterBufferSamples = loadSetting(settings, "audioJitterBufferSamples", 0);
@ -404,11 +406,17 @@ void Menu::loadSettings(QSettings* settings) {
// TODO: cache more settings in MyAvatar that are checked with very high frequency.
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
myAvatar->updateCollisionFlags();
if (lockedSettings) {
Application::getInstance()->unlockSettings();
}
}
void Menu::saveSettings(QSettings* settings) {
bool lockedSettings = false;
if (!settings) {
settings = Application::getInstance()->getSettings();
settings = Application::getInstance()->lockSettings();
lockedSettings = true;
}
settings->setValue("audioJitterBufferSamples", _audioJitterBufferSamples);
@ -430,6 +438,9 @@ void Menu::saveSettings(QSettings* settings) {
Application::getInstance()->getAvatar()->saveData(settings);
NodeList::getInstance()->saveData(settings);
if (lockedSettings) {
Application::getInstance()->unlockSettings();
}
}
void Menu::importSettings() {
@ -1403,9 +1414,8 @@ void Menu::removeMenu(const QString& menuName) {
if (action) {
QString finalMenuPart;
QMenu* parent = getMenuParent(menuName, finalMenuPart);
if (parent) {
removeAction(parent, finalMenuPart);
parent->removeAction(action);
} else {
QMenuBar::removeAction(action);
}

View file

@ -189,10 +189,12 @@ static TextRenderer* textRenderer(TextRendererType type) {
return displayNameRenderer;
}
void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
// simple frustum check
float boundingRadius = getBillboardSize();
if (Application::getInstance()->getViewFrustum()->sphereInFrustum(cameraPosition, boundingRadius) == ViewFrustum::OUTSIDE) {
ViewFrustum* frustum = (renderMode == Avatar::SHADOW_RENDER_MODE) ?
Application::getInstance()->getShadowViewFrustum() : Application::getInstance()->getViewFrustum();
if (frustum->sphereInFrustum(_position, boundingRadius) == ViewFrustum::OUTSIDE) {
return;
}
@ -202,11 +204,11 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
{
// glow when moving far away
const float GLOW_DISTANCE = 20.0f;
Glower glower(_moving && distanceToTarget > GLOW_DISTANCE && !forShadowMap ? 1.0f : 0.0f);
Glower glower(_moving && distanceToTarget > GLOW_DISTANCE && renderMode == NORMAL_RENDER_MODE ? 1.0f : 0.0f);
// render body
if (Menu::getInstance()->isOptionChecked(MenuOption::Avatars)) {
renderBody(forShadowMap);
renderBody(renderMode);
}
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionProxies)) {
_skeletonModel.renderCollisionProxies(0.7f);
@ -230,7 +232,8 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
float angle = abs(angleBetween(toTarget + delta, toTarget - delta));
float sphereRadius = getHead()->getAverageLoudness() * SPHERE_LOUDNESS_SCALING;
if (!forShadowMap && (sphereRadius > MIN_SPHERE_SIZE) && (angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
if (renderMode == NORMAL_RENDER_MODE && (sphereRadius > MIN_SPHERE_SIZE) &&
(angle < MAX_SPHERE_ANGLE) && (angle > MIN_SPHERE_ANGLE)) {
glColor4f(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.f - angle / MAX_SPHERE_ANGLE);
glPushMatrix();
glTranslatef(_position.x, _position.y, _position.z);
@ -242,8 +245,8 @@ void Avatar::render(const glm::vec3& cameraPosition, bool forShadowMap) {
}
const float DISPLAYNAME_DISTANCE = 10.0f;
setShowDisplayName(!forShadowMap && distanceToTarget < DISPLAYNAME_DISTANCE);
if (forShadowMap) {
setShowDisplayName(renderMode == NORMAL_RENDER_MODE && distanceToTarget < DISPLAYNAME_DISTANCE);
if (renderMode != NORMAL_RENDER_MODE) {
return;
}
renderDisplayName();
@ -306,17 +309,16 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::renderBody(bool forShadowMap) {
void Avatar::renderBody(RenderMode renderMode) {
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
// render the billboard until both models are loaded
if (forShadowMap) {
return;
if (renderMode != SHADOW_RENDER_MODE) {
renderBillboard();
}
renderBillboard();
return;
}
_skeletonModel.render(1.0f);
getHead()->render(1.0f);
_skeletonModel.render(1.0f, renderMode == SHADOW_RENDER_MODE);
getHead()->render(1.0f, renderMode == SHADOW_RENDER_MODE);
getHand()->render(false);
}

View file

@ -74,7 +74,10 @@ public:
void init();
void simulate(float deltaTime);
virtual void render(const glm::vec3& cameraPosition, bool forShadowMap);
enum RenderMode { NORMAL_RENDER_MODE, SHADOW_RENDER_MODE, MIRROR_RENDER_MODE };
virtual void render(const glm::vec3& cameraPosition, RenderMode renderMode = NORMAL_RENDER_MODE);
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
@ -181,7 +184,7 @@ protected:
float getPelvisToHeadLength() const;
void renderDisplayName();
virtual void renderBody(bool forShadowMap);
virtual void renderBody(RenderMode renderMode);
private:

View file

@ -72,7 +72,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
simulateAvatarFades(deltaTime);
}
void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly) {
void AvatarManager::renderAvatars(Avatar::RenderMode renderMode, bool selfAvatarOnly) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::renderAvatars()");
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::LookAtVectors);
@ -85,13 +85,13 @@ void AvatarManager::renderAvatars(bool forShadowMapOrMirror, bool selfAvatarOnly
if (!avatar->isInitialized()) {
continue;
}
avatar->render(cameraPosition, forShadowMapOrMirror);
avatar->render(cameraPosition, renderMode);
avatar->setDisplayingLookatVectors(renderLookAtVectors);
}
renderAvatarFades(cameraPosition, forShadowMapOrMirror);
renderAvatarFades(cameraPosition, renderMode);
} else {
// just render myAvatar
_myAvatar->render(cameraPosition, forShadowMapOrMirror);
_myAvatar->render(cameraPosition, renderMode);
_myAvatar->setDisplayingLookatVectors(renderLookAtVectors);
}
}
@ -114,14 +114,14 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
}
}
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap) {
void AvatarManager::renderAvatarFades(const glm::vec3& cameraPosition, Avatar::RenderMode renderMode) {
// render avatar fades
Glower glower(forShadowMap ? 0.0f : 1.0f);
Glower glower(renderMode == Avatar::NORMAL_RENDER_MODE ? 1.0f : 0.0f);
foreach(const AvatarSharedPointer& fadingAvatar, _avatarFades) {
Avatar* avatar = static_cast<Avatar*>(fadingAvatar.data());
if (avatar != static_cast<Avatar*>(_myAvatar.data())) {
avatar->render(cameraPosition, forShadowMap);
avatar->render(cameraPosition, renderMode);
}
}
}

View file

@ -29,7 +29,7 @@ public:
MyAvatar* getMyAvatar() { return _myAvatar.data(); }
void updateOtherAvatars(float deltaTime);
void renderAvatars(bool forShadowMapOrMirror = false, bool selfAvatarOnly = false);
void renderAvatars(Avatar::RenderMode renderMode, bool selfAvatarOnly = false);
void clearOtherAvatars();
@ -45,7 +45,7 @@ private:
void processKillAvatar(const QByteArray& datagram);
void simulateAvatarFades(float deltaTime);
void renderAvatarFades(const glm::vec3& cameraPosition, bool forShadowMap);
void renderAvatarFades(const glm::vec3& cameraPosition, Avatar::RenderMode renderMode);
// virtual override
AvatarHash::iterator erase(const AvatarHash::iterator& iterator);

View file

@ -45,13 +45,6 @@ void FaceModel::simulate(float deltaTime) {
Model::simulate(deltaTime, true, newJointStates);
}
bool FaceModel::render(float alpha) {
if (!Model::render(alpha)) {
return false;
}
return true;
}
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(_rotation);

View file

@ -22,7 +22,6 @@ public:
FaceModel(Head* owningHead);
void simulate(float deltaTime);
bool render(float alpha);
protected:

View file

@ -27,10 +27,7 @@ Hand::Hand(Avatar* owningAvatar) :
HandData((AvatarData*)owningAvatar),
_owningAvatar(owningAvatar),
_renderAlpha(1.0),
_collisionCenter(0,0,0),
_collisionAge(0),
_collisionDuration(0)
_renderAlpha(1.0)
{
}
@ -42,10 +39,6 @@ void Hand::reset() {
void Hand::simulate(float deltaTime, bool isMine) {
if (_collisionAge > 0.f) {
_collisionAge += deltaTime;
}
calculateGeometry();
if (isMine) {
@ -222,26 +215,6 @@ void Hand::collideAgainstOurself() {
}
}
void Hand::handleVoxelCollision(PalmData* palm, const glm::vec3& fingerTipPosition, VoxelTreeElement* voxel, float deltaTime) {
// Collision between finger and a voxel plays sound
const float LOWEST_FREQUENCY = 100.f;
const float HERTZ_PER_RGB = 3.f;
const float DECAY_PER_SAMPLE = 0.0005f;
const float DURATION_MAX = 2.0f;
const float MIN_VOLUME = 0.1f;
float volume = MIN_VOLUME + glm::clamp(glm::length(palm->getRawVelocity()), 0.f, (1.f - MIN_VOLUME));
float duration = volume;
_collisionCenter = fingerTipPosition;
_collisionAge = deltaTime;
_collisionDuration = duration;
int voxelBrightness = voxel->getColor()[0] + voxel->getColor()[1] + voxel->getColor()[2];
float frequency = LOWEST_FREQUENCY + (voxelBrightness * HERTZ_PER_RGB);
Application::getInstance()->getAudio()->startDrumSound(volume,
frequency,
DURATION_MAX,
DECAY_PER_SAMPLE);
}
void Hand::calculateGeometry() {
// generate finger tip balls....
_leapFingerTipBalls.clear();
@ -312,21 +285,6 @@ void Hand::render(bool isMine) {
renderLeapHands(isMine);
}
if (isMine) {
// If hand/voxel collision has happened, render a little expanding sphere
if (_collisionAge > 0.f) {
float opacity = glm::clamp(1.f - (_collisionAge / _collisionDuration), 0.f, 1.f);
glColor4f(1, 0, 0, 0.5 * opacity);
glPushMatrix();
glTranslatef(_collisionCenter.x, _collisionCenter.y, _collisionCenter.z);
glutSolidSphere(_collisionAge * 0.25f, 20, 20);
glPopMatrix();
if (_collisionAge > _collisionDuration) {
_collisionAge = 0.f;
}
}
}
glEnable(GL_DEPTH_TEST);
glEnable(GL_RESCALE_NORMAL);

View file

@ -22,7 +22,6 @@
#include "InterfaceConfig.h"
#include "world.h"
#include "VoxelSystem.h"
class Avatar;
@ -72,13 +71,6 @@ private:
std::vector<HandBall> _leapFingerTipBalls;
std::vector<HandBall> _leapFingerRootBalls;
glm::vec3 _lastFingerAddVoxel, _lastFingerDeleteVoxel;
VoxelDetail _collidingVoxel;
glm::vec3 _collisionCenter;
float _collisionAge;
float _collisionDuration;
// private methods
void setLeapHands(const std::vector<glm::vec3>& handPositions,
const std::vector<glm::vec3>& handNormals);
@ -88,8 +80,6 @@ private:
void calculateGeometry();
void handleVoxelCollision(PalmData* palm, const glm::vec3& fingerTipPosition, VoxelTreeElement* voxel, float deltaTime);
void playSlaps(PalmData& palm, Avatar* avatar);
};

View file

@ -168,8 +168,8 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
_eyePosition = calculateAverageEyePosition();
}
void Head::render(float alpha) {
if (_faceModel.render(alpha) && _renderLookatVectors) {
void Head::render(float alpha, bool forShadowMap) {
if (_faceModel.render(alpha, forShadowMap) && _renderLookatVectors) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
}
}

View file

@ -37,7 +37,7 @@ public:
void init();
void reset();
void simulate(float deltaTime, bool isMine, bool billboard = false);
void render(float alpha);
void render(float alpha, bool forShadowMap);
void setScale(float scale);
void setPosition(glm::vec3 position) { _position = position; }
void setGravity(glm::vec3 gravity) { _gravity = gravity; }

View file

@ -26,7 +26,6 @@
#include "Menu.h"
#include "MyAvatar.h"
#include "Physics.h"
#include "VoxelSystem.h"
#include "devices/Faceshift.h"
#include "devices/OculusManager.h"
#include "ui/TextRenderer.h"
@ -451,12 +450,12 @@ void MyAvatar::renderDebugBodyPoints() {
}
// virtual
void MyAvatar::render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror) {
void MyAvatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
// don't render if we've been asked to disable local rendering
if (!_shouldRender) {
return; // exit early
}
Avatar::render(cameraPosition, forShadowMapOrMirror);
Avatar::render(cameraPosition, renderMode);
}
void MyAvatar::renderHeadMouse() const {
@ -639,20 +638,20 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
_billboardValid = false;
}
void MyAvatar::renderBody(bool forceRenderHead) {
void MyAvatar::renderBody(RenderMode renderMode) {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
return; // wait until both models are loaded
}
// Render the body's voxels and head
_skeletonModel.render(1.0f);
_skeletonModel.render(1.0f, renderMode == SHADOW_RENDER_MODE);
// Render head so long as the camera isn't inside it
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.40f;
Camera* myCamera = Application::getInstance()->getCamera();
if (forceRenderHead || (glm::length(myCamera->getPosition() - getHead()->calculateAverageEyePosition()) >
if (renderMode != NORMAL_RENDER_MODE || (glm::length(myCamera->getPosition() - getHead()->calculateAverageEyePosition()) >
RENDER_HEAD_CUTOFF_DISTANCE * _scale)) {
getHead()->render(1.0f);
getHead()->render(1.0f, renderMode == SHADOW_RENDER_MODE);
}
getHand()->render(true);
}

View file

@ -35,8 +35,8 @@ public:
void simulate(float deltaTime);
void updateFromGyros(float deltaTime);
void render(const glm::vec3& cameraPosition, bool forShadowMapOrMirror = false);
void renderBody(bool forceRenderHead);
void render(const glm::vec3& cameraPosition, RenderMode renderMode = NORMAL_RENDER_MODE);
void renderBody(RenderMode renderMode);
void renderDebugBodyPoints();
void renderHeadMouse() const;

View file

@ -62,17 +62,6 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
}
}
bool SkeletonModel::render(float alpha) {
if (_jointStates.isEmpty()) {
return false;
}
Model::render(alpha);
return true;
}
void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const {
if (jointIndex == -1) {
return;

View file

@ -23,7 +23,6 @@ public:
SkeletonModel(Avatar* owningAvatar);
void simulate(float deltaTime, bool fullUpdate = true);
bool render(float alpha);
/// \param jointIndex index of hand joint
/// \param shapes[out] list in which is stored pointers to hand shapes

View file

@ -43,11 +43,14 @@ Model::~Model() {
ProgramObject Model::_program;
ProgramObject Model::_normalMapProgram;
ProgramObject Model::_shadowProgram;
ProgramObject Model::_skinProgram;
ProgramObject Model::_skinNormalMapProgram;
ProgramObject Model::_skinShadowProgram;
int Model::_normalMapTangentLocation;
Model::SkinLocations Model::_skinLocations;
Model::SkinLocations Model::_skinNormalMapLocations;
Model::SkinLocations Model::_skinShadowLocations;
void Model::initSkinProgram(ProgramObject& program, Model::SkinLocations& locations) {
program.bind();
@ -93,6 +96,11 @@ void Model::init() {
_normalMapTangentLocation = _normalMapProgram.attributeLocation("tangent");
_normalMapProgram.release();
_shadowProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/model_shadow.vert");
_shadowProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/model_shadow.frag");
_shadowProgram.link();
_skinProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath()
+ "shaders/skin_model.vert");
_skinProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath()
@ -108,6 +116,14 @@ void Model::init() {
_skinNormalMapProgram.link();
initSkinProgram(_skinNormalMapProgram, _skinNormalMapLocations);
_skinShadowProgram.addShaderFromSourceFile(QGLShader::Vertex,
Application::resourcesPath() + "shaders/skin_model_shadow.vert");
_skinShadowProgram.addShaderFromSourceFile(QGLShader::Fragment,
Application::resourcesPath() + "shaders/model_shadow.frag");
_skinShadowProgram.link();
initSkinProgram(_skinShadowProgram, _skinShadowLocations);
}
}
@ -167,7 +183,7 @@ void Model::simulate(float deltaTime, bool fullUpdate) {
simulate(deltaTime, fullUpdate, updateGeometry());
}
bool Model::render(float alpha) {
bool Model::render(float alpha, bool forShadowMap) {
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(alpha);
@ -198,13 +214,13 @@ bool Model::render(float alpha) {
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f * alpha);
renderMeshes(alpha, false);
renderMeshes(alpha, forShadowMap, false);
glDisable(GL_ALPHA_TEST);
// render translucent meshes afterwards, with back face culling
renderMeshes(alpha, true);
renderMeshes(alpha, forShadowMap, true);
glDisable(GL_CULL_FACE);
@ -960,7 +976,7 @@ void Model::deleteGeometry() {
}
}
void Model::renderMeshes(float alpha, bool translucent) {
void Model::renderMeshes(float alpha, bool forShadowMap, bool translucent) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
@ -985,7 +1001,12 @@ void Model::renderMeshes(float alpha, bool translucent) {
ProgramObject* program = &_program;
ProgramObject* skinProgram = &_skinProgram;
SkinLocations* skinLocations = &_skinLocations;
if (!mesh.tangents.isEmpty()) {
if (forShadowMap) {
program = &_shadowProgram;
skinProgram = &_skinShadowProgram;
skinLocations = &_skinShadowLocations;
} else if (!mesh.tangents.isEmpty()) {
program = &_normalMapProgram;
skinProgram = &_skinNormalMapProgram;
skinLocations = &_skinNormalMapLocations;
@ -1018,7 +1039,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
}
if (mesh.blendshapes.isEmpty()) {
if (!mesh.tangents.isEmpty()) {
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
activeProgram->enableAttributeArray(tangentLocation);
}
@ -1028,7 +1049,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
(mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
} else {
if (!mesh.tangents.isEmpty()) {
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
activeProgram->setAttributeBuffer(tangentLocation, GL_FLOAT, 0, 3);
activeProgram->enableAttributeArray(tangentLocation);
}
@ -1057,31 +1078,33 @@ void Model::renderMeshes(float alpha, bool translucent) {
continue;
}
// apply material properties
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
Texture* diffuseMap = networkPart.diffuseTexture.data();
if (mesh.isEye) {
if (diffuseMap) {
if (forShadowMap) {
glBindTexture(GL_TEXTURE_2D, 0);
} else {
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
Texture* diffuseMap = networkPart.diffuseTexture.data();
if (mesh.isEye && diffuseMap) {
diffuseMap = (_dilatedTextures[i][j] =
static_cast<DilatableNetworkTexture*>(diffuseMap)->getDilatedTexture(_pupilDilation)).data();
}
glBindTexture(GL_TEXTURE_2D, !diffuseMap ?
Application::getInstance()->getTextureCache()->getWhiteTextureID() : diffuseMap->getID());
if (!mesh.tangents.isEmpty()) {
glActiveTexture(GL_TEXTURE1);
Texture* normalMap = networkPart.normalTexture.data();
glBindTexture(GL_TEXTURE_2D, !normalMap ?
Application::getInstance()->getTextureCache()->getBlueTextureID() : normalMap->getID());
glActiveTexture(GL_TEXTURE0);
}
}
glBindTexture(GL_TEXTURE_2D, !diffuseMap ?
Application::getInstance()->getTextureCache()->getWhiteTextureID() : diffuseMap->getID());
if (!mesh.tangents.isEmpty()) {
glActiveTexture(GL_TEXTURE1);
Texture* normalMap = networkPart.normalTexture.data();
glBindTexture(GL_TEXTURE_2D, !normalMap ?
Application::getInstance()->getTextureCache()->getBlueTextureID() : normalMap->getID());
glActiveTexture(GL_TEXTURE0);
}
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
offset += part.quadIndices.size() * sizeof(int);
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
@ -1096,7 +1119,7 @@ void Model::renderMeshes(float alpha, bool translucent) {
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
if (!mesh.tangents.isEmpty()) {
if (!(mesh.tangents.isEmpty() || forShadowMap)) {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);

View file

@ -58,7 +58,7 @@ public:
void createCollisionShapes();
void updateShapePositions();
void simulate(float deltaTime, bool fullUpdate = true);
bool render(float alpha);
bool render(float alpha = 1.0f, bool forShadowMap = false);
/// Sets the URL of the model to render.
/// \param fallback the URL of a fallback model to render if the requested model fails to load
@ -261,7 +261,7 @@ private:
void applyNextGeometry();
void deleteGeometry();
void renderMeshes(float alpha, bool translucent);
void renderMeshes(float alpha, bool forShadowMap, bool translucent);
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
QSharedPointer<NetworkGeometry> _nextBaseGeometry;
@ -283,8 +283,10 @@ private:
static ProgramObject _program;
static ProgramObject _normalMapProgram;
static ProgramObject _shadowProgram;
static ProgramObject _skinProgram;
static ProgramObject _skinNormalMapProgram;
static ProgramObject _skinShadowProgram;
static int _normalMapTangentLocation;
@ -298,6 +300,7 @@ private:
static SkinLocations _skinLocations;
static SkinLocations _skinNormalMapLocations;
static SkinLocations _skinShadowLocations;
static void initSkinProgram(ProgramObject& program, SkinLocations& locations);
static QVector<JointState> createJointStates(const FBXGeometry& geometry);

View file

@ -0,0 +1,69 @@
//
// AudioDeviceScriptingInterface.cpp
// hifi
//
// Created by Brad Hefta-Gaub on 3/23/14
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#include "Application.h"
#include "AudioDeviceScriptingInterface.h"
AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
static AudioDeviceScriptingInterface sharedInstance;
return &sharedInstance;
}
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
bool result;
QMetaObject::invokeMethod(Application::getInstance()->getAudio(), "switchInputToAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result),
Q_ARG(const QString&, deviceName));
return result;
}
bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
bool result;
QMetaObject::invokeMethod(Application::getInstance()->getAudio(), "switchOutputToAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result),
Q_ARG(const QString&, deviceName));
return result;
}
QString AudioDeviceScriptingInterface::getInputDevice() {
return Application::getInstance()->getAudio()->getDeviceName(QAudio::AudioInput);
}
QString AudioDeviceScriptingInterface::getOutputDevice() {
return Application::getInstance()->getAudio()->getDeviceName(QAudio::AudioOutput);
}
QString AudioDeviceScriptingInterface::getDefaultInputDevice() {
return Application::getInstance()->getAudio()->getDefaultDeviceName(QAudio::AudioInput);
}
QString AudioDeviceScriptingInterface::getDefaultOutputDevice() {
return Application::getInstance()->getAudio()->getDefaultDeviceName(QAudio::AudioOutput);
}
QVector<QString> AudioDeviceScriptingInterface::getInputDevices() {
return Application::getInstance()->getAudio()->getDeviceNames(QAudio::AudioInput);
}
QVector<QString> AudioDeviceScriptingInterface::getOutputDevices() {
return Application::getInstance()->getAudio()->getDeviceNames(QAudio::AudioOutput);
}
float AudioDeviceScriptingInterface::getInputVolume() {
return Application::getInstance()->getAudio()->getInputVolume();
}
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
Application::getInstance()->getAudio()->setInputVolume(volume);
}

View file

@ -0,0 +1,41 @@
//
// AudioDeviceScriptingInterface.h
// hifi
//
// Created by Brad Hefta-Gaub on 3/22/14
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__AudioDeviceScriptingInterface__
#define __hifi__AudioDeviceScriptingInterface__
#include <QDebug>
#include <QObject>
#include <QString>
#include "Application.h"
class AudioDeviceScriptingInterface : public QObject {
Q_OBJECT
AudioDeviceScriptingInterface() { };
public:
static AudioDeviceScriptingInterface* getInstance();
public slots:
bool setInputDevice(const QString& deviceName);
bool setOutputDevice(const QString& deviceName);
QString getInputDevice();
QString getOutputDevice();
QString getDefaultInputDevice();
QString getDefaultOutputDevice();
QVector<QString> getInputDevices();
QVector<QString> getOutputDevices();
float getInputVolume();
void setInputVolume(float volume);
};
#endif /* defined(__hifi__AudioDeviceScriptingInterface__) */

View file

@ -0,0 +1,36 @@
//
// SettingsScriptingInterface.cpp
// hifi
//
// Created by Brad Hefta-Gaub on 2/25/14
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#include "Application.h"
#include "SettingsScriptingInterface.h"
SettingsScriptingInterface* SettingsScriptingInterface::getInstance() {
static SettingsScriptingInterface sharedInstance;
return &sharedInstance;
}
QVariant SettingsScriptingInterface::getValue(const QString& setting) {
QSettings* settings = Application::getInstance()->lockSettings();
QVariant value = settings->value(setting);
Application::getInstance()->unlockSettings();
return value;
}
QVariant SettingsScriptingInterface::getValue(const QString& setting, const QVariant& defaultValue) {
QSettings* settings = Application::getInstance()->lockSettings();
QVariant value = settings->value(setting, defaultValue);
Application::getInstance()->unlockSettings();
return value;
}
void SettingsScriptingInterface::setValue(const QString& setting, const QVariant& value) {
QSettings* settings = Application::getInstance()->lockSettings();
settings->setValue(setting, value);
Application::getInstance()->unlockSettings();
}

View file

@ -0,0 +1,30 @@
//
// SettingsScriptingInterface.h
// hifi
//
// Created by Brad Hefta-Gaub on 3/22/14
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
#ifndef __hifi__SettingsScriptingInterface__
#define __hifi__SettingsScriptingInterface__
#include <QDebug>
#include <QObject>
#include <QString>
#include "Application.h"
class SettingsScriptingInterface : public QObject {
Q_OBJECT
SettingsScriptingInterface() { };
public:
static SettingsScriptingInterface* getInstance();
public slots:
QVariant getValue(const QString& setting);
QVariant getValue(const QString& setting, const QVariant& defaultValue);
void setValue(const QString& setting, const QVariant& value);
};
#endif /* defined(__hifi__SettingsScriptingInterface__) */

View file

@ -38,11 +38,12 @@ void InfoView::forcedShow() {
}
bool InfoView::shouldShow() {
bool shouldShow = false;
if (_forced) {
return true;
}
QSettings* settings = Application::getInstance()->getSettings();
QSettings* settings = Application::getInstance()->lockSettings();
QString lastVersion = settings->value(SETTINGS_VERSION_KEY).toString();
@ -51,10 +52,12 @@ bool InfoView::shouldShow() {
if (version != QString::null && (lastVersion == QString::null || lastVersion != version)) {
settings->setValue(SETTINGS_VERSION_KEY, version);
return true;
shouldShow = true;
} else {
return false;
}
shouldShow = false;
}
Application::getInstance()->unlockSettings();
return shouldShow;
}
void InfoView::loaded(bool ok) {

View file

@ -12,7 +12,6 @@
#include <SharedUtil.h>
#include "Base3DOverlay.h"
#include "TextRenderer.h"
const glm::vec3 DEFAULT_POSITION = glm::vec3(0.0f, 0.0f, 0.0f);
const float DEFAULT_LINE_WIDTH = 1.0f;

View file

@ -12,10 +12,10 @@
#include <QGLWidget>
#include <QScriptValue>
#include <VoxelSystem.h>
#include <Application.h>
#include "LocalVoxelsOverlay.h"
#include "voxels/VoxelSystem.h"
QMap<QString, WeakVoxelSystemPointer> LocalVoxelsOverlay::_voxelSystemMap;

View file

@ -12,7 +12,7 @@
#include <SharedUtil.h>
#include "TextOverlay.h"
#include "TextRenderer.h"
#include "ui/TextRenderer.h"
TextOverlay::TextOverlay() :
_leftMargin(DEFAULT_MARGIN),

Some files were not shown because too many files have changed in this diff Show more