mirror of
https://github.com/overte-org/overte.git
synced 2025-04-24 05:53:29 +02:00
Merge branch 'master' of https://github.com/worklist/hifi
This commit is contained in:
commit
2b87296cde
71 changed files with 1945 additions and 1758 deletions
|
@ -37,8 +37,8 @@ int hifiSockAddrMeta = qRegisterMetaType<HifiSockAddr>("HifiSockAddr");
|
|||
|
||||
AssignmentClient::AssignmentClient(int &argc, char **argv) :
|
||||
QCoreApplication(argc, argv),
|
||||
_assignmentServerHostname(DEFAULT_ASSIGNMENT_SERVER_HOSTNAME),
|
||||
_shutdownEventListener(this)
|
||||
_shutdownEventListener(this),
|
||||
_assignmentServerHostname(DEFAULT_ASSIGNMENT_SERVER_HOSTNAME)
|
||||
{
|
||||
LogUtils::init();
|
||||
|
||||
|
|
|
@ -327,7 +327,6 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
const float FULL_POWER = 1.0f;
|
||||
const float SQUARE_ROOT_OF_TWO_OVER_TWO = 0.71f;
|
||||
const float HALF_POWER = SQUARE_ROOT_OF_TWO_OVER_TWO;
|
||||
const float QUARTER_POWER = HALF_POWER * HALF_POWER;
|
||||
|
||||
const float ONE_OVER_TWO_PI = 1.0f / TWO_PI;
|
||||
const float FILTER_CUTOFF_FREQUENCY_HZ = 1000.0f;
|
||||
|
@ -344,8 +343,8 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
((-1.0 * ONE_OVER_TWO_PI * (bearingRelativeAngleToSource + PI_OVER_TWO)) + HALF_POWER) :
|
||||
((-1.0 * ONE_OVER_TWO_PI * (bearingRelativeAngleToSource - PI)) + HALF_POWER);
|
||||
|
||||
float distanceBetween = glm::length(relativePosition);
|
||||
#if 0
|
||||
float distanceBetween = glm::length(relativePosition);
|
||||
qDebug() << "avatar="
|
||||
<< listeningNodeStream
|
||||
<< "gainL="
|
||||
|
|
|
@ -389,7 +389,7 @@ const QByteArray* OctreeQueryNode::getNextNackedPacket() {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
void OctreeQueryNode::parseNackPacket(QByteArray& packet) {
|
||||
void OctreeQueryNode::parseNackPacket(const QByteArray& packet) {
|
||||
|
||||
int numBytesPacketHeader = numBytesForPacketHeader(packet);
|
||||
const unsigned char* dataAt = reinterpret_cast<const unsigned char*>(packet.data()) + numBytesPacketHeader;
|
||||
|
|
|
@ -109,7 +109,7 @@ public:
|
|||
|
||||
OCTREE_PACKET_SEQUENCE getSequenceNumber() const { return _sequenceNumber; }
|
||||
|
||||
void parseNackPacket(QByteArray& packet);
|
||||
void parseNackPacket(const QByteArray& packet);
|
||||
bool hasNextNackedPacket() const;
|
||||
const QByteArray* getNextNackedPacket();
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
|
||||
#include "OctreeServer.h"
|
||||
#include "OctreeServerConsts.h"
|
||||
#include "OctreeServerDatagramProcessor.h"
|
||||
|
||||
OctreeServer* OctreeServer::_instance = NULL;
|
||||
int OctreeServer::_clientCount = 0;
|
||||
|
@ -827,55 +828,83 @@ void OctreeServer::parsePayload() {
|
|||
}
|
||||
}
|
||||
|
||||
void OctreeServer::readPendingDatagrams() {
|
||||
QByteArray receivedPacket;
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
void OctreeServer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
while (readAvailableDatagram(receivedPacket, senderSockAddr)) {
|
||||
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
|
||||
PacketType packetType = packetTypeForPacket(receivedPacket);
|
||||
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
|
||||
if (packetType == getMyQueryMessageType()) {
|
||||
// If we got a query packet, then we're talking to an agent, and we
|
||||
// need to make sure we have it in our nodeList.
|
||||
if (matchingNode) {
|
||||
nodeList->updateNodeWithDataFromPacket(matchingNode, receivedPacket);
|
||||
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
|
||||
if (nodeData && !nodeData->isOctreeSendThreadInitalized()) {
|
||||
|
||||
// NOTE: this is an important aspect of the proper ref counting. The send threads/node data need to
|
||||
// know that the OctreeServer/Assignment will not get deleted on it while it's still active. The
|
||||
// solution is to get the shared pointer for the current assignment. We need to make sure this is the
|
||||
// same SharedAssignmentPointer that was ref counted by the assignment client.
|
||||
SharedAssignmentPointer sharedAssignment = AssignmentClient::getCurrentAssignment();
|
||||
nodeData->initializeOctreeSendThread(sharedAssignment, matchingNode);
|
||||
}
|
||||
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
|
||||
PacketType packetType = packetTypeForPacket(receivedPacket);
|
||||
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
|
||||
if (packetType == getMyQueryMessageType()) {
|
||||
// If we got a query packet, then we're talking to an agent, and we
|
||||
// need to make sure we have it in our nodeList.
|
||||
if (matchingNode) {
|
||||
nodeList->updateNodeWithDataFromPacket(matchingNode, receivedPacket);
|
||||
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
|
||||
if (nodeData && !nodeData->isOctreeSendThreadInitalized()) {
|
||||
|
||||
// NOTE: this is an important aspect of the proper ref counting. The send threads/node data need to
|
||||
// know that the OctreeServer/Assignment will not get deleted on it while it's still active. The
|
||||
// solution is to get the shared pointer for the current assignment. We need to make sure this is the
|
||||
// same SharedAssignmentPointer that was ref counted by the assignment client.
|
||||
SharedAssignmentPointer sharedAssignment = AssignmentClient::getCurrentAssignment();
|
||||
nodeData->initializeOctreeSendThread(sharedAssignment, matchingNode);
|
||||
}
|
||||
} else if (packetType == PacketTypeOctreeDataNack) {
|
||||
// If we got a nack packet, then we're talking to an agent, and we
|
||||
// need to make sure we have it in our nodeList.
|
||||
if (matchingNode) {
|
||||
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
|
||||
if (nodeData) {
|
||||
nodeData->parseNackPacket(receivedPacket);
|
||||
}
|
||||
}
|
||||
} else if (packetType == PacketTypeJurisdictionRequest) {
|
||||
_jurisdictionSender->queueReceivedPacket(matchingNode, receivedPacket);
|
||||
} else if (packetType == PacketTypeSignedTransactionPayment) {
|
||||
handleSignedTransactionPayment(packetType, receivedPacket);
|
||||
} else if (_octreeInboundPacketProcessor && getOctree()->handlesEditPacketType(packetType)) {
|
||||
_octreeInboundPacketProcessor->queueReceivedPacket(matchingNode, receivedPacket);
|
||||
} else {
|
||||
// let processNodeData handle it.
|
||||
NodeList::getInstance()->processNodeData(senderSockAddr, receivedPacket);
|
||||
}
|
||||
} else if (packetType == PacketTypeOctreeDataNack) {
|
||||
// If we got a nack packet, then we're talking to an agent, and we
|
||||
// need to make sure we have it in our nodeList.
|
||||
if (matchingNode) {
|
||||
OctreeQueryNode* nodeData = (OctreeQueryNode*)matchingNode->getLinkedData();
|
||||
if (nodeData) {
|
||||
nodeData->parseNackPacket(receivedPacket);
|
||||
}
|
||||
}
|
||||
} else if (packetType == PacketTypeJurisdictionRequest) {
|
||||
_jurisdictionSender->queueReceivedPacket(matchingNode, receivedPacket);
|
||||
} else if (packetType == PacketTypeSignedTransactionPayment) {
|
||||
handleSignedTransactionPayment(packetType, receivedPacket);
|
||||
} else if (_octreeInboundPacketProcessor && getOctree()->handlesEditPacketType(packetType)) {
|
||||
_octreeInboundPacketProcessor->queueReceivedPacket(matchingNode, receivedPacket);
|
||||
} else {
|
||||
// let processNodeData handle it.
|
||||
NodeList::getInstance()->processNodeData(senderSockAddr, receivedPacket);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OctreeServer::setupDatagramProcessingThread() {
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
// we do not want this event loop to be the handler for UDP datagrams, so disconnect
|
||||
disconnect(&nodeList->getNodeSocket(), 0, this, 0);
|
||||
|
||||
// setup a QThread with us as parent that will house the AudioMixerDatagramProcessor
|
||||
_datagramProcessingThread = new QThread(this);
|
||||
|
||||
// create an AudioMixerDatagramProcessor and move it to that thread
|
||||
OctreeServerDatagramProcessor* datagramProcessor = new OctreeServerDatagramProcessor(nodeList->getNodeSocket(), thread());
|
||||
datagramProcessor->moveToThread(_datagramProcessingThread);
|
||||
|
||||
// remove the NodeList as the parent of the node socket
|
||||
nodeList->getNodeSocket().setParent(NULL);
|
||||
nodeList->getNodeSocket().moveToThread(_datagramProcessingThread);
|
||||
|
||||
// let the datagram processor handle readyRead from node socket
|
||||
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead,
|
||||
datagramProcessor, &OctreeServerDatagramProcessor::readPendingDatagrams);
|
||||
|
||||
// connect to the datagram processing thread signal that tells us we have to handle a packet
|
||||
connect(datagramProcessor, &OctreeServerDatagramProcessor::packetRequiresProcessing, this, &OctreeServer::readPendingDatagram);
|
||||
|
||||
// delete the datagram processor and the associated thread when the QThread quits
|
||||
connect(_datagramProcessingThread, &QThread::finished, datagramProcessor, &QObject::deleteLater);
|
||||
connect(datagramProcessor, &QObject::destroyed, _datagramProcessingThread, &QThread::deleteLater);
|
||||
|
||||
// start the datagram processing thread
|
||||
_datagramProcessingThread->start();
|
||||
|
||||
}
|
||||
|
||||
void OctreeServer::run() {
|
||||
_safeServerName = getMyServerName();
|
||||
|
||||
|
@ -887,6 +916,8 @@ void OctreeServer::run() {
|
|||
// use common init to setup common timers and logging
|
||||
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
|
||||
|
||||
setupDatagramProcessingThread();
|
||||
|
||||
// Now would be a good time to parse our arguments, if we got them as assignment
|
||||
if (getPayload().size() > 0) {
|
||||
parsePayload();
|
||||
|
|
|
@ -123,13 +123,15 @@ public:
|
|||
public slots:
|
||||
/// runs the voxel server assignment
|
||||
void run();
|
||||
void readPendingDatagrams();
|
||||
void nodeAdded(SharedNodePointer node);
|
||||
void nodeKilled(SharedNodePointer node);
|
||||
void sendStatsPacket();
|
||||
|
||||
void handleSignedTransactionPaymentResponse(const QJsonObject& jsonObject);
|
||||
|
||||
void readPendingDatagrams() { }; // this will not be called since our datagram processing thread will handle
|
||||
void readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr);
|
||||
|
||||
protected:
|
||||
void parsePayload();
|
||||
void initHTTPManager(int port);
|
||||
|
@ -140,6 +142,7 @@ protected:
|
|||
QString getStatusLink();
|
||||
|
||||
void handleSignedTransactionPayment(PacketType packetType, const QByteArray& datagram);
|
||||
void setupDatagramProcessingThread();
|
||||
|
||||
int _argc;
|
||||
const char** _argv;
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
//
|
||||
// OctreeServerDatagramProcessor.cpp
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 2014-09-05
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QDebug>
|
||||
|
||||
#include <HifiSockAddr.h>
|
||||
#include <NodeList.h>
|
||||
#include <PacketHeaders.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "OctreeServerDatagramProcessor.h"
|
||||
|
||||
OctreeServerDatagramProcessor::OctreeServerDatagramProcessor(QUdpSocket& nodeSocket, QThread* previousNodeSocketThread) :
|
||||
_nodeSocket(nodeSocket),
|
||||
_previousNodeSocketThread(previousNodeSocketThread)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
OctreeServerDatagramProcessor::~OctreeServerDatagramProcessor() {
|
||||
// return the node socket to its previous thread
|
||||
_nodeSocket.moveToThread(_previousNodeSocketThread);
|
||||
}
|
||||
|
||||
void OctreeServerDatagramProcessor::readPendingDatagrams() {
|
||||
|
||||
HifiSockAddr senderSockAddr;
|
||||
static QByteArray incomingPacket;
|
||||
|
||||
// read everything that is available
|
||||
while (_nodeSocket.hasPendingDatagrams()) {
|
||||
incomingPacket.resize(_nodeSocket.pendingDatagramSize());
|
||||
|
||||
// just get this packet off the stack
|
||||
_nodeSocket.readDatagram(incomingPacket.data(), incomingPacket.size(),
|
||||
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
|
||||
|
||||
PacketType packetType = packetTypeForPacket(incomingPacket);
|
||||
if (packetType == PacketTypePing) {
|
||||
NodeList::getInstance()->processNodeData(senderSockAddr, incomingPacket);
|
||||
return; // don't emit
|
||||
}
|
||||
|
||||
// emit the signal to tell AudioMixer it needs to process a packet
|
||||
emit packetRequiresProcessing(incomingPacket, senderSockAddr);
|
||||
}
|
||||
}
|
32
assignment-client/src/octree/OctreeServerDatagramProcessor.h
Normal file
32
assignment-client/src/octree/OctreeServerDatagramProcessor.h
Normal file
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
// OctreeServerDatagramProcessor.h
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 2014-09-05
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OctreeServerDatagramProcessor_h
|
||||
#define hifi_OctreeServerDatagramProcessor_h
|
||||
|
||||
#include <qobject.h>
|
||||
#include <qudpsocket.h>
|
||||
|
||||
class OctreeServerDatagramProcessor : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
OctreeServerDatagramProcessor(QUdpSocket& nodeSocket, QThread* previousNodeSocketThread);
|
||||
~OctreeServerDatagramProcessor();
|
||||
public slots:
|
||||
void readPendingDatagrams();
|
||||
signals:
|
||||
void packetRequiresProcessing(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr);
|
||||
private:
|
||||
QUdpSocket& _nodeSocket;
|
||||
QThread* _previousNodeSocketThread;
|
||||
};
|
||||
|
||||
#endif // hifi_OctreeServerDatagramProcessor_h
|
|
@ -43,5 +43,7 @@
|
|||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>NSHighResolutionCapable</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
|
|
@ -8,9 +8,9 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
Script.include("lookWithTouch.js");
|
||||
Script.include("editVoxels.js");
|
||||
Script.include("editModels.js");
|
||||
Script.include("selectAudioDevice.js");
|
||||
Script.include("hydraMove.js");
|
||||
Script.include("inspect.js");
|
||||
Script.load("lookWithTouch.js");
|
||||
Script.load("editVoxels.js");
|
||||
Script.load("editModels.js");
|
||||
Script.load("selectAudioDevice.js");
|
||||
Script.load("hydraMove.js");
|
||||
Script.load("inspect.js");
|
||||
|
|
|
@ -1147,10 +1147,10 @@ var toolBar = (function () {
|
|||
}, true, false);
|
||||
|
||||
browseModelsButton = toolBar.addTool({
|
||||
imageURL: toolIconUrl + "list-icon.png",
|
||||
imageURL: toolIconUrl + "list-icon.svg",
|
||||
width: toolWidth,
|
||||
height: toolHeight,
|
||||
alpha: 0.7,
|
||||
alpha: 0.9,
|
||||
visible: true
|
||||
});
|
||||
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
QLabel#avatarLabel {
|
||||
background-image: url(styles/avatar.svg);
|
||||
background-repeat: no-repeat;
|
||||
background-position: left center;
|
||||
}
|
||||
|
||||
QLabel#advancedTuningLabel {
|
||||
background-image: url(styles/wrench.svg);
|
||||
background-repeat: no-repeat;
|
||||
background-position: left center;
|
||||
}
|
||||
|
||||
QPushButton#buttonBrowseHead,
|
||||
QPushButton#buttonBrowseBody,
|
||||
QPushButton#buttonBrowseLocation,
|
||||
QPushButton#buttonBrowseScriptsLocation {
|
||||
background-image: url(styles/search.svg);
|
||||
background-repeat: no-repeat;
|
||||
background-position: center center;
|
||||
background-color: #fff;
|
||||
border-radius: 0;
|
||||
padding: 0;
|
||||
}
|
|
@ -382,6 +382,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_runningScriptsWidget->setRunningScripts(getRunningScripts());
|
||||
connect(_runningScriptsWidget, &RunningScriptsWidget::stopScriptName, this, &Application::stopScript);
|
||||
|
||||
connect(this, SIGNAL(aboutToQuit()), this, SLOT(saveScripts()));
|
||||
|
||||
// check first run...
|
||||
QVariant firstRunValue = _settings->value("firstRun",QVariant(true));
|
||||
if (firstRunValue.isValid() && firstRunValue.toBool()) {
|
||||
|
@ -393,7 +395,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
QMutexLocker locker(&_settingsMutex);
|
||||
_settings->setValue("firstRun",QVariant(false));
|
||||
} else {
|
||||
// do this as late as possible so that all required subsystems are inialized
|
||||
// do this as late as possible so that all required subsystems are initialized
|
||||
loadScripts();
|
||||
|
||||
QMutexLocker locker(&_settingsMutex);
|
||||
|
@ -425,7 +427,6 @@ Application::~Application() {
|
|||
|
||||
saveSettings();
|
||||
storeSizeAndPosition();
|
||||
saveScripts();
|
||||
|
||||
int DELAY_TIME = 1000;
|
||||
UserActivityLogger::getInstance().close(DELAY_TIME);
|
||||
|
@ -594,7 +595,7 @@ void Application::paintGL() {
|
|||
if (OculusManager::isConnected()) {
|
||||
_textureCache.setFrameBufferSize(OculusManager::getRenderTargetSize());
|
||||
} else {
|
||||
_textureCache.setFrameBufferSize(_glWidget->size());
|
||||
_textureCache.setFrameBufferSize(_glWidget->getDeviceSize());
|
||||
}
|
||||
|
||||
glEnable(GL_LINE_SMOOTH);
|
||||
|
@ -975,7 +976,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
if (isShifted) {
|
||||
_viewFrustum.setFocalLength(_viewFrustum.getFocalLength() - 0.1f);
|
||||
if (TV3DManager::isConnected()) {
|
||||
TV3DManager::configureCamera(_myCamera, _glWidget->width(),_glWidget->height());
|
||||
TV3DManager::configureCamera(_myCamera, _glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
|
||||
}
|
||||
} else {
|
||||
_myCamera.setEyeOffsetPosition(_myCamera.getEyeOffsetPosition() + glm::vec3(-0.001, 0, 0));
|
||||
|
@ -987,7 +988,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
if (isShifted) {
|
||||
_viewFrustum.setFocalLength(_viewFrustum.getFocalLength() + 0.1f);
|
||||
if (TV3DManager::isConnected()) {
|
||||
TV3DManager::configureCamera(_myCamera, _glWidget->width(),_glWidget->height());
|
||||
TV3DManager::configureCamera(_myCamera, _glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -1151,7 +1152,8 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
showMouse = false;
|
||||
}
|
||||
|
||||
_controllerScriptingInterface.emitMouseMoveEvent(event, deviceID); // send events to any registered scripts
|
||||
QMouseEvent deviceEvent = getDeviceEvent(event, deviceID);
|
||||
_controllerScriptingInterface.emitMouseMoveEvent(&deviceEvent, deviceID); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||
|
@ -1166,12 +1168,13 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
_seenMouseMove = true;
|
||||
}
|
||||
|
||||
_mouseX = event->x();
|
||||
_mouseY = event->y();
|
||||
_mouseX = deviceEvent.x();
|
||||
_mouseY = deviceEvent.y();
|
||||
}
|
||||
|
||||
void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
_controllerScriptingInterface.emitMousePressEvent(event); // send events to any registered scripts
|
||||
QMouseEvent deviceEvent = getDeviceEvent(event, deviceID);
|
||||
_controllerScriptingInterface.emitMousePressEvent(&deviceEvent); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||
|
@ -1181,8 +1184,8 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
|
||||
if (activeWindow() == _window) {
|
||||
if (event->button() == Qt::LeftButton) {
|
||||
_mouseX = event->x();
|
||||
_mouseY = event->y();
|
||||
_mouseX = deviceEvent.x();
|
||||
_mouseY = deviceEvent.y();
|
||||
_mouseDragStartedX = _mouseX;
|
||||
_mouseDragStartedY = _mouseY;
|
||||
_mousePressed = true;
|
||||
|
@ -1204,7 +1207,8 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
}
|
||||
|
||||
void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
_controllerScriptingInterface.emitMouseReleaseEvent(event); // send events to any registered scripts
|
||||
QMouseEvent deviceEvent = getDeviceEvent(event, deviceID);
|
||||
_controllerScriptingInterface.emitMouseReleaseEvent(&deviceEvent); // send events to any registered scripts
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface.isMouseCaptured()) {
|
||||
|
@ -1213,8 +1217,8 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
|
||||
if (activeWindow() == _window) {
|
||||
if (event->button() == Qt::LeftButton) {
|
||||
_mouseX = event->x();
|
||||
_mouseY = event->y();
|
||||
_mouseX = deviceEvent.x();
|
||||
_mouseY = deviceEvent.y();
|
||||
_mousePressed = false;
|
||||
checkBandwidthMeterClick();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
|
||||
|
@ -1413,7 +1417,7 @@ void Application::checkBandwidthMeterClick() {
|
|||
if (Menu::getInstance()->isOptionChecked(MenuOption::Bandwidth) &&
|
||||
glm::compMax(glm::abs(glm::ivec2(_mouseX - _mouseDragStartedX, _mouseY - _mouseDragStartedY)))
|
||||
<= BANDWIDTH_METER_CLICK_MAX_DRAG_LENGTH
|
||||
&& _bandwidthMeter.isWithinArea(_mouseX, _mouseY, _glWidget->width(), _glWidget->height())) {
|
||||
&& _bandwidthMeter.isWithinArea(_mouseX, _mouseY, _glWidget->getDeviceWidth(), _glWidget->getDeviceHeight())) {
|
||||
|
||||
// The bandwidth meter is visible, the click didn't get dragged too far and
|
||||
// we actually hit the bandwidth meter
|
||||
|
@ -1427,7 +1431,7 @@ void Application::setFullscreen(bool fullscreen) {
|
|||
}
|
||||
|
||||
void Application::setEnable3DTVMode(bool enable3DTVMode) {
|
||||
resizeGL(_glWidget->width(),_glWidget->height());
|
||||
resizeGL(_glWidget->getDeviceWidth(),_glWidget->getDeviceHeight());
|
||||
}
|
||||
|
||||
void Application::setEnableVRMode(bool enableVRMode) {
|
||||
|
@ -1440,7 +1444,7 @@ void Application::setEnableVRMode(bool enableVRMode) {
|
|||
}
|
||||
}
|
||||
|
||||
resizeGL(_glWidget->width(), _glWidget->height());
|
||||
resizeGL(_glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
|
||||
}
|
||||
|
||||
void Application::setRenderVoxels(bool voxelRender) {
|
||||
|
@ -1731,8 +1735,8 @@ void Application::init() {
|
|||
_voxelShader.init();
|
||||
_pointShader.init();
|
||||
|
||||
_mouseX = _glWidget->width() / 2;
|
||||
_mouseY = _glWidget->height() / 2;
|
||||
_mouseX = _glWidget->getDeviceWidth() / 2;
|
||||
_mouseY = _glWidget->getDeviceHeight() / 2;
|
||||
QCursor::setPos(_mouseX, _mouseY);
|
||||
|
||||
// TODO: move _myAvatar out of Application. Move relevant code to MyAvataar or AvatarManager
|
||||
|
@ -1887,8 +1891,8 @@ void Application::updateMouseRay() {
|
|||
// if the mouse pointer isn't visible, act like it's at the center of the screen
|
||||
float x = 0.5f, y = 0.5f;
|
||||
if (!_mouseHidden) {
|
||||
x = _mouseX / (float)_glWidget->width();
|
||||
y = _mouseY / (float)_glWidget->height();
|
||||
x = _mouseX / (float)_glWidget->getDeviceWidth();
|
||||
y = _mouseY / (float)_glWidget->getDeviceHeight();
|
||||
}
|
||||
_viewFrustum.computePickRay(x, y, _mouseRayOrigin, _mouseRayDirection);
|
||||
|
||||
|
@ -2328,6 +2332,14 @@ int Application::sendNackPackets() {
|
|||
return packetsSent;
|
||||
}
|
||||
|
||||
QMouseEvent Application::getDeviceEvent(QMouseEvent* event, unsigned int deviceID) {
|
||||
if (deviceID > 0) {
|
||||
return *event;
|
||||
}
|
||||
return QMouseEvent(event->type(), QPointF(_glWidget->getDeviceX(event->x()), _glWidget->getDeviceY(event->y())),
|
||||
event->windowPos(), event->screenPos(), event->button(), event->buttons(), event->modifiers());
|
||||
}
|
||||
|
||||
void Application::queryOctree(NodeType_t serverType, PacketType packetType, NodeToJurisdictionMap& jurisdictions) {
|
||||
|
||||
//qDebug() << ">>> inside... queryOctree()... _viewFrustum.getFieldOfView()=" << _viewFrustum.getFieldOfView();
|
||||
|
@ -2675,7 +2687,7 @@ void Application::updateShadowMap() {
|
|||
|
||||
fbo->release();
|
||||
|
||||
glViewport(0, 0, _glWidget->width(), _glWidget->height());
|
||||
glViewport(0, 0, _glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
|
||||
}
|
||||
|
||||
const GLfloat WORLD_AMBIENT_COLOR[] = { 0.525f, 0.525f, 0.6f };
|
||||
|
@ -2705,7 +2717,7 @@ QImage Application::renderAvatarBillboard() {
|
|||
glDisable(GL_BLEND);
|
||||
|
||||
const int BILLBOARD_SIZE = 64;
|
||||
renderRearViewMirror(QRect(0, _glWidget->height() - BILLBOARD_SIZE, BILLBOARD_SIZE, BILLBOARD_SIZE), true);
|
||||
renderRearViewMirror(QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE, BILLBOARD_SIZE, BILLBOARD_SIZE), true);
|
||||
|
||||
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
|
||||
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
|
||||
|
@ -2973,8 +2985,8 @@ void Application::computeOffAxisFrustum(float& left, float& right, float& bottom
|
|||
}
|
||||
|
||||
glm::vec2 Application::getScaledScreenPoint(glm::vec2 projectedPoint) {
|
||||
float horizontalScale = _glWidget->width() / 2.0f;
|
||||
float verticalScale = _glWidget->height() / 2.0f;
|
||||
float horizontalScale = _glWidget->getDeviceWidth() / 2.0f;
|
||||
float verticalScale = _glWidget->getDeviceHeight() / 2.0f;
|
||||
|
||||
// -1,-1 is 0,windowHeight
|
||||
// 1,1 is windowWidth,0
|
||||
|
@ -2993,7 +3005,7 @@ glm::vec2 Application::getScaledScreenPoint(glm::vec2 projectedPoint) {
|
|||
// -1,-1 1,-1
|
||||
|
||||
glm::vec2 screenPoint((projectedPoint.x + 1.0) * horizontalScale,
|
||||
((projectedPoint.y + 1.0) * -verticalScale) + _glWidget->height());
|
||||
((projectedPoint.y + 1.0) * -verticalScale) + _glWidget->getDeviceHeight());
|
||||
|
||||
return screenPoint;
|
||||
}
|
||||
|
@ -3029,8 +3041,8 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
_mirrorCamera.update(1.0f/_fps);
|
||||
|
||||
// set the bounds of rear mirror view
|
||||
glViewport(region.x(), _glWidget->height() - region.y() - region.height(), region.width(), region.height());
|
||||
glScissor(region.x(), _glWidget->height() - region.y() - region.height(), region.width(), region.height());
|
||||
glViewport(region.x(), _glWidget->getDeviceHeight() - region.y() - region.height(), region.width(), region.height());
|
||||
glScissor(region.x(), _glWidget->getDeviceHeight() - region.y() - region.height(), region.width(), region.height());
|
||||
bool updateViewFrustum = false;
|
||||
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
|
@ -3097,7 +3109,7 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
|
|||
}
|
||||
|
||||
// reset Viewport and projection matrix
|
||||
glViewport(0, 0, _glWidget->width(), _glWidget->height());
|
||||
glViewport(0, 0, _glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
updateProjectionMatrix(_myCamera, updateViewFrustum);
|
||||
}
|
||||
|
@ -3279,8 +3291,8 @@ void Application::deleteVoxelAt(const VoxelDetail& voxel) {
|
|||
|
||||
|
||||
void Application::resetSensors() {
|
||||
_mouseX = _glWidget->width() / 2;
|
||||
_mouseY = _glWidget->height() / 2;
|
||||
_mouseX = _glWidget->getDeviceWidth() / 2;
|
||||
_mouseY = _glWidget->getDeviceHeight() / 2;
|
||||
|
||||
_faceplus.reset();
|
||||
_faceshift.reset();
|
||||
|
@ -3672,18 +3684,26 @@ void Application::clearScriptsBeforeRunning() {
|
|||
}
|
||||
|
||||
void Application::saveScripts() {
|
||||
// saves all current running scripts
|
||||
// Saves all currently running user-loaded scripts
|
||||
QMutexLocker locker(&_settingsMutex);
|
||||
_settings->beginWriteArray("Settings");
|
||||
for (int i = 0; i < getRunningScripts().size(); ++i){
|
||||
_settings->setArrayIndex(i);
|
||||
_settings->setValue("script", getRunningScripts().at(i));
|
||||
|
||||
QStringList runningScripts = getRunningScripts();
|
||||
int i = 0;
|
||||
for (QStringList::const_iterator it = runningScripts.begin(); it != runningScripts.end(); it += 1) {
|
||||
if (getScriptEngine(*it)->isUserLoaded()) {
|
||||
_settings->setArrayIndex(i);
|
||||
_settings->setValue("script", *it);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
_settings->endArray();
|
||||
}
|
||||
|
||||
ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScriptFromEditor, bool activateMainWindow) {
|
||||
QUrl scriptUrl(scriptName);
|
||||
ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUserLoaded,
|
||||
bool loadScriptFromEditor, bool activateMainWindow) {
|
||||
QUrl scriptUrl(scriptFilename);
|
||||
const QString& scriptURLString = scriptUrl.toString();
|
||||
if (_scriptEnginesHash.contains(scriptURLString) && loadScriptFromEditor
|
||||
&& !_scriptEnginesHash[scriptURLString]->isFinished()) {
|
||||
|
@ -3692,7 +3712,7 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
|
|||
}
|
||||
|
||||
ScriptEngine* scriptEngine;
|
||||
if (scriptName.isNull()) {
|
||||
if (scriptFilename.isNull()) {
|
||||
scriptEngine = new ScriptEngine(NO_SCRIPT, "", &_controllerScriptingInterface);
|
||||
} else {
|
||||
// start the script on a new thread...
|
||||
|
@ -3708,6 +3728,7 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
|
|||
_runningScriptsWidget->setRunningScripts(getRunningScripts());
|
||||
UserActivityLogger::getInstance().loadedScript(scriptURLString);
|
||||
}
|
||||
scriptEngine->setUserLoaded(isUserLoaded);
|
||||
|
||||
// setup the packet senders and jurisdiction listeners of the script engine's scripting interfaces so
|
||||
// we can use the same ones from the application.
|
||||
|
@ -3740,7 +3761,7 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
|
|||
|
||||
connect(scriptEngine, SIGNAL(finished(const QString&)), this, SLOT(scriptFinished(const QString&)));
|
||||
|
||||
connect(scriptEngine, SIGNAL(loadScript(const QString&)), this, SLOT(loadScript(const QString&)));
|
||||
connect(scriptEngine, SIGNAL(loadScript(const QString&, bool)), this, SLOT(loadScript(const QString&, bool)));
|
||||
|
||||
scriptEngine->registerGlobalObject("Overlays", &_overlays);
|
||||
|
||||
|
@ -3810,7 +3831,7 @@ void Application::stopAllScripts(bool restart) {
|
|||
// stops all current running scripts
|
||||
for (QHash<QString, ScriptEngine*>::const_iterator it = _scriptEnginesHash.constBegin();
|
||||
it != _scriptEnginesHash.constEnd(); it++) {
|
||||
if (restart) {
|
||||
if (restart && it.value()->isUserLoaded()) {
|
||||
connect(it.value(), SIGNAL(finished(const QString&)), SLOT(loadScript(const QString&)));
|
||||
}
|
||||
it.value()->stop();
|
||||
|
|
|
@ -150,7 +150,6 @@ public:
|
|||
void setPreviousScriptLocation(const QString& previousScriptLocation);
|
||||
void storeSizeAndPosition();
|
||||
void clearScriptsBeforeRunning();
|
||||
void saveScripts();
|
||||
void initializeGL();
|
||||
void paintGL();
|
||||
void resizeGL(int width, int height);
|
||||
|
@ -186,7 +185,7 @@ public:
|
|||
|
||||
glm::vec3 getMouseVoxelWorldCoordinates(const VoxelDetail& mouseVoxel);
|
||||
|
||||
QGLWidget* getGLWidget() { return _glWidget; }
|
||||
GLCanvas* getGLWidget() { return _glWidget; }
|
||||
bool isThrottleRendering() const { return _glWidget->isThrottleRendering(); }
|
||||
MyAvatar* getAvatar() { return _myAvatar; }
|
||||
Audio* getAudio() { return &_audio; }
|
||||
|
@ -288,7 +287,7 @@ public:
|
|||
PointShader& getPointShader() { return _pointShader; }
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
||||
glm::vec2 getViewportDimensions() const{ return glm::vec2(_glWidget->width(),_glWidget->height()); }
|
||||
glm::vec2 getViewportDimensions() const { return glm::vec2(_glWidget->getDeviceWidth(), _glWidget->getDeviceHeight()); }
|
||||
NodeToJurisdictionMap& getVoxelServerJurisdictions() { return _voxelServerJurisdictions; }
|
||||
NodeToJurisdictionMap& getParticleServerJurisdictions() { return _particleServerJurisdictions; }
|
||||
NodeToJurisdictionMap& getEntityServerJurisdictions() { return _entityServerJurisdictions; }
|
||||
|
@ -340,13 +339,15 @@ public slots:
|
|||
void loadScriptURLDialog();
|
||||
void toggleLogDialog();
|
||||
void initAvatarAndViewFrustum();
|
||||
ScriptEngine* loadScript(const QString& fileNameString = QString(), bool loadScriptFromEditor = false, bool activateMainWindow = false);
|
||||
ScriptEngine* loadScript(const QString& scriptFilename = QString(), bool isUserLoaded = true,
|
||||
bool loadScriptFromEditor = false, bool activateMainWindow = false);
|
||||
void scriptFinished(const QString& scriptName);
|
||||
void stopAllScripts(bool restart = false);
|
||||
void stopScript(const QString& scriptName);
|
||||
void reloadAllScripts();
|
||||
void loadDefaultScripts();
|
||||
void toggleRunningScriptsWidget();
|
||||
void saveScripts();
|
||||
|
||||
void uploadHead();
|
||||
void uploadSkeleton();
|
||||
|
@ -435,6 +436,8 @@ private:
|
|||
|
||||
int sendNackPackets();
|
||||
|
||||
QMouseEvent getDeviceEvent(QMouseEvent* event, unsigned int deviceID);
|
||||
|
||||
MainWindow* _window;
|
||||
GLCanvas* _glWidget; // our GLCanvas has a couple extra features
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@
|
|||
#include <glm/glm.hpp>
|
||||
|
||||
#include "Audio.h"
|
||||
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "PositionalAudioStream.h"
|
||||
|
@ -82,7 +83,7 @@ Audio::Audio(QObject* parent) :
|
|||
_noiseGateSampleCounter(0),
|
||||
_noiseGateOpen(false),
|
||||
_noiseGateEnabled(true),
|
||||
_toneInjectionEnabled(false),
|
||||
_audioSourceInjectEnabled(false),
|
||||
_noiseGateFramesToClose(0),
|
||||
_totalInputAudioSamples(0),
|
||||
_collisionSoundMagnitude(0.0f),
|
||||
|
@ -102,6 +103,8 @@ Audio::Audio(QObject* parent) :
|
|||
_framesPerScope(DEFAULT_FRAMES_PER_SCOPE),
|
||||
_samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope),
|
||||
_peqEnabled(false),
|
||||
_noiseSourceEnabled(false),
|
||||
_toneSourceEnabled(true),
|
||||
_scopeInput(0),
|
||||
_scopeOutputLeft(0),
|
||||
_scopeOutputRight(0),
|
||||
|
@ -137,6 +140,10 @@ void Audio::reset() {
|
|||
_receivedAudioStream.reset();
|
||||
resetStats();
|
||||
_peq.reset();
|
||||
_noiseSource.reset();
|
||||
_toneSource.reset();
|
||||
_sourceGain.reset();
|
||||
_inputGain.reset();
|
||||
}
|
||||
|
||||
void Audio::resetStats() {
|
||||
|
@ -424,14 +431,25 @@ void Audio::start() {
|
|||
qDebug() << "Unable to set up audio output because of a problem with output format.";
|
||||
}
|
||||
|
||||
_peq.initialize( _inputFormat.sampleRate(), _audioInput->bufferSize() );
|
||||
|
||||
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 2 );
|
||||
_peq.initialize( _inputFormat.sampleRate() );
|
||||
_inputGain.initialize();
|
||||
_sourceGain.initialize();
|
||||
_noiseSource.initialize();
|
||||
_toneSource.initialize();
|
||||
_sourceGain.setParameters(0.25f,0.0f);
|
||||
_inputGain.setParameters(1.0f,0.0f);
|
||||
}
|
||||
|
||||
void Audio::stop() {
|
||||
|
||||
_inputFrameBuffer.finalize();
|
||||
_peq.finalize();
|
||||
|
||||
_inputGain.finalize();
|
||||
_sourceGain.finalize();
|
||||
_noiseSource.finalize();
|
||||
_toneSource.finalize();
|
||||
|
||||
// "switch" to invalid devices in order to shut down the state
|
||||
switchInputToAudioDevice(QAudioDeviceInfo());
|
||||
switchOutputToAudioDevice(QAudioDeviceInfo());
|
||||
|
@ -477,14 +495,30 @@ void Audio::handleAudioInput() {
|
|||
|
||||
QByteArray inputByteArray = _inputDevice->readAll();
|
||||
|
||||
int16_t* inputFrameData = (int16_t*)inputByteArray.data();
|
||||
const int inputFrameCount = inputByteArray.size() / sizeof(int16_t);
|
||||
|
||||
_inputFrameBuffer.copyFrames(1, inputFrameCount, inputFrameData, false /*copy in*/);
|
||||
|
||||
_inputGain.render(_inputFrameBuffer); // input/mic gain+mute
|
||||
|
||||
// Add audio source injection if enabled
|
||||
if (_audioSourceInjectEnabled && !_muted) {
|
||||
|
||||
if (_toneSourceEnabled) { // sine generator
|
||||
_toneSource.render(_inputFrameBuffer);
|
||||
}
|
||||
else if(_noiseSourceEnabled) { // pink noise generator
|
||||
_noiseSource.render(_inputFrameBuffer);
|
||||
}
|
||||
_sourceGain.render(_inputFrameBuffer); // post gain
|
||||
}
|
||||
if (_peqEnabled && !_muted) {
|
||||
// we wish to pre-filter our captured input, prior to loopback
|
||||
|
||||
int16_t* ioBuffer = (int16_t*)inputByteArray.data();
|
||||
|
||||
_peq.render(ioBuffer, ioBuffer, inputByteArray.size() / sizeof(int16_t));
|
||||
_peq.render(_inputFrameBuffer); // 3-band parametric eq
|
||||
}
|
||||
|
||||
_inputFrameBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted && _audioOutput) {
|
||||
// if this person wants local loopback add that to the locally injected audio
|
||||
|
||||
|
@ -522,7 +556,7 @@ void Audio::handleAudioInput() {
|
|||
|
||||
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
|
||||
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
|
||||
|
||||
|
||||
const int numNetworkBytes = _isStereoInput ? NETWORK_BUFFER_LENGTH_BYTES_STEREO : NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
|
||||
const int numNetworkSamples = _isStereoInput ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
|
||||
|
||||
|
@ -599,20 +633,8 @@ void Audio::handleAudioInput() {
|
|||
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
|
||||
}
|
||||
|
||||
// Add tone injection if enabled
|
||||
const float TONE_FREQ = 220.0f / SAMPLE_RATE * TWO_PI;
|
||||
const float QUARTER_VOLUME = 8192.0f;
|
||||
if (_toneInjectionEnabled) {
|
||||
loudness = 0.0f;
|
||||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
networkAudioSamples[i] = QUARTER_VOLUME * sinf(TONE_FREQ * (float)(i + _proceduralEffectSample));
|
||||
loudness += fabsf(networkAudioSamples[i]);
|
||||
}
|
||||
}
|
||||
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// If Noise Gate is enabled, check and turn the gate on and off
|
||||
if (!_toneInjectionEnabled && _noiseGateEnabled) {
|
||||
if (!_audioSourceInjectEnabled && _noiseGateEnabled) {
|
||||
float averageOfAllSampleFrames = 0.0f;
|
||||
_noiseSampleFrames[_noiseGateSampleCounter++] = _lastInputLoudness;
|
||||
if (_noiseGateSampleCounter == NUMBER_OF_NOISE_SAMPLE_FRAMES) {
|
||||
|
@ -1041,8 +1063,18 @@ void Audio::processProceduralAudio(int16_t* monoInput, int numSamples) {
|
|||
}
|
||||
}
|
||||
|
||||
void Audio::toggleToneInjection() {
|
||||
_toneInjectionEnabled = !_toneInjectionEnabled;
|
||||
void Audio::toggleAudioSourceInject() {
|
||||
_audioSourceInjectEnabled = !_audioSourceInjectEnabled;
|
||||
}
|
||||
|
||||
void Audio::selectAudioSourcePinkNoise() {
|
||||
_noiseSourceEnabled = Menu::getInstance()->isOptionChecked(MenuOption::AudioSourcePinkNoise);
|
||||
_toneSourceEnabled = !_noiseSourceEnabled;
|
||||
}
|
||||
|
||||
void Audio::selectAudioSourceSine440() {
|
||||
_toneSourceEnabled = Menu::getInstance()->isOptionChecked(MenuOption::AudioSourceSine440);
|
||||
_noiseSourceEnabled = !_toneSourceEnabled;
|
||||
}
|
||||
|
||||
void Audio::toggleAudioSpatialProcessing() {
|
||||
|
|
|
@ -20,6 +20,12 @@
|
|||
#include "Recorder.h"
|
||||
#include "RingBufferHistory.h"
|
||||
#include "MovingMinMaxAvg.h"
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioSourceTone.h"
|
||||
#include "AudioSourceNoise.h"
|
||||
#include "AudioGain.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
|
@ -116,7 +122,9 @@ public slots:
|
|||
void audioMixerKilled();
|
||||
void toggleMute();
|
||||
void toggleAudioNoiseReduction();
|
||||
void toggleToneInjection();
|
||||
void toggleAudioSourceInject();
|
||||
void selectAudioSourcePinkNoise();
|
||||
void selectAudioSourceSine440();
|
||||
void toggleScope();
|
||||
void toggleScopePause();
|
||||
void toggleStats();
|
||||
|
@ -199,7 +207,8 @@ private:
|
|||
int _noiseGateSampleCounter;
|
||||
bool _noiseGateOpen;
|
||||
bool _noiseGateEnabled;
|
||||
bool _toneInjectionEnabled;
|
||||
bool _audioSourceInjectEnabled;
|
||||
|
||||
int _noiseGateFramesToClose;
|
||||
int _totalInputAudioSamples;
|
||||
|
||||
|
@ -282,10 +291,27 @@ private:
|
|||
int _framesPerScope;
|
||||
int _samplesPerScope;
|
||||
|
||||
// Multi-band parametric EQ
|
||||
bool _peqEnabled;
|
||||
AudioFilterPEQ3m _peq;
|
||||
// Input framebuffer
|
||||
AudioBufferFloat32 _inputFrameBuffer;
|
||||
|
||||
// Input gain
|
||||
AudioGain _inputGain;
|
||||
|
||||
// Post tone/pink noise generator gain
|
||||
AudioGain _sourceGain;
|
||||
|
||||
// Pink noise source
|
||||
bool _noiseSourceEnabled;
|
||||
AudioSourcePinkNoise _noiseSource;
|
||||
|
||||
// Tone source
|
||||
bool _toneSourceEnabled;
|
||||
AudioSourceTone _toneSource;
|
||||
|
||||
// Multi-band parametric EQ
|
||||
bool _peqEnabled;
|
||||
AudioFilterPEQ3m _peq;
|
||||
|
||||
QMutex _guard;
|
||||
QByteArray* _scopeInput;
|
||||
QByteArray* _scopeOutputLeft;
|
||||
|
|
|
@ -262,8 +262,8 @@ CameraScriptableObject::CameraScriptableObject(Camera* camera, ViewFrustum* view
|
|||
}
|
||||
|
||||
PickRay CameraScriptableObject::computePickRay(float x, float y) {
|
||||
float screenWidth = Application::getInstance()->getGLWidget()->width();
|
||||
float screenHeight = Application::getInstance()->getGLWidget()->height();
|
||||
float screenWidth = Application::getInstance()->getGLWidget()->getDeviceWidth();
|
||||
float screenHeight = Application::getInstance()->getGLWidget()->getDeviceHeight();
|
||||
PickRay result;
|
||||
if (OculusManager::isConnected()) {
|
||||
result.origin = _camera->getPosition();
|
||||
|
|
|
@ -9,13 +9,14 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "GLCanvas.h"
|
||||
#include "devices/OculusManager.h"
|
||||
#include <QMainWindow>
|
||||
#include <QMimeData>
|
||||
#include <QUrl>
|
||||
#include <QMainWindow>
|
||||
#include <QWindow>
|
||||
|
||||
#include "Application.h"
|
||||
#include "GLCanvas.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
|
||||
|
||||
|
@ -34,6 +35,22 @@ bool GLCanvas::isThrottleRendering() const {
|
|||
return _throttleRendering || Application::getInstance()->getWindow()->isMinimized();
|
||||
}
|
||||
|
||||
int GLCanvas::getDeviceWidth() const {
|
||||
return width() * (windowHandle() ? windowHandle()->devicePixelRatio() : 1.0f);
|
||||
}
|
||||
|
||||
int GLCanvas::getDeviceHeight() const {
|
||||
return height() * (windowHandle() ? windowHandle()->devicePixelRatio() : 1.0f);
|
||||
}
|
||||
|
||||
int GLCanvas::getDeviceX(int x) const {
|
||||
return x * getDeviceWidth() / width();
|
||||
}
|
||||
|
||||
int GLCanvas::getDeviceY(int y) const {
|
||||
return y * getDeviceHeight() / height();
|
||||
}
|
||||
|
||||
void GLCanvas::initializeGL() {
|
||||
Application::getInstance()->initializeGL();
|
||||
setAttribute(Qt::WA_AcceptTouchEvents);
|
||||
|
|
|
@ -21,6 +21,14 @@ class GLCanvas : public QGLWidget {
|
|||
public:
|
||||
GLCanvas();
|
||||
bool isThrottleRendering() const;
|
||||
|
||||
int getDeviceWidth() const;
|
||||
int getDeviceHeight() const;
|
||||
QSize getDeviceSize() const { return QSize(getDeviceWidth(), getDeviceHeight()); }
|
||||
|
||||
int getDeviceX(int x) const;
|
||||
int getDeviceY(int y) const;
|
||||
|
||||
protected:
|
||||
|
||||
QTimer _frameTimer;
|
||||
|
|
|
@ -545,11 +545,31 @@ Menu::Menu() :
|
|||
0,
|
||||
this,
|
||||
SLOT(muteEnvironment()));
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioToneInjection,
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioSourceInject,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleToneInjection()));
|
||||
SLOT(toggleAudioSourceInject()));
|
||||
QMenu* audioSourceMenu = audioDebugMenu->addMenu("Generated Audio Source");
|
||||
{
|
||||
QAction *pinkNoise = addCheckableActionToQMenuAndActionHash(audioSourceMenu, MenuOption::AudioSourcePinkNoise,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioSourcePinkNoise()));
|
||||
|
||||
QAction *sine440 = addCheckableActionToQMenuAndActionHash(audioSourceMenu, MenuOption::AudioSourceSine440,
|
||||
0,
|
||||
true,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioSourceSine440()));
|
||||
|
||||
QActionGroup* audioSourceGroup = new QActionGroup(audioSourceMenu);
|
||||
audioSourceGroup->addAction(pinkNoise);
|
||||
audioSourceGroup->addAction(sine440);
|
||||
}
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope,
|
||||
Qt::CTRL | Qt::Key_P, false,
|
||||
appInstance->getAudio(),
|
||||
|
@ -1085,7 +1105,7 @@ void Menu::showLoginForCurrentDomain() {
|
|||
|
||||
void Menu::editPreferences() {
|
||||
if (!_preferencesDialog) {
|
||||
_preferencesDialog = new PreferencesDialog(Application::getInstance()->getWindow());
|
||||
_preferencesDialog = new PreferencesDialog();
|
||||
_preferencesDialog->show();
|
||||
} else {
|
||||
_preferencesDialog->close();
|
||||
|
@ -1468,7 +1488,9 @@ void Menu::showChat() {
|
|||
if (_chatWindow->isHidden()) {
|
||||
_chatWindow->show();
|
||||
}
|
||||
_chatWindow->raise();
|
||||
_chatWindow->activateWindow();
|
||||
_chatWindow->setFocus();
|
||||
} else {
|
||||
Application::getInstance()->getTrayIcon()->showMessage("Interface", "You need to login to be able to chat with others on this domain.");
|
||||
}
|
||||
|
@ -1480,6 +1502,9 @@ void Menu::toggleChat() {
|
|||
if (!_chatAction->isEnabled() && _chatWindow && AccountManager::getInstance().isLoggedIn()) {
|
||||
if (_chatWindow->isHidden()) {
|
||||
_chatWindow->show();
|
||||
_chatWindow->raise();
|
||||
_chatWindow->activateWindow();
|
||||
_chatWindow->setFocus();
|
||||
} else {
|
||||
_chatWindow->hide();
|
||||
}
|
||||
|
|
|
@ -347,7 +347,9 @@ namespace MenuOption {
|
|||
const QString AudioSpatialProcessingSlightlyRandomSurfaces = "Slightly Random Surfaces";
|
||||
const QString AudioSpatialProcessingStereoSource = "Stereo Source";
|
||||
const QString AudioSpatialProcessingWithDiffusions = "With Diffusions";
|
||||
const QString AudioToneInjection = "Inject Test Tone";
|
||||
const QString AudioSourceInject = "Generated Audio";
|
||||
const QString AudioSourcePinkNoise = "Pink Noise";
|
||||
const QString AudioSourceSine440 = "Sine 440hz";
|
||||
const QString Avatars = "Avatars";
|
||||
const QString AvatarsReceiveShadows = "Avatars Receive Shadows";
|
||||
const QString Bandwidth = "Bandwidth Display";
|
||||
|
|
|
@ -223,6 +223,20 @@ void Avatar::measureMotionDerivatives(float deltaTime) {
|
|||
_lastOrientation = getOrientation();
|
||||
}
|
||||
|
||||
void Avatar::setPosition(const glm::vec3 position, bool overideReferential) {
|
||||
AvatarData::setPosition(position, overideReferential);
|
||||
_lastPosition = position;
|
||||
_velocity = glm::vec3(0.0f);
|
||||
_lastVelocity = glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
void Avatar::slamPosition(const glm::vec3& newPosition) {
|
||||
_position = newPosition;
|
||||
_lastPosition = newPosition;
|
||||
_velocity = glm::vec3(0.0f);
|
||||
_lastVelocity = glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
void Avatar::setMouseRay(const glm::vec3 &origin, const glm::vec3 &direction) {
|
||||
_mouseRayOrigin = origin;
|
||||
_mouseRayDirection = direction;
|
||||
|
|
|
@ -160,6 +160,9 @@ public:
|
|||
/// Scales a world space position vector relative to the avatar position and scale
|
||||
/// \param vector position to be scaled. Will store the result
|
||||
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
|
||||
|
||||
void setPosition(const glm::vec3 position, bool overideReferential = false);
|
||||
void slamPosition(const glm::vec3& newPosition);
|
||||
|
||||
public slots:
|
||||
void updateCollisionGroups();
|
||||
|
|
|
@ -108,7 +108,7 @@ JointReferential::JointReferential(Referential* referential, EntityTree* tree, A
|
|||
|
||||
const EntityItem* item = _tree->findEntityByID(_entityID);
|
||||
const Model* model = getModel(item);
|
||||
if (!isValid() || model == NULL || _jointIndex >= model->getJointStateCount()) {
|
||||
if (!isValid() || model == NULL || _jointIndex >= (uint32_t)(model->getJointStateCount())) {
|
||||
_refScale = item->getRadius();
|
||||
model->getJointRotationInWorldFrame(_jointIndex, _refRotation);
|
||||
model->getJointPositionInWorldFrame(_jointIndex, _refPosition);
|
||||
|
@ -123,7 +123,7 @@ JointReferential::JointReferential(uint32_t jointIndex, const QUuid& entityID, E
|
|||
_type = JOINT;
|
||||
const EntityItem* item = _tree->findEntityByID(_entityID);
|
||||
const Model* model = getModel(item);
|
||||
if (!isValid() || model == NULL || _jointIndex >= model->getJointStateCount()) {
|
||||
if (!isValid() || model == NULL || _jointIndex >= (uint32_t)(model->getJointStateCount())) {
|
||||
qDebug() << "JointReferential::constructor(): Not Valid";
|
||||
_isValid = false;
|
||||
return;
|
||||
|
@ -142,7 +142,7 @@ JointReferential::JointReferential(uint32_t jointIndex, const QUuid& entityID, E
|
|||
void JointReferential::update() {
|
||||
const EntityItem* item = _tree->findEntityByID(_entityID);
|
||||
const Model* model = getModel(item);
|
||||
if (!isValid() || model == NULL || _jointIndex >= model->getJointStateCount()) {
|
||||
if (!isValid() || model == NULL || _jointIndex >= (uint32_t)(model->getJointStateCount())) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -189,4 +189,4 @@ int JointReferential::unpackExtraData(const unsigned char *sourceBuffer, int siz
|
|||
sourceBuffer += sizeof(_jointIndex);
|
||||
|
||||
return sourceBuffer - startPosition;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1767,7 +1767,7 @@ void MyAvatar::maybeUpdateBillboard() {
|
|||
|
||||
void MyAvatar::goHome() {
|
||||
qDebug("Going Home!");
|
||||
setPosition(START_LOCATION);
|
||||
slamPosition(START_LOCATION);
|
||||
}
|
||||
|
||||
void MyAvatar::increaseSize() {
|
||||
|
@ -1827,7 +1827,7 @@ void MyAvatar::goToLocationFromAddress(const QJsonObject& locationObject) {
|
|||
const float DISTANCE_TO_USER = 2.0f;
|
||||
glm::vec3 newPosition = glm::vec3(coordinateItems[0].toFloat(), coordinateItems[1].toFloat(),
|
||||
coordinateItems[2].toFloat()) - newOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||
setPosition(newPosition);
|
||||
slamPosition(newPosition);
|
||||
emit transformChanged();
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ DeviceTracker* DeviceTracker::getDevice(const Name& name) {
|
|||
}
|
||||
|
||||
DeviceTracker* DeviceTracker::getDevice(DeviceTracker::ID deviceID) {
|
||||
if ((deviceID >= 0) && (deviceID < Singleton::get()->_devicesVector.size())) {
|
||||
if ((deviceID >= 0) && (deviceID < (int)(Singleton::get()->_devicesVector.size()))) {
|
||||
return Singleton::get()->_devicesVector[ deviceID ];
|
||||
} else {
|
||||
return NULL;
|
||||
|
|
|
@ -383,7 +383,8 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
}
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height());
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->getDeviceWidth(),
|
||||
Application::getInstance()->getGLWidget()->getDeviceHeight());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
@ -400,7 +401,8 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
|
|||
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
||||
|
||||
glLoadIdentity();
|
||||
gluOrtho2D(0, Application::getInstance()->getGLWidget()->width(), 0, Application::getInstance()->getGLWidget()->height());
|
||||
gluOrtho2D(0, Application::getInstance()->getGLWidget()->getDeviceWidth(), 0,
|
||||
Application::getInstance()->getGLWidget()->getDeviceHeight());
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
|
@ -525,4 +527,4 @@ QSize OculusManager::getRenderTargetSize() {
|
|||
#else
|
||||
return QSize(100, 100);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,8 +209,8 @@ void PrioVR::renderCalibrationCountdown() {
|
|||
}
|
||||
static TextRenderer textRenderer(MONO_FONT_FAMILY, 18, QFont::Bold, false, TextRenderer::OUTLINE_EFFECT, 2);
|
||||
QByteArray text = "Assume T-Pose in " + QByteArray::number(secondsRemaining) + "...";
|
||||
textRenderer.draw((Application::getInstance()->getGLWidget()->width() - textRenderer.computeWidth(text.constData())) / 2,
|
||||
Application::getInstance()->getGLWidget()->height() / 2,
|
||||
text);
|
||||
textRenderer.draw((Application::getInstance()->getGLWidget()->getDeviceWidth() -
|
||||
textRenderer.computeWidth(text.constData())) / 2, Application::getInstance()->getGLWidget()->getDeviceHeight() / 2,
|
||||
text);
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -369,7 +369,7 @@ void SixenseManager::updateCalibration(const sixenseControllerData* controllers)
|
|||
void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
||||
Application* application = Application::getInstance();
|
||||
MyAvatar* avatar = application->getAvatar();
|
||||
QGLWidget* widget = application->getGLWidget();
|
||||
GLCanvas* widget = application->getGLWidget();
|
||||
QPoint pos;
|
||||
|
||||
Qt::MouseButton bumperButton;
|
||||
|
@ -396,10 +396,10 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
|
|||
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = widget->width() * getCursorPixelRangeMult();
|
||||
float cursorRange = widget->getDeviceWidth() * getCursorPixelRangeMult();
|
||||
|
||||
pos.setX(widget->width() / 2.0f + cursorRange * xAngle);
|
||||
pos.setY(widget->height() / 2.0f + cursorRange * yAngle);
|
||||
pos.setX(widget->getDeviceWidth() / 2.0f + cursorRange * xAngle);
|
||||
pos.setY(widget->getDeviceHeight() / 2.0f + cursorRange * yAngle);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -33,8 +33,8 @@ bool TV3DManager::isConnected() {
|
|||
|
||||
void TV3DManager::connect() {
|
||||
Application* app = Application::getInstance();
|
||||
int width = app->getGLWidget()->width();
|
||||
int height = app->getGLWidget()->height();
|
||||
int width = app->getGLWidget()->getDeviceWidth();
|
||||
int height = app->getGLWidget()->getDeviceHeight();
|
||||
Camera& camera = *app->getCamera();
|
||||
|
||||
configureCamera(camera, width, height);
|
||||
|
@ -90,8 +90,8 @@ void TV3DManager::display(Camera& whichCamera) {
|
|||
// left eye portal
|
||||
int portalX = 0;
|
||||
int portalY = 0;
|
||||
int portalW = Application::getInstance()->getGLWidget()->width() / 2;
|
||||
int portalH = Application::getInstance()->getGLWidget()->height();
|
||||
int portalW = Application::getInstance()->getGLWidget()->getDeviceWidth() / 2;
|
||||
int portalH = Application::getInstance()->getGLWidget()->getDeviceHeight();
|
||||
|
||||
const bool glowEnabled = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
|
||||
|
||||
|
@ -137,7 +137,7 @@ void TV3DManager::display(Camera& whichCamera) {
|
|||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// render right side view
|
||||
portalX = Application::getInstance()->getGLWidget()->width() / 2;
|
||||
portalX = Application::getInstance()->getGLWidget()->getDeviceWidth() / 2;
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
// render left side view
|
||||
glViewport(portalX, portalY, portalW, portalH);
|
||||
|
@ -165,7 +165,8 @@ void TV3DManager::display(Camera& whichCamera) {
|
|||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// reset the viewport to how we started
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height());
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->getDeviceWidth(),
|
||||
Application::getInstance()->getGLWidget()->getDeviceHeight());
|
||||
|
||||
if (glowEnabled) {
|
||||
Application::getInstance()->getGlowEffect()->render();
|
||||
|
|
|
@ -116,7 +116,7 @@ void AmbientOcclusionEffect::render() {
|
|||
glGetIntegerv(GL_VIEWPORT, viewport);
|
||||
const int VIEWPORT_X_INDEX = 0;
|
||||
const int VIEWPORT_WIDTH_INDEX = 2;
|
||||
QSize widgetSize = Application::getInstance()->getGLWidget()->size();
|
||||
QSize widgetSize = Application::getInstance()->getGLWidget()->getDeviceSize();
|
||||
float sMin = viewport[VIEWPORT_X_INDEX] / (float)widgetSize.width();
|
||||
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)widgetSize.width();
|
||||
|
||||
|
|
|
@ -256,8 +256,8 @@ void ControllerScriptingInterface::releaseJoystick(int joystickIndex) {
|
|||
}
|
||||
|
||||
glm::vec2 ControllerScriptingInterface::getViewportDimensions() const {
|
||||
QGLWidget* widget = Application::getInstance()->getGLWidget();
|
||||
return glm::vec2(widget->width(), widget->height());
|
||||
GLCanvas* widget = Application::getInstance()->getGLWidget();
|
||||
return glm::vec2(widget->getDeviceWidth(), widget->getDeviceHeight());
|
||||
}
|
||||
|
||||
AbstractInputController* ControllerScriptingInterface::createInputController(const QString& deviceName, const QString& tracker) {
|
||||
|
|
|
@ -70,7 +70,7 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
|
|||
Application* application = Application::getInstance();
|
||||
|
||||
Overlays& overlays = application->getOverlays();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
//Handle fading and deactivation/activation of UI
|
||||
|
@ -99,14 +99,14 @@ void ApplicationOverlay::renderOverlay(bool renderToTexture) {
|
|||
glPushMatrix();
|
||||
|
||||
glLoadIdentity();
|
||||
gluOrtho2D(0, glWidget->width(), glWidget->height(), 0);
|
||||
gluOrtho2D(0, glWidget->getDeviceWidth(), glWidget->getDeviceHeight(), 0);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
|
||||
renderAudioMeter();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::HeadMouse)) {
|
||||
myAvatar->renderHeadMouse(glWidget->width(), glWidget->height());
|
||||
myAvatar->renderHeadMouse(glWidget->getDeviceWidth(), glWidget->getDeviceHeight());
|
||||
}
|
||||
|
||||
renderStatsAndLogs();
|
||||
|
@ -141,7 +141,7 @@ void ApplicationOverlay::displayOverlayTexture() {
|
|||
}
|
||||
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
|
||||
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
|
@ -152,16 +152,16 @@ void ApplicationOverlay::displayOverlayTexture() {
|
|||
glPushMatrix();
|
||||
|
||||
glLoadIdentity();
|
||||
gluOrtho2D(0, glWidget->width(), glWidget->height(), 0);
|
||||
gluOrtho2D(0, glWidget->getDeviceWidth(), glWidget->getDeviceHeight(), 0);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
glColor4f(1.0f, 1.0f, 1.0f, _alpha);
|
||||
glTexCoord2f(0, 0); glVertex2i(0, glWidget->height());
|
||||
glTexCoord2f(1, 0); glVertex2i(glWidget->width(), glWidget->height());
|
||||
glTexCoord2f(1, 1); glVertex2i(glWidget->width(), 0);
|
||||
glTexCoord2f(0, 0); glVertex2i(0, glWidget->getDeviceHeight());
|
||||
glTexCoord2f(1, 0); glVertex2i(glWidget->getDeviceWidth(), glWidget->getDeviceHeight());
|
||||
glTexCoord2f(1, 1); glVertex2i(glWidget->getDeviceWidth(), 0);
|
||||
glTexCoord2f(0, 1); glVertex2i(0, 0);
|
||||
glEnd();
|
||||
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
|
@ -275,7 +275,7 @@ bool raySphereIntersect(const glm::vec3 &dir, const glm::vec3 &origin, float r,
|
|||
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
||||
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
|
||||
|
@ -305,8 +305,8 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
|||
float u = asin(collisionPos.x) / (_textureFov)+0.5f;
|
||||
float v = 1.0 - (asin(collisionPos.y) / (_textureFov)+0.5f);
|
||||
|
||||
rv.setX(u * glWidget->width());
|
||||
rv.setY(v * glWidget->height());
|
||||
rv.setX(u * glWidget->getDeviceWidth());
|
||||
rv.setY(v * glWidget->getDeviceHeight());
|
||||
}
|
||||
} else {
|
||||
//if they did not click on the overlay, just set the coords to INT_MAX
|
||||
|
@ -323,8 +323,8 @@ QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
|
|||
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
|
||||
}
|
||||
|
||||
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * glWidget->width());
|
||||
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * glWidget->height());
|
||||
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * glWidget->getDeviceWidth());
|
||||
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * glWidget->getDeviceHeight());
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
@ -496,11 +496,11 @@ void ApplicationOverlay::displayOverlayTexture3DTV(Camera& whichCamera, float as
|
|||
//draw the mouse pointer
|
||||
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
|
||||
|
||||
const float reticleSize = 40.0f / application->getGLWidget()->width() * quadWidth;
|
||||
const float reticleSize = 40.0f / application->getGLWidget()->getDeviceWidth() * quadWidth;
|
||||
x -= reticleSize / 2.0f;
|
||||
y += reticleSize / 2.0f;
|
||||
const float mouseX = (application->getMouseX() / (float)application->getGLWidget()->width()) * quadWidth;
|
||||
const float mouseY = (1.0 - (application->getMouseY() / (float)application->getGLWidget()->height())) * quadHeight;
|
||||
const float mouseX = (application->getMouseX() / (float)application->getGLWidget()->getDeviceWidth()) * quadWidth;
|
||||
const float mouseY = (1.0 - (application->getMouseY() / (float)application->getGLWidget()->getDeviceHeight())) * quadHeight;
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
|
||||
|
@ -564,7 +564,7 @@ void ApplicationOverlay::renderPointers() {
|
|||
|
||||
void ApplicationOverlay::renderControllerPointers() {
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
MyAvatar* myAvatar = application->getAvatar();
|
||||
|
||||
//Static variables used for storing controller state
|
||||
|
@ -671,14 +671,14 @@ void ApplicationOverlay::renderControllerPointers() {
|
|||
float yAngle = 0.5f - ((atan2(direction.z, direction.y) + M_PI_2));
|
||||
|
||||
// Get the pixel range over which the xAngle and yAngle are scaled
|
||||
float cursorRange = glWidget->width() * application->getSixenseManager()->getCursorPixelRangeMult();
|
||||
float cursorRange = glWidget->getDeviceWidth() * application->getSixenseManager()->getCursorPixelRangeMult();
|
||||
|
||||
mouseX = (glWidget->width() / 2.0f + cursorRange * xAngle);
|
||||
mouseY = (glWidget->height() / 2.0f + cursorRange * yAngle);
|
||||
mouseX = (glWidget->getDeviceWidth() / 2.0f + cursorRange * xAngle);
|
||||
mouseY = (glWidget->getDeviceHeight() / 2.0f + cursorRange * yAngle);
|
||||
}
|
||||
|
||||
//If the cursor is out of the screen then don't render it
|
||||
if (mouseX < 0 || mouseX >= glWidget->width() || mouseY < 0 || mouseY >= glWidget->height()) {
|
||||
if (mouseX < 0 || mouseX >= glWidget->getDeviceWidth() || mouseY < 0 || mouseY >= glWidget->getDeviceHeight()) {
|
||||
_reticleActive[index] = false;
|
||||
continue;
|
||||
}
|
||||
|
@ -706,11 +706,11 @@ void ApplicationOverlay::renderControllerPointers() {
|
|||
void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
|
||||
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
glm::vec3 cursorVerts[4];
|
||||
|
||||
const int widgetWidth = glWidget->width();
|
||||
const int widgetHeight = glWidget->height();
|
||||
const int widgetWidth = glWidget->getDeviceWidth();
|
||||
const int widgetHeight = glWidget->getDeviceHeight();
|
||||
|
||||
const float reticleSize = 50.0f;
|
||||
|
||||
|
@ -848,10 +848,10 @@ void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
|
|||
void ApplicationOverlay::renderMagnifier(int mouseX, int mouseY, float sizeMult, bool showBorder) const
|
||||
{
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
|
||||
const int widgetWidth = glWidget->width();
|
||||
const int widgetHeight = glWidget->height();
|
||||
const int widgetWidth = glWidget->getDeviceWidth();
|
||||
const int widgetHeight = glWidget->getDeviceHeight();
|
||||
|
||||
const float magnifyWidth = MAGNIFY_WIDTH * sizeMult;
|
||||
const float magnifyHeight = MAGNIFY_HEIGHT * sizeMult;
|
||||
|
@ -960,7 +960,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
|
||||
Application* application = Application::getInstance();
|
||||
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
Audio* audio = application->getAudio();
|
||||
|
||||
// Display a single screen-size quad to create an alpha blended 'collision' flash
|
||||
|
@ -968,7 +968,8 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
float collisionSoundMagnitude = audio->getCollisionSoundMagnitude();
|
||||
const float VISIBLE_COLLISION_SOUND_MAGNITUDE = 0.5f;
|
||||
if (collisionSoundMagnitude > VISIBLE_COLLISION_SOUND_MAGNITUDE) {
|
||||
renderCollisionOverlay(glWidget->width(), glWidget->height(), audio->getCollisionSoundMagnitude());
|
||||
renderCollisionOverlay(glWidget->getDeviceWidth(), glWidget->getDeviceHeight(),
|
||||
audio->getCollisionSoundMagnitude());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1018,16 +1019,16 @@ void ApplicationOverlay::renderAudioMeter() {
|
|||
if ((audio->getTimeSinceLastClip() > 0.f) && (audio->getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME)) {
|
||||
const float MAX_MAGNITUDE = 0.7f;
|
||||
float magnitude = MAX_MAGNITUDE * (1 - audio->getTimeSinceLastClip() / CLIPPING_INDICATOR_TIME);
|
||||
renderCollisionOverlay(glWidget->width(), glWidget->height(), magnitude, 1.0f);
|
||||
renderCollisionOverlay(glWidget->getDeviceWidth(), glWidget->getDeviceHeight(), magnitude, 1.0f);
|
||||
}
|
||||
|
||||
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP,
|
||||
audioMeterY,
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::Mirror));
|
||||
|
||||
audio->renderScope(glWidget->width(), glWidget->height());
|
||||
audio->renderScope(glWidget->getDeviceWidth(), glWidget->getDeviceHeight());
|
||||
|
||||
audio->renderStats(WHITE_TEXT, glWidget->width(), glWidget->height());
|
||||
audio->renderStats(WHITE_TEXT, glWidget->getDeviceWidth(), glWidget->getDeviceHeight());
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
if (isClipping) {
|
||||
|
@ -1089,7 +1090,7 @@ void ApplicationOverlay::renderStatsAndLogs() {
|
|||
|
||||
Application* application = Application::getInstance();
|
||||
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
const OctreePacketProcessor& octreePacketProcessor = application->getOctreePacketProcessor();
|
||||
BandwidthMeter* bandwidthMeter = application->getBandwidthMeter();
|
||||
NodeBounds& nodeBoundsDisplay = application->getNodeBoundsDisplay();
|
||||
|
@ -1103,11 +1104,12 @@ void ApplicationOverlay::renderStatsAndLogs() {
|
|||
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
|
||||
int voxelPacketsToProcess = octreePacketProcessor.packetsToProcessCount();
|
||||
// Onscreen text about position, servers, etc
|
||||
Stats::getInstance()->display(WHITE_TEXT, horizontalOffset, application->getFps(), application->getPacketsPerSecond(), application->getBytesPerSecond(), voxelPacketsToProcess);
|
||||
Stats::getInstance()->display(WHITE_TEXT, horizontalOffset, application->getFps(),
|
||||
application->getPacketsPerSecond(), application->getBytesPerSecond(), voxelPacketsToProcess);
|
||||
// Bandwidth meter
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Bandwidth)) {
|
||||
Stats::drawBackground(0x33333399, glWidget->width() - 296, glWidget->height() - 68, 296, 68);
|
||||
bandwidthMeter->render(glWidget->width(), glWidget->height());
|
||||
Stats::drawBackground(0x33333399, glWidget->getDeviceWidth() - 296, glWidget->getDeviceHeight() - 68, 296, 68);
|
||||
bandwidthMeter->render(glWidget->getDeviceWidth(), glWidget->getDeviceHeight());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1120,7 +1122,8 @@ void ApplicationOverlay::renderStatsAndLogs() {
|
|||
(Menu::getInstance()->isOptionChecked(MenuOption::Stats) &&
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::Bandwidth))
|
||||
? 80 : 20;
|
||||
drawText(glWidget->width() - 100, glWidget->height() - timerBottom, 0.30f, 0.0f, 0, frameTimer, WHITE_TEXT);
|
||||
drawText(glWidget->getDeviceWidth() - 100, glWidget->getDeviceHeight() - timerBottom,
|
||||
0.30f, 0.0f, 0, frameTimer, WHITE_TEXT);
|
||||
}
|
||||
nodeBoundsDisplay.drawOverlay();
|
||||
}
|
||||
|
@ -1243,9 +1246,9 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder() {
|
|||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
if (nodeList && !nodeList->getDomainHandler().isConnected()) {
|
||||
QGLWidget* glWidget = Application::getInstance()->getGLWidget();
|
||||
int right = glWidget->width();
|
||||
int bottom = glWidget->height();
|
||||
GLCanvas* glWidget = Application::getInstance()->getGLWidget();
|
||||
int right = glWidget->getDeviceWidth();
|
||||
int bottom = glWidget->getDeviceHeight();
|
||||
|
||||
glColor3f(CONNECTION_STATUS_BORDER_COLOR[0],
|
||||
CONNECTION_STATUS_BORDER_COLOR[1],
|
||||
|
@ -1264,7 +1267,7 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder() {
|
|||
}
|
||||
|
||||
QOpenGLFramebufferObject* ApplicationOverlay::getFramebufferObject() {
|
||||
QSize size = Application::getInstance()->getGLWidget()->size();
|
||||
QSize size = Application::getInstance()->getGLWidget()->getDeviceSize();
|
||||
if (!_framebufferObject || _framebufferObject->size() != size) {
|
||||
|
||||
delete _framebufferObject;
|
||||
|
|
|
@ -18,28 +18,27 @@
|
|||
#include <QTimer>
|
||||
|
||||
#include "Application.h"
|
||||
#include "ChatMessageArea.h"
|
||||
#include "FlowLayout.h"
|
||||
#include "qtimespan.h"
|
||||
#include "ui_chatWindow.h"
|
||||
#include "UIUtil.h"
|
||||
#include "XmppClient.h"
|
||||
#include "ChatMessageArea.h"
|
||||
|
||||
#include "ui_chatWindow.h"
|
||||
#include "ChatWindow.h"
|
||||
|
||||
|
||||
|
||||
const int NUM_MESSAGES_TO_TIME_STAMP = 20;
|
||||
|
||||
const float OPACITY_ACTIVE = 1.0f;
|
||||
const float OPACITY_INACTIVE = 0.8f;
|
||||
|
||||
const QRegularExpression regexLinks("((?:(?:ftp)|(?:https?)|(?:hifi))://\\S+)");
|
||||
const QRegularExpression regexHifiLinks("([#@]\\S+)");
|
||||
const QString mentionSoundsPath("/mention-sounds/");
|
||||
const QString mentionRegex("@(\\b%1\\b)");
|
||||
|
||||
ChatWindow::ChatWindow(QWidget* parent) :
|
||||
FramelessDialog(parent, 0, POSITION_RIGHT),
|
||||
QWidget(parent, Qt::Window | Qt::CustomizeWindowHint | Qt::WindowTitleHint | Qt::WindowMinMaxButtonsHint |
|
||||
Qt::WindowCloseButtonHint),
|
||||
ui(new Ui::ChatWindow),
|
||||
numMessagesAfterLastTimeStamp(0),
|
||||
_mousePressed(false),
|
||||
|
@ -82,7 +81,6 @@ ChatWindow::ChatWindow(QWidget* parent) :
|
|||
startTimerForTimeStamps();
|
||||
} else {
|
||||
ui->numOnlineLabel->hide();
|
||||
ui->closeButton->hide();
|
||||
ui->usersArea->hide();
|
||||
ui->messagesScrollArea->hide();
|
||||
ui->messagePlainTextEdit->hide();
|
||||
|
@ -112,17 +110,25 @@ ChatWindow::~ChatWindow() {
|
|||
void ChatWindow::keyPressEvent(QKeyEvent* event) {
|
||||
if (event->key() == Qt::Key_Escape) {
|
||||
Application::getInstance()->getWindow()->activateWindow();
|
||||
hide();
|
||||
} else {
|
||||
FramelessDialog::keyPressEvent(event);
|
||||
QWidget::keyPressEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
void ChatWindow::showEvent(QShowEvent* event) {
|
||||
FramelessDialog::showEvent(event);
|
||||
QWidget::showEvent(event);
|
||||
|
||||
if (!event->spontaneous()) {
|
||||
ui->messagePlainTextEdit->setFocus();
|
||||
}
|
||||
const QRect parentGeometry = parentWidget()->geometry();
|
||||
int titleBarHeight = UIUtil::getWindowTitleBarHeight(this);
|
||||
int menuBarHeight = Menu::getInstance()->geometry().height();
|
||||
int topMargin = titleBarHeight + menuBarHeight;
|
||||
|
||||
setGeometry(parentGeometry.topRight().x() - size().width() + 1, parentGeometry.topRight().y() + topMargin,
|
||||
size().width(), parentWidget()->height() - topMargin);
|
||||
|
||||
Application::processEvents();
|
||||
|
||||
|
@ -167,7 +173,7 @@ bool ChatWindow::eventFilter(QObject* sender, QEvent* event) {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
return FramelessDialog::eventFilter(sender, event);
|
||||
return QWidget::eventFilter(sender, event);
|
||||
}
|
||||
|
||||
void ChatWindow::addTimeStamp() {
|
||||
|
@ -214,7 +220,6 @@ void ChatWindow::startTimerForTimeStamps() {
|
|||
void ChatWindow::connected() {
|
||||
ui->connectingToXMPPLabel->hide();
|
||||
ui->numOnlineLabel->show();
|
||||
ui->closeButton->show();
|
||||
ui->usersArea->show();
|
||||
ui->messagesScrollArea->show();
|
||||
ui->messagePlainTextEdit->show();
|
||||
|
@ -393,9 +398,7 @@ void ChatWindow::scrollToBottom() {
|
|||
|
||||
bool ChatWindow::event(QEvent* event) {
|
||||
if (event->type() == QEvent::WindowActivate) {
|
||||
setWindowOpacity(OPACITY_ACTIVE);
|
||||
} else if (event->type() == QEvent::WindowDeactivate) {
|
||||
setWindowOpacity(OPACITY_INACTIVE);
|
||||
ui->messagePlainTextEdit->setFocus();
|
||||
}
|
||||
return FramelessDialog::event(event);
|
||||
return QWidget::event(event);
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ const int AUTO_SCROLL_THRESHOLD = 20;
|
|||
class ChatWindow;
|
||||
}
|
||||
|
||||
class ChatWindow : public FramelessDialog {
|
||||
class ChatWindow : public QWidget {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
|
|
@ -52,7 +52,7 @@ JSConsole::JSConsole(QWidget* parent, ScriptEngine* scriptEngine) :
|
|||
|
||||
|
||||
if (_scriptEngine == NULL) {
|
||||
_scriptEngine = Application::getInstance()->loadScript();
|
||||
_scriptEngine = Application::getInstance()->loadScript(QString(), false);
|
||||
}
|
||||
|
||||
connect(_scriptEngine, SIGNAL(evaluationFinished(QScriptValue, bool)),
|
||||
|
|
|
@ -920,7 +920,8 @@ void SetSpannerTool::applyEdit(const AttributePointer& attribute, const SharedOb
|
|||
|
||||
Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->release();
|
||||
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height());
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->getDeviceWidth(),
|
||||
Application::getInstance()->getGLWidget()->getDeviceHeight());
|
||||
|
||||
// send the images off to the lab for processing
|
||||
QThreadPool::globalInstance()->start(new Voxelizer(size, cellBounds,
|
||||
|
|
|
@ -40,9 +40,9 @@ void NodeBounds::draw() {
|
|||
// Compute ray to find selected nodes later on. We can't use the pre-computed ray in Application because it centers
|
||||
// itself after the cursor disappears.
|
||||
Application* application = Application::getInstance();
|
||||
QGLWidget* glWidget = application->getGLWidget();
|
||||
float mouseX = application->getMouseX() / (float)glWidget->width();
|
||||
float mouseY = application->getMouseY() / (float)glWidget->height();
|
||||
GLCanvas* glWidget = application->getGLWidget();
|
||||
float mouseX = application->getMouseX() / (float)glWidget->getDeviceWidth();
|
||||
float mouseY = application->getMouseY() / (float)glWidget->getDeviceHeight();
|
||||
glm::vec3 mouseRayOrigin;
|
||||
glm::vec3 mouseRayDirection;
|
||||
application->getViewFrustum()->computePickRay(mouseX, mouseY, mouseRayOrigin, mouseRayDirection);
|
||||
|
|
|
@ -16,16 +16,15 @@
|
|||
#include "PreferencesDialog.h"
|
||||
#include "UserActivityLogger.h"
|
||||
|
||||
const int SCROLL_PANEL_BOTTOM_MARGIN = 30;
|
||||
const int OK_BUTTON_RIGHT_MARGIN = 30;
|
||||
const int BUTTONS_TOP_MARGIN = 24;
|
||||
const int PREFERENCES_HEIGHT_PADDING = 20;
|
||||
|
||||
PreferencesDialog::PreferencesDialog(QWidget* parent, Qt::WindowFlags flags) : FramelessDialog(parent, flags, POSITION_LEFT) {
|
||||
PreferencesDialog::PreferencesDialog() :
|
||||
QDialog(Application::getInstance()->getWindow()) {
|
||||
|
||||
setAttribute(Qt::WA_DeleteOnClose);
|
||||
|
||||
ui.setupUi(this);
|
||||
setStyleSheetFile("styles/preferences.qss");
|
||||
loadPreferences();
|
||||
connect(ui.closeButton, &QPushButton::clicked, this, &QDialog::close);
|
||||
|
||||
connect(ui.buttonBrowseHead, &QPushButton::clicked, this, &PreferencesDialog::openHeadModelBrowser);
|
||||
connect(ui.buttonBrowseBody, &QPushButton::clicked, this, &PreferencesDialog::openBodyModelBrowser);
|
||||
|
@ -33,6 +32,9 @@ PreferencesDialog::PreferencesDialog(QWidget* parent, Qt::WindowFlags flags) : F
|
|||
connect(ui.buttonBrowseScriptsLocation, &QPushButton::clicked, this, &PreferencesDialog::openScriptsLocationBrowser);
|
||||
connect(ui.buttonReloadDefaultScripts, &QPushButton::clicked,
|
||||
Application::getInstance(), &Application::loadDefaultScripts);
|
||||
// move dialog to left side
|
||||
move(parentWidget()->geometry().topLeft());
|
||||
setFixedHeight(parentWidget()->size().height() - PREFERENCES_HEIGHT_PADDING);
|
||||
}
|
||||
|
||||
void PreferencesDialog::accept() {
|
||||
|
@ -49,78 +51,48 @@ void PreferencesDialog::setSkeletonUrl(QString modelUrl) {
|
|||
}
|
||||
|
||||
void PreferencesDialog::openHeadModelBrowser() {
|
||||
setWindowFlags(windowFlags() & ~Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
|
||||
ModelsBrowser modelBrowser(HEAD_MODEL);
|
||||
connect(&modelBrowser, &ModelsBrowser::selected, this, &PreferencesDialog::setHeadUrl);
|
||||
modelBrowser.browse();
|
||||
|
||||
setWindowFlags(windowFlags() | Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
}
|
||||
|
||||
void PreferencesDialog::openBodyModelBrowser() {
|
||||
setWindowFlags(windowFlags() & ~Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
|
||||
ModelsBrowser modelBrowser(SKELETON_MODEL);
|
||||
connect(&modelBrowser, &ModelsBrowser::selected, this, &PreferencesDialog::setSkeletonUrl);
|
||||
modelBrowser.browse();
|
||||
|
||||
setWindowFlags(windowFlags() | Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
}
|
||||
|
||||
void PreferencesDialog::openSnapshotLocationBrowser() {
|
||||
setWindowFlags(windowFlags() & ~Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
|
||||
QString dir = QFileDialog::getExistingDirectory(this, tr("Snapshots Location"),
|
||||
QStandardPaths::writableLocation(QStandardPaths::DesktopLocation),
|
||||
QFileDialog::ShowDirsOnly | QFileDialog::DontResolveSymlinks);
|
||||
if (!dir.isNull() && !dir.isEmpty()) {
|
||||
ui.snapshotLocationEdit->setText(dir);
|
||||
}
|
||||
|
||||
setWindowFlags(windowFlags() | Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
}
|
||||
|
||||
void PreferencesDialog::openScriptsLocationBrowser() {
|
||||
setWindowFlags(windowFlags() & ~Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
|
||||
QString dir = QFileDialog::getExistingDirectory(this, tr("Scripts Location"),
|
||||
ui.scriptsLocationEdit->text(),
|
||||
QFileDialog::ShowDirsOnly | QFileDialog::DontResolveSymlinks);
|
||||
if (!dir.isNull() && !dir.isEmpty()) {
|
||||
ui.scriptsLocationEdit->setText(dir);
|
||||
}
|
||||
|
||||
setWindowFlags(windowFlags() | Qt::WindowStaysOnTopHint);
|
||||
show();
|
||||
}
|
||||
|
||||
void PreferencesDialog::resizeEvent(QResizeEvent *resizeEvent) {
|
||||
|
||||
|
||||
// keep buttons panel at the bottom
|
||||
ui.buttonsPanel->setGeometry(0, size().height() - ui.buttonsPanel->height(), size().width(), ui.buttonsPanel->height());
|
||||
|
||||
ui.buttonsPanel->setGeometry(0,
|
||||
size().height() - ui.buttonsPanel->height(),
|
||||
size().width(),
|
||||
ui.buttonsPanel->height());
|
||||
|
||||
// set width and height of srcollarea to match bottom panel and width
|
||||
ui.scrollArea->setGeometry(ui.scrollArea->geometry().x(), ui.scrollArea->geometry().y(),
|
||||
size().width(),
|
||||
size().height() - ui.buttonsPanel->height() -
|
||||
SCROLL_PANEL_BOTTOM_MARGIN - ui.scrollArea->geometry().y());
|
||||
|
||||
// move Save button to left position
|
||||
ui.defaultButton->move(size().width() - OK_BUTTON_RIGHT_MARGIN - ui.defaultButton->size().width(), BUTTONS_TOP_MARGIN);
|
||||
|
||||
// move Save button to left position
|
||||
ui.cancelButton->move(ui.defaultButton->pos().x() - ui.cancelButton->size().width(), BUTTONS_TOP_MARGIN);
|
||||
|
||||
// move close button
|
||||
ui.closeButton->move(size().width() - OK_BUTTON_RIGHT_MARGIN - ui.closeButton->size().width(), ui.closeButton->pos().y());
|
||||
size().height() - ui.buttonsPanel->height() - ui.scrollArea->geometry().y());
|
||||
|
||||
}
|
||||
|
||||
void PreferencesDialog::loadPreferences() {
|
||||
|
|
|
@ -12,20 +12,20 @@
|
|||
#ifndef hifi_PreferencesDialog_h
|
||||
#define hifi_PreferencesDialog_h
|
||||
|
||||
#include "FramelessDialog.h"
|
||||
#include "ui_preferencesDialog.h"
|
||||
|
||||
#include <QDialog>
|
||||
#include <QString>
|
||||
|
||||
class PreferencesDialog : public FramelessDialog {
|
||||
class PreferencesDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
PreferencesDialog(QWidget* parent = 0, Qt::WindowFlags flags = 0);
|
||||
PreferencesDialog();
|
||||
|
||||
protected:
|
||||
void resizeEvent(QResizeEvent* resizeEvent);
|
||||
|
||||
|
||||
private:
|
||||
void loadPreferences();
|
||||
void savePreferences();
|
||||
|
|
|
@ -69,7 +69,7 @@ void RunningScriptsWidget::updateFileFilter(const QString& filter) {
|
|||
|
||||
void RunningScriptsWidget::loadScriptFromList(const QModelIndex& index) {
|
||||
QVariant scriptFile = _proxyModel.data(index, ScriptsModel::ScriptPath);
|
||||
Application::getInstance()->loadScript(scriptFile.toString(), false, false);
|
||||
Application::getInstance()->loadScript(scriptFile.toString());
|
||||
}
|
||||
|
||||
void RunningScriptsWidget::loadSelectedScript() {
|
||||
|
|
|
@ -93,7 +93,7 @@ bool ScriptEditorWidget::setRunning(bool run) {
|
|||
|
||||
if (run) {
|
||||
const QString& scriptURLString = QUrl(_currentScript).toString();
|
||||
_scriptEngine = Application::getInstance()->loadScript(scriptURLString, true);
|
||||
_scriptEngine = Application::getInstance()->loadScript(scriptURLString, true, true);
|
||||
connect(_scriptEngine, &ScriptEngine::runningStateChanged, this, &ScriptEditorWidget::runningStateChanged);
|
||||
connect(_scriptEngine, &ScriptEngine::errorMessage, this, &ScriptEditorWidget::onScriptError);
|
||||
connect(_scriptEngine, &ScriptEngine::printedMessage, this, &ScriptEditorWidget::onScriptPrint);
|
||||
|
|
|
@ -56,8 +56,8 @@ Stats::Stats():
|
|||
_metavoxelReceiveProgress(0),
|
||||
_metavoxelReceiveTotal(0)
|
||||
{
|
||||
QGLWidget* glWidget = Application::getInstance()->getGLWidget();
|
||||
resetWidth(glWidget->width(), 0);
|
||||
GLCanvas* glWidget = Application::getInstance()->getGLWidget();
|
||||
resetWidth(glWidget->getDeviceWidth(), 0);
|
||||
}
|
||||
|
||||
void Stats::toggleExpanded() {
|
||||
|
@ -67,7 +67,7 @@ void Stats::toggleExpanded() {
|
|||
// called on mouse click release
|
||||
// check for clicks over stats in order to expand or contract them
|
||||
void Stats::checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseDragStartedY, int horizontalOffset) {
|
||||
QGLWidget* glWidget = Application::getInstance()->getGLWidget();
|
||||
GLCanvas* glWidget = Application::getInstance()->getGLWidget();
|
||||
|
||||
if (0 != glm::compMax(glm::abs(glm::ivec2(mouseX - mouseDragStartedX, mouseY - mouseDragStartedY)))) {
|
||||
// not worried about dragging on stats
|
||||
|
@ -114,7 +114,7 @@ void Stats::checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseD
|
|||
// top-right stats click
|
||||
lines = _expanded ? 11 : 3;
|
||||
statsHeight = lines * STATS_PELS_PER_LINE + 10;
|
||||
statsWidth = glWidget->width() - statsX;
|
||||
statsWidth = glWidget->getDeviceWidth() - statsX;
|
||||
if (mouseX > statsX && mouseX < statsX + statsWidth && mouseY > statsY && mouseY < statsY + statsHeight) {
|
||||
toggleExpanded();
|
||||
return;
|
||||
|
@ -122,8 +122,8 @@ void Stats::checkClick(int mouseX, int mouseY, int mouseDragStartedX, int mouseD
|
|||
}
|
||||
|
||||
void Stats::resetWidth(int width, int horizontalOffset) {
|
||||
QGLWidget* glWidget = Application::getInstance()->getGLWidget();
|
||||
int extraSpace = glWidget->width() - horizontalOffset -2
|
||||
GLCanvas* glWidget = Application::getInstance()->getGLWidget();
|
||||
int extraSpace = glWidget->getDeviceWidth() - horizontalOffset -2
|
||||
- STATS_GENERAL_MIN_WIDTH
|
||||
- (Menu::getInstance()->isOptionChecked(MenuOption::TestPing) ? STATS_PING_MIN_WIDTH -1 : 0)
|
||||
- STATS_GEO_MIN_WIDTH
|
||||
|
@ -147,7 +147,7 @@ void Stats::resetWidth(int width, int horizontalOffset) {
|
|||
_pingStatsWidth += (int) extraSpace / panels;
|
||||
}
|
||||
_geoStatsWidth += (int) extraSpace / panels;
|
||||
_voxelStatsWidth += glWidget->width() - (_generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3);
|
||||
_voxelStatsWidth += glWidget->getDeviceWidth() - (_generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +198,7 @@ void Stats::display(
|
|||
int bytesPerSecond,
|
||||
int voxelPacketsToProcess)
|
||||
{
|
||||
QGLWidget* glWidget = Application::getInstance()->getGLWidget();
|
||||
GLCanvas* glWidget = Application::getInstance()->getGLWidget();
|
||||
|
||||
unsigned int backgroundColor = 0x33333399;
|
||||
int verticalOffset = 0, lines = 0;
|
||||
|
@ -210,7 +210,7 @@ void Stats::display(
|
|||
std::stringstream voxelStats;
|
||||
|
||||
if (_lastHorizontalOffset != horizontalOffset) {
|
||||
resetWidth(glWidget->width(), horizontalOffset);
|
||||
resetWidth(glWidget->getDeviceWidth(), horizontalOffset);
|
||||
_lastHorizontalOffset = horizontalOffset;
|
||||
}
|
||||
|
||||
|
@ -410,7 +410,8 @@ void Stats::display(
|
|||
}
|
||||
}
|
||||
|
||||
drawBackground(backgroundColor, horizontalOffset, 0, glWidget->width() - horizontalOffset, lines * STATS_PELS_PER_LINE + 10);
|
||||
drawBackground(backgroundColor, horizontalOffset, 0, glWidget->getDeviceWidth() - horizontalOffset,
|
||||
lines * STATS_PELS_PER_LINE + 10);
|
||||
horizontalOffset += 5;
|
||||
|
||||
if (_expanded) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>ChatWindow</class>
|
||||
<widget class="QDialog" name="ChatWindow">
|
||||
<widget class="QWidget" name="ChatWindow">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
|
@ -86,45 +86,6 @@
|
|||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QPushButton" name="closeButton">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>16</width>
|
||||
<height>16</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="focusPolicy">
|
||||
<enum>Qt::NoFocus</enum>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">QPushButton {
|
||||
background-color: rgba( 0, 0, 0, 0% );
|
||||
border: none;
|
||||
image: url(../resources/images/close.svg)
|
||||
}
|
||||
|
||||
|
||||
QPushButton:pressed {
|
||||
background-color: rgba( 0, 0, 0, 0% );
|
||||
border: none;
|
||||
image: url(../resources/images/close_down.svg)
|
||||
}</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="flat">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
|
@ -283,7 +244,7 @@ border-color: palette(dark); border-style: solid; border-left-width: 1px; borde
|
|||
<enum>QAbstractScrollArea::AdjustToContents</enum>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="acceptRichText">
|
||||
<bool>false</bool>
|
||||
|
@ -308,22 +269,5 @@ border-color: palette(dark); border-style: solid; border-left-width: 1px; borde
|
|||
<tabstop>messagesScrollArea</tabstop>
|
||||
</tabstops>
|
||||
<resources/>
|
||||
<connections>
|
||||
<connection>
|
||||
<sender>closeButton</sender>
|
||||
<signal>clicked()</signal>
|
||||
<receiver>ChatWindow</receiver>
|
||||
<slot>hide()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>390</x>
|
||||
<y>42</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>550</x>
|
||||
<y>42</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
</connections>
|
||||
<connections/>
|
||||
</ui>
|
||||
|
|
File diff suppressed because it is too large
Load diff
174
libraries/audio/src/AudioBuffer.h
Normal file
174
libraries/audio/src/AudioBuffer.h
Normal file
|
@ -0,0 +1,174 @@
|
|||
//
|
||||
// AudioBuffer.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/29/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioBuffer_h
|
||||
#define hifi_AudioBuffer_h
|
||||
|
||||
#include <typeinfo>
|
||||
|
||||
template< typename T >
|
||||
class AudioFrameBuffer {
|
||||
|
||||
uint16_t _channelCount;
|
||||
uint16_t _frameCount;
|
||||
uint16_t _frameCountMax;
|
||||
|
||||
T** _frameBuffer;
|
||||
|
||||
void allocateFrames() {
|
||||
_frameBuffer = new T*[_channelCount];
|
||||
if (_frameBuffer) {
|
||||
for (uint16_t i = 0; i < _channelCount; ++i) {
|
||||
_frameBuffer[i] = new T[_frameCountMax];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void deallocateFrames() {
|
||||
if (_frameBuffer) {
|
||||
for (uint16_t i = 0; i < _channelCount; ++i) {
|
||||
delete _frameBuffer[i];
|
||||
}
|
||||
delete _frameBuffer;
|
||||
}
|
||||
_frameBuffer = NULL;
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
AudioFrameBuffer() :
|
||||
_channelCount(0),
|
||||
_frameCount(0),
|
||||
_frameCountMax(0),
|
||||
_frameBuffer(NULL) {
|
||||
}
|
||||
|
||||
AudioFrameBuffer(const uint16_t channelCount, const uint16_t frameCount) :
|
||||
_channelCount(channelCount),
|
||||
_frameCount(frameCount),
|
||||
_frameCountMax(frameCount),
|
||||
_frameBuffer(NULL) {
|
||||
allocateFrames();
|
||||
}
|
||||
|
||||
~AudioFrameBuffer() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
void initialize(const uint16_t channelCount, const uint16_t frameCount) {
|
||||
if (_frameBuffer) {
|
||||
finalize();
|
||||
}
|
||||
_channelCount = channelCount;
|
||||
_frameCount = frameCount;
|
||||
_frameCountMax = frameCount;
|
||||
allocateFrames();
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
deallocateFrames();
|
||||
_channelCount = 0;
|
||||
_frameCount = 0;
|
||||
}
|
||||
|
||||
T**& getFrameData() {
|
||||
return _frameBuffer;
|
||||
}
|
||||
|
||||
uint16_t getChannelCount() {
|
||||
return _channelCount;
|
||||
}
|
||||
|
||||
uint16_t getFrameCount() {
|
||||
return _frameCount;
|
||||
}
|
||||
|
||||
void zeroFrames() {
|
||||
if (!_frameBuffer) {
|
||||
return;
|
||||
}
|
||||
for (uint16_t i = 0; i < _channelCount; ++i) {
|
||||
memset(_frameBuffer[i], 0, sizeof(T)*_frameCountMax);
|
||||
}
|
||||
}
|
||||
|
||||
template< typename S >
|
||||
void copyFrames(uint16_t channelCount, const uint16_t frameCount, S* frames, const bool copyOut = false) {
|
||||
if ( !_frameBuffer || !frames) {
|
||||
return;
|
||||
}
|
||||
assert(channelCount == _channelCount);
|
||||
assert(frameCount <= _frameCountMax);
|
||||
|
||||
_frameCount = frameCount; // we allow copying fewer frames than we've allocated
|
||||
|
||||
if (copyOut) {
|
||||
S* dst = frames;
|
||||
|
||||
if(typeid(T) == typeid(S)) { // source and destination types are the same
|
||||
for (int i = 0; i < _frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
*dst++ = _frameBuffer[j][i];
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
if(typeid(T) == typeid(float32_t) &&
|
||||
typeid(S) == typeid(int16_t)) {
|
||||
|
||||
const int scale = (2 << ((8 * sizeof(S)) - 1));
|
||||
|
||||
for (int i = 0; i < _frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
*dst++ = (S)(_frameBuffer[j][i] * scale);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
assert(0); // currently unsupported conversion
|
||||
}
|
||||
}
|
||||
}
|
||||
else { // copyIn
|
||||
S* src = frames;
|
||||
|
||||
if(typeid(T) == typeid(S)) { // source and destination types are the same
|
||||
for (int i = 0; i < _frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
_frameBuffer[j][i] = *src++;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
if(typeid(T) == typeid(float32_t) &&
|
||||
typeid(S) == typeid(int16_t)) {
|
||||
|
||||
const int scale = (2 << ((8 * sizeof(S)) - 1));
|
||||
|
||||
for (int i = 0; i < _frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
_frameBuffer[j][i] = ((T)(*src++)) / scale;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
assert(0); // currently unsupported conversion
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
typedef AudioFrameBuffer< float32_t > AudioBufferFloat32;
|
||||
typedef AudioFrameBuffer< int32_t > AudioBufferSInt32;
|
||||
|
||||
#endif // hifi_AudioBuffer_h
|
||||
|
|
@ -37,11 +37,11 @@ public:
|
|||
//
|
||||
// ctor/dtor
|
||||
//
|
||||
AudioBiquad()
|
||||
: _xm1(0.)
|
||||
, _xm2(0.)
|
||||
, _ym1(0.)
|
||||
, _ym2(0.) {
|
||||
AudioBiquad() :
|
||||
_xm1(0.),
|
||||
_xm2(0.),
|
||||
_ym1(0.),
|
||||
_ym2(0.) {
|
||||
setParameters(0.,0.,0.,0.,0.);
|
||||
}
|
||||
|
||||
|
|
|
@ -9,9 +9,12 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
|
|
|
@ -49,9 +49,9 @@ public:
|
|||
//
|
||||
// ctor/dtor
|
||||
//
|
||||
AudioFilterBank()
|
||||
: _sampleRate(0.)
|
||||
, _frameCount(0) {
|
||||
AudioFilterBank() :
|
||||
_sampleRate(0.0f),
|
||||
_frameCount(0) {
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
_buffer[ i ] = NULL;
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public:
|
|||
//
|
||||
// public interface
|
||||
//
|
||||
void initialize(const float sampleRate, const int frameCount) {
|
||||
void initialize(const float sampleRate, const int frameCount = 0) {
|
||||
finalize();
|
||||
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
|
@ -141,6 +141,16 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
void render(AudioBufferFloat32& frameBuffer) {
|
||||
|
||||
float32_t** samples = frameBuffer.getFrameData();
|
||||
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
_filters[i][j].render( samples[j], samples[j], frameBuffer.getFrameCount() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void reset() {
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
|
|
85
libraries/audio/src/AudioFormat.h
Normal file
85
libraries/audio/src/AudioFormat.h
Normal file
|
@ -0,0 +1,85 @@
|
|||
//
|
||||
// AudioFormat.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/28/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioFormat_h
|
||||
#define hifi_AudioFormat_h
|
||||
|
||||
#ifndef _FLOAT32_T
|
||||
#define _FLOAT32_T
|
||||
typedef float float32_t;
|
||||
#endif
|
||||
|
||||
#ifndef _FLOAT64_T
|
||||
#define _FLOAT64_T
|
||||
typedef double float64_t;
|
||||
#endif
|
||||
|
||||
//
|
||||
// Audio format structure (currently for uncompressed streams only)
|
||||
//
|
||||
|
||||
struct AudioFormat {
|
||||
|
||||
struct Flags {
|
||||
uint32_t _isFloat : 1;
|
||||
uint32_t _isSigned : 1;
|
||||
uint32_t _isInterleaved : 1;
|
||||
uint32_t _isBigEndian : 1;
|
||||
uint32_t _isPacked : 1;
|
||||
uint32_t _reserved : 27;
|
||||
} _flags;
|
||||
|
||||
uint32_t _bytesPerFrame;
|
||||
uint32_t _channelsPerFrame;
|
||||
uint32_t _bitsPerChannel;
|
||||
float64_t _sampleRate;
|
||||
|
||||
|
||||
AudioFormat() {
|
||||
memset(this, 0, sizeof(*this));
|
||||
}
|
||||
~AudioFormat() { }
|
||||
|
||||
AudioFormat& operator=(const AudioFormat& fmt) {
|
||||
memcpy(this, &fmt, sizeof(*this));
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool operator==(const AudioFormat& fmt) {
|
||||
return memcmp(this, &fmt, sizeof(*this)) == 0;
|
||||
}
|
||||
|
||||
bool operator!=(const AudioFormat& fmt) {
|
||||
return memcmp(this, &fmt, sizeof(*this)) != 0;
|
||||
}
|
||||
|
||||
void setCanonicalFloat32(uint32_t channels) {
|
||||
assert(channels > 0 && channels <= 2);
|
||||
_sampleRate = SAMPLE_RATE; // todo: create audio constants header
|
||||
_bitsPerChannel = sizeof(float32_t) * 8;
|
||||
_channelsPerFrame = channels;
|
||||
_bytesPerFrame = _channelsPerFrame * _bitsPerChannel / 8;
|
||||
_flags._isFloat = true;
|
||||
_flags._isInterleaved = _channelsPerFrame > 1;
|
||||
}
|
||||
|
||||
void setCanonicalInt16(uint32_t channels) {
|
||||
assert(channels > 0 && channels <= 2);
|
||||
_sampleRate = SAMPLE_RATE; // todo: create audio constants header
|
||||
_bitsPerChannel = sizeof(int16_t) * 8;
|
||||
_channelsPerFrame = channels;
|
||||
_bytesPerFrame = _channelsPerFrame * _bitsPerChannel / 8;
|
||||
_flags._isSigned = true;
|
||||
_flags._isInterleaved = _channelsPerFrame > 1;
|
||||
}
|
||||
};
|
||||
|
||||
#endif // hifi_AudioFormat_h
|
75
libraries/audio/src/AudioGain.h
Normal file
75
libraries/audio/src/AudioGain.h
Normal file
|
@ -0,0 +1,75 @@
|
|||
//
|
||||
// AudioGain.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 9/1/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioGain_h
|
||||
#define hifi_AudioGain_h
|
||||
|
||||
class AudioGain
|
||||
{
|
||||
float32_t _gain;
|
||||
bool _mute;
|
||||
|
||||
public:
|
||||
AudioGain() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
~AudioGain() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
setParameters(1.0f,0.0f);
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
}
|
||||
|
||||
void reset() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
void setParameters(const float gain, const float mute) {
|
||||
_gain = std::min(std::max(gain, 0.0f), 1.0f);
|
||||
_mute = mute != 0.0f;
|
||||
|
||||
}
|
||||
|
||||
void getParameters(float& gain, float& mute) {
|
||||
gain = _gain;
|
||||
mute = _mute ? 1.0f : 0.0f;
|
||||
}
|
||||
|
||||
void render(AudioBufferFloat32& frameBuffer) {
|
||||
if (_mute) {
|
||||
frameBuffer.zeroFrames();
|
||||
return;
|
||||
}
|
||||
|
||||
float32_t** samples = frameBuffer.getFrameData();
|
||||
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
|
||||
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 8) {
|
||||
samples[j][i + 0] *= _gain;
|
||||
samples[j][i + 1] *= _gain;
|
||||
samples[j][i + 2] *= _gain;
|
||||
samples[j][i + 3] *= _gain;
|
||||
samples[j][i + 4] *= _gain;
|
||||
samples[j][i + 5] *= _gain;
|
||||
samples[j][i + 6] *= _gain;
|
||||
samples[j][i + 7] *= _gain;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif // AudioGain_h
|
||||
|
||||
|
|
@ -64,13 +64,13 @@ void AudioInjector::injectAudio() {
|
|||
// pack some placeholder sequence number for now
|
||||
int numPreSequenceNumberBytes = injectAudioPacket.size();
|
||||
packetStream << (quint16)0;
|
||||
|
||||
// pack the stereo/mono type of the stream
|
||||
packetStream << _options.isStereo();
|
||||
|
||||
// pack stream identifier (a generated UUID)
|
||||
packetStream << QUuid::createUuid();
|
||||
|
||||
// pack the stereo/mono type of the stream
|
||||
packetStream << _options.isStereo();
|
||||
|
||||
// pack the flag for loopback
|
||||
uchar loopbackFlag = (uchar) (!_options.getLoopbackAudioInterface());
|
||||
packetStream << loopbackFlag;
|
||||
|
|
21
libraries/audio/src/AudioSourceNoise.cpp
Normal file
21
libraries/audio/src/AudioSourceNoise.cpp
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// AudioSourceNoise.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/10/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioSourceNoise.h"
|
||||
|
||||
template<>
|
||||
uint32_t AudioSourcePinkNoise::_randomSeed = 1974; // a truly random number
|
103
libraries/audio/src/AudioSourceNoise.h
Normal file
103
libraries/audio/src/AudioSourceNoise.h
Normal file
|
@ -0,0 +1,103 @@
|
|||
//
|
||||
// AudioSourceNoise.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 9/1/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// Adapted from code by Phil Burk http://www.firstpr.com.au/dsp/pink-noise/
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioSourceNoise_h
|
||||
#define hifi_AudioSourceNoise_h
|
||||
|
||||
template< const uint16_t N = 30>
|
||||
class AudioSourceNoise
|
||||
{
|
||||
static const uint16_t _randomRows = N;
|
||||
static const uint16_t _randomBits = 24;
|
||||
static const uint16_t _randomShift = (sizeof(int32_t) * 8) - _randomBits;
|
||||
|
||||
static uint32_t _randomSeed;
|
||||
|
||||
int32_t _rows[_randomRows];
|
||||
int32_t _runningSum; // used to optimize summing of generators.
|
||||
uint16_t _index; // incremented each sample.
|
||||
uint16_t _indexMask; // index wrapped by ANDing with this mask.
|
||||
float32_t _scale; // used to scale within range of -1.0 to +1.0
|
||||
|
||||
static uint32_t generateRandomNumber() {
|
||||
_randomSeed = (_randomSeed * 196314165) + 907633515;
|
||||
return _randomSeed >> _randomShift;
|
||||
}
|
||||
|
||||
public:
|
||||
AudioSourceNoise() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
~AudioSourceNoise() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
memset(_rows, 0, _randomRows * sizeof(int32_t));
|
||||
|
||||
_runningSum = 0;
|
||||
_index = 0;
|
||||
_indexMask = (1 << _randomRows) - 1;
|
||||
_scale = 1.0f / ((_randomRows + 1) * (1 << (_randomBits - 1)));
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
}
|
||||
|
||||
void reset() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
void setParameters(void) {
|
||||
}
|
||||
|
||||
void getParameters(void) {
|
||||
}
|
||||
|
||||
void render(AudioBufferFloat32& frameBuffer) {
|
||||
|
||||
uint32_t randomNumber;
|
||||
|
||||
float32_t** samples = frameBuffer.getFrameData();
|
||||
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); ++i) {
|
||||
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
|
||||
|
||||
_index = (_index + 1) & _indexMask; // increment and mask index.
|
||||
if (_index != 0) { // if index is zero, don't update any random values.
|
||||
|
||||
uint32_t numZeros = 0; // determine how many trailing zeros in _index
|
||||
uint32_t tmp = _index;
|
||||
while ((tmp & 1) == 0) {
|
||||
tmp >>= 1;
|
||||
numZeros++;
|
||||
}
|
||||
// replace the indexed _rows random value. subtract and add back to _runningSum instead
|
||||
// of adding all the random values together. only one value changes each time.
|
||||
_runningSum -= _rows[numZeros];
|
||||
randomNumber = generateRandomNumber();
|
||||
_runningSum += randomNumber;
|
||||
_rows[numZeros] = randomNumber;
|
||||
}
|
||||
|
||||
// add extra white noise value and scale between -1.0 and +1.0
|
||||
samples[j][i] = (_runningSum + generateRandomNumber()) * _scale;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
typedef AudioSourceNoise<> AudioSourcePinkNoise;
|
||||
|
||||
#endif // AudioSourceNoise_h
|
||||
|
20
libraries/audio/src/AudioSourceTone.cpp
Normal file
20
libraries/audio/src/AudioSourceTone.cpp
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// AudioSourceTone.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/10/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioSourceTone.h"
|
||||
|
||||
uint32_t AudioSourceTone::_frameOffset = 0;
|
72
libraries/audio/src/AudioSourceTone.h
Normal file
72
libraries/audio/src/AudioSourceTone.h
Normal file
|
@ -0,0 +1,72 @@
|
|||
//
|
||||
// AudioSourceTone.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 9/1/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioSourceTone_h
|
||||
#define hifi_AudioSourceTone_h
|
||||
|
||||
class AudioSourceTone
|
||||
{
|
||||
static uint32_t _frameOffset;
|
||||
float32_t _frequency;
|
||||
float32_t _amplitude;
|
||||
float32_t _sampleRate;
|
||||
float32_t _omega;
|
||||
|
||||
public:
|
||||
AudioSourceTone() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
~AudioSourceTone() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
_frameOffset = 0;
|
||||
setParameters(SAMPLE_RATE, 220.0f, 0.9f);
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
}
|
||||
|
||||
void reset() {
|
||||
_frameOffset = 0;
|
||||
}
|
||||
|
||||
void setParameters(const float32_t sampleRate, const float32_t frequency, const float32_t amplitude) {
|
||||
_sampleRate = std::max(sampleRate, 1.0f);
|
||||
_frequency = std::max(frequency, 1.0f);
|
||||
_amplitude = std::max(amplitude, 1.0f);
|
||||
_omega = _frequency / _sampleRate * TWO_PI;
|
||||
}
|
||||
|
||||
void getParameters(float32_t& sampleRate, float32_t& frequency, float32_t& amplitude) {
|
||||
sampleRate = _sampleRate;
|
||||
frequency = _frequency;
|
||||
amplitude = _amplitude;
|
||||
}
|
||||
|
||||
void render(AudioBufferFloat32& frameBuffer) {
|
||||
|
||||
// note: this is a placeholder implementation. final version will not include any transcendental ops in our render loop
|
||||
|
||||
float32_t** samples = frameBuffer.getFrameData();
|
||||
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); ++i) {
|
||||
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
|
||||
samples[j][i] = sinf((i + _frameOffset) * _omega);
|
||||
}
|
||||
}
|
||||
_frameOffset += frameBuffer.getFrameCount();
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
@ -16,6 +16,8 @@
|
|||
#include <AABox.h>
|
||||
|
||||
#include "InboundAudioStream.h"
|
||||
#include "AudioFormat.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ public:
|
|||
const QUuid& getSessionUUID() { return _sessionUUID; }
|
||||
|
||||
const glm::vec3& getPosition();
|
||||
void setPosition(const glm::vec3 position, bool overideReferential = false);
|
||||
virtual void setPosition(const glm::vec3 position, bool overideReferential = false);
|
||||
|
||||
glm::vec3 getHandPosition() const;
|
||||
void setHandPosition(const glm::vec3& handPosition);
|
||||
|
|
|
@ -125,6 +125,12 @@ bool DeleteEntityOperator::postRecursion(OctreeElement* element) {
|
|||
if ((subTreeContainsSomeEntitiesToDelete(element))) {
|
||||
element->markWithChangedTime();
|
||||
}
|
||||
|
||||
// It should always be ok to prune children. Because we are only in this PostRecursion function if
|
||||
// we've already finished processing all of the children of this current element. If any of those
|
||||
// children are the containing element for any entity in our lists of entities to delete, then they
|
||||
// must have already deleted the entity, and they are safe to prune. Since this operation doesn't
|
||||
// ever add any elements we don't have to worry about memory being reused within this recursion pass.
|
||||
EntityTreeElement* entityTreeElement = static_cast<EntityTreeElement*>(element);
|
||||
entityTreeElement->pruneChildren(); // take this opportunity to prune any empty leaves
|
||||
return keepSearching; // if we haven't yet found it, keep looking
|
||||
|
|
|
@ -39,6 +39,9 @@ void EntityTree::eraseAllOctreeElements(bool createNewRoot) {
|
|||
}
|
||||
_entityToElementMap.clear();
|
||||
Octree::eraseAllOctreeElements(createNewRoot);
|
||||
_movingEntities.clear();
|
||||
_changingEntities.clear();
|
||||
_mortalEntities.clear();
|
||||
}
|
||||
|
||||
bool EntityTree::handlesEditPacketType(PacketType packetType) const {
|
||||
|
@ -492,7 +495,7 @@ int EntityTree::processEditPacketData(PacketType packetType, const unsigned char
|
|||
|
||||
void EntityTree::notifyNewlyCreatedEntity(const EntityItem& newEntity, const SharedNodePointer& senderNode) {
|
||||
_newlyCreatedHooksLock.lockForRead();
|
||||
for (size_t i = 0; i < _newlyCreatedHooks.size(); i++) {
|
||||
for (int i = 0; i < _newlyCreatedHooks.size(); i++) {
|
||||
_newlyCreatedHooks[i]->entityCreated(newEntity, senderNode);
|
||||
}
|
||||
_newlyCreatedHooksLock.unlock();
|
||||
|
@ -506,7 +509,7 @@ void EntityTree::addNewlyCreatedHook(NewlyCreatedEntityHook* hook) {
|
|||
|
||||
void EntityTree::removeNewlyCreatedHook(NewlyCreatedEntityHook* hook) {
|
||||
_newlyCreatedHooksLock.lockForWrite();
|
||||
for (size_t i = 0; i < _newlyCreatedHooks.size(); i++) {
|
||||
for (int i = 0; i < _newlyCreatedHooks.size(); i++) {
|
||||
if (_newlyCreatedHooks[i] == hook) {
|
||||
_newlyCreatedHooks.erase(_newlyCreatedHooks.begin() + i);
|
||||
break;
|
||||
|
@ -859,6 +862,7 @@ void EntityTree::forgetEntitiesDeletedBefore(quint64 sinceTime) {
|
|||
|
||||
// TODO: consider consolidating processEraseMessageDetails() and processEraseMessage()
|
||||
int EntityTree::processEraseMessage(const QByteArray& dataByteArray, const SharedNodePointer& sourceNode) {
|
||||
lockForWrite();
|
||||
const unsigned char* packetData = (const unsigned char*)dataByteArray.constData();
|
||||
const unsigned char* dataAt = packetData;
|
||||
size_t packetLength = dataByteArray.size();
|
||||
|
@ -901,10 +905,13 @@ int EntityTree::processEraseMessage(const QByteArray& dataByteArray, const Share
|
|||
}
|
||||
deleteEntities(entityItemIDsToDelete);
|
||||
}
|
||||
unlock();
|
||||
|
||||
return processedBytes;
|
||||
}
|
||||
|
||||
// This version skips over the header
|
||||
// NOTE: Caller must lock the tree before calling this.
|
||||
// TODO: consider consolidating processEraseMessageDetails() and processEraseMessage()
|
||||
int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, const SharedNodePointer& sourceNode) {
|
||||
const unsigned char* packetData = (const unsigned char*)dataByteArray.constData();
|
||||
|
@ -938,7 +945,6 @@ int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, cons
|
|||
}
|
||||
deleteEntities(entityItemIDsToDelete);
|
||||
}
|
||||
|
||||
return processedBytes;
|
||||
}
|
||||
|
||||
|
|
|
@ -723,7 +723,6 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
int bytesForThisEntity = 0;
|
||||
EntityItemID entityItemID;
|
||||
EntityItem* entityItem = NULL;
|
||||
bool newEntity = false;
|
||||
|
||||
// Old model files don't have UUIDs in them. So we don't want to try to read those IDs from the stream.
|
||||
// Since this can only happen on loading an old file, we can safely treat these as new entity cases,
|
||||
|
@ -768,7 +767,6 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
addEntityItem(entityItem); // add this new entity to this elements entities
|
||||
entityItemID = entityItem->getEntityItemID();
|
||||
_myTree->setContainingElement(entityItemID, this);
|
||||
newEntity = true;
|
||||
EntityItem::SimulationState newState = entityItem->getSimulationState();
|
||||
_myTree->changeEntityState(entityItem, EntityItem::Static, newState);
|
||||
}
|
||||
|
@ -826,4 +824,4 @@ void EntityTreeElement::debugDump() {
|
|||
entity->debugDump();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ void MovingEntitiesOperator::addEntityToMoveList(EntityItem* entity, const AACub
|
|||
// check our tree, to determine if this entity is known
|
||||
EntityToMoveDetails details;
|
||||
details.oldContainingElement = oldContainingElement;
|
||||
details.oldContainingElementCube = oldContainingElement->getAACube();
|
||||
details.entity = entity;
|
||||
details.oldFound = false;
|
||||
details.newFound = false;
|
||||
|
@ -123,8 +124,30 @@ bool MovingEntitiesOperator::postRecursion(OctreeElement* element) {
|
|||
element->markWithChangedTime();
|
||||
}
|
||||
|
||||
EntityTreeElement* entityTreeElement = static_cast<EntityTreeElement*>(element);
|
||||
entityTreeElement->pruneChildren(); // take this opportunity to prune any empty leaves
|
||||
|
||||
|
||||
// It's not OK to prune if we have the potential of deleting the original containig element.
|
||||
// because if we prune the containing element then new might end up reallocating the same memory later
|
||||
// and that will confuse our logic.
|
||||
//
|
||||
// it's ok to prune if:
|
||||
// 2) this subtree doesn't contain any old elements
|
||||
// 3) this subtree contains an old element, but this element isn't a direct parent of any old containing element
|
||||
|
||||
bool elementSubTreeContainsOldElements = false;
|
||||
bool elementIsDirectParentOfOldElment = false;
|
||||
foreach(const EntityToMoveDetails& details, _entitiesToMove) {
|
||||
if (element->getAACube().contains(details.oldContainingElementCube)) {
|
||||
elementSubTreeContainsOldElements = true;
|
||||
}
|
||||
if (element->isParentOf(details.oldContainingElement)) {
|
||||
elementIsDirectParentOfOldElment = true;
|
||||
}
|
||||
}
|
||||
if (!elementSubTreeContainsOldElements || !elementIsDirectParentOfOldElment) {
|
||||
EntityTreeElement* entityTreeElement = static_cast<EntityTreeElement*>(element);
|
||||
entityTreeElement->pruneChildren(); // take this opportunity to prune any empty leaves
|
||||
}
|
||||
|
||||
return keepSearching; // if we haven't yet found it, keep looking
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ public:
|
|||
AACube newCube;
|
||||
AABox newBox;
|
||||
EntityTreeElement* oldContainingElement;
|
||||
AACube oldContainingElementCube;
|
||||
bool oldFound;
|
||||
bool newFound;
|
||||
};
|
||||
|
|
|
@ -242,7 +242,7 @@ int Octree::readElementData(OctreeElement* destinationElement, const unsigned ch
|
|||
// give this destination element the child mask from the packet
|
||||
const unsigned char ALL_CHILDREN_ASSUMED_TO_EXIST = 0xFF;
|
||||
|
||||
if (bytesLeftToRead < sizeof(unsigned char)) {
|
||||
if ((size_t)bytesLeftToRead < sizeof(unsigned char)) {
|
||||
qDebug() << "UNEXPECTED: readElementData() only had " << bytesLeftToRead << " bytes. Not enough for meaningful data.";
|
||||
return bytesAvailable; // assume we read the entire buffer...
|
||||
}
|
||||
|
@ -1865,7 +1865,6 @@ int Octree::encodeTreeBitstreamRecursion(OctreeElement* element,
|
|||
|
||||
bool Octree::readFromSVOFile(const char* fileName) {
|
||||
bool fileOk = false;
|
||||
bool hasBufferBreaks = false;
|
||||
|
||||
PacketVersion gotVersion = 0;
|
||||
std::ifstream file(fileName, std::ios::in|std::ios::binary|std::ios::ate);
|
||||
|
|
|
@ -275,7 +275,7 @@ void OctreeEditPacketSender::queueOctreeEditMessage(PacketType type, unsigned ch
|
|||
|
||||
// If we're switching type, then we send the last one and start over
|
||||
if ((type != packetBuffer._currentType && packetBuffer._currentSize > 0) ||
|
||||
(packetBuffer._currentSize + length >= _maxPacketSize)) {
|
||||
(packetBuffer._currentSize + length >= (size_t)_maxPacketSize)) {
|
||||
releaseQueuedPacket(packetBuffer);
|
||||
initializePacket(packetBuffer, type);
|
||||
}
|
||||
|
|
|
@ -102,6 +102,7 @@ ScriptEngine::ScriptEngine(const QString& scriptContents, const QString& fileNam
|
|||
_vec3Library(),
|
||||
_uuidLibrary(),
|
||||
_animationCache(this),
|
||||
_isUserLoaded(false),
|
||||
_arrayBufferClass(new ArrayBufferClass(this))
|
||||
{
|
||||
}
|
||||
|
@ -127,6 +128,7 @@ ScriptEngine::ScriptEngine(const QUrl& scriptURL,
|
|||
_vec3Library(),
|
||||
_uuidLibrary(),
|
||||
_animationCache(this),
|
||||
_isUserLoaded(false),
|
||||
_arrayBufferClass(new ArrayBufferClass(this))
|
||||
{
|
||||
QString scriptURLString = scriptURL.toString();
|
||||
|
@ -708,7 +710,7 @@ void ScriptEngine::include(const QString& includeFile) {
|
|||
|
||||
void ScriptEngine::load(const QString& loadFile) {
|
||||
QUrl url = resolveInclude(loadFile);
|
||||
emit loadScript(url.toString());
|
||||
emit loadScript(url.toString(), false);
|
||||
}
|
||||
|
||||
void ScriptEngine::nodeKilled(SharedNodePointer node) {
|
||||
|
|
|
@ -93,6 +93,9 @@ public:
|
|||
bool isFinished() const { return _isFinished; }
|
||||
bool isRunning() const { return _isRunning; }
|
||||
|
||||
void setUserLoaded(bool isUserLoaded) { _isUserLoaded = isUserLoaded; }
|
||||
bool isUserLoaded() const { return _isUserLoaded; }
|
||||
|
||||
public slots:
|
||||
void stop();
|
||||
|
||||
|
@ -116,7 +119,7 @@ signals:
|
|||
void errorMessage(const QString& message);
|
||||
void runningStateChanged();
|
||||
void evaluationFinished(QScriptValue result, bool isException);
|
||||
void loadScript(const QString& scriptName);
|
||||
void loadScript(const QString& scriptName, bool isUserLoaded);
|
||||
|
||||
protected:
|
||||
QString _scriptContents;
|
||||
|
@ -152,7 +155,8 @@ private:
|
|||
Vec3 _vec3Library;
|
||||
ScriptUUID _uuidLibrary;
|
||||
AnimationCache _animationCache;
|
||||
|
||||
bool _isUserLoaded;
|
||||
|
||||
ArrayBufferClass* _arrayBufferClass;
|
||||
|
||||
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
|
||||
|
|
|
@ -30,11 +30,11 @@ void TypedArrayPrototype::set(QScriptValue array, qint32 offset) {
|
|||
engine()->evaluate("throw \"ArgumentError: negative offset\"");
|
||||
}
|
||||
quint32 length = array.property("length").toInt32();
|
||||
if (offset + length > thisObject().data().property(typedArray->_lengthName).toInt32()) {
|
||||
if (offset + (qint32)length > thisObject().data().property(typedArray->_lengthName).toInt32()) {
|
||||
engine()->evaluate("throw \"ArgumentError: array does not fit\"");
|
||||
return;
|
||||
}
|
||||
for (int i = 0; i < length; ++i) {
|
||||
for (quint32 i = 0; i < length; ++i) {
|
||||
thisObject().setProperty(QString::number(offset + i), array.property(QString::number(i)));
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -50,7 +50,7 @@ QScriptValue TypedArray::newInstance(QScriptValue array) {
|
|||
if (array.property(ARRAY_LENGTH_HANDLE).isValid()) {
|
||||
quint32 length = array.property(ARRAY_LENGTH_HANDLE).toInt32();
|
||||
QScriptValue newArray = newInstance(length);
|
||||
for (int i = 0; i < length; ++i) {
|
||||
for (quint32 i = 0; i < length; ++i) {
|
||||
QScriptValue value = array.property(QString::number(i));
|
||||
setProperty(newArray, engine()->toStringHandle(QString::number(i)),
|
||||
i * _bytesPerElement, (value.isNumber()) ? value : QScriptValue(0));
|
||||
|
@ -119,7 +119,7 @@ QScriptValue TypedArray::construct(QScriptContext* context, QScriptEngine* engin
|
|||
return QScriptValue();
|
||||
}
|
||||
if (lengthArg.toInt32() < 0 ||
|
||||
byteOffsetArg.toInt32() + lengthArg.toInt32() * cls->_bytesPerElement > arrayBuffer->size()) {
|
||||
byteOffsetArg.toInt32() + lengthArg.toInt32() * (qint32)(cls->_bytesPerElement) > arrayBuffer->size()) {
|
||||
engine->evaluate("throw \"RangeError: byteLength out of range\"");
|
||||
return QScriptValue();
|
||||
}
|
||||
|
@ -155,10 +155,10 @@ QScriptClass::QueryFlags TypedArray::queryProperty(const QScriptValue& object,
|
|||
quint32 byteOffset = object.data().property(_byteOffsetName).toInt32();
|
||||
quint32 length = object.data().property(_lengthName).toInt32();
|
||||
bool ok = false;
|
||||
int pos = name.toArrayIndex(&ok);
|
||||
quint32 pos = name.toArrayIndex(&ok);
|
||||
|
||||
// Check that name is a valid index and arrayBuffer exists
|
||||
if (ok && pos >= 0 && pos < length) {
|
||||
if (ok && pos < length) {
|
||||
*id = byteOffset + pos * _bytesPerElement; // save pos to avoid recomputation
|
||||
return HandlesReadAccess | HandlesWriteAccess; // Read/Write access
|
||||
}
|
||||
|
|
|
@ -71,6 +71,8 @@ QScriptValue XMLHttpRequestClass::getStatus() const {
|
|||
return QScriptValue(408);
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return QScriptValue(501);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return QScriptValue(0);
|
||||
|
@ -92,6 +94,8 @@ QString XMLHttpRequestClass::getStatusText() const {
|
|||
return "Timeout";
|
||||
case QNetworkReply::ContentOperationNotPermittedError:
|
||||
return "Not Implemented";
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return "";
|
||||
|
|
|
@ -115,7 +115,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -140,7 +140,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytesB[] = { 136, 30 };
|
||||
char expectedBytesB[] = { (char)136, (char)30 };
|
||||
QByteArray expectedResultB(expectedBytesB, sizeof(expectedBytesB)/sizeof(expectedBytesB[0]));
|
||||
|
||||
if (encoded == expectedResultB) {
|
||||
|
@ -172,7 +172,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
qDebug() << "encoded=";
|
||||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -197,7 +197,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytesB[] = { 136, 30 };
|
||||
char expectedBytesB[] = { (char)136, (char)30 };
|
||||
QByteArray expectedResultB(expectedBytesB, sizeof(expectedBytesB)/sizeof(expectedBytesB[0]));
|
||||
|
||||
if (encoded == expectedResultB) {
|
||||
|
@ -231,7 +231,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -264,7 +264,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -296,7 +296,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -329,7 +329,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -362,7 +362,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -395,7 +395,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -432,7 +432,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
outputBufferBits((const unsigned char*)encoded.constData(), encoded.size());
|
||||
}
|
||||
|
||||
char expectedBytes[] = { 196, 15, 2 };
|
||||
char expectedBytes[] = { (char)196, (char)15, (char)2 };
|
||||
QByteArray expectedResult(expectedBytes, sizeof(expectedBytes)/sizeof(expectedBytes[0]));
|
||||
|
||||
if (encoded == expectedResult) {
|
||||
|
@ -642,7 +642,7 @@ void OctreeTests::propertyFlagsTests(bool verbose) {
|
|||
qDebug() << "props.getHasProperty(PARTICLE_PROP_VISIBLE)" << (props.getHasProperty(PARTICLE_PROP_VISIBLE))
|
||||
<< "{ expect true }";
|
||||
}
|
||||
char expectedBytesD[] = { 136, 16 };
|
||||
char expectedBytesD[] = { (char)136, (char)16 };
|
||||
QByteArray expectedResultD(expectedBytesD, sizeof(expectedBytesD)/sizeof(expectedBytesD[0]));
|
||||
|
||||
testsTaken++;
|
||||
|
|
Loading…
Reference in a new issue