mirror of
https://github.com/overte-org/overte.git
synced 2025-04-19 15:43:50 +02:00
Merge branch 'master' into tony/vive-rendering-work
This commit is contained in:
commit
de3144cde5
20 changed files with 167 additions and 90 deletions
|
@ -8,14 +8,14 @@ like to get paid for your work, make sure you report the bug via a job on
|
|||
[Worklist.net](https://worklist.net).
|
||||
|
||||
We're hiring! We're looking for skilled developers;
|
||||
send your resume to hiring@highfidelity.io
|
||||
send your resume to hiring@highfidelity.com
|
||||
|
||||
##### Chat with us
|
||||
Come chat with us in [our Gitter](http://gitter.im/highfidelity/hifi) if you have any questions or just want to say hi!
|
||||
|
||||
Documentation
|
||||
=========
|
||||
Documentation is available at [docs.highfidelity.io](http://docs.highfidelity.io), if something is missing, please suggest it via a new job on Worklist (add to the hifi-docs project).
|
||||
Documentation is available at [docs.highfidelity.com](http://docs.highfidelity.com), if something is missing, please suggest it via a new job on Worklist (add to the hifi-docs project).
|
||||
|
||||
Build Instructions
|
||||
=========
|
||||
|
|
|
@ -77,6 +77,18 @@ void AssetServer::completeSetup() {
|
|||
|
||||
auto assetServerObject = settingsObject[ASSET_SERVER_SETTINGS_KEY].toObject();
|
||||
|
||||
static const QString MAX_BANDWIDTH_OPTION = "max_bandwidth";
|
||||
auto maxBandwidthValue = assetServerObject[MAX_BANDWIDTH_OPTION];
|
||||
auto maxBandwidthFloat = maxBandwidthValue.toDouble(-1);
|
||||
|
||||
if (maxBandwidthFloat > 0.0) {
|
||||
const int BYTES_PER_MEGABITS = (1024 * 1024) / 8;
|
||||
int maxBandwidth = maxBandwidthFloat * BYTES_PER_MEGABITS;
|
||||
nodeList->setConnectionMaxBandwidth(maxBandwidth);
|
||||
qInfo() << "Set maximum bandwith per connection to" << maxBandwidthFloat << "Mb/s."
|
||||
" (" << maxBandwidth << "bytes/sec)";
|
||||
}
|
||||
|
||||
// get the path to the asset folder from the domain server settings
|
||||
static const QString ASSETS_PATH_OPTION = "assets_path";
|
||||
auto assetsJSONValue = assetServerObject[ASSETS_PATH_OPTION];
|
||||
|
|
|
@ -186,6 +186,15 @@
|
|||
"help": "The path to the directory assets are stored in.<br/>If this path is relative, it will be relative to the application data directory.<br/>If you change this path you will need to manually copy any existing assets from the previous directory.",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "max_bandwidth",
|
||||
"type": "double",
|
||||
"label": "Max Bandwidth Per User",
|
||||
"help": "The maximum upstream bandwidth each user can use (in Mb/s).",
|
||||
"placeholder": "10.0",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -3232,7 +3232,9 @@ void Application::update(float deltaTime) {
|
|||
controller::Pose leftHandPose = userInputMapper->getPoseState(controller::Action::LEFT_HAND);
|
||||
controller::Pose rightHandPose = userInputMapper->getPoseState(controller::Action::RIGHT_HAND);
|
||||
auto myAvatarMatrix = createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition());
|
||||
myAvatar->setHandControllerPosesInWorldFrame(leftHandPose.transform(myAvatarMatrix), rightHandPose.transform(myAvatarMatrix));
|
||||
auto worldToSensorMatrix = glm::inverse(myAvatar->getSensorToWorldMatrix());
|
||||
auto avatarToSensorMatrix = worldToSensorMatrix * myAvatarMatrix;
|
||||
myAvatar->setHandControllerPosesInSensorFrame(leftHandPose.transform(avatarToSensorMatrix), rightHandPose.transform(avatarToSensorMatrix));
|
||||
|
||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||
updateDialogs(deltaTime); // update various stats dialogs if present
|
||||
|
@ -3328,6 +3330,9 @@ void Application::update(float deltaTime) {
|
|||
|
||||
qApp->updateMyAvatarLookAtPosition();
|
||||
|
||||
// update sensorToWorldMatrix for camera and hand controllers
|
||||
myAvatar->updateSensorToWorldMatrix();
|
||||
|
||||
{
|
||||
PROFILE_RANGE_EX("MyAvatar", 0xffff00ff, (uint64_t)getActiveDisplayPlugin()->presentCount());
|
||||
avatarManager->updateMyAvatar(deltaTime);
|
||||
|
@ -3392,9 +3397,6 @@ void Application::update(float deltaTime) {
|
|||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
|
||||
}
|
||||
}
|
||||
|
||||
// update sensorToWorldMatrix for rendering camera.
|
||||
myAvatar->updateSensorToWorldMatrix();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -418,7 +418,7 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
|||
_hmdSensorFacing = getFacingDir2D(_hmdSensorOrientation);
|
||||
}
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// best called at end of main loop, after physics.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void MyAvatar::updateSensorToWorldMatrix() {
|
||||
|
@ -1087,24 +1087,32 @@ static controller::Pose applyLowVelocityFilter(const controller::Pose& oldPose,
|
|||
return finalPose;
|
||||
}
|
||||
|
||||
void MyAvatar::setHandControllerPosesInWorldFrame(const controller::Pose& left, const controller::Pose& right) {
|
||||
void MyAvatar::setHandControllerPosesInSensorFrame(const controller::Pose& left, const controller::Pose& right) {
|
||||
if (controller::InputDevice::getLowVelocityFilter()) {
|
||||
auto oldLeftPose = getLeftHandControllerPoseInWorldFrame();
|
||||
auto oldRightPose = getRightHandControllerPoseInWorldFrame();
|
||||
_leftHandControllerPoseInWorldFrameCache.set(applyLowVelocityFilter(oldLeftPose, left));
|
||||
_rightHandControllerPoseInWorldFrameCache.set(applyLowVelocityFilter(oldRightPose, right));
|
||||
auto oldLeftPose = getLeftHandControllerPoseInSensorFrame();
|
||||
auto oldRightPose = getRightHandControllerPoseInSensorFrame();
|
||||
_leftHandControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldLeftPose, left));
|
||||
_rightHandControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldRightPose, right));
|
||||
} else {
|
||||
_leftHandControllerPoseInWorldFrameCache.set(left);
|
||||
_rightHandControllerPoseInWorldFrameCache.set(right);
|
||||
_leftHandControllerPoseInSensorFrameCache.set(left);
|
||||
_rightHandControllerPoseInSensorFrameCache.set(right);
|
||||
}
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getLeftHandControllerPoseInSensorFrame() const {
|
||||
return _leftHandControllerPoseInSensorFrameCache.get();
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getRightHandControllerPoseInSensorFrame() const {
|
||||
return _rightHandControllerPoseInSensorFrameCache.get();
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getLeftHandControllerPoseInWorldFrame() const {
|
||||
return _leftHandControllerPoseInWorldFrameCache.get();
|
||||
return _leftHandControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getRightHandControllerPoseInWorldFrame() const {
|
||||
return _rightHandControllerPoseInWorldFrameCache.get();
|
||||
return _rightHandControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getLeftHandControllerPoseInAvatarFrame() const {
|
||||
|
|
|
@ -247,7 +247,9 @@ public:
|
|||
|
||||
virtual void rebuildCollisionShape() override;
|
||||
|
||||
void setHandControllerPosesInWorldFrame(const controller::Pose& left, const controller::Pose& right);
|
||||
void setHandControllerPosesInSensorFrame(const controller::Pose& left, const controller::Pose& right);
|
||||
controller::Pose getLeftHandControllerPoseInSensorFrame() const;
|
||||
controller::Pose getRightHandControllerPoseInSensorFrame() const;
|
||||
controller::Pose getLeftHandControllerPoseInWorldFrame() const;
|
||||
controller::Pose getRightHandControllerPoseInWorldFrame() const;
|
||||
controller::Pose getLeftHandControllerPoseInAvatarFrame() const;
|
||||
|
@ -451,9 +453,9 @@ private:
|
|||
bool _hoverReferenceCameraFacingIsCaptured { false };
|
||||
glm::vec3 _hoverReferenceCameraFacing { 0.0f, 0.0f, -1.0f }; // hmd sensor space
|
||||
|
||||
// These are stored in WORLD frame
|
||||
ThreadSafeValueCache<controller::Pose> _leftHandControllerPoseInWorldFrameCache { controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _rightHandControllerPoseInWorldFrameCache { controller::Pose() };
|
||||
// These are stored in SENSOR frame
|
||||
ThreadSafeValueCache<controller::Pose> _leftHandControllerPoseInSensorFrameCache { controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _rightHandControllerPoseInSensorFrameCache { controller::Pose() };
|
||||
|
||||
float AVATAR_MOVEMENT_ENERGY_CONSTANT { 0.001f };
|
||||
float AUDIO_ENERGY_CONSTANT { 0.000001f };
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include <PerfStat.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <Finally.h>
|
||||
|
||||
#include "OffscreenGLCanvas.h"
|
||||
#include "GLEscrow.h"
|
||||
|
@ -84,6 +85,7 @@ protected:
|
|||
Queue _queue;
|
||||
QMutex _mutex;
|
||||
QWaitCondition _waitCondition;
|
||||
std::atomic<bool> _rendering { false };
|
||||
|
||||
private:
|
||||
// Event-driven methods
|
||||
|
@ -214,22 +216,19 @@ void OffscreenQmlRenderThread::init() {
|
|||
connect(_renderControl, &QQuickRenderControl::sceneChanged, _surface, &OffscreenQmlSurface::requestUpdate);
|
||||
|
||||
if (!_canvas.makeCurrent()) {
|
||||
qWarning("Failed to make context current on render thread");
|
||||
// Failed to make GL context current, this OffscreenQmlSurface is basically dead
|
||||
qWarning("Failed to make context current on QML Renderer Thread");
|
||||
return;
|
||||
}
|
||||
|
||||
_renderControl->initialize(_canvas.getContext());
|
||||
setupFbo();
|
||||
_escrow.setRecycler([this](GLuint texture){
|
||||
_textures.recycleTexture(texture);
|
||||
});
|
||||
_canvas.doneCurrent();
|
||||
}
|
||||
|
||||
void OffscreenQmlRenderThread::cleanup() {
|
||||
if (!_canvas.makeCurrent()) {
|
||||
qFatal("Failed to make context current on render thread");
|
||||
return;
|
||||
}
|
||||
_renderControl->invalidate();
|
||||
|
||||
_fbo.reset();
|
||||
|
@ -237,7 +236,6 @@ void OffscreenQmlRenderThread::cleanup() {
|
|||
_textures.clear();
|
||||
|
||||
_canvas.doneCurrent();
|
||||
|
||||
_canvas.getContextObject()->moveToThread(QCoreApplication::instance()->thread());
|
||||
|
||||
_quit = true;
|
||||
|
@ -245,57 +243,55 @@ void OffscreenQmlRenderThread::cleanup() {
|
|||
|
||||
void OffscreenQmlRenderThread::resize() {
|
||||
// Lock _newSize changes
|
||||
QMutexLocker locker(&_mutex);
|
||||
{
|
||||
QMutexLocker locker(&_mutex);
|
||||
|
||||
// Update our members
|
||||
if (_quickWindow) {
|
||||
_quickWindow->setGeometry(QRect(QPoint(), _newSize));
|
||||
_quickWindow->contentItem()->setSize(_newSize);
|
||||
// Update our members
|
||||
if (_quickWindow) {
|
||||
_quickWindow->setGeometry(QRect(QPoint(), _newSize));
|
||||
_quickWindow->contentItem()->setSize(_newSize);
|
||||
}
|
||||
|
||||
// Qt bug in 5.4 forces this check of pixel ratio,
|
||||
// even though we're rendering offscreen.
|
||||
qreal pixelRatio = 1.0;
|
||||
if (_renderControl && _renderControl->_renderWindow) {
|
||||
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
||||
}
|
||||
|
||||
uvec2 newOffscreenSize = toGlm(_newSize * pixelRatio);
|
||||
if (newOffscreenSize == _size) {
|
||||
return;
|
||||
}
|
||||
|
||||
qDebug() << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
|
||||
_size = newOffscreenSize;
|
||||
}
|
||||
|
||||
// Qt bug in 5.4 forces this check of pixel ratio,
|
||||
// even though we're rendering offscreen.
|
||||
qreal pixelRatio = 1.0;
|
||||
if (_renderControl && _renderControl->_renderWindow) {
|
||||
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
||||
}
|
||||
|
||||
uvec2 newOffscreenSize = toGlm(_newSize * pixelRatio);
|
||||
_textures.setSize(newOffscreenSize);
|
||||
if (newOffscreenSize == _size) {
|
||||
return;
|
||||
}
|
||||
_size = newOffscreenSize;
|
||||
|
||||
// Clear out any fbos with the old size
|
||||
if (!_canvas.makeCurrent()) {
|
||||
qWarning("Failed to make context current on render thread");
|
||||
return;
|
||||
}
|
||||
|
||||
qDebug() << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
|
||||
|
||||
locker.unlock();
|
||||
|
||||
_textures.setSize(_size);
|
||||
setupFbo();
|
||||
_canvas.doneCurrent();
|
||||
}
|
||||
|
||||
void OffscreenQmlRenderThread::render() {
|
||||
if (_surface->_paused) {
|
||||
// Ensure we always release the main thread
|
||||
Finally releaseMainThread([this] {
|
||||
_waitCondition.wakeOne();
|
||||
});
|
||||
|
||||
if (_surface->_paused) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_canvas.makeCurrent()) {
|
||||
qWarning("Failed to make context current on render thread");
|
||||
return;
|
||||
}
|
||||
_rendering = true;
|
||||
Finally unmarkRenderingFlag([this] {
|
||||
_rendering = false;
|
||||
});
|
||||
|
||||
QMutexLocker locker(&_mutex);
|
||||
_renderControl->sync();
|
||||
_waitCondition.wakeOne();
|
||||
locker.unlock();
|
||||
{
|
||||
QMutexLocker locker(&_mutex);
|
||||
_renderControl->sync();
|
||||
releaseMainThread.trigger();
|
||||
}
|
||||
|
||||
using namespace oglplus;
|
||||
|
||||
|
@ -308,6 +304,7 @@ void OffscreenQmlRenderThread::render() {
|
|||
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
|
||||
_fbo->Complete(Framebuffer::Target::Draw);
|
||||
{
|
||||
PROFILE_RANGE("qml_render->rendercontrol")
|
||||
_renderControl->render();
|
||||
// FIXME The web browsers seem to be leaving GL in an error state.
|
||||
// Need a debug context with sync logging to figure out why.
|
||||
|
@ -338,10 +335,10 @@ OffscreenQmlSurface::~OffscreenQmlSurface() {
|
|||
QObject::disconnect(&_updateTimer);
|
||||
QObject::disconnect(qApp);
|
||||
|
||||
qDebug() << "Stopping QML render thread " << _renderer->currentThreadId();
|
||||
qDebug() << "Stopping QML Renderer Thread " << _renderer->currentThreadId();
|
||||
_renderer->_queue.add(STOP);
|
||||
if (!_renderer->wait(MAX_SHUTDOWN_WAIT_SECS * USECS_PER_SECOND)) {
|
||||
qWarning() << "Failed to shut down the QML render thread";
|
||||
qWarning() << "Failed to shut down the QML Renderer Thread";
|
||||
}
|
||||
|
||||
delete _rootItem;
|
||||
|
@ -396,8 +393,6 @@ void OffscreenQmlSurface::resize(const QSize& newSize_) {
|
|||
std::max(static_cast<int>(scale * newSize.height()), 10));
|
||||
}
|
||||
|
||||
|
||||
|
||||
QSize currentSize = _renderer->_quickWindow->geometry().size();
|
||||
if (newSize == currentSize) {
|
||||
return;
|
||||
|
@ -508,7 +503,12 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
|||
}
|
||||
|
||||
void OffscreenQmlSurface::updateQuick() {
|
||||
if (!_renderer || !_renderer->allowNewFrame(_maxFps)) {
|
||||
// If we're
|
||||
// a) not set up
|
||||
// b) already rendering a frame
|
||||
// c) rendering too fast
|
||||
// then skip this
|
||||
if (!_renderer || _renderer->_rendering || !_renderer->allowNewFrame(_maxFps)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -518,11 +518,11 @@ void OffscreenQmlSurface::updateQuick() {
|
|||
}
|
||||
|
||||
if (_render) {
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
// Lock the GUI size while syncing
|
||||
QMutexLocker locker(&(_renderer->_mutex));
|
||||
_renderer->_queue.add(RENDER);
|
||||
_renderer->_waitCondition.wait(&(_renderer->_mutex));
|
||||
|
||||
_render = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -219,6 +219,8 @@ public:
|
|||
|
||||
udt::Socket::StatsVector sampleStatsForAllConnections() { return _nodeSocket.sampleStatsForAllConnections(); }
|
||||
|
||||
void setConnectionMaxBandwidth(int maxBandwidth) { _nodeSocket.setConnectionMaxBandwidth(maxBandwidth); }
|
||||
|
||||
public slots:
|
||||
void reset();
|
||||
void eraseAllNodes();
|
||||
|
|
|
@ -380,6 +380,7 @@ void Resource::finishedLoading(bool success) {
|
|||
_failedToLoad = true;
|
||||
}
|
||||
_loadPriorities.clear();
|
||||
emit finished(success);
|
||||
}
|
||||
|
||||
void Resource::reinsert() {
|
||||
|
|
|
@ -201,6 +201,9 @@ signals:
|
|||
/// This can be used instead of downloadFinished to access data before it is processed.
|
||||
void loaded(const QByteArray& request);
|
||||
|
||||
/// Fired when the resource has finished loading.
|
||||
void finished(bool success);
|
||||
|
||||
/// Fired when the resource failed to load.
|
||||
void failed(QNetworkReply::NetworkError error);
|
||||
|
||||
|
|
|
@ -20,13 +20,19 @@ using namespace std::chrono;
|
|||
|
||||
static const double USECS_PER_SECOND = 1000000.0;
|
||||
|
||||
void CongestionControl::setMaxBandwidth(int maxBandwidth) {
|
||||
_maxBandwidth = maxBandwidth;
|
||||
setPacketSendPeriod(_packetSendPeriod);
|
||||
}
|
||||
|
||||
void CongestionControl::setPacketSendPeriod(double newSendPeriod) {
|
||||
Q_ASSERT_X(newSendPeriod >= 0, "CongestionControl::setPacketPeriod", "Can not set a negative packet send period");
|
||||
|
||||
if (_maxBandwidth > 0) {
|
||||
|
||||
auto maxBandwidth = _maxBandwidth.load();
|
||||
if (maxBandwidth > 0) {
|
||||
// anytime the packet send period is about to be increased, make sure it stays below the minimum period,
|
||||
// calculated based on the maximum desired bandwidth
|
||||
double minPacketSendPeriod = USECS_PER_SECOND / (((double) _maxBandwidth) / _mss);
|
||||
double minPacketSendPeriod = USECS_PER_SECOND / (((double) maxBandwidth) / _mss);
|
||||
_packetSendPeriod = std::max(newSendPeriod, minPacketSendPeriod);
|
||||
} else {
|
||||
_packetSendPeriod = newSendPeriod;
|
||||
|
@ -39,7 +45,7 @@ DefaultCC::DefaultCC() :
|
|||
_mss = udt::MAX_PACKET_SIZE_WITH_UDP_HEADER;
|
||||
|
||||
_congestionWindowSize = 16.0;
|
||||
_packetSendPeriod = 1.0;
|
||||
setPacketSendPeriod(1.0);
|
||||
}
|
||||
|
||||
void DefaultCC::onACK(SequenceNumber ackNum) {
|
||||
|
@ -73,10 +79,10 @@ void DefaultCC::onACK(SequenceNumber ackNum) {
|
|||
|
||||
if (_receiveRate > 0) {
|
||||
// if we have a valid receive rate we set the send period to whatever the receive rate dictates
|
||||
_packetSendPeriod = USECS_PER_SECOND / _receiveRate;
|
||||
setPacketSendPeriod(USECS_PER_SECOND / _receiveRate);
|
||||
} else {
|
||||
// no valid receive rate, packet send period is dictated by estimated RTT and current congestion window size
|
||||
_packetSendPeriod = (_rtt + synInterval()) / _congestionWindowSize;
|
||||
setPacketSendPeriod((_rtt + synInterval()) / _congestionWindowSize);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -148,8 +154,8 @@ void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
|
|||
if (rangeStart > _lastDecreaseMaxSeq) {
|
||||
|
||||
_lastDecreasePeriod = _packetSendPeriod;
|
||||
|
||||
_packetSendPeriod = ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE);
|
||||
|
||||
setPacketSendPeriod(ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE));
|
||||
|
||||
// use EWMA to update the average number of NAKs per congestion
|
||||
static const double NAK_EWMA_ALPHA = 0.125;
|
||||
|
@ -175,7 +181,7 @@ void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
|
|||
// there have been fewer than MAX_DECREASES_PER_CONGESTION_EPOCH AND this NAK matches the random count at which we
|
||||
// decided we would decrease the packet send period
|
||||
|
||||
_packetSendPeriod = ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE);
|
||||
setPacketSendPeriod(ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE));
|
||||
_lastDecreaseMaxSeq = _sendCurrSeqNum;
|
||||
}
|
||||
}
|
||||
|
@ -198,12 +204,12 @@ void DefaultCC::stopSlowStart() {
|
|||
|
||||
if (_receiveRate > 0) {
|
||||
// Set the sending rate to the receiving rate.
|
||||
_packetSendPeriod = USECS_PER_SECOND / _receiveRate;
|
||||
setPacketSendPeriod(USECS_PER_SECOND / _receiveRate);
|
||||
} else {
|
||||
// If no receiving rate is observed, we have to compute the sending
|
||||
// rate according to the current window size, and decrease it
|
||||
// using the method below.
|
||||
_packetSendPeriod = _congestionWindowSize / (_rtt + synInterval());
|
||||
setPacketSendPeriod(_congestionWindowSize / (_rtt + synInterval()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#ifndef hifi_CongestionControl_h
|
||||
#define hifi_CongestionControl_h
|
||||
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
@ -37,6 +38,7 @@ public:
|
|||
virtual ~CongestionControl() {}
|
||||
|
||||
int synInterval() const { return _synInterval; }
|
||||
void setMaxBandwidth(int maxBandwidth);
|
||||
|
||||
virtual void init() {}
|
||||
virtual void onACK(SequenceNumber ackNum) {}
|
||||
|
@ -49,7 +51,6 @@ protected:
|
|||
void setMSS(int mss) { _mss = mss; }
|
||||
void setMaxCongestionWindowSize(int window) { _maxCongestionWindowSize = window; }
|
||||
void setBandwidth(int bandwidth) { _bandwidth = bandwidth; }
|
||||
void setMaxBandwidth(int maxBandwidth) { _maxBandwidth = maxBandwidth; }
|
||||
virtual void setInitialSendSequenceNumber(SequenceNumber seqNum) = 0;
|
||||
void setSendCurrentSequenceNumber(SequenceNumber seqNum) { _sendCurrSeqNum = seqNum; }
|
||||
void setReceiveRate(int rate) { _receiveRate = rate; }
|
||||
|
@ -60,7 +61,7 @@ protected:
|
|||
double _congestionWindowSize { 16.0 }; // Congestion window size, in packets
|
||||
|
||||
int _bandwidth { 0 }; // estimated bandwidth, packets per second
|
||||
int _maxBandwidth { -1 }; // Maximum desired bandwidth, packets per second
|
||||
std::atomic<int> _maxBandwidth { -1 }; // Maximum desired bandwidth, bytes per second
|
||||
double _maxCongestionWindowSize { 0.0 }; // maximum cwnd size, in packets
|
||||
|
||||
int _mss { 0 }; // Maximum Packet Size, including all packet headers
|
||||
|
|
|
@ -80,6 +80,10 @@ void Connection::resetRTT() {
|
|||
_rttVariance = _rtt / 2;
|
||||
}
|
||||
|
||||
void Connection::setMaxBandwidth(int maxBandwidth) {
|
||||
_congestionControl->setMaxBandwidth(maxBandwidth);
|
||||
}
|
||||
|
||||
SendQueue& Connection::getSendQueue() {
|
||||
if (!_sendQueue) {
|
||||
|
||||
|
|
|
@ -76,6 +76,8 @@ public:
|
|||
|
||||
HifiSockAddr getDestination() const { return _destination; }
|
||||
|
||||
void setMaxBandwidth(int maxBandwidth);
|
||||
|
||||
signals:
|
||||
void packetSent();
|
||||
void connectionInactive(const HifiSockAddr& sockAddr);
|
||||
|
|
|
@ -176,7 +176,9 @@ Connection& Socket::findOrCreateConnection(const HifiSockAddr& sockAddr) {
|
|||
auto it = _connectionsHash.find(sockAddr);
|
||||
|
||||
if (it == _connectionsHash.end()) {
|
||||
auto connection = std::unique_ptr<Connection>(new Connection(this, sockAddr, _ccFactory->create()));
|
||||
auto congestionControl = _ccFactory->create();
|
||||
congestionControl->setMaxBandwidth(_maxBandwidth);
|
||||
auto connection = std::unique_ptr<Connection>(new Connection(this, sockAddr, std::move(congestionControl)));
|
||||
|
||||
// we queue the connection to cleanup connection in case it asks for it during its own rate control sync
|
||||
QObject::connect(connection.get(), &Connection::connectionInactive, this, &Socket::cleanupConnection);
|
||||
|
@ -350,6 +352,17 @@ void Socket::setCongestionControlFactory(std::unique_ptr<CongestionControlVirtua
|
|||
_synInterval = _ccFactory->synInterval();
|
||||
}
|
||||
|
||||
|
||||
void Socket::setConnectionMaxBandwidth(int maxBandwidth) {
|
||||
qInfo() << "Setting socket's maximum bandwith to" << maxBandwidth << ". ("
|
||||
<< _connectionsHash.size() << "live connections)";
|
||||
_maxBandwidth = maxBandwidth;
|
||||
for (auto& pair : _connectionsHash) {
|
||||
auto& connection = pair.second;
|
||||
connection->setMaxBandwidth(_maxBandwidth);
|
||||
}
|
||||
}
|
||||
|
||||
ConnectionStats::Stats Socket::sampleStatsForConnection(const HifiSockAddr& destination) {
|
||||
auto it = _connectionsHash.find(destination);
|
||||
if (it != _connectionsHash.end()) {
|
||||
|
|
|
@ -72,6 +72,7 @@ public:
|
|||
{ _unfilteredHandlers[senderSockAddr] = handler; }
|
||||
|
||||
void setCongestionControlFactory(std::unique_ptr<CongestionControlVirtualFactory> ccFactory);
|
||||
void setConnectionMaxBandwidth(int maxBandwidth);
|
||||
|
||||
void messageReceived(std::unique_ptr<Packet> packet);
|
||||
void messageFailed(Connection* connection, Packet::MessageNumber messageNumber);
|
||||
|
@ -109,8 +110,10 @@ private:
|
|||
std::unordered_map<HifiSockAddr, SequenceNumber> _unreliableSequenceNumbers;
|
||||
std::unordered_map<HifiSockAddr, std::unique_ptr<Connection>> _connectionsHash;
|
||||
|
||||
int _synInterval = 10; // 10ms
|
||||
QTimer* _synTimer;
|
||||
int _synInterval { 10 }; // 10ms
|
||||
QTimer* _synTimer { nullptr };
|
||||
|
||||
int _maxBandwidth { -1 };
|
||||
|
||||
std::unique_ptr<CongestionControlVirtualFactory> _ccFactory { new CongestionControlFactory<DefaultCC>() };
|
||||
|
||||
|
|
|
@ -158,6 +158,13 @@ void ScriptEngine::disconnectNonEssentialSignals() {
|
|||
}
|
||||
|
||||
void ScriptEngine::runInThread() {
|
||||
Q_ASSERT_X(!_isThreaded, "ScriptEngine::runInThread()", "runInThread should not be called more than once");
|
||||
|
||||
if (_isThreaded) {
|
||||
qCWarning(scriptengine) << "ScriptEngine already running in thread: " << getFilename();
|
||||
return;
|
||||
}
|
||||
|
||||
_isThreaded = true;
|
||||
QThread* workerThread = new QThread(); // thread is not owned, so we need to manage the delete
|
||||
QString scriptEngineName = QString("Script Thread:") + getFilename();
|
||||
|
|
|
@ -20,6 +20,10 @@ public:
|
|||
template <typename F>
|
||||
Finally(F f) : _f(f) {}
|
||||
~Finally() { _f(); }
|
||||
void trigger() {
|
||||
_f();
|
||||
_f = [] {};
|
||||
}
|
||||
private:
|
||||
std::function<void()> _f;
|
||||
};
|
||||
|
|
|
@ -82,7 +82,6 @@ void LogHandler::flushRepeatedMessages() {
|
|||
}
|
||||
|
||||
QString LogHandler::printMessage(LogMsgType type, const QMessageLogContext& context, const QString& message) {
|
||||
|
||||
if (message.isEmpty()) {
|
||||
return QString();
|
||||
}
|
||||
|
|
|
@ -114,7 +114,6 @@ QScriptValue QmlWindowClass::internalConstructor(const QString& qmlSource,
|
|||
}
|
||||
} else {
|
||||
auto argumentObject = context->argument(0);
|
||||
qDebug() << argumentObject.toString();
|
||||
if (!argumentObject.property(TITLE_PROPERTY).isUndefined()) {
|
||||
title = argumentObject.property(TITLE_PROPERTY).toString();
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue