diff --git a/cmake/macros/TargetWebRTC.cmake b/cmake/macros/TargetWebRTC.cmake
new file mode 100644
index 0000000000..d2821528df
--- /dev/null
+++ b/cmake/macros/TargetWebRTC.cmake
@@ -0,0 +1,24 @@
+#
+# Copyright 2019 High Fidelity, Inc.
+#
+# Distributed under the Apache License, Version 2.0.
+# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
+#
+macro(TARGET_WEBRTC)
+ if (ANDROID)
+ # I don't yet have working libwebrtc for android
+ # include(SelectLibraryConfigurations)
+ # set(INSTALL_DIR ${HIFI_ANDROID_PRECOMPILED}/webrtc/webrtc)
+ # set(WEBRTC_INCLUDE_DIRS "${INSTALL_DIR}/include/webrtc")
+ # set(WEBRTC_LIBRARY_DEBUG ${INSTALL_DIR}/debug/lib/libwebrtc.a)
+ # set(WEBRTC_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libwebrtc.a)
+ # select_library_configurations(WEBRTC)
+ else()
+ set(WEBRTC_INCLUDE_DIRS "${VCPKG_INSTALL_ROOT}/include/webrtc")
+ find_library(WEBRTC_LIBRARY NAMES webrtc PATHS ${VCPKG_INSTALL_ROOT}/lib/ NO_DEFAULT_PATH)
+ target_include_directories(${TARGET_NAME} SYSTEM PUBLIC ${WEBRTC_INCLUDE_DIRS})
+ target_link_libraries(${TARGET_NAME} ${WEBRTC_LIBRARY})
+ endif()
+
+
+endmacro()
diff --git a/cmake/ports/hifi-deps/CONTROL b/cmake/ports/hifi-deps/CONTROL
index 5f860a1620..2441de9002 100644
--- a/cmake/ports/hifi-deps/CONTROL
+++ b/cmake/ports/hifi-deps/CONTROL
@@ -1,4 +1,4 @@
Source: hifi-deps
Version: 0.1
Description: Collected dependencies for High Fidelity applications
-Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openexr (!android), openssl (windows), tbb (!android&!osx), zlib
+Build-Depends: bullet3, draco, etc2comp, glm, nvtt, openexr (!android), openssl (windows), tbb (!android&!osx), zlib, webrtc (!android)
diff --git a/cmake/ports/webrtc/CONTROL b/cmake/ports/webrtc/CONTROL
new file mode 100644
index 0000000000..12a76920b9
--- /dev/null
+++ b/cmake/ports/webrtc/CONTROL
@@ -0,0 +1,3 @@
+Source: webrtc
+Version: 20190626
+Description: WebRTC
diff --git a/cmake/ports/webrtc/portfile.cmake b/cmake/ports/webrtc/portfile.cmake
new file mode 100644
index 0000000000..3f2fb7a6ab
--- /dev/null
+++ b/cmake/ports/webrtc/portfile.cmake
@@ -0,0 +1,36 @@
+include(vcpkg_common_functions)
+set(WEBRTC_VERSION 20190626)
+set(MASTER_COPY_SOURCE_PATH ${CURRENT_BUILDTREES_DIR}/src)
+
+if (ANDROID)
+ # this is handled by hifi_android.py
+elseif (WIN32)
+ vcpkg_download_distfile(
+ WEBRTC_SOURCE_ARCHIVE
+ URLS https://hifi-public.s3.amazonaws.com/seth/webrtc-20190626-windows.zip
+ SHA512 c0848eddb1579b3bb0496b8785e24f30470f3c477145035fd729264a326a467b9467ae9f426aa5d72d168ad9e9bf2c279150744832736bdf39064d24b04de1a3
+ FILENAME webrtc-20190626-windows.zip
+ )
+elseif (APPLE)
+ vcpkg_download_distfile(
+ WEBRTC_SOURCE_ARCHIVE
+ URLS https://hifi-public.s3.amazonaws.com/seth/webrtc-20190626-osx.tar.gz
+ SHA512 fc70cec1b5ee87395137b7090f424e2fc2300fc17d744d5ffa1cf7aa0e0f1a069a9d72ba1ad2fb4a640ebeb6c218bda24351ba0083e1ff96c4a4b5032648a9d2
+ FILENAME webrtc-20190626-osx.tar.gz
+ )
+else ()
+ # else Linux desktop
+ vcpkg_download_distfile(
+ WEBRTC_SOURCE_ARCHIVE
+ URLS https://hifi-public.s3.amazonaws.com/seth/webrtc-20190626-linux.tar.gz
+ SHA512 07d7776551aa78cb09a3ef088a8dee7762735c168c243053b262083d90a1d258cec66dc386f6903da5c4461921a3c2db157a1ee106a2b47e7756cb424b66cc43
+ FILENAME webrtc-20190626-linux.tar.gz
+ )
+endif ()
+
+vcpkg_extract_source_archive(${WEBRTC_SOURCE_ARCHIVE})
+
+file(COPY ${MASTER_COPY_SOURCE_PATH}/webrtc/include DESTINATION ${CURRENT_PACKAGES_DIR})
+file(COPY ${MASTER_COPY_SOURCE_PATH}/webrtc/lib DESTINATION ${CURRENT_PACKAGES_DIR})
+file(COPY ${MASTER_COPY_SOURCE_PATH}/webrtc/share DESTINATION ${CURRENT_PACKAGES_DIR})
+file(COPY ${MASTER_COPY_SOURCE_PATH}/webrtc/debug DESTINATION ${CURRENT_PACKAGES_DIR})
diff --git a/domain-server/resources/web/content/js/content.js b/domain-server/resources/web/content/js/content.js
index 9b5c807245..bf4cf07b26 100644
--- a/domain-server/resources/web/content/js/content.js
+++ b/domain-server/resources/web/content/js/content.js
@@ -130,12 +130,12 @@ $(document).ready(function(){
html += "
File Name | ";
html += "Created | ";
html += "Installed | ";
- //html += "Installed By | ";
+ html += "Installed By | ";
html += " | ";
html += " | ";
html += " | ";
html += " | ";
- //html += " |
";
+ html += " | ";
html += "";
$('#' + Settings.INSTALLED_CONTENT + ' .panel-body').html(html);
}
@@ -379,7 +379,7 @@ $(document).ready(function(){
$('#' + INSTALLED_CONTENT_FILENAME_ID).text(data.installed_content.filename);
$('#' + INSTALLED_CONTENT_CREATED_ID).text(data.installed_content.creation_time ? moment(data.installed_content.creation_time).format('lll') : "");
$('#' + INSTALLED_CONTENT_INSTALLED_ID).text(data.installed_content.install_time ? moment(data.installed_content.install_time).format('lll') : "");
- //$('#' + INSTALLED_CONTENT_INSTALLED_BY_ID).text(data.installed_content.installed_by);
+ $('#' + INSTALLED_CONTENT_INSTALLED_BY_ID).text(data.installed_content.installed_by);
// update the progress bars for current restore status
if (data.status.isRecovering) {
diff --git a/domain-server/src/AssetsBackupHandler.cpp b/domain-server/src/AssetsBackupHandler.cpp
index d978e4ea56..6ccfb2ad57 100644
--- a/domain-server/src/AssetsBackupHandler.cpp
+++ b/domain-server/src/AssetsBackupHandler.cpp
@@ -278,7 +278,7 @@ void AssetsBackupHandler::createBackup(const QString& backupName, QuaZip& zip) {
_backups.emplace_back(backupName, mappings, false);
}
-std::pair AssetsBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) {
+std::pair AssetsBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) {
Q_ASSERT(QThread::currentThread() == thread());
if (operationInProgress()) {
diff --git a/domain-server/src/AssetsBackupHandler.h b/domain-server/src/AssetsBackupHandler.h
index c8f20ab965..703b844afc 100644
--- a/domain-server/src/AssetsBackupHandler.h
+++ b/domain-server/src/AssetsBackupHandler.h
@@ -38,7 +38,7 @@ public:
void loadBackup(const QString& backupName, QuaZip& zip) override;
void loadingComplete() override;
void createBackup(const QString& backupName, QuaZip& zip) override;
- std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) override;
+ std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) override;
void deleteBackup(const QString& backupName) override;
void consolidateBackup(const QString& backupName, QuaZip& zip) override;
bool isCorruptedBackup(const QString& backupName) override;
diff --git a/domain-server/src/BackupHandler.h b/domain-server/src/BackupHandler.h
index 278d43ade3..8ef11b432a 100644
--- a/domain-server/src/BackupHandler.h
+++ b/domain-server/src/BackupHandler.h
@@ -30,7 +30,7 @@ public:
virtual void loadBackup(const QString& backupName, QuaZip& zip) = 0;
virtual void loadingComplete() = 0;
virtual void createBackup(const QString& backupName, QuaZip& zip) = 0;
- virtual std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) = 0;
+ virtual std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) = 0;
virtual void deleteBackup(const QString& backupName) = 0;
virtual void consolidateBackup(const QString& backupName, QuaZip& zip) = 0;
virtual bool isCorruptedBackup(const QString& backupName) = 0;
diff --git a/domain-server/src/ContentSettingsBackupHandler.cpp b/domain-server/src/ContentSettingsBackupHandler.cpp
index b3748a66a3..97a10e81c3 100644
--- a/domain-server/src/ContentSettingsBackupHandler.cpp
+++ b/domain-server/src/ContentSettingsBackupHandler.cpp
@@ -84,7 +84,7 @@ void ContentSettingsBackupHandler::createBackup(const QString& backupName, QuaZi
}
}
-std::pair ContentSettingsBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) {
+std::pair ContentSettingsBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) {
if (!zip.setCurrentFile(CONTENT_SETTINGS_BACKUP_FILENAME)) {
QString errorStr("Failed to find " + CONTENT_SETTINGS_BACKUP_FILENAME + " while recovering backup");
qWarning() << errorStr;
@@ -117,7 +117,7 @@ std::pair ContentSettingsBackupHandler::recoverBackup(const QStri
{ INSTALLED_CONTENT_NAME, archiveJson[INSTALLED_CONTENT_NAME].toString()},
{ INSTALLED_CONTENT_CREATION_TIME, archiveJson[INSTALLED_CONTENT_CREATION_TIME].toVariant().toLongLong() },
{ INSTALLED_CONTENT_INSTALL_TIME, QDateTime::currentDateTime().currentMSecsSinceEpoch() },
- { INSTALLED_CONTENT_INSTALLED_BY, "" }
+ { INSTALLED_CONTENT_INSTALLED_BY, username }
};
jsonObject.insert(INSTALLED_CONTENT, installed_content);
diff --git a/domain-server/src/ContentSettingsBackupHandler.h b/domain-server/src/ContentSettingsBackupHandler.h
index 0e44a18424..0872bce59a 100644
--- a/domain-server/src/ContentSettingsBackupHandler.h
+++ b/domain-server/src/ContentSettingsBackupHandler.h
@@ -28,7 +28,7 @@ public:
void createBackup(const QString& backupName, QuaZip& zip) override;
- std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) override;
+ std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) override;
void deleteBackup(const QString& backupName) override {}
diff --git a/domain-server/src/DomainContentBackupManager.cpp b/domain-server/src/DomainContentBackupManager.cpp
index 11930f0b49..3a7897ec61 100644
--- a/domain-server/src/DomainContentBackupManager.cpp
+++ b/domain-server/src/DomainContentBackupManager.cpp
@@ -279,7 +279,7 @@ void DomainContentBackupManager::deleteBackup(MiniPromise::Promise promise, cons
});
}
-bool DomainContentBackupManager::recoverFromBackupZip(const QString& backupName, QuaZip& zip, const QString& sourceFilename, bool rollingBack) {
+bool DomainContentBackupManager::recoverFromBackupZip(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename, bool rollingBack) {
if (!zip.open(QuaZip::Mode::mdUnzip)) {
qWarning() << "Failed to unzip file: " << backupName;
return false;
@@ -290,7 +290,7 @@ bool DomainContentBackupManager::recoverFromBackupZip(const QString& backupName,
for (auto& handler : _backupHandlers) {
bool success;
QString errorStr;
- std::tie(success, errorStr) = handler->recoverBackup(backupName, zip, sourceFilename);
+ std::tie(success, errorStr) = handler->recoverBackup(backupName, zip, username, sourceFilename);
if (!success) {
if (!rollingBack) {
_recoveryError = errorStr;
@@ -304,7 +304,7 @@ bool DomainContentBackupManager::recoverFromBackupZip(const QString& backupName,
}
}
-void DomainContentBackupManager::recoverFromBackup(MiniPromise::Promise promise, const QString& backupName) {
+void DomainContentBackupManager::recoverFromBackup(MiniPromise::Promise promise, const QString& backupName, const QString& username) {
if (_isRecovering) {
promise->resolve({
{ "success", false }
@@ -314,7 +314,7 @@ void DomainContentBackupManager::recoverFromBackup(MiniPromise::Promise promise,
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "recoverFromBackup", Q_ARG(MiniPromise::Promise, promise),
- Q_ARG(const QString&, backupName));
+ Q_ARG(const QString&, backupName), Q_ARG(const QString&, username));
return;
}
@@ -327,7 +327,7 @@ void DomainContentBackupManager::recoverFromBackup(MiniPromise::Promise promise,
if (backupFile.open(QIODevice::ReadOnly)) {
QuaZip zip { &backupFile };
- success = recoverFromBackupZip(backupName, zip, backupName);
+ success = recoverFromBackupZip(backupName, zip, username, backupName);
backupFile.close();
} else {
@@ -340,11 +340,11 @@ void DomainContentBackupManager::recoverFromBackup(MiniPromise::Promise promise,
});
}
-void DomainContentBackupManager::recoverFromUploadedBackup(MiniPromise::Promise promise, QByteArray uploadedBackup) {
+void DomainContentBackupManager::recoverFromUploadedBackup(MiniPromise::Promise promise, QByteArray uploadedBackup, QString username) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "recoverFromUploadedBackup", Q_ARG(MiniPromise::Promise, promise),
- Q_ARG(QByteArray, uploadedBackup));
+ Q_ARG(QByteArray, uploadedBackup), Q_ARG(QString, username));
return;
}
@@ -355,17 +355,17 @@ void DomainContentBackupManager::recoverFromUploadedBackup(MiniPromise::Promise
QuaZip uploadedZip { &uploadedBackupBuffer };
QString backupName = MANUAL_BACKUP_PREFIX + "uploaded.zip";
- bool success = recoverFromBackupZip(backupName, uploadedZip, QString());
+ bool success = recoverFromBackupZip(backupName, uploadedZip, username, QString());
promise->resolve({
{ "success", success }
});
}
-void DomainContentBackupManager::recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename, QString sourceFilename) {
+void DomainContentBackupManager::recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename, const QString username, QString sourceFilename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "recoverFromUploadedFile", Q_ARG(MiniPromise::Promise, promise),
- Q_ARG(QString, uploadedFilename), Q_ARG(QString, sourceFilename));
+ Q_ARG(QString, uploadedFilename), Q_ARG(QString, username), Q_ARG(QString, sourceFilename));
return;
}
@@ -382,7 +382,7 @@ void DomainContentBackupManager::recoverFromUploadedFile(MiniPromise::Promise pr
QString backupName = MANUAL_BACKUP_PREFIX + "uploaded.zip";
- bool success = recoverFromBackupZip(backupName, uploadedZip, sourceFilename);
+ bool success = recoverFromBackupZip(backupName, uploadedZip, username, sourceFilename);
if (!success) {
@@ -394,7 +394,7 @@ void DomainContentBackupManager::recoverFromUploadedFile(MiniPromise::Promise pr
QuaZip uploadedZip { &uploadedFile };
QString backupName = MANUAL_BACKUP_PREFIX + "uploaded.zip";
- recoverFromBackupZip(backupName, uploadedZip, sourceFilename, true);
+ recoverFromBackupZip(backupName, uploadedZip, username, sourceFilename, true);
}
}
diff --git a/domain-server/src/DomainContentBackupManager.h b/domain-server/src/DomainContentBackupManager.h
index f5957d74f5..f2ee71d498 100644
--- a/domain-server/src/DomainContentBackupManager.h
+++ b/domain-server/src/DomainContentBackupManager.h
@@ -95,9 +95,9 @@ public:
public slots:
void getAllBackupsAndStatus(MiniPromise::Promise promise);
void createManualBackup(MiniPromise::Promise promise, const QString& name);
- void recoverFromBackup(MiniPromise::Promise promise, const QString& backupName);
- void recoverFromUploadedBackup(MiniPromise::Promise promise, QByteArray uploadedBackup);
- void recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename, QString sourceFilename);
+ void recoverFromBackup(MiniPromise::Promise promise, const QString& backupName, const QString& username);
+ void recoverFromUploadedBackup(MiniPromise::Promise promise, QByteArray uploadedBackup, QString username);
+ void recoverFromUploadedFile(MiniPromise::Promise promise, QString uploadedFilename, QString username, QString sourceFilename);
void deleteBackup(MiniPromise::Promise promise, const QString& backupName);
signals:
@@ -119,7 +119,7 @@ protected:
std::pair createBackup(const QString& prefix, const QString& name);
- bool recoverFromBackupZip(const QString& backupName, QuaZip& backupZip, const QString& sourceFilename, bool rollingBack = false);
+ bool recoverFromBackupZip(const QString& backupName, QuaZip& backupZip, const QString& username, const QString& sourceFilename, bool rollingBack = false);
private slots:
void removeOldConsolidatedBackups();
diff --git a/domain-server/src/DomainServer.cpp b/domain-server/src/DomainServer.cpp
index fa4bf89ad6..09438b31bc 100644
--- a/domain-server/src/DomainServer.cpp
+++ b/domain-server/src/DomainServer.cpp
@@ -1957,6 +1957,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
QPointer connectionPtr { connection };
auto nodeList = DependencyManager::get();
+ QString username;
auto getSetting = [this](QString keyPath, QVariant& value) -> bool {
@@ -2024,7 +2025,9 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
}
// all requests below require a cookie to prove authentication so check that first
- if (!isAuthenticatedRequest(connection, url)) {
+ bool isAuthenticated { false };
+ std::tie(isAuthenticated, username) = isAuthenticatedRequest(connection);
+ if (!isAuthenticated) {
// this is not an authenticated request
// return true from the handler since it was handled with a 401 or re-direct to auth
return true;
@@ -2361,7 +2364,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
connectionPtr->respond(success ? HTTPConnection::StatusCode200 : HTTPConnection::StatusCode400, docJSON.toJson(),
JSON_MIME_TYPE.toUtf8());
});
- _contentManager->recoverFromBackup(deferred, id);
+ _contentManager->recoverFromBackup(deferred, id, username);
return true;
}
} else if (connection->requestOperation() == QNetworkAccessManager::PutOperation) {
@@ -2557,6 +2560,9 @@ bool DomainServer::processPendingContent(HTTPConnection* connection, QString ite
int sessionId = sessionIdBytes.toInt();
bool newUpload = itemName == "restore-file" || itemName == "restore-file-chunk-initial" || itemName == "restore-file-chunk-only";
+ bool isAuthenticated;
+ QString username;
+ std::tie(isAuthenticated, username) = isAuthenticatedRequest(connection);
if (filename.endsWith(".zip", Qt::CaseInsensitive)) {
static const QString TEMPORARY_CONTENT_FILEPATH { QDir::tempPath() + "/hifiUploadContent_XXXXXX.zip" };
@@ -2591,7 +2597,7 @@ bool DomainServer::processPendingContent(HTTPConnection* connection, QString ite
_pendingContentFiles.erase(sessionId);
});
- _contentManager->recoverFromUploadedFile(deferred, _pendingFileContent.fileName(), filename);
+ _contentManager->recoverFromUploadedFile(deferred, _pendingFileContent.fileName(), username, filename);
}
} else if (filename.endsWith(".json", Qt::CaseInsensitive)
|| filename.endsWith(".json.gz", Qt::CaseInsensitive)) {
@@ -2604,7 +2610,7 @@ bool DomainServer::processPendingContent(HTTPConnection* connection, QString ite
if (itemName == "restore-file" || itemName == "restore-file-chunk-final" || itemName == "restore-file-chunk-only") {
// invoke our method to hand the new octree file off to the octree server
- if (!handleOctreeFileReplacement(_pendingUploadedContent, filename, QString())) {
+ if (!handleOctreeFileReplacement(_pendingUploadedContent, filename, QString(), username)) {
connection->respond(HTTPConnection::StatusCode400);
return false;
}
@@ -2680,7 +2686,7 @@ void DomainServer::profileRequestFinished() {
}
}
-bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl& url) {
+std::pair DomainServer::isAuthenticatedRequest(HTTPConnection* connection) {
static const QByteArray HTTP_COOKIE_HEADER_KEY = "Cookie";
static const QString ADMIN_USERS_CONFIG_KEY = "admin-users";
@@ -2717,7 +2723,7 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
if (_settingsManager.valueForKeyPath(ADMIN_USERS_CONFIG_KEY).toStringList().contains(profileUsername)) {
// this is an authenticated user
- return true;
+ return { true, profileUsername };
}
// loop the roles of this user and see if they are in the admin-roles array
@@ -2727,7 +2733,7 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
foreach(const QString& userRole, sessionData.getRoles()) {
if (adminRolesArray.contains(userRole)) {
// this user has a role that allows them to administer the domain-server
- return true;
+ return { true, profileUsername };
}
}
}
@@ -2735,7 +2741,7 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
connection->respond(HTTPConnection::StatusCode401, UNAUTHENTICATED_BODY);
// the user does not have allowed username or role, return 401
- return false;
+ return { false, QString() };
} else {
static const QByteArray REQUESTED_WITH_HEADER = "X-Requested-With";
static const QString XML_REQUESTED_WITH = "XMLHttpRequest";
@@ -2764,7 +2770,7 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
}
// we don't know about this user yet, so they are not yet authenticated
- return false;
+ return { false, QString() };
}
} else if (_settingsManager.valueForKeyPath(BASIC_AUTH_USERNAME_KEY_PATH).isValid()) {
// config file contains username and password combinations for basic auth
@@ -2793,7 +2799,7 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
"" : QCryptographicHash::hash(headerPassword.toUtf8(), QCryptographicHash::Sha256).toHex();
if (settingsUsername == headerUsername && hexHeaderPassword == settingsPassword) {
- return true;
+ return { true, headerUsername };
}
}
}
@@ -2815,11 +2821,11 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
HTTPConnection::DefaultContentType, basicAuthHeader);
// not authenticated, bubble up false
- return false;
+ return { false, QString() };
} else {
// we don't have an OAuth URL + admin roles/usernames, so all users are authenticated
- return true;
+ return { true, QString() };
}
}
@@ -3493,7 +3499,7 @@ void DomainServer::maybeHandleReplacementEntityFile() {
}
}
-bool DomainServer::handleOctreeFileReplacement(QByteArray octreeFile, QString sourceFilename, QString name) {
+bool DomainServer::handleOctreeFileReplacement(QByteArray octreeFile, QString sourceFilename, QString name, QString username) {
OctreeUtils::RawEntityData data;
if (data.readOctreeDataInfoFromData(octreeFile)) {
data.resetIdAndVersion();
@@ -3514,7 +3520,7 @@ bool DomainServer::handleOctreeFileReplacement(QByteArray octreeFile, QString so
{ INSTALLED_CONTENT_NAME, name },
{ INSTALLED_CONTENT_CREATION_TIME, 0 },
{ INSTALLED_CONTENT_INSTALL_TIME, QDateTime::currentDateTime().currentMSecsSinceEpoch() },
- { INSTALLED_CONTENT_INSTALLED_BY, "" }
+ { INSTALLED_CONTENT_INSTALLED_BY, username }
};
QJsonObject jsonObject { { INSTALLED_CONTENT, installed_content } };
@@ -3539,6 +3545,11 @@ void DomainServer::handleDomainContentReplacementFromURLRequest(QSharedPointer()->findNodeWithAddr(message->getSenderSockAddr());
if (node && node->getCanReplaceContent()) {
+ DomainServerNodeData* nodeData = static_cast(node->getLinkedData());
+ QString username;
+ if (nodeData) {
+ username = nodeData->getUsername();
+ }
// Convert message data into our URL
QString url(message->getMessage());
QUrl modelsURL = QUrl(url, QUrl::StrictMode);
@@ -3548,17 +3559,17 @@ void DomainServer::handleDomainContentReplacementFromURLRequest(QSharedPointererror();
if (networkError == QNetworkReply::NoError) {
if (modelsURL.fileName().endsWith(".json.gz")) {
QUrlQuery urlQuery(modelsURL.query(QUrl::FullyEncoded));
QString itemName = urlQuery.queryItemValue(CONTENT_SET_NAME_QUERY_PARAM);
- handleOctreeFileReplacement(reply->readAll(), modelsURL.fileName(), itemName);
+ handleOctreeFileReplacement(reply->readAll(), modelsURL.fileName(), itemName, username);
} else if (modelsURL.fileName().endsWith(".zip")) {
auto deferred = makePromise("recoverFromUploadedBackup");
- _contentManager->recoverFromUploadedBackup(deferred, reply->readAll());
+ _contentManager->recoverFromUploadedBackup(deferred, reply->readAll(), username);
}
} else {
qDebug() << "Error downloading JSON from specified file: " << modelsURL;
@@ -3569,7 +3580,12 @@ void DomainServer::handleDomainContentReplacementFromURLRequest(QSharedPointer message) {
auto node = DependencyManager::get()->nodeWithLocalID(message->getSourceID());
- if (node->getCanReplaceContent()) {
- handleOctreeFileReplacement(message->readAll(), QString(), QString());
+ if (node && node->getCanReplaceContent()) {
+ QString username;
+ DomainServerNodeData* nodeData = static_cast(node->getLinkedData());
+ if (nodeData) {
+ username = nodeData->getUsername();
+ }
+ handleOctreeFileReplacement(message->readAll(), QString(), QString(), username);
}
}
diff --git a/domain-server/src/DomainServer.h b/domain-server/src/DomainServer.h
index aef59a4e4a..02362abd7b 100644
--- a/domain-server/src/DomainServer.h
+++ b/domain-server/src/DomainServer.h
@@ -99,7 +99,7 @@ private slots:
void handleDomainContentReplacementFromURLRequest(QSharedPointer message);
void handleOctreeFileReplacementRequest(QSharedPointer message);
- bool handleOctreeFileReplacement(QByteArray octreeFile, QString sourceFilename, QString name);
+ bool handleOctreeFileReplacement(QByteArray octreeFile, QString sourceFilename, QString name, QString username);
void processOctreeDataRequestMessage(QSharedPointer message);
void processOctreeDataPersistMessage(QSharedPointer message);
@@ -194,7 +194,7 @@ private:
QUrl oauthRedirectURL();
QUrl oauthAuthorizationURL(const QUuid& stateUUID = QUuid::createUuid());
- bool isAuthenticatedRequest(HTTPConnection* connection, const QUrl& url);
+ std::pair isAuthenticatedRequest(HTTPConnection* connection);
QNetworkReply* profileRequestGivenTokenReply(QNetworkReply* tokenReply);
Headers setupCookieHeadersFromProfileReply(QNetworkReply* profileReply);
diff --git a/domain-server/src/EntitiesBackupHandler.cpp b/domain-server/src/EntitiesBackupHandler.cpp
index 03baec9164..e7e8b5a90d 100644
--- a/domain-server/src/EntitiesBackupHandler.cpp
+++ b/domain-server/src/EntitiesBackupHandler.cpp
@@ -57,7 +57,7 @@ void EntitiesBackupHandler::createBackup(const QString& backupName, QuaZip& zip)
}
}
-std::pair EntitiesBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) {
+std::pair EntitiesBackupHandler::recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) {
if (!zip.setCurrentFile(ENTITIES_BACKUP_FILENAME)) {
QString errorStr("Failed to find " + ENTITIES_BACKUP_FILENAME + " while recovering backup");
qWarning() << errorStr;
diff --git a/domain-server/src/EntitiesBackupHandler.h b/domain-server/src/EntitiesBackupHandler.h
index f8b6cba8a0..1cdfdd89ed 100644
--- a/domain-server/src/EntitiesBackupHandler.h
+++ b/domain-server/src/EntitiesBackupHandler.h
@@ -29,7 +29,7 @@ public:
void createBackup(const QString& backupName, QuaZip& zip) override;
// Recover from a full backup
- std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& sourceFilename) override;
+ std::pair recoverBackup(const QString& backupName, QuaZip& zip, const QString& username, const QString& sourceFilename) override;
// Delete a skeleton backup
void deleteBackup(const QString& backupName) override {}
diff --git a/hifi_android.py b/hifi_android.py
index 42b472e960..0c2ea07cc7 100644
--- a/hifi_android.py
+++ b/hifi_android.py
@@ -94,6 +94,10 @@ ANDROID_PACKAGES = {
'checksum': 'ddcb23df336b08017042ba4786db1d9e',
'sharedLibFolder': 'lib',
'includeLibs': {'libbreakpad_client.a'}
+ },
+ 'webrtc': {
+ 'file': 'webrtc-20190626-android.tar.gz',
+ 'checksum': 'e2dccd3d8efdcba6d428c87ba7fb2a53'
}
}
diff --git a/interface/resources/qml/hifi/audio/Audio.qml b/interface/resources/qml/hifi/audio/Audio.qml
index af517be55d..9db19e72e2 100644
--- a/interface/resources/qml/hifi/audio/Audio.qml
+++ b/interface/resources/qml/hifi/audio/Audio.qml
@@ -166,16 +166,16 @@ Rectangle {
x: 2 * margins.paddings;
width: parent.width;
// switch heights + 2 * top margins
- height: (root.switchHeight) * 3 + 48;
+ height: (root.switchHeight) * 6 + 48;
anchors.top: firstSeparator.bottom;
anchors.topMargin: 10;
- // mute is in its own row
Item {
id: switchContainer;
x: margins.paddings;
width: parent.width / 2;
height: parent.height;
+ anchors.top: parent.top
anchors.left: parent.left;
HifiControlsUit.Switch {
id: muteMic;
@@ -222,12 +222,29 @@ Rectangle {
}
HifiControlsUit.Switch {
- id: pttSwitch
+ id: acousticEchoCancellationSwitch;
height: root.switchHeight;
switchWidth: root.switchWidth;
anchors.top: noiseReductionSwitch.bottom
anchors.topMargin: 24
anchors.left: parent.left
+ labelTextOn: "Echo Cancellation";
+ labelTextSize: 16;
+ backgroundOnColor: "#E3E3E3";
+ checked: AudioScriptingInterface.acousticEchoCancellation;
+ onCheckedChanged: {
+ AudioScriptingInterface.acousticEchoCancellation = checked;
+ checked = Qt.binding(function() { return AudioScriptingInterface.acousticEchoCancellation; });
+ }
+ }
+
+ HifiControlsUit.Switch {
+ id: pttSwitch
+ height: root.switchHeight;
+ switchWidth: root.switchWidth;
+ anchors.top: acousticEchoCancellationSwitch.bottom;
+ anchors.topMargin: 24
+ anchors.left: parent.left
labelTextOn: (bar.currentIndex === 0) ? qsTr("Push To Talk (T)") : qsTr("Push To Talk");
labelTextSize: 16;
backgroundOnColor: "#E3E3E3";
@@ -298,7 +315,6 @@ Rectangle {
checked = Qt.binding(function() { return AudioScriptingInterface.isStereoInput; }); // restore binding
}
}
-
}
}
diff --git a/interface/resources/qml/hifi/simplifiedUI/settingsApp/audio/Audio.qml b/interface/resources/qml/hifi/simplifiedUI/settingsApp/audio/Audio.qml
index 8827bb3834..bfc0bc5200 100644
--- a/interface/resources/qml/hifi/simplifiedUI/settingsApp/audio/Audio.qml
+++ b/interface/resources/qml/hifi/simplifiedUI/settingsApp/audio/Audio.qml
@@ -222,6 +222,17 @@ Flickable {
}
}
}
+
+ SimplifiedControls.Switch {
+ id: acousticEchoCancellationSwitch
+ Layout.preferredHeight: 18
+ Layout.preferredWidth: parent.width
+ labelTextOn: "Acoustic Echo Cancellation"
+ checked: AudioScriptingInterface.acousticEchoCancellation
+ onClicked: {
+ AudioScriptingInterface.acousticEchoCancellation = !AudioScriptingInterface.acousticEchoCancellation;
+ }
+ }
}
}
diff --git a/interface/src/scripting/Audio.cpp b/interface/src/scripting/Audio.cpp
index a0bea256ad..f674b533a7 100644
--- a/interface/src/scripting/Audio.cpp
+++ b/interface/src/scripting/Audio.cpp
@@ -26,6 +26,7 @@ QString Audio::HMD { "VR" };
Setting::Handle enableNoiseReductionSetting { QStringList { Audio::AUDIO, "NoiseReduction" }, true };
Setting::Handle enableWarnWhenMutedSetting { QStringList { Audio::AUDIO, "WarnWhenMuted" }, true };
+Setting::Handle enableAcousticEchoCancellationSetting { QStringList { Audio::AUDIO, "AcousticEchoCancellation" }, true };
float Audio::loudnessToLevel(float loudness) {
@@ -40,12 +41,14 @@ Audio::Audio() : _devices(_contextIsHMD) {
connect(client, &AudioClient::muteToggled, this, &Audio::setMuted);
connect(client, &AudioClient::noiseReductionChanged, this, &Audio::enableNoiseReduction);
connect(client, &AudioClient::warnWhenMutedChanged, this, &Audio::enableWarnWhenMuted);
+ connect(client, &AudioClient::acousticEchoCancellationChanged, this, &Audio::enableAcousticEchoCancellation);
connect(client, &AudioClient::inputLoudnessChanged, this, &Audio::onInputLoudnessChanged);
connect(client, &AudioClient::inputVolumeChanged, this, &Audio::setInputVolume);
connect(this, &Audio::contextChanged, &_devices, &AudioDevices::onContextChanged);
connect(this, &Audio::pushingToTalkChanged, this, &Audio::handlePushedToTalk);
enableNoiseReduction(enableNoiseReductionSetting.get());
enableWarnWhenMuted(enableWarnWhenMutedSetting.get());
+ enableAcousticEchoCancellation(enableAcousticEchoCancellationSetting.get());
onContextChanged();
}
@@ -277,6 +280,28 @@ void Audio::enableWarnWhenMuted(bool enable) {
}
}
+bool Audio::acousticEchoCancellationEnabled() const {
+ return resultWithReadLock([&] {
+ return _enableAcousticEchoCancellation;
+ });
+}
+
+void Audio::enableAcousticEchoCancellation(bool enable) {
+ bool changed = false;
+ withWriteLock([&] {
+ if (_enableAcousticEchoCancellation != enable) {
+ _enableAcousticEchoCancellation = enable;
+ auto client = DependencyManager::get().data();
+ QMetaObject::invokeMethod(client, "setAcousticEchoCancellation", Q_ARG(bool, enable), Q_ARG(bool, false));
+ enableAcousticEchoCancellationSetting.set(enable);
+ changed = true;
+ }
+ });
+ if (changed) {
+ emit acousticEchoCancellationChanged(enable);
+ }
+}
+
float Audio::getInputVolume() const {
return resultWithReadLock([&] {
return _inputVolume;
diff --git a/interface/src/scripting/Audio.h b/interface/src/scripting/Audio.h
index aab1ade95b..d2d1ee36c9 100644
--- a/interface/src/scripting/Audio.h
+++ b/interface/src/scripting/Audio.h
@@ -72,6 +72,9 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
* @property {number} systemInjectorGain - The gain (relative volume) that system sounds are played at.
* @property {number} pushingToTalkOutputGainDesktop - The gain (relative volume) that all sounds are played at when the user is holding
* the push-to-talk key in Desktop mode.
+ * @property {boolean} acousticEchoCancellation - true
if audio-echo-cancellation is enabled, otherwise
+ * false
. When enabled, sound from the audio output will be suppressed when it echos back to the
+ * input audio signal.
*
* @comment The following properties are from AudioScriptingInterface.h.
* @property {boolean} isStereoInput - true
if the input audio is being used in stereo, otherwise
@@ -85,6 +88,8 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
Q_PROPERTY(bool noiseReduction READ noiseReductionEnabled WRITE enableNoiseReduction NOTIFY noiseReductionChanged)
Q_PROPERTY(bool warnWhenMuted READ warnWhenMutedEnabled WRITE enableWarnWhenMuted NOTIFY warnWhenMutedChanged)
+ Q_PROPERTY(bool acousticEchoCancellation
+ READ acousticEchoCancellationEnabled WRITE enableAcousticEchoCancellation NOTIFY acousticEchoCancellationChanged)
Q_PROPERTY(float inputVolume READ getInputVolume WRITE setInputVolume NOTIFY inputVolumeChanged)
Q_PROPERTY(float inputLevel READ getInputLevel NOTIFY inputLevelChanged)
Q_PROPERTY(bool clipping READ isClipping NOTIFY clippingChanged)
@@ -115,6 +120,7 @@ public:
bool isMuted() const;
bool noiseReductionEnabled() const;
bool warnWhenMutedEnabled() const;
+ bool acousticEchoCancellationEnabled() const;
float getInputVolume() const;
float getInputLevel() const;
bool isClipping() const;
@@ -396,6 +402,14 @@ signals:
*/
void warnWhenMutedChanged(bool isEnabled);
+ /**jsdoc
+ * Triggered when acoustic echo cancellation is enabled or disabled.
+ * @function Audio.acousticEchoCancellationChanged
+ * @param {boolean} isEnabled - true
if acoustic echo cancellation is enabled, otherwise false
.
+ * @returns {Signal}
+ */
+ void acousticEchoCancellationChanged(bool isEnabled);
+
/**jsdoc
* Triggered when the input audio volume changes.
* @function Audio.inputVolumeChanged
@@ -494,6 +508,7 @@ private slots:
void setMuted(bool muted);
void enableNoiseReduction(bool enable);
void enableWarnWhenMuted(bool enable);
+ void enableAcousticEchoCancellation(bool enable);
void setInputVolume(float volume);
void onInputLoudnessChanged(float loudness, bool isClipping);
@@ -512,6 +527,7 @@ private:
bool _isClipping { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _enableWarnWhenMuted { true };
+ bool _enableAcousticEchoCancellation { true }; // AudioClient::_isAECEnabled
bool _contextIsHMD { false };
AudioDevices* getDevices() { return &_devices; }
AudioDevices _devices;
diff --git a/interface/src/ui/Snapshot.cpp b/interface/src/ui/Snapshot.cpp
index 926588e4ca..bb9971e582 100644
--- a/interface/src/ui/Snapshot.cpp
+++ b/interface/src/ui/Snapshot.cpp
@@ -41,7 +41,6 @@
#include "MainWindow.h"
#include "Snapshot.h"
#include "SnapshotUploader.h"
-#include "ToneMappingEffect.h"
// filename format: hifi-snap-by-%username%-on-%date%_%time%_@-%location%.jpg
// %1 <= username, %2 <= date and time, %3 <= current location
diff --git a/libraries/audio-client/CMakeLists.txt b/libraries/audio-client/CMakeLists.txt
index 6ca7962c39..6b88292dd4 100644
--- a/libraries/audio-client/CMakeLists.txt
+++ b/libraries/audio-client/CMakeLists.txt
@@ -7,6 +7,11 @@ link_hifi_libraries(audio plugins)
include_hifi_library_headers(shared)
include_hifi_library_headers(networking)
+if (ANDROID)
+else ()
+ target_webrtc()
+endif ()
+
# append audio includes to our list of includes to bubble
target_include_directories(${TARGET_NAME} PUBLIC "${HIFI_LIBRARY_DIR}/audio/src")
diff --git a/libraries/audio-client/src/AudioClient.cpp b/libraries/audio-client/src/AudioClient.cpp
index 04ab0f7973..c16e297c28 100644
--- a/libraries/audio-client/src/AudioClient.cpp
+++ b/libraries/audio-client/src/AudioClient.cpp
@@ -24,7 +24,7 @@
#endif
#ifdef WIN32
-#define WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN 1
#include
#include
#include
@@ -286,6 +286,7 @@ AudioClient::AudioClient() :
_shouldEchoLocally(false),
_shouldEchoToServer(false),
_isNoiseGateEnabled(true),
+ _isAECEnabled(true),
_reverb(false),
_reverbOptions(&_scriptReverbOptions),
_inputToNetworkResampler(NULL),
@@ -302,6 +303,7 @@ AudioClient::AudioClient() :
_isHeadsetPluggedIn(false),
#endif
_orientationGetter(DEFAULT_ORIENTATION_GETTER) {
+
// avoid putting a lock in the device callback
assert(_localSamplesAvailable.is_lock_free());
@@ -353,6 +355,10 @@ AudioClient::AudioClient() :
configureReverb();
+#if defined(WEBRTC_ENABLED)
+ configureWebrtc();
+#endif
+
auto nodeList = DependencyManager::get();
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::AudioStreamStats, &_stats, "processStreamStatsPacket");
@@ -1084,6 +1090,131 @@ void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
}
}
+#if defined(WEBRTC_ENABLED)
+
+static void deinterleaveToFloat(const int16_t* src, float* const* dst, int numFrames, int numChannels) {
+ for (int i = 0; i < numFrames; i++) {
+ for (int ch = 0; ch < numChannels; ch++) {
+ float f = *src++;
+ f *= (1/32768.0f); // scale
+ dst[ch][i] = f; // deinterleave
+ }
+ }
+}
+
+static void interleaveToInt16(const float* const* src, int16_t* dst, int numFrames, int numChannels) {
+ for (int i = 0; i < numFrames; i++) {
+ for (int ch = 0; ch < numChannels; ch++) {
+ float f = src[ch][i];
+ f *= 32768.0f; // scale
+ f += (f < 0.0f) ? -0.5f : 0.5f; // round
+ f = std::max(std::min(f, 32767.0f), -32768.0f); // saturate
+ *dst++ = (int16_t)f; // interleave
+ }
+ }
+}
+
+void AudioClient::configureWebrtc() {
+ _apm = webrtc::AudioProcessingBuilder().Create();
+
+ webrtc::AudioProcessing::Config config;
+
+ config.pre_amplifier.enabled = false;
+ config.high_pass_filter.enabled = false;
+ config.echo_canceller.enabled = true;
+ config.echo_canceller.mobile_mode = false;
+ config.echo_canceller.use_legacy_aec = false;
+ config.noise_suppression.enabled = false;
+ config.noise_suppression.level = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
+ config.voice_detection.enabled = false;
+ config.gain_controller1.enabled = false;
+ config.gain_controller2.enabled = false;
+ config.gain_controller2.fixed_digital.gain_db = 0.0f;
+ config.gain_controller2.adaptive_digital.enabled = false;
+ config.residual_echo_detector.enabled = true;
+ config.level_estimation.enabled = false;
+
+ _apm->ApplyConfig(config);
+}
+
+// rebuffer into 10ms chunks
+void AudioClient::processWebrtcFarEnd(const int16_t* samples, int numFrames, int numChannels, int sampleRate) {
+
+ const webrtc::StreamConfig streamConfig = webrtc::StreamConfig(sampleRate, numChannels);
+ const int numChunk = (int)streamConfig.num_frames();
+
+ if (sampleRate > WEBRTC_SAMPLE_RATE_MAX) {
+ qCWarning(audioclient) << "WebRTC does not support" << sampleRate << "output sample rate.";
+ return;
+ }
+ if (numChannels > WEBRTC_CHANNELS_MAX) {
+ qCWarning(audioclient) << "WebRTC does not support" << numChannels << "output channels.";
+ return;
+ }
+
+ while (numFrames > 0) {
+
+ // number of frames to fill
+ int numFill = std::min(numFrames, numChunk - _numFifoFarEnd);
+
+ // refill fifo
+ memcpy(&_fifoFarEnd[_numFifoFarEnd], samples, numFill * numChannels * sizeof(int16_t));
+ samples += numFill * numChannels;
+ numFrames -= numFill;
+ _numFifoFarEnd += numFill;
+
+ if (_numFifoFarEnd == numChunk) {
+
+ // convert audio format
+ float buffer[WEBRTC_CHANNELS_MAX][WEBRTC_FRAMES_MAX];
+ float* const buffers[WEBRTC_CHANNELS_MAX] = { buffer[0], buffer[1] };
+ deinterleaveToFloat(_fifoFarEnd, buffers, numChunk, numChannels);
+
+ // process one chunk
+ int error = _apm->ProcessReverseStream(buffers, streamConfig, streamConfig, buffers);
+ if (error != _apm->kNoError) {
+ qCWarning(audioclient) << "WebRTC ProcessReverseStream() returned ERROR:" << error;
+ }
+ _numFifoFarEnd = 0;
+ }
+ }
+}
+
+void AudioClient::processWebrtcNearEnd(int16_t* samples, int numFrames, int numChannels, int sampleRate) {
+
+ const webrtc::StreamConfig streamConfig = webrtc::StreamConfig(sampleRate, numChannels);
+ const int numChunk = (int)streamConfig.num_frames();
+
+ if (sampleRate > WEBRTC_SAMPLE_RATE_MAX) {
+ qCWarning(audioclient) << "WebRTC does not support" << sampleRate << "input sample rate.";
+ return;
+ }
+ if (numChannels > WEBRTC_CHANNELS_MAX) {
+ qCWarning(audioclient) << "WebRTC does not support" << numChannels << "input channels.";
+ return;
+ }
+ if (numFrames != numChunk) {
+ qCWarning(audioclient) << "WebRTC requires exactly 10ms of input.";
+ return;
+ }
+
+ // convert audio format
+ float buffer[WEBRTC_CHANNELS_MAX][WEBRTC_FRAMES_MAX];
+ float* const buffers[WEBRTC_CHANNELS_MAX] = { buffer[0], buffer[1] };
+ deinterleaveToFloat(samples, buffers, numFrames, numChannels);
+
+ // process one chunk
+ int error = _apm->ProcessStream(buffers, streamConfig, streamConfig, buffers);
+ if (error != _apm->kNoError) {
+ qCWarning(audioclient) << "WebRTC ProcessStream() returned ERROR:" << error;
+ } else {
+ // modify samples in-place
+ interleaveToInt16(buffers, samples, numFrames, numChannels);
+ }
+}
+
+#endif // WEBRTC_ENABLED
+
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
@@ -1262,6 +1393,13 @@ void AudioClient::handleMicAudioInput() {
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
+#if defined(WEBRTC_ENABLED)
+ if (_isAECEnabled) {
+ processWebrtcNearEnd(inputAudioSamples.get(), inputSamplesRequired / _inputFormat.channelCount(),
+ _inputFormat.channelCount(), _inputFormat.sampleRate());
+ }
+#endif
+
// detect loudness and clipping on the raw input
bool isClipping = false;
float loudness = computeLoudness(inputAudioSamples.get(), inputSamplesRequired, _inputFormat.channelCount(), isClipping);
@@ -1574,6 +1712,15 @@ void AudioClient::setWarnWhenMuted(bool enable, bool emitSignal) {
}
}
+void AudioClient::setAcousticEchoCancellation(bool enable, bool emitSignal) {
+ if (_isAECEnabled != enable) {
+ _isAECEnabled = enable;
+ if (emitSignal) {
+ emit acousticEchoCancellationChanged(_isAECEnabled);
+ }
+ }
+}
+
bool AudioClient::setIsStereoInput(bool isStereoInput) {
bool stereoInputChanged = false;
if (isStereoInput != _isStereoInput && _inputDeviceInfo.supportedChannelCounts().contains(2)) {
@@ -2107,15 +2254,16 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
return maxSize;
}
- // samples requested from OUTPUT_CHANNEL_COUNT
+ // max samples requested from OUTPUT_CHANNEL_COUNT
int deviceChannelCount = _audio->_outputFormat.channelCount();
- int samplesRequested = (int)(maxSize / AudioConstants::SAMPLE_SIZE) * OUTPUT_CHANNEL_COUNT / deviceChannelCount;
+ int maxSamplesRequested = (int)(maxSize / AudioConstants::SAMPLE_SIZE) * OUTPUT_CHANNEL_COUNT / deviceChannelCount;
// restrict samplesRequested to the size of our mix/scratch buffers
- samplesRequested = std::min(samplesRequested, _audio->_outputPeriod);
+ maxSamplesRequested = std::min(maxSamplesRequested, _audio->_outputPeriod);
int16_t* scratchBuffer = _audio->_outputScratchBuffer;
float* mixBuffer = _audio->_outputMixBuffer;
+ int samplesRequested = maxSamplesRequested;
int networkSamplesPopped;
if ((networkSamplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
qCDebug(audiostream, "Read %d samples from buffer (%d available, %d requested)", networkSamplesPopped, _receivedAudioStream.getSamplesAvailable(), samplesRequested);
@@ -2160,45 +2308,45 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
});
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
- int framesPopped = samplesPopped / AudioConstants::STEREO;
- int bytesWritten;
- if (samplesPopped > 0) {
-
- // apply output gain
- float newGain = _audio->_outputGain.load(std::memory_order_acquire);
- float oldGain = _audio->_lastOutputGain;
- _audio->_lastOutputGain = newGain;
-
- applyGainSmoothing(mixBuffer, framesPopped, oldGain, newGain);
-
- if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
- // limit the audio
- _audio->_audioLimiter.render(mixBuffer, (int16_t*)data, framesPopped);
- } else {
- _audio->_audioLimiter.render(mixBuffer, scratchBuffer, framesPopped);
-
- // upmix or downmix to deviceChannelCount
- if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
- int extraChannels = deviceChannelCount - OUTPUT_CHANNEL_COUNT;
- channelUpmix(scratchBuffer, (int16_t*)data, samplesPopped, extraChannels);
- } else {
- channelDownmix(scratchBuffer, (int16_t*)data, samplesPopped);
- }
- }
-
- bytesWritten = framesPopped * AudioConstants::SAMPLE_SIZE * deviceChannelCount;
- assert(bytesWritten <= maxSize);
-
- } else {
- // nothing on network, don't grab anything from injectors, and just return 0s
- memset(data, 0, maxSize);
- bytesWritten = maxSize;
+ if (samplesPopped == 0) {
+ // nothing on network, don't grab anything from injectors, and fill with silence
+ samplesPopped = maxSamplesRequested;
+ memset(mixBuffer, 0, samplesPopped * sizeof(float));
}
+ int framesPopped = samplesPopped / OUTPUT_CHANNEL_COUNT;
+
+ // apply output gain
+ float newGain = _audio->_outputGain.load(std::memory_order_acquire);
+ float oldGain = _audio->_lastOutputGain;
+ _audio->_lastOutputGain = newGain;
+
+ applyGainSmoothing(mixBuffer, framesPopped, oldGain, newGain);
+
+ // limit the audio
+ _audio->_audioLimiter.render(mixBuffer, scratchBuffer, framesPopped);
+
+#if defined(WEBRTC_ENABLED)
+ if (_audio->_isAECEnabled) {
+ _audio->processWebrtcFarEnd(scratchBuffer, framesPopped, OUTPUT_CHANNEL_COUNT, _audio->_outputFormat.sampleRate());
+ }
+#endif
+
+ // if required, upmix or downmix to deviceChannelCount
+ if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
+ memcpy(data, scratchBuffer, samplesPopped * AudioConstants::SAMPLE_SIZE);
+ } else if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
+ int extraChannels = deviceChannelCount - OUTPUT_CHANNEL_COUNT;
+ channelUpmix(scratchBuffer, (int16_t*)data, samplesPopped, extraChannels);
+ } else {
+ channelDownmix(scratchBuffer, (int16_t*)data, samplesPopped);
+ }
+ int bytesWritten = framesPopped * AudioConstants::SAMPLE_SIZE * deviceChannelCount;
+ assert(bytesWritten <= maxSize);
// send output buffer for recording
if (_audio->_isRecording) {
Lock lock(_recordMutex);
- _audio->_audioFileWav.addRawAudioChunk(reinterpret_cast(scratchBuffer), bytesWritten);
+ _audio->_audioFileWav.addRawAudioChunk(data, bytesWritten);
}
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
diff --git a/libraries/audio-client/src/AudioClient.h b/libraries/audio-client/src/AudioClient.h
index decf0f7751..ab12393ebf 100644
--- a/libraries/audio-client/src/AudioClient.h
+++ b/libraries/audio-client/src/AudioClient.h
@@ -29,6 +29,7 @@
#include
#include
#include
+#include
#include
#include
@@ -215,6 +216,9 @@ public slots:
void setWarnWhenMuted(bool isNoiseGateEnabled, bool emitSignal = true);
bool isWarnWhenMutedEnabled() const { return _warnWhenMuted; }
+ void setAcousticEchoCancellation(bool isAECEnabled, bool emitSignal = true);
+ bool isAcousticEchoCancellationEnabled() const { return _isAECEnabled; }
+
virtual bool getLocalEcho() override { return _shouldEchoLocally; }
virtual void setLocalEcho(bool localEcho) override { _shouldEchoLocally = localEcho; }
virtual void toggleLocalEcho() override { _shouldEchoLocally = !_shouldEchoLocally; }
@@ -256,6 +260,7 @@ signals:
void muteToggled(bool muted);
void noiseReductionChanged(bool noiseReductionEnabled);
void warnWhenMutedChanged(bool warnWhenMutedEnabled);
+ void acousticEchoCancellationChanged(bool acousticEchoCancellationEnabled);
void mutedByMixer();
void inputReceived(const QByteArray& inputSamples);
void inputLoudnessChanged(float loudness, bool isClipping);
@@ -377,6 +382,7 @@ private:
bool _shouldEchoToServer;
bool _isNoiseGateEnabled;
bool _warnWhenMuted;
+ bool _isAECEnabled;
bool _reverb;
AudioEffectOptions _scriptReverbOptions;
@@ -414,9 +420,23 @@ private:
// Adds Reverb
void configureReverb();
void updateReverbOptions();
-
void handleLocalEchoAndReverb(QByteArray& inputByteArray);
+#if defined(WEBRTC_ENABLED)
+ static const int WEBRTC_SAMPLE_RATE_MAX = 96000;
+ static const int WEBRTC_CHANNELS_MAX = 2;
+ static const int WEBRTC_FRAMES_MAX = webrtc::AudioProcessing::kChunkSizeMs * WEBRTC_SAMPLE_RATE_MAX / 1000;
+
+ webrtc::AudioProcessing* _apm { nullptr };
+
+ int16_t _fifoFarEnd[WEBRTC_CHANNELS_MAX * WEBRTC_FRAMES_MAX] {};
+ int _numFifoFarEnd = 0; // numFrames saved in fifo
+
+ void configureWebrtc();
+ void processWebrtcFarEnd(const int16_t* samples, int numFrames, int numChannels, int sampleRate);
+ void processWebrtcNearEnd(int16_t* samples, int numFrames, int numChannels, int sampleRate);
+#endif
+
bool switchInputToAudioDevice(const QAudioDeviceInfo inputDeviceInfo, bool isShutdownRequest = false);
bool switchOutputToAudioDevice(const QAudioDeviceInfo outputDeviceInfo, bool isShutdownRequest = false);
diff --git a/libraries/platform/src/platform/backend/PlatformInstance.cpp b/libraries/platform/src/platform/backend/PlatformInstance.cpp
index d4cadba3b7..e02eaf2837 100644
--- a/libraries/platform/src/platform/backend/PlatformInstance.cpp
+++ b/libraries/platform/src/platform/backend/PlatformInstance.cpp
@@ -106,33 +106,33 @@ void Instance::enumerateNics() {
}
json Instance::getCPU(int index) {
- assert(index <(int) _cpus.size());
+ assert(index < (int)_cpus.size());
- if (index < 0 || (int) _cpus.size() <= index)
+ if (index < 0 || (int)_cpus.size() <= index)
return json();
return _cpus.at(index);
}
json Instance::getGPU(int index) {
- assert(index <(int) _gpus.size());
+ assert(index < (int)_gpus.size());
- if (index < 0 || (int) _gpus.size() <= index)
+ if (index < 0 || (int)_gpus.size() <= index)
return json();
-
+
return _gpus.at(index);
}
-
json Instance::getDisplay(int index) {
- assert(index <(int) _displays.size());
-
- if (index < 0 || (int) _displays.size() <= index)
+ assert(index < (int)_displays.size());
+
+ if (index < 0 || (int)_displays.size() <= index)
return json();
return _displays.at(index);
}
+
Instance::~Instance() {
if (_cpus.size() > 0) {
_cpus.clear();
@@ -147,7 +147,6 @@ Instance::~Instance() {
}
}
-
json Instance::listAllKeys() {
json allKeys;
allKeys.array({{
diff --git a/libraries/render-utils/src/BloomEffect.cpp b/libraries/render-utils/src/BloomEffect.cpp
index 414a1c3f91..e58d07ac33 100644
--- a/libraries/render-utils/src/BloomEffect.cpp
+++ b/libraries/render-utils/src/BloomEffect.cpp
@@ -15,7 +15,6 @@
#include
#include
-#include
#include "render-utils/ShaderConstants.h"
#define BLOOM_BLUR_LEVEL_COUNT 3
diff --git a/libraries/render-utils/src/RenderCommonTask.cpp b/libraries/render-utils/src/RenderCommonTask.cpp
index 9ea4ac9f3c..7cf7f1129f 100644
--- a/libraries/render-utils/src/RenderCommonTask.cpp
+++ b/libraries/render-utils/src/RenderCommonTask.cpp
@@ -148,6 +148,30 @@ void Blit::run(const RenderContextPointer& renderContext, const gpu::Framebuffer
});
}
+NewFramebuffer::NewFramebuffer(gpu::Element pixelFormat) {
+ _pixelFormat = pixelFormat;
+}
+
+void NewFramebuffer::run(const render::RenderContextPointer& renderContext, Output& output) {
+ RenderArgs* args = renderContext->args;
+ glm::uvec2 frameSize(args->_viewport.z, args->_viewport.w);
+ output.reset();
+
+ if (_outputFramebuffer && _outputFramebuffer->getSize() != frameSize) {
+ _outputFramebuffer.reset();
+ }
+
+ if (!_outputFramebuffer) {
+ _outputFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("newFramebuffer.out"));
+ auto colorFormat = _pixelFormat;
+ auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
+ auto colorTexture = gpu::Texture::createRenderBuffer(colorFormat, frameSize.x, frameSize.y, gpu::Texture::SINGLE_MIP, defaultSampler);
+ _outputFramebuffer->setRenderBuffer(0, colorTexture);
+ }
+
+ output = _outputFramebuffer;
+}
+
void NewOrDefaultFramebuffer::run(const render::RenderContextPointer& renderContext, const Input& input, Output& output) {
RenderArgs* args = renderContext->args;
// auto frameSize = input;
@@ -167,7 +191,7 @@ void NewOrDefaultFramebuffer::run(const render::RenderContextPointer& renderCont
}
if (!_outputFramebuffer) {
- _outputFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("newFramebuffer.out"));
+ _outputFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("newOrDefaultFramebuffer.out"));
auto colorFormat = gpu::Element::COLOR_SRGBA_32;
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
auto colorTexture = gpu::Texture::createRenderBuffer(colorFormat, frameSize.x, frameSize.y, gpu::Texture::SINGLE_MIP, defaultSampler);
diff --git a/libraries/render-utils/src/RenderCommonTask.h b/libraries/render-utils/src/RenderCommonTask.h
index ec50fbf2cc..15d6ff9895 100644
--- a/libraries/render-utils/src/RenderCommonTask.h
+++ b/libraries/render-utils/src/RenderCommonTask.h
@@ -83,6 +83,20 @@ public:
void run(const render::RenderContextPointer& renderContext, const gpu::FramebufferPointer& srcFramebuffer);
};
+class NewFramebuffer {
+public:
+ using Output = gpu::FramebufferPointer;
+ using JobModel = render::Job::ModelO;
+
+ NewFramebuffer(gpu::Element pixelFormat = gpu::Element::COLOR_SRGBA_32);
+
+ void run(const render::RenderContextPointer& renderContext, Output& output);
+protected:
+ gpu::Element _pixelFormat;
+private:
+ gpu::FramebufferPointer _outputFramebuffer;
+};
+
class NewOrDefaultFramebuffer {
public:
using Input = glm::uvec2;
diff --git a/libraries/render-utils/src/RenderDeferredTask.cpp b/libraries/render-utils/src/RenderDeferredTask.cpp
index c26f3b613c..4561cf903d 100644
--- a/libraries/render-utils/src/RenderDeferredTask.cpp
+++ b/libraries/render-utils/src/RenderDeferredTask.cpp
@@ -29,7 +29,6 @@
#include
#include
#include
-#include
#include "RenderHifi.h"
#include "render-utils/ShaderConstants.h"
@@ -51,7 +50,7 @@
#include "AmbientOcclusionEffect.h"
#include "AntialiasingEffect.h"
-#include "ToneMappingEffect.h"
+#include "ToneMapAndResampleTask.h"
#include "SubsurfaceScattering.h"
#include "DrawHaze.h"
#include "BloomEffect.h"
@@ -96,7 +95,7 @@ RenderDeferredTask::RenderDeferredTask()
void RenderDeferredTask::configure(const Config& config) {
// Propagate resolution scale to sub jobs who need it
- auto preparePrimaryBufferConfig = config.getConfig("PreparePrimaryBuffer");
+ auto preparePrimaryBufferConfig = config.getConfig("PreparePrimaryBufferDeferred");
assert(preparePrimaryBufferConfig);
preparePrimaryBufferConfig->setResolutionScale(config.resolutionScale);
}
@@ -146,7 +145,7 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
const auto jitter = task.addJob("JitterCam");
// GPU jobs: Start preparing the primary, deferred and lighting buffer
- const auto scaledPrimaryFramebuffer = task.addJob("PreparePrimaryBuffer");
+ const auto scaledPrimaryFramebuffer = task.addJob("PreparePrimaryBufferDeferred");
// Prepare deferred, generate the shared Deferred Frame Transform. Only valid with the scaled frame buffer
const auto deferredFrameTransform = task.addJob("DeferredFrameTransform", jitter);
@@ -238,23 +237,22 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
const auto bloomInputs = BloomEffect::Inputs(deferredFrameTransform, lightingFramebuffer, bloomFrame).asVarying();
task.addJob("Bloom", bloomInputs);
+ const auto destFramebuffer = static_cast(nullptr);
+
// Lighting Buffer ready for tone mapping
- const auto toneMappingInputs = ToneMappingDeferred::Input(lightingFramebuffer, scaledPrimaryFramebuffer).asVarying();
- const auto toneMappedBuffer = task.addJob("ToneMapping", toneMappingInputs);
+ const auto toneMappingInputs = ToneMapAndResample::Input(lightingFramebuffer, destFramebuffer).asVarying();
+ const auto toneMappedBuffer = task.addJob("ToneMapping", toneMappingInputs);
// Debugging task is happening in the "over" layer after tone mapping and just before HUD
{ // Debug the bounds of the rendered items, still look at the zbuffer
const auto extraDebugBuffers = RenderDeferredTaskDebug::ExtraBuffers(linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer, ambientOcclusionUniforms, scatteringResource, velocityBuffer);
const auto debugInputs = RenderDeferredTaskDebug::Input(fetchedItems, shadowTaskOutputs, lightingStageInputs, lightClusters, prepareDeferredOutputs, extraDebugBuffers,
- deferredFrameTransform, jitter, lightingModel).asVarying();
+ deferredFrameTransform, jitter, lightingModel).asVarying();
task.addJob("DebugRenderDeferredTask", debugInputs);
}
- // Upscale to finale resolution
- const auto primaryFramebuffer = task.addJob("PrimaryBufferUpscale", toneMappedBuffer);
-
// HUD Layer
- const auto renderHUDLayerInputs = RenderHUDLayerTask::Input(primaryFramebuffer, lightingModel, hudOpaque, hudTransparent, hazeFrame).asVarying();
+ const auto renderHUDLayerInputs = RenderHUDLayerTask::Input(toneMappedBuffer, lightingModel, hudOpaque, hudTransparent, hazeFrame).asVarying();
task.addJob("RenderHUDLayer", renderHUDLayerInputs);
}
@@ -415,7 +413,6 @@ void RenderDeferredTaskDebug::build(JobModel& task, const render::Varying& input
const auto debugZoneInputs = DebugZoneLighting::Inputs(deferredFrameTransform, lightFrame, backgroundFrame).asVarying();
task.addJob("DrawZoneStack", debugZoneInputs);
-
}
diff --git a/libraries/render-utils/src/RenderForwardTask.cpp b/libraries/render-utils/src/RenderForwardTask.cpp
index b6b17ee376..14f2e51697 100755
--- a/libraries/render-utils/src/RenderForwardTask.cpp
+++ b/libraries/render-utils/src/RenderForwardTask.cpp
@@ -19,7 +19,6 @@
#include
#include
#include
-#include
#include
@@ -28,7 +27,7 @@
#include "StencilMaskPass.h"
#include "ZoneRenderer.h"
#include "FadeEffect.h"
-#include "ToneMappingEffect.h"
+#include "ToneMapAndResampleTask.h"
#include "BackgroundStage.h"
#include "FramebufferCache.h"
#include "TextureCache.h"
@@ -51,7 +50,7 @@ extern void initForwardPipelines(ShapePlumber& plumber);
void RenderForwardTask::configure(const Config& config) {
// Propagate resolution scale to sub jobs who need it
- auto preparePrimaryBufferConfig = config.getConfig("PreparePrimaryBuffer");
+ auto preparePrimaryBufferConfig = config.getConfig("PreparePrimaryBufferForward");
assert(preparePrimaryBufferConfig);
preparePrimaryBufferConfig->setResolutionScale(config.resolutionScale);
}
@@ -99,7 +98,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// GPU jobs: Start preparing the main framebuffer
- const auto scaledPrimaryFramebuffer = task.addJob("PreparePrimaryBuffer");
+ const auto scaledPrimaryFramebuffer = task.addJob("PreparePrimaryBufferForward");
// Prepare deferred, generate the shared Deferred Frame Transform. Only valid with the scaled frame buffer
const auto deferredFrameTransform = task.addJob("DeferredFrameTransform");
@@ -141,34 +140,17 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
task.addJob("DrawZoneStack", debugZoneInputs);
}
-#if defined(Q_OS_ANDROID)
+ const auto newResolvedFramebuffer = task.addJob("MakeResolvingFramebuffer", gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10));
- // Just resolve the msaa
- const auto resolveInputs = ResolveFramebuffer::Inputs(scaledPrimaryFramebuffer, static_cast(nullptr)).asVarying();
- const auto resolvedFramebuffer = task.addJob("Resolve", resolveInputs);
-
- const auto toneMappedBuffer = resolvedFramebuffer;
-#else
- const auto newResolvedFramebuffer = task.addJob("MakeResolvingFramebuffer");
-
-
- // Just resolve the msaa
const auto resolveInputs = ResolveFramebuffer::Inputs(scaledPrimaryFramebuffer, newResolvedFramebuffer).asVarying();
const auto resolvedFramebuffer = task.addJob("Resolve", resolveInputs);
- // Lighting Buffer ready for tone mapping
- // Forward rendering on GLES doesn't support tonemapping to and from the same FBO, so we specify
- // the output FBO as null, which causes the tonemapping to target the blit framebuffer
- const auto toneMappingInputs = ToneMappingDeferred::Input(resolvedFramebuffer, resolvedFramebuffer).asVarying();
- const auto toneMappedBuffer = task.addJob("ToneMapping", toneMappingInputs);
-
-#endif
-
- // Upscale to finale resolution
- const auto primaryFramebuffer = task.addJob("PrimaryBufferUpscale", toneMappedBuffer);
+ const auto destFramebuffer = static_cast(nullptr);
+ const auto toneMappingInputs = ToneMapAndResample::Input(resolvedFramebuffer, destFramebuffer).asVarying();
+ const auto toneMappedBuffer = task.addJob("ToneMapping", toneMappingInputs);
// HUD Layer
- const auto renderHUDLayerInputs = RenderHUDLayerTask::Input(primaryFramebuffer, lightingModel, hudOpaque, hudTransparent, hazeFrame).asVarying();
+ const auto renderHUDLayerInputs = RenderHUDLayerTask::Input(toneMappedBuffer, lightingModel, hudOpaque, hudTransparent, hazeFrame).asVarying();
task.addJob("RenderHUDLayer", renderHUDLayerInputs);
}
@@ -176,8 +158,8 @@ gpu::FramebufferPointer PreparePrimaryFramebufferMSAA::createFramebuffer(const c
gpu::FramebufferPointer framebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(name));
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
-
- auto colorFormat = gpu::Element::COLOR_SRGBA_32;
+
+ auto colorFormat = gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10);
auto colorTexture =
gpu::Texture::createRenderBufferMultisample(colorFormat, frameSize.x, frameSize.y, numSamples, defaultSampler);
framebuffer->setRenderBuffer(0, colorTexture);
diff --git a/libraries/render-utils/src/RenderForwardTask.h b/libraries/render-utils/src/RenderForwardTask.h
index 2abf248692..dd0abbc9ab 100755
--- a/libraries/render-utils/src/RenderForwardTask.h
+++ b/libraries/render-utils/src/RenderForwardTask.h
@@ -50,11 +50,13 @@ public:
const float SCALE_RANGE_MIN = 0.1f;
const float SCALE_RANGE_MAX = 2.0f;
resolutionScale = std::max(SCALE_RANGE_MIN, std::min(SCALE_RANGE_MAX, scale));
+ //emit dirty();
}
int getNumSamples() const { return numSamples; }
void setNumSamples(int num) {
numSamples = std::max(1, std::min(32, num));
+ emit dirty();
}
signals:
diff --git a/libraries/render-utils/src/ToneMapAndResampleTask.cpp b/libraries/render-utils/src/ToneMapAndResampleTask.cpp
new file mode 100644
index 0000000000..8d4a3c485d
--- /dev/null
+++ b/libraries/render-utils/src/ToneMapAndResampleTask.cpp
@@ -0,0 +1,110 @@
+//
+// ToneMapAndResampleTask.cpp
+// libraries/render-utils/src
+//
+// Created by Anna Brewer on 7/3/19.
+// Copyright 2019 High Fidelity, Inc.
+//
+// Distributed under the Apache License, Version 2.0.
+// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
+//
+
+#include "ToneMapAndResampleTask.h"
+
+#include
+#include
+
+#include "render-utils/ShaderConstants.h"
+#include "StencilMaskPass.h"
+#include "FramebufferCache.h"
+
+using namespace render;
+using namespace shader::gpu::program;
+using namespace shader::render_utils::program;
+
+gpu::PipelinePointer ToneMapAndResample::_pipeline;
+gpu::PipelinePointer ToneMapAndResample::_mirrorPipeline;
+
+ToneMapAndResample::ToneMapAndResample() {
+ Parameters parameters;
+ _parametersBuffer = gpu::BufferView(std::make_shared(sizeof(Parameters), (const gpu::Byte*) ¶meters));
+}
+
+void ToneMapAndResample::init() {
+ // shared_ptr to gpu::State
+ gpu::StatePointer blitState = gpu::StatePointer(new gpu::State());
+
+ blitState->setDepthTest(gpu::State::DepthTest(false, false));
+ blitState->setColorWriteMask(true, true, true, true);
+
+ _pipeline = gpu::PipelinePointer(gpu::Pipeline::create(gpu::Shader::createProgram(toneMapping), blitState));
+ _mirrorPipeline = gpu::PipelinePointer(gpu::Pipeline::create(gpu::Shader::createProgram(toneMapping_mirrored), blitState));
+}
+
+void ToneMapAndResample::setExposure(float exposure) {
+ auto& params = _parametersBuffer.get();
+ if (params._exposure != exposure) {
+ _parametersBuffer.edit()._exposure = exposure;
+ _parametersBuffer.edit()._twoPowExposure = pow(2.0, exposure);
+ }
+}
+
+void ToneMapAndResample::setToneCurve(ToneCurve curve) {
+ auto& params = _parametersBuffer.get();
+ if (params._toneCurve != (int)curve) {
+ _parametersBuffer.edit()._toneCurve = (int)curve;
+ }
+}
+
+void ToneMapAndResample::configure(const Config& config) {
+ setExposure(config.exposure);
+ setToneCurve((ToneCurve)config.curve);
+}
+
+void ToneMapAndResample::run(const RenderContextPointer& renderContext, const Input& input, Output& output) {
+ assert(renderContext->args);
+ assert(renderContext->args->hasViewFrustum());
+
+ RenderArgs* args = renderContext->args;
+
+ auto lightingBuffer = input.get0()->getRenderBuffer(0);
+ auto destinationFramebuffer = input.get1();
+
+ if (!destinationFramebuffer) {
+ destinationFramebuffer = args->_blitFramebuffer;
+ }
+
+ if (!lightingBuffer || !destinationFramebuffer) {
+ return;
+ }
+
+ if (!_pipeline) {
+ init();
+ }
+
+ const auto bufferSize = destinationFramebuffer->getSize();
+
+ auto srcBufferSize = glm::ivec2(lightingBuffer->getDimensions());
+
+ glm::ivec4 destViewport{ 0, 0, bufferSize.x, bufferSize.y };
+
+ gpu::doInBatch("Resample::run", args->_context, [&](gpu::Batch& batch) {
+ batch.enableStereo(false);
+ batch.setFramebuffer(destinationFramebuffer);
+
+ batch.setViewportTransform(destViewport);
+ batch.setProjectionTransform(glm::mat4());
+ batch.resetViewTransform();
+ batch.setPipeline(args->_renderMode == RenderArgs::MIRROR_RENDER_MODE ? _mirrorPipeline : _pipeline);
+
+ batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(srcBufferSize, args->_viewport));
+ batch.setUniformBuffer(render_utils::slot::buffer::ToneMappingParams, _parametersBuffer);
+ batch.setResourceTexture(render_utils::slot::texture::ToneMappingColor, lightingBuffer);
+ batch.draw(gpu::TRIANGLE_STRIP, 4);
+ });
+
+ // Set full final viewport
+ args->_viewport = destViewport;
+
+ output = destinationFramebuffer;
+}
diff --git a/libraries/render-utils/src/ToneMappingEffect.h b/libraries/render-utils/src/ToneMapAndResampleTask.h
similarity index 65%
rename from libraries/render-utils/src/ToneMappingEffect.h
rename to libraries/render-utils/src/ToneMapAndResampleTask.h
index faf6e514e9..1c7ef2cf48 100644
--- a/libraries/render-utils/src/ToneMappingEffect.h
+++ b/libraries/render-utils/src/ToneMapAndResampleTask.h
@@ -1,16 +1,16 @@
//
-// ToneMappingEffect.h
+// ToneMapAndResample.h
// libraries/render-utils/src
//
-// Created by Sam Gateau on 12/7/2015.
-// Copyright 2015 High Fidelity, Inc.
+// Created by Anna Brewer on 7/3/19.
+// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
-#ifndef hifi_ToneMappingEffect_h
-#define hifi_ToneMappingEffect_h
+#ifndef hifi_ToneMapAndResample_h
+#define hifi_ToneMapAndResample_h
#include
#include
@@ -20,29 +20,66 @@
#include
#include
+enum class ToneCurve {
+ // Different tone curve available
+ None,
+ Gamma22,
+ Reinhard,
+ Filmic,
+};
+
+class ToneMappingConfig : public render::Job::Config {
+ Q_OBJECT
+ Q_PROPERTY(float exposure MEMBER exposure WRITE setExposure);
+ Q_PROPERTY(int curve MEMBER curve WRITE setCurve);
-class ToneMappingEffect {
public:
- ToneMappingEffect();
- virtual ~ToneMappingEffect() {}
+ ToneMappingConfig() : render::Job::Config(true) {}
- void render(RenderArgs* args, const gpu::TexturePointer& lightingBuffer, const gpu::FramebufferPointer& destinationBuffer);
+ void setExposure(float newExposure) { exposure = newExposure; emit dirty(); }
+ void setCurve(int newCurve) { curve = std::max((int)ToneCurve::None, std::min((int)ToneCurve::Filmic, newCurve)); emit dirty(); }
+
+
+ float exposure{ 0.0f };
+ int curve{ (int)ToneCurve::Gamma22 };
+
+signals:
+ void dirty();
+};
+
+class ToneMapAndResample {
+public:
+ ToneMapAndResample();
+ virtual ~ToneMapAndResample() {}
+
+ void render(RenderArgs* args, const gpu::TexturePointer& lightingBuffer, gpu::FramebufferPointer& destinationBuffer);
void setExposure(float exposure);
float getExposure() const { return _parametersBuffer.get()._exposure; }
- // Different tone curve available
- enum ToneCurve {
- None = 0,
- Gamma22,
- Reinhard,
- Filmic,
- };
void setToneCurve(ToneCurve curve);
ToneCurve getToneCurve() const { return (ToneCurve)_parametersBuffer.get()._toneCurve; }
-private:
+ // Inputs: lightingFramebuffer, destinationFramebuffer
+ using Input = render::VaryingSet2;
+ using Output = gpu::FramebufferPointer;
+ using Config = ToneMappingConfig;
+ using JobModel = render::Job::ModelIO;
+ void configure(const Config& config);
+ void run(const render::RenderContextPointer& renderContext, const Input& input, Output& output);
+
+protected:
+ static gpu::PipelinePointer _pipeline;
+ static gpu::PipelinePointer _mirrorPipeline;
+
+ gpu::FramebufferPointer _destinationFrameBuffer;
+
+ float _factor{ 2.0f };
+
+ gpu::FramebufferPointer getResampledFrameBuffer(const gpu::FramebufferPointer& sourceFramebuffer);
+
+private:
gpu::PipelinePointer _blitLightBuffer;
// Class describing the uniform buffer with all the parameters common to the tone mapping shaders
@@ -51,46 +88,16 @@ private:
float _exposure = 0.0f;
float _twoPowExposure = 1.0f;
glm::vec2 spareA;
- int _toneCurve = Gamma22;
+ int _toneCurve = (int)ToneCurve::Gamma22;
glm::vec3 spareB;
Parameters() {}
};
+
typedef gpu::BufferView UniformBufferView;
gpu::BufferView _parametersBuffer;
- void init(RenderArgs* args);
+ void init();
};
-class ToneMappingConfig : public render::Job::Config {
- Q_OBJECT
- Q_PROPERTY(float exposure MEMBER exposure WRITE setExposure);
- Q_PROPERTY(int curve MEMBER curve WRITE setCurve);
-public:
- ToneMappingConfig() : render::Job::Config(true) {}
-
- void setExposure(float newExposure) { exposure = newExposure; emit dirty(); }
- void setCurve(int newCurve) { curve = std::max((int)ToneMappingEffect::None, std::min((int)ToneMappingEffect::Filmic, newCurve)); emit dirty(); }
-
-
- float exposure{ 0.0f };
- int curve{ ToneMappingEffect::Gamma22 };
-signals:
- void dirty();
-};
-
-class ToneMappingDeferred {
-public:
- // Inputs: lightingFramebuffer, destinationFramebuffer
- using Input = render::VaryingSet2;
- using Output = gpu::FramebufferPointer;
- using Config = ToneMappingConfig;
- using JobModel = render::Job::ModelIO;
-
- void configure(const Config& config);
- void run(const render::RenderContextPointer& renderContext, const Input& input, Output& output);
-
- ToneMappingEffect _toneMappingEffect;
-};
-
-#endif // hifi_ToneMappingEffect_h
+#endif // hifi_ToneMapAndResample_h
diff --git a/libraries/render-utils/src/ToneMappingEffect.cpp b/libraries/render-utils/src/ToneMappingEffect.cpp
deleted file mode 100644
index b7cc5d3d80..0000000000
--- a/libraries/render-utils/src/ToneMappingEffect.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-//
-// ToneMappingEffect.cpp
-// libraries/render-utils/src
-//
-// Created by Sam Gateau on 12/7/2015.
-// Copyright 2015 High Fidelity, Inc.
-//
-// Distributed under the Apache License, Version 2.0.
-// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
-//
-
-#include "ToneMappingEffect.h"
-
-#include
-#include
-
-#include "render-utils/ShaderConstants.h"
-#include "StencilMaskPass.h"
-#include "FramebufferCache.h"
-
-
-ToneMappingEffect::ToneMappingEffect() {
- Parameters parameters;
- _parametersBuffer = gpu::BufferView(std::make_shared(sizeof(Parameters), (const gpu::Byte*) ¶meters));
-}
-
-void ToneMappingEffect::init(RenderArgs* args) {
- auto blitProgram = gpu::Shader::createProgram(shader::render_utils::program::toneMapping);
-
- auto blitState = std::make_shared();
- blitState->setColorWriteMask(true, true, true, true);
- _blitLightBuffer = gpu::PipelinePointer(gpu::Pipeline::create(blitProgram, blitState));
-}
-
-void ToneMappingEffect::setExposure(float exposure) {
- auto& params = _parametersBuffer.get();
- if (params._exposure != exposure) {
- _parametersBuffer.edit()._exposure = exposure;
- _parametersBuffer.edit()._twoPowExposure = pow(2.0, exposure);
- }
-}
-
-void ToneMappingEffect::setToneCurve(ToneCurve curve) {
- auto& params = _parametersBuffer.get();
- if (params._toneCurve != curve) {
- _parametersBuffer.edit()._toneCurve = curve;
- }
-}
-
-void ToneMappingEffect::render(RenderArgs* args, const gpu::TexturePointer& lightingBuffer, const gpu::FramebufferPointer& destinationFramebuffer) {
- if (!_blitLightBuffer) {
- init(args);
- }
-
- if (!lightingBuffer || !destinationFramebuffer) {
- return;
- }
-
- auto framebufferSize = glm::ivec2(lightingBuffer->getDimensions());
- gpu::doInBatch("ToneMappingEffect::render", args->_context, [&](gpu::Batch& batch) {
- batch.enableStereo(false);
- batch.setFramebuffer(destinationFramebuffer);
-
- // FIXME: Generate the Luminosity map
- //batch.generateTextureMips(lightingBuffer);
-
- batch.setViewportTransform(args->_viewport);
- batch.setProjectionTransform(glm::mat4());
- batch.resetViewTransform();
- batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport));
- batch.setPipeline(_blitLightBuffer);
-
- batch.setUniformBuffer(render_utils::slot::buffer::ToneMappingParams, _parametersBuffer);
- batch.setResourceTexture(render_utils::slot::texture::ToneMappingColor, lightingBuffer);
- batch.draw(gpu::TRIANGLE_STRIP, 4);
- });
-}
-
-
-void ToneMappingDeferred::configure(const Config& config) {
- _toneMappingEffect.setExposure(config.exposure);
- _toneMappingEffect.setToneCurve((ToneMappingEffect::ToneCurve)config.curve);
-}
-
-void ToneMappingDeferred::run(const render::RenderContextPointer& renderContext, const Input& input, Output& output) {
-
- auto lightingBuffer = input.get0()->getRenderBuffer(0);
- auto destFbo = input.get1();
-
- if (!destFbo) {
- destFbo = renderContext->args->_blitFramebuffer;
- }
-
- _toneMappingEffect.render(renderContext->args, lightingBuffer, destFbo);
- output = destFbo;
-}
diff --git a/libraries/render-utils/src/render-utils/toneMapping.slp b/libraries/render-utils/src/render-utils/toneMapping.slp
index d4d8ec4b01..2bcb4497c4 100644
--- a/libraries/render-utils/src/render-utils/toneMapping.slp
+++ b/libraries/render-utils/src/render-utils/toneMapping.slp
@@ -1 +1,2 @@
VERTEX gpu::vertex::DrawViewportQuadTransformTexcoord
+DEFINES mirrored:f
diff --git a/libraries/render-utils/src/toneMapping.slf b/libraries/render-utils/src/toneMapping.slf
index 4f7ed6374d..32aa2b0788 100644
--- a/libraries/render-utils/src/toneMapping.slf
+++ b/libraries/render-utils/src/toneMapping.slf
@@ -43,7 +43,11 @@ layout(location=0) in vec2 varTexCoord0;
layout(location=0) out vec4 outFragColor;
void main(void) {
+<@if HIFI_USE_MIRRORED@>
+ vec4 fragColorRaw = texture(colorMap, vec2(1.0 - varTexCoord0.x, varTexCoord0.y));
+<@else@>
vec4 fragColorRaw = texture(colorMap, varTexCoord0);
+<@endif@>
vec3 fragColor = fragColorRaw.xyz;
vec3 srcColor = fragColor * getTwoPowExposure();
diff --git a/libraries/shared/src/shared/WebRTC.h b/libraries/shared/src/shared/WebRTC.h
new file mode 100644
index 0000000000..2f0e444bff
--- /dev/null
+++ b/libraries/shared/src/shared/WebRTC.h
@@ -0,0 +1,36 @@
+//
+// WebRTC.h
+// libraries/shared/src/shared/
+//
+// Copyright 2019 High Fidelity, Inc.
+//
+// Distributed under the Apache License, Version 2.0.
+// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
+//
+
+#ifndef hifi_WebRTC_h
+#define hifi_WebRTC_h
+
+#if defined(Q_OS_MAC)
+# define WEBRTC_ENABLED 1
+# define WEBRTC_POSIX 1
+#elif defined(Q_OS_WIN)
+# define WEBRTC_ENABLED 1
+# define WEBRTC_WIN 1
+# define NOMINMAX 1
+# define WIN32_LEAN_AND_MEAN 1
+#elif defined(Q_OS_ANDROID)
+// I don't yet have a working libwebrtc for android
+// # define WEBRTC_ENABLED 1
+// # define WEBRTC_POSIX 1
+#elif defined(Q_OS_LINUX)
+# define WEBRTC_ENABLED 1
+# define WEBRTC_POSIX 1
+#endif
+
+#if defined(WEBRTC_ENABLED)
+# include
+# include "modules/audio_processing/audio_processing_impl.h"
+#endif
+
+#endif // hifi_WebRTC_h
diff --git a/scripts/developer/utilities/render/deferredLighting.qml b/scripts/developer/utilities/render/deferredLighting.qml
index 80ca8b09e1..f2891ddc55 100644
--- a/scripts/developer/utilities/render/deferredLighting.qml
+++ b/scripts/developer/utilities/render/deferredLighting.qml
@@ -45,7 +45,7 @@ Rectangle {
anchors.right: parent.right
spacing: 5
Repeater {
- model: [ "MSAA:PrepareFramebuffer:numSamples:4:1"
+ model: [ "MSAA:PreparePrimaryBufferForward:numSamples:4:1"
]
ConfigSlider {
label: qsTr(modelData.split(":")[0])
diff --git a/tools/oven/src/Oven.cpp b/tools/oven/src/Oven.cpp
index a680dd1f89..d7e0cec67b 100644
--- a/tools/oven/src/Oven.cpp
+++ b/tools/oven/src/Oven.cpp
@@ -21,6 +21,7 @@
#include
#include
#include
+#include
#include
#include
#include
@@ -42,6 +43,7 @@ Oven::Oven() {
DependencyManager::set();
DependencyManager::set();
DependencyManager::set();
+ DependencyManager::set();
MaterialBaker::setNextOvenWorkerThreadOperator([] {
return Oven::instance().getNextWorkerThread();