Merge branch 'master' of https://github.com/highfidelity/hifi into orange

This commit is contained in:
samcake 2016-03-22 10:13:50 -07:00
commit 67abdab8b2
54 changed files with 694 additions and 996 deletions

View file

@ -222,7 +222,6 @@ void Agent::executeScript() {
scriptedAvatar->setForceFaceTrackerConnected(true);
// call model URL setters with empty URLs so our avatar, if user, will have the default models
scriptedAvatar->setFaceModelURL(QUrl());
scriptedAvatar->setSkeletonModelURL(QUrl());
// give this AvatarData object to the script engine

View file

@ -103,8 +103,6 @@ DomainServer::DomainServer(int argc, char* argv[]) :
qDebug() << "Setting up LimitedNodeList and assignments.";
setupNodeListAndAssignments();
loadExistingSessionsFromSettings();
// setup automatic networking settings with data server
setupAutomaticNetworking();
@ -681,7 +679,12 @@ unsigned int DomainServer::countConnectedUsers() {
}
QUrl DomainServer::oauthRedirectURL() {
return QString("https://%1:%2/oauth").arg(_hostname).arg(_httpsManager->serverPort());
if (_httpsManager) {
return QString("https://%1:%2/oauth").arg(_hostname).arg(_httpsManager->serverPort());
} else {
qWarning() << "Attempting to determine OAuth re-direct URL with no HTTPS server configured.";
return QUrl();
}
}
const QString OAUTH_CLIENT_ID_QUERY_KEY = "client_id";
@ -1642,10 +1645,11 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
// add it to the set so we can handle the callback from the OAuth provider
_webAuthenticationStateSet.insert(stateUUID);
QUrl oauthRedirectURL = oauthAuthorizationURL(stateUUID);
QUrl authURL = oauthAuthorizationURL(stateUUID);
Headers redirectHeaders;
redirectHeaders.insert("Location", oauthRedirectURL.toEncoded());
redirectHeaders.insert("Location", authURL.toEncoded());
connection->respond(HTTPConnection::StatusCode302,
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
@ -1723,7 +1727,6 @@ QNetworkReply* DomainServer::profileRequestGivenTokenReply(QNetworkReply* tokenR
return NetworkAccessManager::getInstance().get(profileRequest);
}
const QString DS_SETTINGS_SESSIONS_GROUP = "web-sessions";
Headers DomainServer::setupCookieHeadersFromProfileReply(QNetworkReply* profileReply) {
Headers cookieHeaders;
@ -1737,10 +1740,6 @@ Headers DomainServer::setupCookieHeadersFromProfileReply(QNetworkReply* profileR
DomainServerWebSessionData sessionData(userObject);
_cookieSessionHash.insert(cookieUUID, sessionData);
// persist the cookie to settings file so we can get it back on DS relaunch
QStringList path = QStringList() << DS_SETTINGS_SESSIONS_GROUP << cookieUUID.toString();
Setting::Handle<QVariant>(path).set(QVariant::fromValue(sessionData));
// setup expiry for cookie to 1 month from today
QDateTime cookieExpiry = QDateTime::currentDateTimeUtc().addMonths(1);
@ -1757,18 +1756,6 @@ Headers DomainServer::setupCookieHeadersFromProfileReply(QNetworkReply* profileR
return cookieHeaders;
}
void DomainServer::loadExistingSessionsFromSettings() {
// read data for existing web sessions into memory so existing sessions can be leveraged
Settings domainServerSettings;
domainServerSettings.beginGroup(DS_SETTINGS_SESSIONS_GROUP);
foreach(const QString& uuidKey, domainServerSettings.childKeys()) {
_cookieSessionHash.insert(QUuid(uuidKey),
domainServerSettings.value(uuidKey).value<DomainServerWebSessionData>());
qDebug() << "Pulled web session from settings - cookie UUID is" << uuidKey;
}
}
void DomainServer::refreshStaticAssignmentAndAddToQueue(SharedAssignmentPointer& assignment) {
QUuid oldUUID = assignment->getUUID();
assignment->resetUUID();

View file

@ -373,6 +373,10 @@ Grabber.prototype.pressEvent = function(event) {
beacon.updatePosition(this.startPosition);
if(!entityIsGrabbedByOther(this.entityID)){
this.moveEvent(event);
}
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
}

View file

@ -17,7 +17,6 @@ Windows.Window {
visible: false
width: 384; height: 640;
title: "Tools"
property string newTabSource
property alias tabView: tabView
onParentChanged: {
if (parent) {
@ -32,26 +31,21 @@ Windows.Window {
property alias y: toolWindow.y
}
property var webTabCreator: Component {
Controls.WebView {
id: webView
property string originalUrl;
// Both toolWindow.newTabSource and url can change, so we need
// to store the original url here, without creating any bindings
Component.onCompleted: {
originalUrl = toolWindow.newTabSource;
url = originalUrl;
}
}
}
TabView {
anchors.fill: parent
id: tabView;
onCountChanged: {
if (0 == count) {
toolWindow.visible = false
Repeater {
model: 4
Tab {
active: true
enabled: false;
// we need to store the original url here for future identification
property string originalUrl: "";
onEnabledChanged: toolWindow.updateVisiblity();
Controls.WebView {
id: webView;
anchors.fill: parent
}
}
}
}
@ -68,8 +62,7 @@ Windows.Window {
function findIndexForUrl(source) {
for (var i = 0; i < tabView.count; ++i) {
var tab = tabView.getTab(i);
if (tab && tab.item && tab.item.originalUrl &&
tab.item.originalUrl === source) {
if (tab.originalUrl === source) {
return i;
}
}
@ -101,21 +94,32 @@ Windows.Window {
}
}
function findFreeTab() {
for (var i = 0; i < tabView.count; ++i) {
var tab = tabView.getTab(i);
if (tab && (!tab.originalUrl || tab.originalUrl === "")) {
return i;
}
}
console.warn("Could not find free tab");
return -1;
}
function removeTabForUrl(source) {
var index = findIndexForUrl(source);
if (index < 0) {
return;
}
var tab = tabView.getTab(index);
tab.enabledChanged.disconnect(updateVisiblity);
tabView.removeTab(index);
console.log("Updating visibility based on child tab removed");
updateVisiblity();
tab.title = "";
tab.originalUrl = "";
tab.enabled = false;
}
function addWebTab(properties) {
if (!properties.source) {
console.warn("Attempted to open Web Tool Pane without URl")
console.warn("Attempted to open Web Tool Pane without URL")
return;
}
@ -125,11 +129,17 @@ Windows.Window {
return tabView.getTab(existingTabIndex);
}
var title = properties.title || "Unknown";
newTabSource = properties.source;
var newTab = tabView.addTab(title, webTabCreator);
var freeTabIndex = findFreeTab();
if (freeTabIndex === -1) {
console.warn("Unable to add new tab");
return;
}
var newTab = tabView.getTab(freeTabIndex);
newTab.title = properties.title || "Unknown";
newTab.originalUrl = properties.source;
newTab.item.url = properties.source;
newTab.active = true;
newTab.enabled = false;
if (properties.width) {
tabView.width = Math.min(Math.max(tabView.width, properties.width),

View file

@ -757,7 +757,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
connect(&nodeList->getPacketReceiver(), &PacketReceiver::dataReceived,
bandwidthRecorder.data(), &BandwidthRecorder::updateInboundData);
connect(&getMyAvatar()->getSkeletonModel(), &SkeletonModel::skeletonLoaded,
// FIXME -- I'm a little concerned about this.
connect(getMyAvatar()->getSkeletonModel().get(), &SkeletonModel::skeletonLoaded,
this, &Application::checkSkeleton, Qt::QueuedConnection);
// Setup the userInputMapper with the actions
@ -875,7 +876,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_applicationStateDevice = std::make_shared<controller::StateController>();
_applicationStateDevice->addInputVariant(QString("InHMD"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getAvatarUpdater()->isHMDMode();
return (float)qApp->isHMDMode();
}));
_applicationStateDevice->addInputVariant(QString("SnapTurn"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getMyAvatar()->getSnapTurn();
@ -1114,7 +1115,6 @@ void Application::cleanupBeforeQuit() {
// first stop all timers directly or by invokeMethod
// depending on what thread they run in
_avatarUpdate->terminate();
locationUpdateTimer.stop();
balanceUpdateTimer.stop();
identityPacketTimer.stop();
@ -1375,6 +1375,15 @@ void Application::initializeUi() {
void Application::paintGL() {
// Some plugins process message events, potentially leading to
// re-entering a paint event. don't allow further processing if this
// happens
if (_inPaint) {
return;
}
_inPaint = true;
Finally clearFlagLambda([this] { _inPaint = false; });
// paintGL uses a queued connection, so we can get messages from the queue even after we've quit
// and the plugins have shutdown
if (_aboutToQuit) {
@ -1407,15 +1416,6 @@ void Application::paintGL() {
return;
}
// Some plugins process message events, potentially leading to
// re-entering a paint event. don't allow further processing if this
// happens
if (_inPaint) {
return;
}
_inPaint = true;
Finally clearFlagLambda([this] { _inPaint = false; });
auto displayPlugin = getActiveDisplayPlugin();
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
@ -1479,7 +1479,6 @@ void Application::paintGL() {
auto myAvatar = getMyAvatar();
boomOffset = myAvatar->getScale() * myAvatar->getBoomLength() * -IDENTITY_FRONT;
myAvatar->startCapture();
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, !(myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN));
@ -1567,7 +1566,6 @@ void Application::paintGL() {
if (!isHMDMode()) {
_myCamera.update(1.0f / _fps);
}
myAvatar->endCapture();
}
getApplicationCompositor().setFrameInfo(_frameCount, _myCamera.getTransform());
@ -2498,11 +2496,10 @@ static uint32_t _renderedFrameIndex { INVALID_FRAME };
void Application::idle(uint64_t now) {
if (_aboutToQuit) {
if (_aboutToQuit || _inPaint) {
return; // bail early, nothing to do here.
}
checkChangeCursor();
Stats::getInstance()->updateStats();
@ -2911,37 +2908,6 @@ void Application::init() {
// Make sure any new sounds are loaded as soon as know about them.
connect(tree.get(), &EntityTree::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
connect(getMyAvatar(), &MyAvatar::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
setAvatarUpdateThreading();
}
const bool ENABLE_AVATAR_UPDATE_THREADING = false;
void Application::setAvatarUpdateThreading() {
setAvatarUpdateThreading(ENABLE_AVATAR_UPDATE_THREADING);
}
void Application::setRawAvatarUpdateThreading() {
setRawAvatarUpdateThreading(ENABLE_AVATAR_UPDATE_THREADING);
}
void Application::setRawAvatarUpdateThreading(bool isThreaded) {
if (_avatarUpdate) {
if (_avatarUpdate->isThreaded() == isThreaded) {
return;
}
_avatarUpdate->terminate();
}
_avatarUpdate = new AvatarUpdate();
_avatarUpdate->initialize(isThreaded);
}
void Application::setAvatarUpdateThreading(bool isThreaded) {
if (_avatarUpdate && (_avatarUpdate->isThreaded() == isThreaded)) {
return;
}
if (_avatarUpdate) {
_avatarUpdate->terminate(); // Must be before we shutdown anim graph.
}
_avatarUpdate = new AvatarUpdate();
_avatarUpdate->initialize(isThreaded);
}
void Application::updateLOD() {
@ -2969,7 +2935,7 @@ void Application::updateMyAvatarLookAtPosition() {
auto eyeTracker = DependencyManager::get<EyeTracker>();
bool isLookingAtSomeone = false;
bool isHMD = _avatarUpdate->isHMDMode();
bool isHMD = qApp->isHMDMode();
glm::vec3 lookAtSpot;
if (eyeTracker->isTracking() && (isHMD || eyeTracker->isSimulating())) {
// Look at the point that the user is looking at.
@ -3020,7 +2986,7 @@ void Application::updateMyAvatarLookAtPosition() {
} else {
// I am not looking at anyone else, so just look forward
if (isHMD) {
glm::mat4 headPose = _avatarUpdate->getHeadPose();
glm::mat4 headPose = myAvatar->getHMDSensorMatrix();
glm::quat headRotation = glm::quat_cast(headPose);
lookAtSpot = myAvatar->getPosition() +
myAvatar->getOrientation() * (headRotation * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
@ -3267,9 +3233,10 @@ void Application::update(float deltaTime) {
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateDialogs(deltaTime); // update various stats dialogs if present
QSharedPointer<AvatarManager> avatarManager = DependencyManager::get<AvatarManager>();
if (_physicsEnabled) {
PerformanceTimer perfTimer("physics");
AvatarManager* avatarManager = DependencyManager::get<AvatarManager>().data();
{
PerformanceTimer perfTimer("updateStates)");
@ -3338,7 +3305,18 @@ void Application::update(float deltaTime) {
}
}
_avatarUpdate->synchronousProcess();
// AvatarManager update
{
PerformanceTimer perfTimer("AvatarManger");
qApp->setAvatarSimrateSample(1.0f / deltaTime);
avatarManager->updateOtherAvatars(deltaTime);
qApp->updateMyAvatarLookAtPosition();
avatarManager->updateMyAvatar(deltaTime);
}
{
PerformanceTimer perfTimer("overlays");
@ -3835,9 +3813,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
// FIXME: This preRender call is temporary until we create a separate render::scene for the mirror rendering.
// Then we can move this logic into the Avatar::simulate call.
auto myAvatar = getMyAvatar();
myAvatar->startRender();
myAvatar->preRender(renderArgs);
myAvatar->endRender();
// Update animation debug draw renderer
AnimDebugDraw::getInstance().update();
@ -3919,9 +3895,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
_renderEngine->getRenderContext()->args = renderArgs;
// Before the deferred pass, let's try to use the render engine
myAvatar->startRenderRun();
_renderEngine->run();
myAvatar->endRenderRun();
}
activeRenderingThread = nullptr;
@ -4604,7 +4578,7 @@ void Application::notifyPacketVersionMismatch() {
}
void Application::checkSkeleton() {
if (getMyAvatar()->getSkeletonModel().isActive() && !getMyAvatar()->getSkeletonModel().hasSkeleton()) {
if (getMyAvatar()->getSkeletonModel()->isActive() && !getMyAvatar()->getSkeletonModel()->hasSkeleton()) {
qCDebug(interfaceapp) << "MyAvatar model has no skeleton";
QString message = "Your selected avatar body has no skeleton.\n\nThe default body will be loaded...";

View file

@ -40,7 +40,6 @@
#include <ViewFrustum.h>
#include <AbstractUriHandler.h>
#include "avatar/AvatarUpdate.h"
#include "avatar/MyAvatar.h"
#include "Bookmarks.h"
#include "Camera.h"
@ -215,7 +214,6 @@ public:
const QRect& getMirrorViewRect() const { return _mirrorViewRect; }
void updateMyAvatarLookAtPosition();
AvatarUpdate* getAvatarUpdater() { return _avatarUpdate; }
float getAvatarSimrate();
void setAvatarSimrateSample(float sample);
@ -253,11 +251,6 @@ public slots:
void openUrl(const QUrl& url);
void setAvatarUpdateThreading();
void setAvatarUpdateThreading(bool isThreaded);
void setRawAvatarUpdateThreading();
void setRawAvatarUpdateThreading(bool isThreaded);
void resetSensors(bool andReload = false);
void setActiveFaceTracker();
@ -420,7 +413,6 @@ private:
std::shared_ptr<controller::StateController> _applicationStateDevice; // Default ApplicationDevice reflecting the state of different properties of the session
std::shared_ptr<KeyboardMouseDevice> _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
AvatarUpdate* _avatarUpdate {nullptr};
SimpleMovingAverage _avatarSimsPerSecond {10};
int _avatarSimsPerSecondReport {0};
quint64 _lastAvatarSimsPerSecondUpdate {0};

View file

@ -85,7 +85,6 @@ namespace MenuOption {
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableCharacterController = "Enable avatar collisions";
const QString EnableInverseKinematics = "Enable Inverse Kinematics";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";

View file

@ -79,7 +79,6 @@ namespace render {
Avatar::Avatar(RigPointer rig) :
AvatarData(),
_skeletonModel(this, nullptr, rig),
_skeletonOffset(0.0f),
_bodyYawDelta(0.0f),
_positionDeltaAccumulator(0.0f),
@ -100,6 +99,8 @@ Avatar::Avatar(RigPointer rig) :
// give the pointer to our head to inherited _headData variable from AvatarData
_headData = static_cast<HeadData*>(new Head(this));
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
}
Avatar::~Avatar() {
@ -112,19 +113,19 @@ Avatar::~Avatar() {
void Avatar::init() {
getHead()->init();
_skeletonModel.init();
_skeletonModel->init();
_initialized = true;
}
glm::vec3 Avatar::getChestPosition() const {
// for now, let's just assume that the "chest" is halfway between the root and the neck
glm::vec3 neckPosition;
return _skeletonModel.getNeckPosition(neckPosition) ? (getPosition() + neckPosition) * 0.5f : getPosition();
return _skeletonModel->getNeckPosition(neckPosition) ? (getPosition() + neckPosition) * 0.5f : getPosition();
}
glm::vec3 Avatar::getNeckPosition() const {
glm::vec3 neckPosition;
return _skeletonModel.getNeckPosition(neckPosition) ? neckPosition : getPosition();
return _skeletonModel->getNeckPosition(neckPosition) ? neckPosition : getPosition();
}
@ -137,10 +138,10 @@ AABox Avatar::getBounds() const {
// Except, that getPartBounds produces an infinite, uncentered bounding box when the model is not yet parsed,
// and we want a centered one. NOTE: There is code that may never try to render, and thus never load and get the
// real model bounds, if this is unrealistically small.
if (!_skeletonModel.isRenderable()) {
if (!_skeletonModel->isRenderable()) {
return AABox(getPosition(), getUniformScale()); // approximately 2m tall, scaled to user request.
}
return _skeletonModel.getPartBounds(0, 0, getPosition(), getOrientation());
return _skeletonModel->getPartBounds(0, 0, getPosition(), getOrientation());
}
void Avatar::animateScaleChanges(float deltaTime) {
@ -191,8 +192,8 @@ void Avatar::simulate(float deltaTime) {
if (_shouldAnimate && !_shouldSkipRender && inView) {
{
PerformanceTimer perfTimer("skeleton");
_skeletonModel.getRig()->copyJointsFromJointData(_jointData);
_skeletonModel.simulate(deltaTime, _hasNewJointRotations || _hasNewJointTranslations);
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
_skeletonModel->simulate(deltaTime, _hasNewJointRotations || _hasNewJointTranslations);
locationChanged(); // joints changed, so if there are any children, update them.
_hasNewJointRotations = false;
_hasNewJointTranslations = false;
@ -200,7 +201,7 @@ void Avatar::simulate(float deltaTime) {
{
PerformanceTimer perfTimer("head");
glm::vec3 headPosition = getPosition();
_skeletonModel.getHeadPosition(headPosition);
_skeletonModel->getHeadPosition(headPosition);
Head* head = getHead();
head->setPosition(headPosition);
head->setScale(getUniformScale());
@ -295,8 +296,7 @@ bool Avatar::addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene>
auto avatarPayloadPointer = Avatar::PayloadPointer(avatarPayload);
_renderItemID = scene->allocateID();
pendingChanges.resetItem(_renderItemID, avatarPayloadPointer);
_skeletonModel.addToScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
_skeletonModel->addToScene(scene, pendingChanges);
for (auto& attachmentModel : _attachmentModels) {
attachmentModel->addToScene(scene, pendingChanges);
@ -308,8 +308,7 @@ bool Avatar::addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene>
void Avatar::removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_renderItemID);
render::Item::clearID(_renderItemID);
_skeletonModel.removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
_skeletonModel->removeFromScene(scene, pendingChanges);
for (auto& attachmentModel : _attachmentModels) {
attachmentModel->removeFromScene(scene, pendingChanges);
}
@ -340,12 +339,12 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
if (_handState & IS_FINGER_POINTING_FLAG) {
int leftIndexTip = getJointIndex("LeftHandIndex4");
int leftIndexTipJoint = getJointIndex("LeftHandIndex3");
havePosition = _skeletonModel.getJointPositionInWorldFrame(leftIndexTip, position);
haveRotation = _skeletonModel.getJointRotationInWorldFrame(leftIndexTipJoint, rotation);
havePosition = _skeletonModel->getJointPositionInWorldFrame(leftIndexTip, position);
haveRotation = _skeletonModel->getJointRotationInWorldFrame(leftIndexTipJoint, rotation);
} else {
int leftHand = _skeletonModel.getLeftHandJointIndex();
havePosition = _skeletonModel.getJointPositionInWorldFrame(leftHand, position);
haveRotation = _skeletonModel.getJointRotationInWorldFrame(leftHand, rotation);
int leftHand = _skeletonModel->getLeftHandJointIndex();
havePosition = _skeletonModel->getJointPositionInWorldFrame(leftHand, position);
haveRotation = _skeletonModel->getJointRotationInWorldFrame(leftHand, rotation);
}
if (havePosition && haveRotation) {
@ -364,12 +363,12 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
if (_handState & IS_FINGER_POINTING_FLAG) {
int rightIndexTip = getJointIndex("RightHandIndex4");
int rightIndexTipJoint = getJointIndex("RightHandIndex3");
havePosition = _skeletonModel.getJointPositionInWorldFrame(rightIndexTip, position);
haveRotation = _skeletonModel.getJointRotationInWorldFrame(rightIndexTipJoint, rotation);
havePosition = _skeletonModel->getJointPositionInWorldFrame(rightIndexTip, position);
haveRotation = _skeletonModel->getJointRotationInWorldFrame(rightIndexTipJoint, rotation);
} else {
int rightHand = _skeletonModel.getRightHandJointIndex();
havePosition = _skeletonModel.getJointPositionInWorldFrame(rightHand, position);
haveRotation = _skeletonModel.getJointRotationInWorldFrame(rightHand, rotation);
int rightHand = _skeletonModel->getRightHandJointIndex();
havePosition = _skeletonModel->getJointPositionInWorldFrame(rightHand, position);
haveRotation = _skeletonModel->getJointRotationInWorldFrame(rightHand, rotation);
}
if (havePosition && haveRotation) {
@ -394,7 +393,6 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
}
if (!frustum->sphereIntersectsFrustum(getPosition(), boundingRadius)) {
endRender();
return;
}
@ -426,7 +424,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
const float LIGHT_EXPONENT = 1.0f;
const float LIGHT_CUTOFF = glm::radians(80.0f);
float distance = BASE_LIGHT_DISTANCE * getUniformScale();
glm::vec3 position = glm::mix(_skeletonModel.getTranslation(), getHead()->getFaceModel().getTranslation(), 0.9f);
glm::vec3 position = _skeletonModel->getTranslation();
glm::quat orientation = getOrientation();
foreach (const AvatarManager::LocalLight& light, DependencyManager::get<AvatarManager>()->getLocalLights()) {
glm::vec3 direction = orientation * light.direction;
@ -436,10 +434,10 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
}
bool renderBounding = Menu::getInstance()->isOptionChecked(MenuOption::RenderBoundingCollisionShapes);
if (renderBounding && shouldRenderHead(renderArgs) && _skeletonModel.isRenderable()) {
if (renderBounding && shouldRenderHead(renderArgs) && _skeletonModel->isRenderable()) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":skeletonBoundingCollisionShapes");
const float BOUNDING_SHAPE_ALPHA = 0.7f;
_skeletonModel.renderBoundingCollisionShapes(*renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
_skeletonModel->renderBoundingCollisionShapes(*renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
}
// If this is the avatar being looked at, render a little ball above their head
@ -468,7 +466,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
* (1.0f - ((float)(now - getHead()->getLookingAtMeStarted()))
/ (LOOKING_AT_ME_DURATION * (float)USECS_PER_SECOND));
if (alpha > 0.0f) {
QSharedPointer<NetworkGeometry> geometry = _skeletonModel.getGeometry();
QSharedPointer<NetworkGeometry> geometry = _skeletonModel->getGeometry();
if (geometry && geometry->isLoaded()) {
const float DEFAULT_EYE_DIAMETER = 0.048f; // Typical human eye
const float RADIUS_INCREMENT = 0.005f;
@ -513,7 +511,6 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
renderDisplayName(batch, frustum, textPosition);
}
}
endRender();
}
glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
@ -539,14 +536,9 @@ void Avatar::fixupModelsInScene() {
// fix them up in the scene
render::ScenePointer scene = qApp->getMain3DScene();
render::PendingChanges pendingChanges;
if (_skeletonModel.isRenderable() && _skeletonModel.needsFixupInScene()) {
_skeletonModel.removeFromScene(scene, pendingChanges);
_skeletonModel.addToScene(scene, pendingChanges);
}
Model& faceModel = getHead()->getFaceModel();
if (faceModel.isRenderable() && faceModel.needsFixupInScene()) {
faceModel.removeFromScene(scene, pendingChanges);
faceModel.addToScene(scene, pendingChanges);
if (_skeletonModel->isRenderable() && _skeletonModel->needsFixupInScene()) {
_skeletonModel->removeFromScene(scene, pendingChanges);
_skeletonModel->addToScene(scene, pendingChanges);
}
for (auto& attachmentModel : _attachmentModels) {
if (attachmentModel->isRenderable() && attachmentModel->needsFixupInScene()) {
@ -564,14 +556,7 @@ void Avatar::fixupModelsInScene() {
}
void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel) {
fixupModelsInScene();
{
if (_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable()) {
getHead()->render(renderArgs, 1.0f, renderFrustum);
}
}
getHead()->renderLookAts(renderArgs);
}
@ -593,8 +578,8 @@ void Avatar::simulateAttachments(float deltaTime) {
model->setRotation(getOrientation() * Quaternions::Y_180);
model->simulate(deltaTime);
} else {
if (_skeletonModel.getJointPositionInWorldFrame(jointIndex, jointPosition) &&
_skeletonModel.getJointRotationInWorldFrame(jointIndex, jointRotation)) {
if (_skeletonModel->getJointPositionInWorldFrame(jointIndex, jointPosition) &&
_skeletonModel->getJointRotationInWorldFrame(jointIndex, jointRotation)) {
model->setTranslation(jointPosition + jointRotation * attachment.translation * getUniformScale());
model->setRotation(jointRotation * attachment.rotation);
model->setScaleToFit(true, getUniformScale() * attachment.scale, true); // hack to force rescale
@ -635,7 +620,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
glm::vec3 bodyUpDirection = getBodyUpDirection();
DEBUG_VALUE("bodyUpDirection =", bodyUpDirection);
if (getSkeletonModel().getNeckPosition(namePosition)) {
if (getSkeletonModel()->getNeckPosition(namePosition)) {
float headHeight = getHeadHeight();
DEBUG_VALUE("namePosition =", namePosition);
DEBUG_VALUE("headHeight =", headHeight);
@ -780,46 +765,46 @@ QVector<glm::quat> Avatar::getJointRotations() const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointRotations();
}
QVector<glm::quat> jointRotations(_skeletonModel.getJointStateCount());
for (int i = 0; i < _skeletonModel.getJointStateCount(); ++i) {
_skeletonModel.getJointRotation(i, jointRotations[i]);
QVector<glm::quat> jointRotations(_skeletonModel->getJointStateCount());
for (int i = 0; i < _skeletonModel->getJointStateCount(); ++i) {
_skeletonModel->getJointRotation(i, jointRotations[i]);
}
return jointRotations;
}
glm::quat Avatar::getJointRotation(int index) const {
glm::quat rotation;
_skeletonModel.getJointRotation(index, rotation);
_skeletonModel->getJointRotation(index, rotation);
return rotation;
}
glm::vec3 Avatar::getJointTranslation(int index) const {
glm::vec3 translation;
_skeletonModel.getJointTranslation(index, translation);
_skeletonModel->getJointTranslation(index, translation);
return translation;
}
glm::quat Avatar::getDefaultJointRotation(int index) const {
glm::quat rotation;
_skeletonModel.getRelativeDefaultJointRotation(index, rotation);
_skeletonModel->getRelativeDefaultJointRotation(index, rotation);
return rotation;
}
glm::vec3 Avatar::getDefaultJointTranslation(int index) const {
glm::vec3 translation;
_skeletonModel.getRelativeDefaultJointTranslation(index, translation);
_skeletonModel->getRelativeDefaultJointTranslation(index, translation);
return translation;
}
glm::quat Avatar::getAbsoluteJointRotationInObjectFrame(int index) const {
glm::quat rotation;
_skeletonModel.getAbsoluteJointRotationInRigFrame(index, rotation);
_skeletonModel->getAbsoluteJointRotationInRigFrame(index, rotation);
return Quaternions::Y_180 * rotation;
}
glm::vec3 Avatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
glm::vec3 translation;
_skeletonModel.getAbsoluteJointTranslationInRigFrame(index, translation);
_skeletonModel->getAbsoluteJointTranslationInRigFrame(index, translation);
return Quaternions::Y_180 * translation;
}
@ -830,7 +815,7 @@ int Avatar::getJointIndex(const QString& name) const {
Q_RETURN_ARG(int, result), Q_ARG(const QString&, name));
return result;
}
return _skeletonModel.isActive() ? _skeletonModel.getGeometry()->getFBXGeometry().getJointIndex(name) : -1;
return _skeletonModel->isActive() ? _skeletonModel->getGeometry()->getFBXGeometry().getJointIndex(name) : -1;
}
QStringList Avatar::getJointNames() const {
@ -840,7 +825,7 @@ QStringList Avatar::getJointNames() const {
Q_RETURN_ARG(QStringList, result));
return result;
}
return _skeletonModel.isActive() ? _skeletonModel.getGeometry()->getFBXGeometry().getJointNames() : QStringList();
return _skeletonModel->isActive() ? _skeletonModel->getGeometry()->getFBXGeometry().getJointNames() : QStringList();
}
glm::vec3 Avatar::getJointPosition(int index) const {
@ -851,7 +836,7 @@ glm::vec3 Avatar::getJointPosition(int index) const {
return position;
}
glm::vec3 position;
_skeletonModel.getJointPositionInWorldFrame(index, position);
_skeletonModel->getJointPositionInWorldFrame(index, position);
return position;
}
@ -863,7 +848,7 @@ glm::vec3 Avatar::getJointPosition(const QString& name) const {
return position;
}
glm::vec3 position;
_skeletonModel.getJointPositionInWorldFrame(getJointIndex(name), position);
_skeletonModel->getJointPositionInWorldFrame(getJointIndex(name), position);
return position;
}
@ -872,14 +857,9 @@ void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
positionToScale = getPosition() + getUniformScale() * (positionToScale - getPosition());
}
void Avatar::setFaceModelURL(const QUrl& faceModelURL) {
AvatarData::setFaceModelURL(faceModelURL);
getHead()->getFaceModel().setURL(_faceModelURL);
}
void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
AvatarData::setSkeletonModelURL(skeletonModelURL);
_skeletonModel.setURL(_skeletonModelURL);
_skeletonModel->setURL(_skeletonModelURL);
}
// create new model, can return an instance of a SoftAttachmentModel rather then Model
@ -912,12 +892,12 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
for (int i = 0; i < attachmentData.size(); i++) {
if (i == (int)_attachmentModels.size()) {
// if number of attachments has been increased, we need to allocate a new model
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel.getRig()));
_attachmentModels.push_back(allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig()));
}
else if (i < oldAttachmentData.size() && oldAttachmentData[i].isSoft != attachmentData[i].isSoft) {
// if the attachment has changed type, we need to re-allocate a new one.
_attachmentsToRemove.push_back(_attachmentModels[i]);
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel.getRig());
_attachmentModels[i] = allocateAttachmentModel(attachmentData[i].isSoft, _skeletonModel->getRig());
}
_attachmentModels[i]->setURL(attachmentData[i].modelURL);
}
@ -925,7 +905,6 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
int Avatar::parseDataFromBuffer(const QByteArray& buffer) {
startUpdate();
if (!_initialized) {
// now that we have data for this Avatar we are go for init
init();
@ -944,7 +923,6 @@ int Avatar::parseDataFromBuffer(const QByteArray& buffer) {
if (_moving || _hasNewJointRotations || _hasNewJointTranslations) {
locationChanged();
}
endUpdate();
return bytesRead;
}
@ -1004,24 +982,14 @@ void Avatar::renderJointConnectingCone(gpu::Batch& batch, glm::vec3 position1, g
}
float Avatar::getSkeletonHeight() const {
Extents extents = _skeletonModel.getBindExtents();
Extents extents = _skeletonModel->getBindExtents();
return extents.maximum.y - extents.minimum.y;
}
float Avatar::getHeadHeight() const {
Extents extents = getHead()->getFaceModel().getMeshExtents();
if (!extents.isEmpty() && extents.isValid()) {
// HACK: We have a really odd case when fading out for some models where this value explodes
float result = extents.maximum.y - extents.minimum.y;
if (result >= 0.0f && result < 100.0f * getUniformScale() ) {
return result;
}
}
extents = _skeletonModel.getMeshExtents();
Extents extents = _skeletonModel->getMeshExtents();
glm::vec3 neckPosition;
if (!extents.isEmpty() && extents.isValid() && _skeletonModel.getNeckPosition(neckPosition)) {
if (!extents.isEmpty() && extents.isValid() && _skeletonModel->getNeckPosition(neckPosition)) {
return extents.maximum.y / 2.0f - neckPosition.y + getPosition().y;
}
@ -1030,7 +998,7 @@ float Avatar::getHeadHeight() const {
}
float Avatar::getPelvisFloatingHeight() const {
return -_skeletonModel.getBindExtents().minimum.y;
return -_skeletonModel->getBindExtents().minimum.y;
}
void Avatar::setShowDisplayName(bool showDisplayName) {
@ -1058,9 +1026,9 @@ void Avatar::setShowDisplayName(bool showDisplayName) {
// virtual
void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
float uniformScale = getUniformScale();
shapeInfo.setCapsuleY(uniformScale * _skeletonModel.getBoundingCapsuleRadius(),
0.5f * uniformScale * _skeletonModel.getBoundingCapsuleHeight());
shapeInfo.setOffset(uniformScale * _skeletonModel.getBoundingCapsuleOffset());
shapeInfo.setCapsuleY(uniformScale * _skeletonModel->getBoundingCapsuleRadius(),
0.5f * uniformScale * _skeletonModel->getBoundingCapsuleHeight());
shapeInfo.setOffset(uniformScale * _skeletonModel->getBoundingCapsuleOffset());
}
void Avatar::setMotionState(AvatarMotionState* motionState) {
@ -1098,12 +1066,12 @@ glm::vec3 Avatar::getUncachedLeftPalmPosition() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat leftPalmRotation;
glm::vec3 leftPalmPosition;
if (_skeletonModel.getLeftGrabPosition(leftPalmPosition)) {
if (_skeletonModel->getLeftGrabPosition(leftPalmPosition)) {
return leftPalmPosition;
}
// avatar didn't have a LeftHandMiddle1 joint, fall back on this:
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getLeftHandJointIndex(), leftPalmRotation);
getSkeletonModel().getLeftHandPosition(leftPalmPosition);
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getLeftHandJointIndex(), leftPalmRotation);
getSkeletonModel()->getLeftHandPosition(leftPalmPosition);
leftPalmPosition += HAND_TO_PALM_OFFSET * glm::inverse(leftPalmRotation);
return leftPalmPosition;
}
@ -1111,7 +1079,7 @@ glm::vec3 Avatar::getUncachedLeftPalmPosition() const {
glm::quat Avatar::getUncachedLeftPalmRotation() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat leftPalmRotation;
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getLeftHandJointIndex(), leftPalmRotation);
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getLeftHandJointIndex(), leftPalmRotation);
return leftPalmRotation;
}
@ -1119,12 +1087,12 @@ glm::vec3 Avatar::getUncachedRightPalmPosition() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat rightPalmRotation;
glm::vec3 rightPalmPosition;
if (_skeletonModel.getRightGrabPosition(rightPalmPosition)) {
if (_skeletonModel->getRightGrabPosition(rightPalmPosition)) {
return rightPalmPosition;
}
// avatar didn't have a RightHandMiddle1 joint, fall back on this:
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getRightHandJointIndex(), rightPalmRotation);
getSkeletonModel().getRightHandPosition(rightPalmPosition);
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getRightHandJointIndex(), rightPalmRotation);
getSkeletonModel()->getRightHandPosition(rightPalmPosition);
rightPalmPosition += HAND_TO_PALM_OFFSET * glm::inverse(rightPalmRotation);
return rightPalmPosition;
}
@ -1132,7 +1100,7 @@ glm::vec3 Avatar::getUncachedRightPalmPosition() const {
glm::quat Avatar::getUncachedRightPalmRotation() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat rightPalmRotation;
getSkeletonModel().getJointRotationInWorldFrame(getSkeletonModel().getRightHandJointIndex(), rightPalmRotation);
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getRightHandJointIndex(), rightPalmRotation);
return rightPalmRotation;
}

View file

@ -84,8 +84,8 @@ public:
bool getIsLookAtTarget() const { return _isLookAtTarget; }
//getters
bool isInitialized() const { return _initialized; }
SkeletonModel& getSkeletonModel() { return _skeletonModel; }
const SkeletonModel& getSkeletonModel() const { return _skeletonModel; }
SkeletonModelPointer getSkeletonModel() { return _skeletonModel; }
const SkeletonModelPointer getSkeletonModel() const { return _skeletonModel; }
glm::vec3 getChestPosition() const;
float getUniformScale() const { return getScale().y; }
const Head* getHead() const { return static_cast<const Head*>(_headData); }
@ -114,7 +114,6 @@ public:
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
virtual void setFaceModelURL(const QUrl& faceModelURL) override;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData) override;
@ -144,7 +143,7 @@ public:
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
void slamPosition(const glm::vec3& position);
virtual void updateAttitude() override { _skeletonModel.updateAttitude(); }
virtual void updateAttitude() override { _skeletonModel->updateAttitude(); }
// Call this when updating Avatar position with a delta. This will allow us to
// _accurately_ measure position changes and compute the resulting velocity
@ -188,7 +187,7 @@ protected:
void setMotionState(AvatarMotionState* motionState);
SkeletonModel _skeletonModel;
SkeletonModelPointer _skeletonModel;
glm::vec3 _skeletonOffset;
std::vector<std::shared_ptr<Model>> _attachmentModels;
std::vector<std::shared_ptr<Model>> _attachmentsToRemove;

View file

@ -145,9 +145,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
removeAvatar(avatarIterator.key());
++avatarIterator;
} else {
avatar->startUpdate();
avatar->simulate(deltaTime);
avatar->endUpdate();
++avatarIterator;
avatar->updateRenderItem(pendingChanges);
@ -169,7 +167,6 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
render::PendingChanges pendingChanges;
while (fadingIterator != _avatarFades.end()) {
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
avatar->startUpdate();
avatar->setTargetScale(avatar->getUniformScale() * SHRINK_RATE);
if (avatar->getTargetScale() <= MIN_FADE_SCALE) {
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
@ -183,7 +180,6 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
avatar->simulate(deltaTime);
++fadingIterator;
}
avatar->endUpdate();
}
scene->enqueuePendingChanges(pendingChanges);
}

View file

@ -1,84 +0,0 @@
//
// AvatarUpdate.cpp
// interface/src/avatar
//
// Created by Howard Stearns on 8/18/15.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
#include <DependencyManager.h>
#include "Application.h"
#include "AvatarManager.h"
#include "AvatarUpdate.h"
#include <display-plugins/DisplayPlugin.h>
#include "InterfaceLogging.h"
AvatarUpdate::AvatarUpdate() : GenericThread(), _lastAvatarUpdate(0), _isHMDMode(false) {
setObjectName("Avatar Update"); // GenericThread::initialize uses this to set the thread name.
Settings settings;
const int DEFAULT_TARGET_AVATAR_SIMRATE = 60;
_targetInterval = USECS_PER_SECOND / settings.value("AvatarUpdateTargetSimrate", DEFAULT_TARGET_AVATAR_SIMRATE).toInt();
}
// We could have the constructor call initialize(), but GenericThread::initialize can take parameters.
// Keeping it separately called by the client allows that client to pass those without our
// constructor needing to know about them.
void AvatarUpdate::synchronousProcess() {
// Keep our own updated value, so that our asynchronous code can consult it.
_isHMDMode = qApp->isHMDMode();
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
MyAvatar* myAvatar = manager->getMyAvatar();
assert(myAvatar);
// transform the head pose from the displayPlugin into avatar coordinates.
glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()));
_headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose());
if (!isThreaded()) {
process();
}
}
bool AvatarUpdate::process() {
PerformanceTimer perfTimer("AvatarUpdate");
quint64 start = usecTimestampNow();
quint64 deltaMicroseconds = 10000;
if (_lastAvatarUpdate > 0) {
deltaMicroseconds = start - _lastAvatarUpdate;
} else {
deltaMicroseconds = 10000; // 10 ms
}
float deltaSeconds = (float) deltaMicroseconds / (float) USECS_PER_SECOND;
assert(deltaSeconds > 0.0f);
_lastAvatarUpdate = start;
qApp->setAvatarSimrateSample(1.0f / deltaSeconds);
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
MyAvatar* myAvatar = manager->getMyAvatar();
//loop through all the other avatars and simulate them...
//gets current lookat data, removes missing avatars, etc.
manager->updateOtherAvatars(deltaSeconds);
myAvatar->startUpdate();
qApp->updateMyAvatarLookAtPosition();
// Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
manager->updateMyAvatar(deltaSeconds);
myAvatar->endUpdate();
if (!isThreaded()) {
return true;
}
int elapsed = (usecTimestampNow() - start);
int usecToSleep = _targetInterval - elapsed;
if (usecToSleep < 0) {
usecToSleep = 1; // always yield
}
usleep(usecToSleep);
return true;
}

View file

@ -1,41 +0,0 @@
//
// AvatarUpdate.h
// interface/src/avatar
//
// Created by Howard Stearns on 8/18/15.
///
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
#ifndef __hifi__AvatarUpdate__
#define __hifi__AvatarUpdate__
#include <QtCore/QObject>
#include <QTimer>
// Home for the avatarUpdate operations (e.g., whether on a separate thread, pipelined in various ways, etc.)
// This might get folded into AvatarManager.
class AvatarUpdate : public GenericThread {
Q_OBJECT
public:
AvatarUpdate();
void synchronousProcess();
private:
virtual bool process(); // No reason for other classes to invoke this.
quint64 _lastAvatarUpdate; // microsoeconds
quint64 _targetInterval; // microseconds
// Goes away if Application::getActiveDisplayPlugin() and friends are made thread safe:
public:
bool isHMDMode() { return _isHMDMode; }
glm::mat4 getHeadPose() { return _headPose; }
private:
bool _isHMDMode;
glm::mat4 _headPose;
};
#endif /* defined(__hifi__AvatarUpdate__) */

View file

@ -1,58 +0,0 @@
//
// FaceModel.cpp
// interface/src/avatar
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/gtx/transform.hpp>
#include "Avatar.h"
#include "FaceModel.h"
#include "Head.h"
#include "Menu.h"
FaceModel::FaceModel(Head* owningHead, RigPointer rig) :
Model(rig, nullptr),
_owningHead(owningHead)
{
assert(_rig);
}
void FaceModel::simulate(float deltaTime, bool fullUpdate) {
updateGeometry();
Avatar* owningAvatar = static_cast<Avatar*>(_owningHead->_owningAvatar);
glm::vec3 neckPosition;
if (!owningAvatar->getSkeletonModel().getNeckPosition(neckPosition)) {
neckPosition = owningAvatar->getPosition();
}
setTranslation(neckPosition);
glm::quat neckParentRotation = owningAvatar->getOrientation();
setRotation(neckParentRotation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningHead->getScale());
setPupilDilation(_owningHead->getPupilDilation());
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
// FIXME - this is very expensive, we shouldn't do it if we don't have to
//invalidCalculatedMeshBoxes();
if (isActive()) {
setOffset(-_geometry->getFBXGeometry().neckPivot);
Model::simulateInternal(deltaTime);
}
}
bool FaceModel::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
if (!isActive()) {
return false;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
return getJointPositionInWorldFrame(geometry.leftEyeJointIndex, firstEyePosition) &&
getJointPositionInWorldFrame(geometry.rightEyeJointIndex, secondEyePosition);
}

View file

@ -1,38 +0,0 @@
//
// FaceModel.h
// interface/src/avatar
//
// Created by Andrzej Kapolka on 9/16/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_FaceModel_h
#define hifi_FaceModel_h
#include <Model.h>
class Head;
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
class FaceModel : public Model {
Q_OBJECT
public:
FaceModel(Head* owningHead, RigPointer rig);
virtual void simulate(float deltaTime, bool fullUpdate = true);
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
private:
Head* _owningHead;
};
#endif // hifi_FaceModel_h

View file

@ -62,20 +62,17 @@ Head::Head(Avatar* owningAvatar) :
_isLookingAtMe(false),
_lookingAtMeStarted(0),
_wasLastLookingAtMe(0),
_faceModel(this, std::make_shared<Rig>()),
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
{
}
void Head::init() {
_faceModel.init();
}
void Head::reset() {
_baseYaw = _basePitch = _baseRoll = 0.0f;
_leanForward = _leanSideways = 0.0f;
_faceModel.reset();
}
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
@ -233,12 +230,6 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
_leftEyePosition = _rightEyePosition = getPosition();
if (!billboard) {
_faceModel.simulate(deltaTime);
if (!_faceModel.getEyePositions(_leftEyePosition, _rightEyePosition)) {
static_cast<Avatar*>(_owningAvatar)->getSkeletonModel().getEyePositions(_leftEyePosition, _rightEyePosition);
}
}
_eyePosition = calculateAverageEyePosition();
}
@ -391,7 +382,7 @@ glm::quat Head::getCameraOrientation() const {
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
// always the same.
if (qApp->getAvatarUpdater()->isHMDMode()) {
if (qApp->isHMDMode()) {
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
if (myAvatar) {
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
@ -411,7 +402,7 @@ glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
}
glm::vec3 Head::getScalePivot() const {
return _faceModel.isActive() ? _faceModel.getTranslation() : _position;
return _position;
}
void Head::setFinalPitch(float finalPitch) {

View file

@ -18,7 +18,6 @@
#include <HeadData.h>
#include "FaceModel.h"
#include "world.h"
@ -76,9 +75,6 @@ public:
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
glm::vec3 getMouthPosition() const { return _eyePosition - getUpDirection() * glm::length(_rightEyePosition - _leftEyePosition); }
FaceModel& getFaceModel() { return _faceModel; }
const FaceModel& getFaceModel() const { return _faceModel; }
bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
float getAverageLoudness() const { return _averageLoudness; }
/// \return the point about which scaling occurs.
@ -150,7 +146,6 @@ private:
bool _isLookingAtMe;
quint64 _lookingAtMeStarted;
quint64 _wasLastLookingAtMe;
FaceModel _faceModel;
glm::vec3 _correctedLookAtPosition;
@ -162,8 +157,6 @@ private:
void renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition);
void calculateMouthShapes();
void applyEyelidOffset(glm::quat headOrientation);
friend class FaceModel;
};
#endif // hifi_Head_h

View file

@ -167,11 +167,6 @@ MyAvatar::MyAvatar(RigPointer rig) :
}
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
if (recordingInterface->getPlayerUseHeadModel() && dummyAvatar.getFaceModelURL().isValid() &&
(dummyAvatar.getFaceModelURL() != getFaceModelURL())) {
// FIXME
//myAvatar->setFaceModelURL(_dummyAvatar.getFaceModelURL());
}
if (recordingInterface->getPlayerUseSkeletonModel() && dummyAvatar.getSkeletonModelURL().isValid() &&
(dummyAvatar.getSkeletonModelURL() != getSkeletonModelURL())) {
@ -230,14 +225,11 @@ QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
}
void MyAvatar::reset(bool andReload) {
if (andReload) {
qApp->setRawAvatarUpdateThreading(false);
}
// Reset dynamic state.
_wasPushing = _isPushing = _isBraking = false;
_follow.deactivate();
_skeletonModel.reset();
_skeletonModel->reset();
getHead()->reset();
_targetVelocity = glm::vec3(0.0f);
setThrust(glm::vec3(0.0f));
@ -343,10 +335,10 @@ void MyAvatar::simulate(float deltaTime) {
{
PerformanceTimer perfTimer("skeleton");
_skeletonModel.simulate(deltaTime);
_skeletonModel->simulate(deltaTime);
}
if (!_skeletonModel.hasSkeleton()) {
if (!_skeletonModel->hasSkeleton()) {
// All the simulation that can be done has been done
return;
}
@ -361,7 +353,7 @@ void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("head");
Head* head = getHead();
glm::vec3 headPosition;
if (!_skeletonModel.getHeadPosition(headPosition)) {
if (!_skeletonModel->getHeadPosition(headPosition)) {
headPosition = getPosition();
}
head->setPosition(headPosition);
@ -449,7 +441,7 @@ void MyAvatar::updateSensorToWorldMatrix() {
void MyAvatar::updateFromTrackers(float deltaTime) {
glm::vec3 estimatedPosition, estimatedRotation;
bool inHmd = qApp->getAvatarUpdater()->isHMDMode();
bool inHmd = qApp->isHMDMode();
bool playing = DependencyManager::get<recording::Deck>()->isPlaying();
if (inHmd && playing) {
return;
@ -716,7 +708,7 @@ void MyAvatar::setEnableDebugDrawSensorToWorldMatrix(bool isEnabled) {
void MyAvatar::setEnableMeshVisible(bool isEnabled) {
render::ScenePointer scene = qApp->getMain3DScene();
_skeletonModel.setVisibleInScene(isEnabled, scene);
_skeletonModel->setVisibleInScene(isEnabled, scene);
}
void MyAvatar::setUseAnimPreAndPostRotations(bool isEnabled) {
@ -783,7 +775,7 @@ void MyAvatar::loadData() {
void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
Settings settings;
settings.beginGroup("savedAttachmentData");
settings.beginGroup(_skeletonModel.getURL().toString());
settings.beginGroup(_skeletonModel->getURL().toString());
settings.beginGroup(attachment.modelURL.toString());
settings.setValue("jointName", attachment.jointName);
@ -806,7 +798,7 @@ void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
AttachmentData MyAvatar::loadAttachmentData(const QUrl& modelURL, const QString& jointName) const {
Settings settings;
settings.beginGroup("savedAttachmentData");
settings.beginGroup(_skeletonModel.getURL().toString());
settings.beginGroup(_skeletonModel->getURL().toString());
settings.beginGroup(modelURL.toString());
AttachmentData attachment;
@ -951,17 +943,17 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
}
glm::vec3 MyAvatar::getDefaultEyePosition() const {
return getPosition() + getWorldAlignedOrientation() * Quaternions::Y_180 * _skeletonModel.getDefaultEyeModelPosition();
return getPosition() + getWorldAlignedOrientation() * Quaternions::Y_180 * _skeletonModel->getDefaultEyeModelPosition();
}
const float SCRIPT_PRIORITY = 1.0f + 1.0f;
const float RECORDER_PRIORITY = 1.0f + 1.0f;
void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
int numStates = glm::min(_skeletonModel.getJointStateCount(), jointRotations.size());
int numStates = glm::min(_skeletonModel->getJointStateCount(), jointRotations.size());
for (int i = 0; i < numStates; ++i) {
// HACK: ATM only Recorder calls setJointRotations() so we hardcode its priority here
_skeletonModel.setJointRotation(i, true, jointRotations[i], RECORDER_PRIORITY);
_skeletonModel->setJointRotation(i, true, jointRotations[i], RECORDER_PRIORITY);
}
}
@ -1009,18 +1001,11 @@ void MyAvatar::clearJointsData() {
_rig->clearJointStates();
}
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
Avatar::setFaceModelURL(faceModelURL);
render::ScenePointer scene = qApp->getMain3DScene();
getHead()->getFaceModel().setVisibleInScene(_prevShouldDrawHead, scene);
}
void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
Avatar::setSkeletonModelURL(skeletonModelURL);
render::ScenePointer scene = qApp->getMain3DScene();
_skeletonModel.setVisibleInScene(true, scene);
_skeletonModel->setVisibleInScene(true, scene);
_headBoneSet.clear();
}
@ -1051,15 +1036,9 @@ void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelN
}
}
if (!getFaceModelURLString().isEmpty()) {
setFaceModelURL(QString());
}
const QString& urlString = fullAvatarURL.toString();
if (urlString.isEmpty() || (fullAvatarURL != getSkeletonModelURL())) {
qApp->setRawAvatarUpdateThreading(false);
setSkeletonModelURL(fullAvatarURL);
qApp->setRawAvatarUpdateThreading();
UserActivityLogger::getInstance().changedModel("skeleton", urlString);
}
sendIdentityPacket();
@ -1088,10 +1067,10 @@ glm::vec3 MyAvatar::getSkeletonPosition() const {
void MyAvatar::rebuildCollisionShape() {
// compute localAABox
float scale = getUniformScale();
float radius = scale * _skeletonModel.getBoundingCapsuleRadius();
float height = scale * _skeletonModel.getBoundingCapsuleHeight() + 2.0f * radius;
float radius = scale * _skeletonModel->getBoundingCapsuleRadius();
float height = scale * _skeletonModel->getBoundingCapsuleHeight() + 2.0f * radius;
glm::vec3 corner(-radius, -0.5f * height, -radius);
corner += scale * _skeletonModel.getBoundingCapsuleOffset();
corner += scale * _skeletonModel->getBoundingCapsuleOffset();
glm::vec3 diagonal(2.0f * radius, height, 2.0f * radius);
_characterController.setLocalBoundingBox(corner, diagonal);
}
@ -1243,7 +1222,7 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName,
void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel) {
if (!_skeletonModel.isRenderable()) {
if (!_skeletonModel->isRenderable()) {
return; // wait until all models are loaded
}
@ -1283,8 +1262,8 @@ void MyAvatar::setVisibleInSceneIfReady(Model* model, render::ScenePointer scene
void MyAvatar::initHeadBones() {
int neckJointIndex = -1;
if (_skeletonModel.getGeometry()) {
neckJointIndex = _skeletonModel.getGeometry()->getFBXGeometry().neckJointIndex;
if (_skeletonModel->getGeometry()) {
neckJointIndex = _skeletonModel->getGeometry()->getFBXGeometry().neckJointIndex;
}
if (neckJointIndex == -1) {
return;
@ -1297,8 +1276,8 @@ void MyAvatar::initHeadBones() {
// fbxJoints only hold links to parents not children, so we have to do a bit of extra work here.
while (q.size() > 0) {
int jointIndex = q.front();
for (int i = 0; i < _skeletonModel.getJointStateCount(); i++) {
if (jointIndex == _skeletonModel.getParentJointIndex(i)) {
for (int i = 0; i < _skeletonModel->getJointStateCount(); i++) {
if (jointIndex == _skeletonModel->getParentJointIndex(i)) {
_headBoneSet.insert(i);
q.push(i);
}
@ -1312,7 +1291,7 @@ void MyAvatar::setAnimGraphUrl(const QUrl& url) {
return;
}
destroyAnimGraph();
_skeletonModel.reset(); // Why is this necessary? Without this, we crash in the next render.
_skeletonModel->reset(); // Why is this necessary? Without this, we crash in the next render.
_animGraphUrl = url;
initAnimGraph();
}
@ -1351,9 +1330,9 @@ void MyAvatar::preRender(RenderArgs* renderArgs) {
render::ScenePointer scene = qApp->getMain3DScene();
const bool shouldDrawHead = shouldRenderHead(renderArgs);
if (_skeletonModel.initWhenReady(scene)) {
if (_skeletonModel->initWhenReady(scene)) {
initHeadBones();
_skeletonModel.setCauterizeBoneSet(_headBoneSet);
_skeletonModel->setCauterizeBoneSet(_headBoneSet);
initAnimGraph();
}
@ -1362,7 +1341,7 @@ void MyAvatar::preRender(RenderArgs* renderArgs) {
auto animSkeleton = _rig->getAnimSkeleton();
// the rig is in the skeletonModel frame
AnimPose xform(glm::vec3(1), _skeletonModel.getRotation(), _skeletonModel.getTranslation());
AnimPose xform(glm::vec3(1), _skeletonModel->getRotation(), _skeletonModel->getTranslation());
if (_enableDebugDrawDefaultPose && animSkeleton) {
glm::vec4 gray(0.2f, 0.2f, 0.2f, 0.2f);
@ -1402,7 +1381,7 @@ void MyAvatar::preRender(RenderArgs* renderArgs) {
DebugDraw::getInstance().updateMyAvatarRot(getOrientation());
if (shouldDrawHead != _prevShouldDrawHead) {
_skeletonModel.setCauterizeBones(!shouldDrawHead);
_skeletonModel->setCauterizeBones(!shouldDrawHead);
}
_prevShouldDrawHead = shouldDrawHead;
}
@ -1493,7 +1472,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
getHead()->setBasePitch(getHead()->getBasePitch() + _driveKeys[PITCH] * _pitchSpeed * deltaTime);
if (qApp->getAvatarUpdater()->isHMDMode()) {
if (qApp->isHMDMode()) {
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();
glm::quat bodyOrientation = getWorldBodyOrientation();
glm::quat localOrientation = glm::inverse(bodyOrientation) * orientation;

View file

@ -178,8 +178,6 @@ public:
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
Q_INVOKABLE float getHeadFinalPitch() const { return getHead()->getFinalPitch(); }
Q_INVOKABLE float getHeadDeltaPitch() const { return getHead()->getDeltaPitch(); }
Q_INVOKABLE int getFaceBlendCoefNum() const { return getHead()->getFaceModel().getBlendshapeCoefficientsNum(); }
Q_INVOKABLE float getFaceBlendCoef(int index) const { return getHead()->getFaceModel().getBlendshapeCoefficient(index); }
Q_INVOKABLE glm::vec3 getEyePosition() const { return getHead()->getEyePosition(); }
@ -279,7 +277,7 @@ public slots:
void setEnableDebugDrawPosition(bool isEnabled);
void setEnableDebugDrawHandControllers(bool isEnabled);
void setEnableDebugDrawSensorToWorldMatrix(bool isEnabled);
bool getEnableMeshVisible() const { return _skeletonModel.isVisible(); }
bool getEnableMeshVisible() const { return _skeletonModel->isVisible(); }
void setEnableMeshVisible(bool isEnabled);
void setUseAnimPreAndPostRotations(bool isEnabled);
void setEnableInverseKinematics(bool isEnabled);
@ -328,7 +326,6 @@ private:
bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead
virtual void setFaceModelURL(const QUrl& faceModelURL) override;
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
void setVisibleInSceneIfReady(Model* model, render::ScenePointer scene, bool visiblity);

View file

@ -93,12 +93,12 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Rig::HeadParameters headParams;
headParams.enableLean = qApp->getAvatarUpdater()->isHMDMode();
headParams.enableLean = qApp->isHMDMode();
headParams.leanSideways = head->getFinalLeanSideways();
headParams.leanForward = head->getFinalLeanForward();
headParams.torsoTwist = head->getTorsoTwist();
if (qApp->getAvatarUpdater()->isHMDMode()) {
if (qApp->isHMDMode()) {
headParams.isInHMD = true;
// get HMD position from sensor space into world space, and back into rig space

View file

@ -18,6 +18,10 @@
class Avatar;
class MuscleConstraint;
class SkeletonModel;
using SkeletonModelPointer = std::shared_ptr<SkeletonModel>;
using SkeletonModelWeakPointer = std::weak_ptr<SkeletonModel>;
/// A skeleton loaded from a model.
class SkeletonModel : public Model {
Q_OBJECT

View file

@ -79,6 +79,6 @@ void SoftAttachmentModel::updateClusterMatrices(glm::vec3 modelPosition, glm::qu
// post the blender if we're not currently waiting for one to finish
if (geometry.hasBlendedMeshes() && _blendshapeCoefficients != _blendedBlendshapeCoefficients) {
_blendedBlendshapeCoefficients = _blendshapeCoefficients;
DependencyManager::get<ModelBlender>()->noteRequiresBlend(this);
DependencyManager::get<ModelBlender>()->noteRequiresBlend(getThisPointer());
}
}

View file

@ -56,7 +56,7 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggybackBytes);
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggybackBytes, message->getSenderSockAddr());
message = QSharedPointer<ReceivedMessage>::create(*newPacket.release());
message = QSharedPointer<ReceivedMessage>::create(*newPacket);
} else {
// Note... stats packets don't have sequence numbers, so we don't want to send those to trackIncomingVoxelPacket()
return; // bail since no piggyback data

View file

@ -36,15 +36,22 @@ void blend(size_t numPoses, const AnimPose* a, const AnimPose* b, float alpha, A
float accumulateTime(float startFrame, float endFrame, float timeScale, float currentFrame, float dt, bool loopFlag,
const QString& id, AnimNode::Triggers& triggersOut) {
const float EPSILON = 0.0001f;
float frame = currentFrame;
const float clampedStartFrame = std::min(startFrame, endFrame);
if (fabsf(clampedStartFrame - endFrame) < 1.0f) {
if (fabsf(clampedStartFrame - endFrame) <= 1.0f) {
// An animation of a single frame should not send loop or done triggers.
frame = endFrame;
} else if (timeScale > 0.0f) {
} else if (timeScale > EPSILON && dt > EPSILON) {
// accumulate time, keeping track of loops and end of animation events.
const float FRAMES_PER_SECOND = 30.0f;
float framesRemaining = (dt * timeScale) * FRAMES_PER_SECOND;
while (framesRemaining > 0.0f) {
// prevent huge dt or timeScales values from causing many trigger events.
uint32_t triggerCount = 0;
const uint32_t MAX_TRIGGER_COUNT = 3;
while (framesRemaining > EPSILON && triggerCount < MAX_TRIGGER_COUNT) {
float framesTillEnd = endFrame - frame;
// when looping, add one frame between start and end.
if (loopFlag) {
@ -62,6 +69,7 @@ float accumulateTime(float startFrame, float endFrame, float timeScale, float cu
frame = endFrame;
framesRemaining = 0.0f;
}
triggerCount++;
} else {
frame += framesRemaining;
framesRemaining = 0.0f;

View file

@ -26,14 +26,12 @@ void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (audioMixer && audioMixer->getActiveSocket()) {
Locker lock(_mutex);
static std::unique_ptr<NLPacket> audioPacket = NLPacket::create(PacketType::Unknown);
auto audioPacket = NLPacket::create(packetType);
quint8 isStereo = bytes == AudioConstants::NETWORK_FRAME_BYTES_STEREO ? 1 : 0;
audioPacket->setType(packetType);
// reset the audio packet so we can start writing
audioPacket->reset();
// write sequence number
audioPacket->writePrimitive(sequenceNumber++);
if (audioPacket->getType() == PacketType::SilentAudioFrame) {
if (packetType == PacketType::SilentAudioFrame) {
// pack num silent samples
quint16 numSilentSamples = isStereo ?
AudioConstants::NETWORK_FRAME_SAMPLES_STEREO :

View file

@ -57,7 +57,6 @@ AvatarData::AvatarData() :
_hasNewJointRotations(true),
_hasNewJointTranslations(true),
_headData(NULL),
_faceModelURL("http://invalid.com"),
_displayNameTargetAlpha(1.0f),
_displayNameAlpha(1.0f),
_billboard(),
@ -87,7 +86,6 @@ const QUrl& AvatarData::defaultFullAvatarModelUrl() {
// There are a number of possible strategies for this set of tools through endRender, below.
void AvatarData::nextAttitude(glm::vec3 position, glm::quat orientation) {
avatarLock.lock();
bool success;
Transform trans = getTransform(success);
if (!success) {
@ -100,33 +98,6 @@ void AvatarData::nextAttitude(glm::vec3 position, glm::quat orientation) {
if (!success) {
qDebug() << "Warning -- AvatarData::nextAttitude failed";
}
avatarLock.unlock();
updateAttitude();
}
void AvatarData::startCapture() {
avatarLock.lock();
}
void AvatarData::endCapture() {
avatarLock.unlock();
}
void AvatarData::startUpdate() {
avatarLock.lock();
}
void AvatarData::endUpdate() {
avatarLock.unlock();
}
void AvatarData::startRenderRun() {
// I'd like to get rid of this and just (un)lock at (end-)startRender.
// But somehow that causes judder in rotations.
avatarLock.lock();
}
void AvatarData::endRenderRun() {
avatarLock.unlock();
}
void AvatarData::startRender() {
updateAttitude();
}
void AvatarData::endRender() {
updateAttitude();
}
@ -989,18 +960,14 @@ bool AvatarData::hasIdentityChangedAfterParsing(const QByteArray& data) {
QDataStream packetStream(data);
QUuid avatarUUID;
QUrl faceModelURL, skeletonModelURL;
QUrl unusedModelURL; // legacy faceModel support
QUrl skeletonModelURL;
QVector<AttachmentData> attachmentData;
QString displayName;
packetStream >> avatarUUID >> faceModelURL >> skeletonModelURL >> attachmentData >> displayName;
packetStream >> avatarUUID >> unusedModelURL >> skeletonModelURL >> attachmentData >> displayName;
bool hasIdentityChanged = false;
if (faceModelURL != _faceModelURL) {
setFaceModelURL(faceModelURL);
hasIdentityChanged = true;
}
if (skeletonModelURL != _skeletonModelURL) {
setSkeletonModelURL(skeletonModelURL);
hasIdentityChanged = true;
@ -1025,7 +992,9 @@ QByteArray AvatarData::identityByteArray() {
QUrl emptyURL("");
const QUrl& urlToSend = (_skeletonModelURL == AvatarData::defaultFullAvatarModelUrl()) ? emptyURL : _skeletonModelURL;
identityStream << QUuid() << _faceModelURL << urlToSend << _attachmentData << _displayName;
QUrl unusedModelURL; // legacy faceModel support
identityStream << QUuid() << unusedModelURL << urlToSend << _attachmentData << _displayName;
return identityData;
}
@ -1038,12 +1007,6 @@ bool AvatarData::hasBillboardChangedAfterParsing(const QByteArray& data) {
return true;
}
void AvatarData::setFaceModelURL(const QUrl& faceModelURL) {
_faceModelURL = faceModelURL;
qCDebug(avatars) << "Changing face model for avatar to" << _faceModelURL.toString();
}
void AvatarData::setSkeletonModelURL(const QUrl& skeletonModelURL) {
const QUrl& expanded = skeletonModelURL.isEmpty() ? AvatarData::defaultFullAvatarModelUrl() : skeletonModelURL;
if (expanded == _skeletonModelURL) {
@ -1162,7 +1125,7 @@ void AvatarData::setBillboardFromURL(const QString &billboardURL) {
}
void AvatarData::setBillboardFromNetworkReply() {
QNetworkReply* networkReply = reinterpret_cast<QNetworkReply*>(sender());
QNetworkReply* networkReply = static_cast<QNetworkReply*>(sender());
setBillboard(networkReply->readAll());
networkReply->deleteLater();
}
@ -1445,9 +1408,6 @@ JointData jointDataFromJsonValue(const QJsonValue& json) {
QJsonObject AvatarData::toJson() const {
QJsonObject root;
if (!getFaceModelURL().isEmpty()) {
root[JSON_AVATAR_HEAD_MODEL] = getFaceModelURL().toString();
}
if (!getSkeletonModelURL().isEmpty()) {
root[JSON_AVATAR_BODY_MODEL] = getSkeletonModelURL().toString();
}
@ -1514,15 +1474,6 @@ void AvatarData::fromJson(const QJsonObject& json) {
_headData->fromJson(json[JSON_AVATAR_HEAD].toObject());
}
if (json.contains(JSON_AVATAR_HEAD_MODEL)) {
auto faceModelURL = json[JSON_AVATAR_HEAD_MODEL].toString();
if (faceModelURL != getFaceModelURL().toString()) {
QUrl faceModel(faceModelURL);
if (faceModel.isValid()) {
setFaceModelURL(faceModel);
}
}
}
if (json.contains(JSON_AVATAR_BODY_MODEL)) {
auto bodyModelURL = json[JSON_AVATAR_BODY_MODEL].toString();
if (bodyModelURL != getSkeletonModelURL().toString()) {

View file

@ -158,7 +158,6 @@ class AvatarData : public QObject, public SpatiallyNestable {
Q_PROPERTY(float audioAverageLoudness READ getAudioAverageLoudness WRITE setAudioAverageLoudness)
Q_PROPERTY(QString displayName READ getDisplayName WRITE setDisplayName)
Q_PROPERTY(QString faceModelURL READ getFaceModelURLFromScript WRITE setFaceModelURLFromScript)
Q_PROPERTY(QString skeletonModelURL READ getSkeletonModelURLFromScript WRITE setSkeletonModelURLFromScript)
Q_PROPERTY(QVector<AttachmentData> attachmentData READ getAttachmentData WRITE setAttachmentData)
Q_PROPERTY(QString billboardURL READ getBillboardURL WRITE setBillboardFromURL)
@ -210,14 +209,6 @@ public:
virtual void setOrientation(const glm::quat& orientation) override;
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
void startCapture(); // start/end of the period in which the latest values are about to be captured for camera, etc.
void endCapture();
void startUpdate(); // start/end of update iteration
void endUpdate();
void startRender(); // start/end of rendering of this object
void startRenderRun(); // start/end of entire scene.
void endRenderRun();
void endRender();
virtual void updateAttitude() {} // Tell skeleton mesh about changes
glm::quat getHeadOrientation() const { return _headData->getOrientation(); }
@ -296,11 +287,8 @@ public:
bool hasBillboardChangedAfterParsing(const QByteArray& data);
const QUrl& getFaceModelURL() const { return _faceModelURL; }
QString getFaceModelURLString() const { return _faceModelURL.toString(); }
const QUrl& getSkeletonModelURL() const { return _skeletonModelURL; }
const QString& getDisplayName() const { return _displayName; }
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setDisplayName(const QString& displayName);
@ -322,9 +310,6 @@ public:
void setBillboardFromURL(const QString& billboardURL);
const QString& getBillboardURL() { return _billboardURL; }
QString getFaceModelURLFromScript() const { return _faceModelURL.toString(); }
void setFaceModelURLFromScript(const QString& faceModelString) { setFaceModelURL(faceModelString); }
QString getSkeletonModelURLFromScript() const { return _skeletonModelURL.toString(); }
void setSkeletonModelURLFromScript(const QString& skeletonModelString) { setSkeletonModelURL(QUrl(skeletonModelString)); }
@ -383,7 +368,6 @@ protected:
HeadData* _headData;
QUrl _faceModelURL; // These need to be empty so that on first time setting them they will not short circuit
QUrl _skeletonModelURL; // These need to be empty so that on first time setting them they will not short circuit
QUrl _skeletonFBXURL;
QVector<AttachmentData> _attachmentData;
@ -411,8 +395,6 @@ protected:
SimpleMovingAverage _averageBytesReceived;
QMutex avatarLock; // Name is redundant, but it aids searches.
// During recording, this holds the starting position, orientation & scale of the recorded avatar
// During playback, it holds the origin from which to play the relative positions in the clip
TransformPointer _recordingBasis;

View file

@ -122,10 +122,6 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
// mesh URL for a UUID, find avatar in our list
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
if (avatar->getFaceModelURL() != faceMeshURL) {
avatar->setFaceModelURL(faceMeshURL);
}
if (avatar->getSkeletonModelURL().isEmpty() || (avatar->getSkeletonModelURL() != skeletonURL)) {
avatar->setSkeletonModelURL(skeletonURL); // Will expand "" to default and so will not continuously fire
}

View file

@ -420,7 +420,7 @@ const FBXGeometry* EntityTreeRenderer::getGeometryForEntity(EntityItemPointer en
std::shared_ptr<RenderableModelEntityItem> modelEntityItem =
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
assert(modelEntityItem); // we need this!!!
Model* model = modelEntityItem->getModel(this);
ModelPointer model = modelEntityItem->getModel(this);
if (model) {
result = &model->getGeometry()->getFBXGeometry();
}
@ -428,8 +428,8 @@ const FBXGeometry* EntityTreeRenderer::getGeometryForEntity(EntityItemPointer en
return result;
}
const Model* EntityTreeRenderer::getModelForEntityItem(EntityItemPointer entityItem) {
const Model* result = NULL;
ModelPointer EntityTreeRenderer::getModelForEntityItem(EntityItemPointer entityItem) {
ModelPointer result = nullptr;
if (entityItem->getType() == EntityTypes::Model) {
std::shared_ptr<RenderableModelEntityItem> modelEntityItem =
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
@ -445,7 +445,7 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
std::shared_ptr<RenderableModelEntityItem> modelEntityItem =
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
if (modelEntityItem->hasCompoundShapeURL()) {
Model* model = modelEntityItem->getModel(this);
ModelPointer model = modelEntityItem->getModel(this);
if (model) {
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = model->getCollisionGeometry();
if (collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) {
@ -461,25 +461,25 @@ void EntityTreeRenderer::processEraseMessage(ReceivedMessage& message, const Sha
std::static_pointer_cast<EntityTree>(_tree)->processEraseMessage(message, sourceNode);
}
Model* EntityTreeRenderer::allocateModel(const QString& url, const QString& collisionUrl) {
Model* model = NULL;
ModelPointer EntityTreeRenderer::allocateModel(const QString& url, const QString& collisionUrl) {
ModelPointer model = nullptr;
// Make sure we only create and delete models on the thread that owns the EntityTreeRenderer
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "allocateModel", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(Model*, model),
Q_RETURN_ARG(ModelPointer, model),
Q_ARG(const QString&, url));
return model;
}
model = new Model(std::make_shared<Rig>());
model = std::make_shared<Model>(std::make_shared<Rig>());
model->init();
model->setURL(QUrl(url));
model->setCollisionModelURL(QUrl(collisionUrl));
return model;
}
Model* EntityTreeRenderer::updateModel(Model* original, const QString& newUrl, const QString& collisionUrl) {
Model* model = NULL;
ModelPointer EntityTreeRenderer::updateModel(ModelPointer original, const QString& newUrl, const QString& collisionUrl) {
ModelPointer model = nullptr;
// The caller shouldn't call us if the URL doesn't need to change. But if they
// do, we just return their original back to them.
@ -490,8 +490,8 @@ Model* EntityTreeRenderer::updateModel(Model* original, const QString& newUrl, c
// Before we do any creating or deleting, make sure we're on our renderer thread
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "updateModel", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(Model*, model),
Q_ARG(Model*, original),
Q_RETURN_ARG(ModelPointer, model),
Q_ARG(ModelPointer, original),
Q_ARG(const QString&, newUrl));
return model;
@ -500,11 +500,11 @@ Model* EntityTreeRenderer::updateModel(Model* original, const QString& newUrl, c
// at this point we know we need to replace the model, and we know we're on the
// correct thread, so we can do all our work.
if (original) {
delete original; // delete the old model...
original.reset(); // delete the old model...
}
// create the model and correctly initialize it with the new url
model = new Model(std::make_shared<Rig>());
model = std::make_shared<Model>(std::make_shared<Rig>());
model->init();
model->setURL(QUrl(newUrl));
model->setCollisionModelURL(QUrl(collisionUrl));
@ -512,19 +512,19 @@ Model* EntityTreeRenderer::updateModel(Model* original, const QString& newUrl, c
return model;
}
void EntityTreeRenderer::releaseModel(Model* model) {
void EntityTreeRenderer::releaseModel(ModelPointer model) {
// If we're not on the renderer's thread, then remember this model to be deleted later
if (QThread::currentThread() != thread()) {
_releasedModels << model;
} else { // otherwise just delete it right away
delete model;
model.reset();
}
}
void EntityTreeRenderer::deleteReleasedModels() {
if (_releasedModels.size() > 0) {
foreach(Model* model, _releasedModels) {
delete model;
foreach(ModelPointer model, _releasedModels) {
model.reset();
}
_releasedModels.clear();
}

View file

@ -29,6 +29,9 @@ class Model;
class ScriptEngine;
class ZoneEntityItem;
class Model;
using ModelPointer = std::shared_ptr<Model>;
using ModelWeakPointer = std::weak_ptr<Model>;
// Generic client side Octree renderer class.
class EntityTreeRenderer : public OctreeRenderer, public EntityItemFBXService, public Dependency {
@ -53,7 +56,7 @@ public:
virtual void init();
virtual const FBXGeometry* getGeometryForEntity(EntityItemPointer entityItem);
virtual const Model* getModelForEntityItem(EntityItemPointer entityItem);
virtual ModelPointer getModelForEntityItem(EntityItemPointer entityItem);
virtual const FBXGeometry* getCollisionGeometryForEntity(EntityItemPointer entityItem);
/// clears the tree
@ -63,13 +66,13 @@ public:
void reloadEntityScripts();
/// if a renderable entity item needs a model, we will allocate it for them
Q_INVOKABLE Model* allocateModel(const QString& url, const QString& collisionUrl);
Q_INVOKABLE ModelPointer allocateModel(const QString& url, const QString& collisionUrl);
/// if a renderable entity item needs to update the URL of a model, we will handle that for the entity
Q_INVOKABLE Model* updateModel(Model* original, const QString& newUrl, const QString& collisionUrl);
Q_INVOKABLE ModelPointer updateModel(ModelPointer original, const QString& newUrl, const QString& collisionUrl);
/// if a renderable entity item is done with a model, it should return it to us
void releaseModel(Model* model);
void releaseModel(ModelPointer model);
void deleteReleasedModels();
@ -129,7 +132,7 @@ private:
void applyZonePropertiesToScene(std::shared_ptr<ZoneEntityItem> zone);
void checkAndCallPreload(const EntityItemID& entityID, const bool reload = false);
QList<Model*> _releasedModels;
QList<ModelPointer> _releasedModels;
RayToEntityIntersectionResult findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude = QVector<EntityItemID>(),
const QVector<EntityItemID>& entityIdsToDiscard = QVector<EntityItemID>());

View file

@ -463,8 +463,8 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
}
Model* RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
Model* result = NULL;
ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
ModelPointer result = nullptr;
if (!renderer) {
return result;
@ -506,7 +506,7 @@ Model* RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
// release interest
_myRenderer->releaseModel(_model);
result = _model = NULL;
result = _model = nullptr;
_needsInitialSimulation = true;
}

View file

@ -57,7 +57,7 @@ public:
BoxFace& face, glm::vec3& surfaceNormal,
void** intersectedObject, bool precisionPicking) const override;
Model* getModel(EntityTreeRenderer* renderer);
ModelPointer getModel(EntityTreeRenderer* renderer);
virtual bool needsToCallUpdate() const override;
virtual void update(const quint64& now) override;
@ -87,7 +87,7 @@ private:
QVariantMap parseTexturesToMap(QString textures);
void remapTextures();
Model* _model = nullptr;
ModelPointer _model = nullptr;
bool _needsInitialSimulation = true;
bool _needsModelReload = true;
EntityTreeRenderer* _myRenderer = nullptr;

View file

@ -57,10 +57,13 @@ void EntityTree::eraseAllOctreeElements(bool createNewRoot) {
if (_simulation) {
_simulation->clearEntities();
}
foreach (EntityTreeElementPointer element, _entityToElementMap) {
element->cleanupEntities();
{
QWriteLocker locker(&_entityToElementLock);
foreach(EntityTreeElementPointer element, _entityToElementMap) {
element->cleanupEntities();
}
_entityToElementMap.clear();
}
_entityToElementMap.clear();
Octree::eraseAllOctreeElements(createNewRoot);
resetClientEditStats();
@ -1229,13 +1232,13 @@ int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, cons
}
EntityTreeElementPointer EntityTree::getContainingElement(const EntityItemID& entityItemID) /*const*/ {
// TODO: do we need to make this thread safe? Or is it acceptable as is
QReadLocker locker(&_entityToElementLock);
EntityTreeElementPointer element = _entityToElementMap.value(entityItemID);
return element;
}
void EntityTree::setContainingElement(const EntityItemID& entityItemID, EntityTreeElementPointer element) {
// TODO: do we need to make this thread safe? Or is it acceptable as is
QWriteLocker locker(&_entityToElementLock);
if (element) {
_entityToElementMap[entityItemID] = element;
} else {
@ -1245,6 +1248,7 @@ void EntityTree::setContainingElement(const EntityItemID& entityItemID, EntityTr
void EntityTree::debugDumpMap() {
qCDebug(entities) << "EntityTree::debugDumpMap() --------------------------";
QReadLocker locker(&_entityToElementLock);
QHashIterator<EntityItemID, EntityTreeElementPointer> i(_entityToElementMap);
while (i.hasNext()) {
i.next();

View file

@ -25,6 +25,9 @@ typedef std::shared_ptr<EntityTree> EntityTreePointer;
#include "DeleteEntityOperator.h"
class Model;
using ModelPointer = std::shared_ptr<Model>;
using ModelWeakPointer = std::weak_ptr<Model>;
class EntitySimulation;
class NewlyCreatedEntityHook {
@ -35,7 +38,7 @@ public:
class EntityItemFBXService {
public:
virtual const FBXGeometry* getGeometryForEntity(EntityItemPointer entityItem) = 0;
virtual const Model* getModelForEntityItem(EntityItemPointer entityItem) = 0;
virtual ModelPointer getModelForEntityItem(EntityItemPointer entityItem) = 0;
virtual const FBXGeometry* getCollisionGeometryForEntity(EntityItemPointer entityItem) = 0;
};
@ -171,7 +174,7 @@ public:
const FBXGeometry* getGeometryForEntity(EntityItemPointer entityItem) {
return _fbxService ? _fbxService->getGeometryForEntity(entityItem) : NULL;
}
const Model* getModelForEntityItem(EntityItemPointer entityItem) {
ModelPointer getModelForEntityItem(EntityItemPointer entityItem) {
return _fbxService ? _fbxService->getModelForEntityItem(entityItem) : NULL;
}
@ -290,6 +293,7 @@ protected:
EntityItemFBXService* _fbxService;
mutable QReadWriteLock _entityToElementLock;
QHash<EntityItemID, EntityTreeElementPointer> _entityToElementMap;
EntitySimulation* _simulation;

View file

@ -48,9 +48,11 @@ QStringList FBXGeometry::getJointNames() const {
}
bool FBXGeometry::hasBlendedMeshes() const {
foreach (const FBXMesh& mesh, meshes) {
if (!mesh.blendshapes.isEmpty()) {
return true;
if (!meshes.isEmpty()) {
foreach (const FBXMesh& mesh, meshes) {
if (!mesh.blendshapes.isEmpty()) {
return true;
}
}
}
return false;

View file

@ -271,6 +271,7 @@ QNetworkReply* OBJReader::request(QUrl& url, bool isTest) {
QNetworkRequest netRequest(url);
QNetworkReply* netReply = isTest ? networkAccessManager.head(netRequest) : networkAccessManager.get(netRequest);
if (!qApp || aboutToQuit) {
netReply->deleteLater();
return nullptr;
}
QEventLoop loop; // Create an event loop that will quit when we get the finished signal

View file

@ -15,8 +15,9 @@
#include <QtQuick/QQuickItem>
#include <QtQuick/QQuickWindow>
#include <QtQuick/QQuickRenderControl>
#include <QtCore/QWaitCondition>
#include <QtCore/QThread>
#include <QtCore/QMutex>
#include <QtCore/QWaitCondition>
#include <shared/NsightHelpers.h>
#include <PerfStat.h>
@ -47,11 +48,10 @@ protected:
private:
QWindow* _renderWindow{ nullptr };
friend class OffscreenQmlRenderer;
friend class OffscreenQmlRenderThread;
friend class OffscreenQmlSurface;
};
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
@ -60,268 +60,281 @@ static const QEvent::Type RENDER = QEvent::Type(QEvent::User + 2);
static const QEvent::Type RESIZE = QEvent::Type(QEvent::User + 3);
static const QEvent::Type STOP = QEvent::Type(QEvent::User + 4);
class OffscreenQmlRenderer : public OffscreenGLCanvas {
friend class OffscreenQmlSurface;
class OffscreenQmlRenderThread : public QThread {
public:
OffscreenQmlRenderThread(OffscreenQmlSurface* surface, QOpenGLContext* shareContext);
virtual ~OffscreenQmlRenderThread() = default;
OffscreenQmlRenderer(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
if (!OffscreenGLCanvas::create(shareContext)) {
static const char* error = "Failed to create OffscreenGLCanvas";
qWarning() << error;
throw error;
};
virtual void run() override;
virtual bool event(QEvent *e) override;
_renderControl = new QMyQuickRenderControl();
protected:
class Queue : public QQueue<QEvent*> {
public:
void add(QEvent::Type type);
QEvent* take();
// Create a QQuickWindow that is associated with out render control. Note that this
// window never gets created or shown, meaning that it will never get an underlying
// native (platform) window.
QQuickWindow::setDefaultAlphaBuffer(true);
// Weirdness... QQuickWindow NEEDS to be created on the rendering thread, or it will refuse to render
// because it retains an internal 'context' object that retains the thread it was created on,
// regardless of whether you later move it to another thread.
_quickWindow = new QQuickWindow(_renderControl);
_quickWindow->setColor(QColor(255, 255, 255, 0));
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
// Qt 5.5
_renderControl->prepareThread(&_thread);
getContextObject()->moveToThread(&_thread);
moveToThread(&_thread);
_thread.setObjectName("QML Thread");
_thread.start();
post(INIT);
}
bool event(QEvent *e) {
switch (int(e->type())) {
case INIT:
{
QMutexLocker lock(&_mutex);
init();
}
return true;
case RENDER:
{
QMutexLocker lock(&_mutex);
render(&lock);
}
return true;
case RESIZE:
{
QMutexLocker lock(&_mutex);
resize();
}
return true;
case STOP:
{
QMutexLocker lock(&_mutex);
cleanup();
}
return true;
default:
return QObject::event(e);
}
}
void post(const QEvent::Type& type) {
QCoreApplication::postEvent(this, new QEvent(type));
}
private:
QMutex _mutex;
QWaitCondition _waitCondition;
bool _isWaiting{ false };
};
friend class OffscreenQmlSurface;
Queue _queue;
QMutex _mutex;
QWaitCondition _waitCondition;
private:
// Event-driven methods
void init();
void render();
void resize();
void cleanup();
void setupFbo() {
using namespace oglplus;
_textures.setSize(_size);
_depthStencil.reset(new Renderbuffer());
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
.Storage(
PixelDataInternalFormat::DepthComponent,
_size.x, _size.y);
_fbo.reset(new Framebuffer());
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
FramebufferAttachment::Depth, *_depthStencil);
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
}
void init() {
connect(_renderControl, &QQuickRenderControl::renderRequested, _surface, &OffscreenQmlSurface::requestRender);
connect(_renderControl, &QQuickRenderControl::sceneChanged, _surface, &OffscreenQmlSurface::requestUpdate);
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
_renderControl->initialize(getContext());
setupFbo();
_escrow.setRecycler([this](GLuint texture){
_textures.recycleTexture(texture);
});
doneCurrent();
}
void cleanup() {
if (!makeCurrent()) {
qFatal("Failed to make context current on render thread");
return;
}
_renderControl->invalidate();
_fbo.reset();
_depthStencil.reset();
_textures.clear();
doneCurrent();
getContextObject()->moveToThread(QCoreApplication::instance()->thread());
_thread.quit();
_cond.wakeOne();
}
void resize() {
// Update our members
if (_quickWindow) {
_quickWindow->setGeometry(QRect(QPoint(), _newSize));
_quickWindow->contentItem()->setSize(_newSize);
}
// Qt bug in 5.4 forces this check of pixel ratio,
// even though we're rendering offscreen.
qreal pixelRatio = 1.0;
if (_renderControl && _renderControl->_renderWindow) {
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
}
uvec2 newOffscreenSize = toGlm(_newSize * pixelRatio);
_textures.setSize(newOffscreenSize);
if (newOffscreenSize == _size) {
return;
}
_size = newOffscreenSize;
// Clear out any fbos with the old size
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
qDebug() << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
setupFbo();
doneCurrent();
}
void render(QMutexLocker *lock) {
if (_surface->_paused) {
return;
}
if (!makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
_renderControl->sync();
_cond.wakeOne();
lock->unlock();
using namespace oglplus;
_quickWindow->setRenderTarget(GetName(*_fbo), QSize(_size.x, _size.y));
try {
PROFILE_RANGE("qml_render")
TexturePtr texture = _textures.getNextTexture();
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
_fbo->Complete(Framebuffer::Target::Draw);
{
_renderControl->render();
// FIXME The web browsers seem to be leaving GL in an error state.
// Need a debug context with sync logging to figure out why.
// for now just clear the errors
glGetError();
}
// FIXME probably unecessary
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
_quickWindow->resetOpenGLState();
_escrow.submit(GetName(*texture));
_lastRenderTime = usecTimestampNow();
} catch (std::runtime_error& error) {
qWarning() << "Failed to render QML " << error.what();
}
}
void aboutToQuit() {
QMutexLocker lock(&_quitMutex);
_quit = true;
}
static const uint64_t MAX_SHUTDOWN_WAIT_SECS = 2;
void stop() {
if (_thread.isRunning()) {
qDebug() << "Stopping QML render thread " << _thread.currentThreadId();
{
QMutexLocker lock(&_mutex);
post(STOP);
}
auto start = usecTimestampNow();
auto now = usecTimestampNow();
bool shutdownClean = false;
while (now - start < (MAX_SHUTDOWN_WAIT_SECS * USECS_PER_SECOND)) {
QMutexLocker lock(&_mutex);
if (_cond.wait(&_mutex, MSECS_PER_SECOND)) {
shutdownClean = true;
break;
}
now = usecTimestampNow();
}
if (!shutdownClean) {
qWarning() << "Failed to shut down the QML render thread";
}
} else {
qDebug() << "QML render thread already completed";
}
}
bool allowNewFrame(uint8_t fps) {
auto minRenderInterval = USECS_PER_SECOND / fps;
auto lastInterval = usecTimestampNow() - _lastRenderTime;
return (lastInterval > minRenderInterval);
}
// Helper methods
void setupFbo();
bool allowNewFrame(uint8_t fps);
// Rendering members
OffscreenGLCanvas _canvas;
OffscreenQmlSurface* _surface{ nullptr };
QQuickWindow* _quickWindow{ nullptr };
QMyQuickRenderControl* _renderControl{ nullptr };
QThread _thread;
QMutex _mutex;
QWaitCondition _cond;
QMutex _quitMutex;
QSize _newSize;
bool _quit;
FramebufferPtr _fbo;
RenderbufferPtr _depthStencil;
uvec2 _size{ 1920, 1080 };
uint64_t _lastRenderTime{ 0 };
TextureRecycler _textures;
GLTextureEscrow _escrow;
uint64_t _lastRenderTime{ 0 };
uvec2 _size{ 1920, 1080 };
QSize _newSize;
bool _quit{ false };
};
void OffscreenQmlRenderThread::Queue::add(QEvent::Type type) {
QMutexLocker locker(&_mutex);
enqueue(new QEvent(type));
if (_isWaiting) {
_waitCondition.wakeOne();
}
}
QEvent* OffscreenQmlRenderThread::Queue::take() {
QMutexLocker locker(&_mutex);
while (isEmpty()) {
_isWaiting = true;
_waitCondition.wait(&_mutex);
_isWaiting = false;
}
QEvent* e = dequeue();
return e;
}
OffscreenQmlRenderThread::OffscreenQmlRenderThread(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
if (!_canvas.create(shareContext)) {
static const char* error = "Failed to create OffscreenGLCanvas";
qWarning() << error;
throw error;
};
_renderControl = new QMyQuickRenderControl();
QQuickWindow::setDefaultAlphaBuffer(true);
// Create a QQuickWindow that is associated with our render control.
// This window never gets created or shown, meaning that it will never get an underlying native (platform) window.
// NOTE: Must be created on the main thread so that OffscreenQmlSurface can send it events
// NOTE: Must be created on the rendering thread or it will refuse to render,
// so we wait until after its ctor to move object/context to this thread.
_quickWindow = new QQuickWindow(_renderControl);
_quickWindow->setColor(QColor(255, 255, 255, 0));
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
// We can prepare, but we must wait to start() the thread until after the ctor
_renderControl->prepareThread(this);
_canvas.getContextObject()->moveToThread(this);
moveToThread(this);
_queue.add(INIT);
}
void OffscreenQmlRenderThread::run() {
while (!_quit) {
QEvent* e = _queue.take();
event(e);
delete e;
}
}
bool OffscreenQmlRenderThread::event(QEvent *e) {
switch (int(e->type())) {
case INIT:
init();
return true;
case RENDER:
render();
return true;
case RESIZE:
resize();
return true;
case STOP:
cleanup();
return true;
default:
return QObject::event(e);
}
}
void OffscreenQmlRenderThread::setupFbo() {
using namespace oglplus;
_textures.setSize(_size);
_depthStencil.reset(new Renderbuffer());
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
.Storage(
PixelDataInternalFormat::DepthComponent,
_size.x, _size.y);
_fbo.reset(new Framebuffer());
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
FramebufferAttachment::Depth, *_depthStencil);
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
}
void OffscreenQmlRenderThread::init() {
connect(_renderControl, &QQuickRenderControl::renderRequested, _surface, &OffscreenQmlSurface::requestRender);
connect(_renderControl, &QQuickRenderControl::sceneChanged, _surface, &OffscreenQmlSurface::requestUpdate);
if (!_canvas.makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
_renderControl->initialize(_canvas.getContext());
setupFbo();
_escrow.setRecycler([this](GLuint texture){
_textures.recycleTexture(texture);
});
_canvas.doneCurrent();
}
void OffscreenQmlRenderThread::cleanup() {
if (!_canvas.makeCurrent()) {
qFatal("Failed to make context current on render thread");
return;
}
_renderControl->invalidate();
_fbo.reset();
_depthStencil.reset();
_textures.clear();
_canvas.doneCurrent();
_canvas.getContextObject()->moveToThread(QCoreApplication::instance()->thread());
_quit = true;
}
void OffscreenQmlRenderThread::resize() {
// Lock _newSize changes
QMutexLocker locker(&_mutex);
// Update our members
if (_quickWindow) {
_quickWindow->setGeometry(QRect(QPoint(), _newSize));
_quickWindow->contentItem()->setSize(_newSize);
}
// Qt bug in 5.4 forces this check of pixel ratio,
// even though we're rendering offscreen.
qreal pixelRatio = 1.0;
if (_renderControl && _renderControl->_renderWindow) {
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
}
uvec2 newOffscreenSize = toGlm(_newSize * pixelRatio);
_textures.setSize(newOffscreenSize);
if (newOffscreenSize == _size) {
return;
}
_size = newOffscreenSize;
// Clear out any fbos with the old size
if (!_canvas.makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
qDebug() << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
locker.unlock();
setupFbo();
_canvas.doneCurrent();
}
void OffscreenQmlRenderThread::render() {
if (_surface->_paused) {
_waitCondition.wakeOne();
return;
}
if (!_canvas.makeCurrent()) {
qWarning("Failed to make context current on render thread");
return;
}
QMutexLocker locker(&_mutex);
_renderControl->sync();
_waitCondition.wakeOne();
locker.unlock();
using namespace oglplus;
_quickWindow->setRenderTarget(GetName(*_fbo), QSize(_size.x, _size.y));
try {
PROFILE_RANGE("qml_render")
TexturePtr texture = _textures.getNextTexture();
_fbo->Bind(Framebuffer::Target::Draw);
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
_fbo->Complete(Framebuffer::Target::Draw);
{
_renderControl->render();
// FIXME The web browsers seem to be leaving GL in an error state.
// Need a debug context with sync logging to figure out why.
// for now just clear the errors
glGetError();
}
// FIXME probably unecessary
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
_quickWindow->resetOpenGLState();
_escrow.submit(GetName(*texture));
_lastRenderTime = usecTimestampNow();
} catch (std::runtime_error& error) {
qWarning() << "Failed to render QML " << error.what();
}
}
bool OffscreenQmlRenderThread::allowNewFrame(uint8_t fps) {
auto minRenderInterval = USECS_PER_SECOND / fps;
auto lastInterval = usecTimestampNow() - _lastRenderTime;
return (lastInterval > minRenderInterval);
}
OffscreenQmlSurface::OffscreenQmlSurface() {
}
static const uint64_t MAX_SHUTDOWN_WAIT_SECS = 2;
OffscreenQmlSurface::~OffscreenQmlSurface() {
QObject::disconnect(&_updateTimer);
QObject::disconnect(qApp);
_renderer->stop();
qDebug() << "Stopping QML render thread " << _renderer->currentThreadId();
_renderer->_queue.add(STOP);
if (!_renderer->wait(MAX_SHUTDOWN_WAIT_SECS * USECS_PER_SECOND)) {
qWarning() << "Failed to shut down the QML render thread";
}
delete _rootItem;
delete _renderer;
delete _qmlComponent;
@ -330,15 +343,16 @@ OffscreenQmlSurface::~OffscreenQmlSurface() {
void OffscreenQmlSurface::onAboutToQuit() {
QObject::disconnect(&_updateTimer);
// Disconnecting the update timer is insufficient, since the renderer
// may attempting to render already, so we need to explicitly tell the renderer
// to stop
_renderer->aboutToQuit();
}
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
_renderer = new OffscreenQmlRenderer(this, shareContext);
_renderer = new OffscreenQmlRenderThread(this, shareContext);
_renderer->moveToThread(_renderer);
_renderer->setObjectName("QML Renderer Thread");
_renderer->start();
_renderer->_renderControl->_renderWindow = _proxyWindow;
// Create a QML engine.
_qmlEngine = new QQmlEngine;
if (!_qmlEngine->incubationController()) {
@ -387,11 +401,11 @@ void OffscreenQmlSurface::resize(const QSize& newSize_) {
}
{
QMutexLocker _locker(&(_renderer->_mutex));
QMutexLocker locker(&(_renderer->_mutex));
_renderer->_newSize = newSize;
}
_renderer->post(RESIZE);
_renderer->_queue.add(RESIZE);
}
QQuickItem* OffscreenQmlSurface::getRootItem() {
@ -491,14 +505,11 @@ void OffscreenQmlSurface::updateQuick() {
}
if (_render) {
QMutexLocker lock(&(_renderer->_mutex));
_renderer->post(RENDER);
while (!_renderer->_cond.wait(&(_renderer->_mutex), 100)) {
if (_renderer->_quit) {
return;
}
qApp->processEvents();
}
// Lock the GUI size while syncing
QMutexLocker locker(&(_renderer->_mutex));
_renderer->_queue.add(RENDER);
_renderer->_waitCondition.wait(&(_renderer->_mutex));
_render = false;
}

View file

@ -26,7 +26,7 @@ class QQmlComponent;
class QQuickWindow;
class QQuickItem;
class OffscreenQmlRenderer;
class OffscreenQmlRenderThread;
class OffscreenQmlSurface : public QObject {
Q_OBJECT
@ -84,8 +84,8 @@ private slots:
void updateQuick();
private:
friend class OffscreenQmlRenderer;
OffscreenQmlRenderer* _renderer{ nullptr };
friend class OffscreenQmlRenderThread;
OffscreenQmlRenderThread* _renderer{ nullptr };
QQmlEngine* _qmlEngine{ nullptr };
QQmlComponent* _qmlComponent{ nullptr };
QQuickItem* _rootItem{ nullptr };

View file

@ -173,7 +173,7 @@ public:
const SkyboxPointer& getSkybox() const { valid(); return _skybox; }
protected:
BackgroundMode _backgroundMode = SKY_BOX;
BackgroundMode _backgroundMode = SKY_DOME;
LightPointer _sunLight;
mutable SkyboxPointer _skybox;

View file

@ -22,13 +22,6 @@ std::unique_ptr<NLPacketList> NLPacketList::create(PacketType packetType, QByteA
return nlPacketList;
}
std::unique_ptr<NLPacketList> NLPacketList::fromPacketList(std::unique_ptr<PacketList> packetList) {
auto nlPacketList = std::unique_ptr<NLPacketList>(new NLPacketList(std::move(*packetList.release())));
nlPacketList->open(ReadOnly);
return nlPacketList;
}
NLPacketList::NLPacketList(PacketType packetType, QByteArray extendedHeader, bool isReliable, bool isOrdered) :
PacketList(packetType, extendedHeader, isReliable, isOrdered)
{

View file

@ -21,8 +21,6 @@ public:
static std::unique_ptr<NLPacketList> create(PacketType packetType, QByteArray extendedHeader = QByteArray(),
bool isReliable = false, bool isOrdered = false);
static std::unique_ptr<NLPacketList> fromPacketList(std::unique_ptr<udt::PacketList>);
PacketVersion getVersion() const { return _packetVersion; }
const QUuid& getSourceID() const { return _sourceID; }

View file

@ -76,7 +76,7 @@ QUrl ResourceManager::normalizeURL(const QUrl& originalUrl) {
}
void ResourceManager::init() {
_thread.setObjectName("Ressource Manager Thread");
_thread.setObjectName("Resource Manager Thread");
auto assetClient = DependencyManager::set<AssetClient>();
assetClient->moveToThread(&_thread);

View file

@ -62,10 +62,10 @@ void DefaultCC::onACK(SequenceNumber ackNum) {
if (_slowStart) {
// we are in slow start phase - increase the congestion window size by the number of packets just ACKed
_congestionWindowSize += seqlen(_slowStartLastACK, ackNum);
_congestionWindowSize += seqlen(_lastACK, ackNum);
// update the last ACK
_slowStartLastACK = ackNum;
_lastACK = ackNum;
// check if we can get out of slow start (is our new congestion window size bigger than the max)
if (_congestionWindowSize > _maxCongestionWindowSize) {
@ -137,19 +137,20 @@ void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
return;
}
}
_loss = true;
static const double INTER_PACKET_ARRIVAL_INCREASE = 1.125;
static const int MAX_DECREASES_PER_CONGESTION_EPOCH = 5;
// check if this NAK starts a new congestion period - this will be the case if the
// NAK received occured for a packet sent after the last decrease
if (rangeStart > _lastDecreaseMaxSeq) {
_lastDecreasePeriod = _packetSendPeriod;
_packetSendPeriod = ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE);
// use EWMA to update the average number of NAKs per congestion
static const double NAK_EWMA_ALPHA = 0.125;
_avgNAKNum = (int)ceil(_avgNAKNum * (1 - NAK_EWMA_ALPHA) + _nakCount * NAK_EWMA_ALPHA);
@ -159,7 +160,7 @@ void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
_decreaseCount = 1;
_lastDecreaseMaxSeq = _sendCurrSeqNum;
if (_avgNAKNum < 1) {
_randomDecreaseThreshold = 1;
} else {
@ -179,17 +180,16 @@ void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
}
}
// Note: This isn't currently being called by anything since we, unlike UDT, don't have TTL on our packets
void DefaultCC::onTimeout() {
if (_slowStart) {
stopSlowStart();
} else {
// UDT used to do the following on timeout if not in slow start - we should check if it could be helpful
// _lastDecreasePeriod = _packetSendPeriod;
// _packetSendPeriod = ceil(_packetSendPeriod * 2);
_lastDecreasePeriod = _packetSendPeriod;
_packetSendPeriod = ceil(_packetSendPeriod * 2);
// this seems odd - the last ack they were setting _lastDecreaseMaxSeq to only applies to slow start
// _lastDecreaseMaxSeq = _slowStartLastAck;
_lastDecreaseMaxSeq = _lastACK;
}
}
@ -206,3 +206,7 @@ void DefaultCC::stopSlowStart() {
_packetSendPeriod = _congestionWindowSize / (_rtt + synInterval());
}
}
void DefaultCC::setInitialSendSequenceNumber(udt::SequenceNumber seqNum) {
_lastACK = _lastDecreaseMaxSeq = seqNum - 1;
}

View file

@ -41,7 +41,7 @@ public:
virtual void init() {}
virtual void onACK(SequenceNumber ackNum) {}
virtual void onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {}
virtual void onTimeout() {}
protected:
void setAckInterval(int ackInterval) { _ackInterval = ackInterval; }
void setRTO(int rto) { _userDefinedRTO = true; _rto = rto; }
@ -107,7 +107,7 @@ public:
virtual void onTimeout();
protected:
virtual void setInitialSendSequenceNumber(SequenceNumber seqNum) { _slowStartLastACK = seqNum; }
virtual void setInitialSendSequenceNumber(SequenceNumber seqNum);
private:
void stopSlowStart(); // stops the slow start on loss or timeout
@ -115,7 +115,7 @@ private:
p_high_resolution_clock::time_point _lastRCTime = p_high_resolution_clock::now(); // last rate increase time
bool _slowStart { true }; // if in slow start phase
SequenceNumber _slowStartLastACK; // last ACKed seq num from previous slow start check
SequenceNumber _lastACK; // last ACKed sequence number from previous
bool _loss { false }; // if loss happened since last rate increase
SequenceNumber _lastDecreaseMaxSeq; // max pkt seq num sent out when last decrease happened
double _lastDecreasePeriod { 1 }; // value of _packetSendPeriod when last decrease happened

View file

@ -98,6 +98,7 @@ SendQueue& Connection::getSendQueue() {
QObject::connect(_sendQueue.get(), &SendQueue::packetSent, this, &Connection::recordSentPackets);
QObject::connect(_sendQueue.get(), &SendQueue::packetRetransmitted, this, &Connection::recordRetransmission);
QObject::connect(_sendQueue.get(), &SendQueue::queueInactive, this, &Connection::queueInactive);
QObject::connect(_sendQueue.get(), &SendQueue::timeout, this, &Connection::queueTimeout);
// set defaults on the send queue from our congestion control object and estimatedTimeout()
_sendQueue->setPacketSendPeriod(_congestionControl->_packetSendPeriod);
@ -129,6 +130,12 @@ void Connection::queueInactive() {
}
}
void Connection::queueTimeout() {
updateCongestionControlAndSendQueue([this]{
_congestionControl->onTimeout();
});
}
void Connection::sendReliablePacket(std::unique_ptr<Packet> packet) {
Q_ASSERT_X(packet->isReliable(), "Connection::send", "Trying to send an unreliable packet reliably.");
getSendQueue().queuePacket(std::move(packet));
@ -411,7 +418,15 @@ SequenceNumber Connection::nextACK() const {
bool Connection::processReceivedSequenceNumber(SequenceNumber sequenceNumber, int packetSize, int payloadSize) {
if (!_hasReceivedHandshake) {
// refuse to process any packets until we've received the handshake
// Refuse to process any packets until we've received the handshake
// Send handshake request to re-request a handshake
auto handshakeRequestPacket = ControlPacket::create(ControlPacket::HandshakeRequest, 0);
_parentSocket->writeBasePacket(*handshakeRequestPacket, _destination);
#ifdef UDT_CONNECTION_DEBUG
qCDebug(networking) << "Received packet before receiving handshake, sending HandshakeRequest";
#endif
return false;
}
@ -537,6 +552,17 @@ void Connection::processControl(std::unique_ptr<ControlPacket> controlPacket) {
processProbeTail(move(controlPacket));
}
break;
case ControlPacket::HandshakeRequest:
if (_hasReceivedHandshakeACK) {
// We're already in a state where we've received a handshake ack, so we are likely in a state
// where the other end expired our connection. Let's reset.
#ifdef UDT_CONNECTION_DEBUG
qCDebug(networking) << "Got handshake request, stopping SendQueue";
#endif
stopSendQueue();
}
break;
}
}

View file

@ -84,6 +84,7 @@ private slots:
void recordSentPackets(int payload, int total);
void recordRetransmission();
void queueInactive();
void queueTimeout();
private:
void sendACK(bool wasCausedBySyncTimeout = true);

View file

@ -100,7 +100,7 @@ void ControlPacket::readType() {
Q_ASSERT_X(bitAndType & CONTROL_BIT_MASK, "ControlPacket::readHeader()", "This should be a control packet");
uint16_t packetType = (bitAndType & ~CONTROL_BIT_MASK) >> (8 * sizeof(Type));
Q_ASSERT_X(packetType <= ControlPacket::Type::ProbeTail, "ControlPacket::readType()", "Received a control packet with wrong type");
Q_ASSERT_X(packetType <= ControlPacket::Type::HandshakeRequest, "ControlPacket::readType()", "Received a control packet with wrong type");
// read the type
_type = (Type) packetType;

View file

@ -34,7 +34,8 @@ public:
TimeoutNAK,
Handshake,
HandshakeACK,
ProbeTail
ProbeTail,
HandshakeRequest
};
static std::unique_ptr<ControlPacket> create(Type type, qint64 size = -1);

View file

@ -20,6 +20,7 @@
#include <QtCore/QThread>
#include <LogHandler.h>
#include <NumericalConstants.h>
#include <SharedUtil.h>
#include "../NetworkLogging.h"
@ -133,7 +134,6 @@ void SendQueue::sendPacket(const Packet& packet) {
void SendQueue::ack(SequenceNumber ack) {
// this is a response from the client, re-set our timeout expiry and our last response time
_timeoutExpiryCount = 0;
_lastReceiverResponse = uint64_t(QDateTime::currentMSecsSinceEpoch());
if (_lastACKSequenceNumber == (uint32_t) ack) {
@ -157,11 +157,13 @@ void SendQueue::ack(SequenceNumber ack) {
}
_lastACKSequenceNumber = (uint32_t) ack;
// call notify_one on the condition_variable_any in case the send thread is sleeping with a full congestion window
_emptyCondition.notify_one();
}
void SendQueue::nak(SequenceNumber start, SequenceNumber end) {
// this is a response from the client, re-set our timeout expiry
_timeoutExpiryCount = 0;
_lastReceiverResponse = uint64_t(QDateTime::currentMSecsSinceEpoch());
{
@ -175,7 +177,6 @@ void SendQueue::nak(SequenceNumber start, SequenceNumber end) {
void SendQueue::overrideNAKListFromPacket(ControlPacket& packet) {
// this is a response from the client, re-set our timeout expiry
_timeoutExpiryCount = 0;
_lastReceiverResponse = uint64_t(QDateTime::currentMSecsSinceEpoch());
{
@ -203,9 +204,7 @@ void SendQueue::sendHandshake() {
std::unique_lock<std::mutex> handshakeLock { _handshakeMutex };
if (!_hasReceivedHandshakeACK) {
// we haven't received a handshake ACK from the client, send another now
static const auto handshakePacket = ControlPacket::create(ControlPacket::Handshake, sizeof(SequenceNumber));
handshakePacket->seek(0);
auto handshakePacket = ControlPacket::create(ControlPacket::Handshake, sizeof(SequenceNumber));
handshakePacket->writePrimitive(_initialSequenceNumber);
_socket->writeBasePacket(*handshakePacket, _destination);
@ -316,10 +315,9 @@ void SendQueue::run() {
}
bool SendQueue::maybeSendNewPacket() {
if (seqlen(SequenceNumber { (uint32_t) _lastACKSequenceNumber }, _currentSequenceNumber) <= _flowWindowSize) {
if (!isFlowWindowFull()) {
// we didn't re-send a packet, so time to send a new one
if (!_packets.isEmpty()) {
SequenceNumber nextNumber = getNextSequenceNumber();
@ -440,28 +438,31 @@ bool SendQueue::maybeResendPacket() {
}
bool SendQueue::isInactive(bool sentAPacket) {
if (!sentAPacket) {
// check if it is time to break this connection
// that will be the case if we have had 16 timeouts since hearing back from the client, and it has been
// at least 5 seconds
static const int NUM_TIMEOUTS_BEFORE_INACTIVE = 16;
static const int MIN_SECONDS_BEFORE_INACTIVE_MS = 5 * 1000;
if (_timeoutExpiryCount >= NUM_TIMEOUTS_BEFORE_INACTIVE &&
_lastReceiverResponse > 0 &&
(QDateTime::currentMSecsSinceEpoch() - _lastReceiverResponse) > MIN_SECONDS_BEFORE_INACTIVE_MS) {
// If the flow window has been full for over CONSIDER_INACTIVE_AFTER,
// then signal the queue is inactive and return so it can be cleaned up
// check for connection timeout first
// that will be the case if we have had 16 timeouts since hearing back from the client, and it has been
// at least 5 seconds
static const int NUM_TIMEOUTS_BEFORE_INACTIVE = 16;
static const int MIN_MS_BEFORE_INACTIVE = 5 * 1000;
auto sinceLastResponse = (QDateTime::currentMSecsSinceEpoch() - _lastReceiverResponse);
if (sinceLastResponse >= quint64(NUM_TIMEOUTS_BEFORE_INACTIVE * (_estimatedTimeout / USECS_PER_MSEC)) &&
_lastReceiverResponse > 0 &&
sinceLastResponse > MIN_MS_BEFORE_INACTIVE) {
// If the flow window has been full for over CONSIDER_INACTIVE_AFTER,
// then signal the queue is inactive and return so it can be cleaned up
#ifdef UDT_CONNECTION_DEBUG
qCDebug(networking) << "SendQueue to" << _destination << "reached" << NUM_TIMEOUTS_BEFORE_INACTIVE << "timeouts"
<< "and 5s before receiving any ACK/NAK and is now inactive. Stopping.";
qCDebug(networking) << "SendQueue to" << _destination << "reached" << NUM_TIMEOUTS_BEFORE_INACTIVE << "timeouts"
<< "and" << MIN_MS_BEFORE_INACTIVE << "milliseconds before receiving any ACK/NAK and is now inactive. Stopping.";
#endif
deactivate();
return true;
}
deactivate();
return true;
}
if (!sentAPacket) {
// During our processing above we didn't send any packets
// If that is still the case we should use a condition_variable_any to sleep until we have data to handle.
@ -470,7 +471,7 @@ bool SendQueue::isInactive(bool sentAPacket) {
DoubleLock doubleLock(_packets.getLock(), _naksLock);
DoubleLock::Lock locker(doubleLock, std::try_to_lock);
if (locker.owns_lock() && _packets.isEmpty() && _naks.isEmpty()) {
if (locker.owns_lock() && (_packets.isEmpty() || isFlowWindowFull()) && _naks.isEmpty()) {
// The packets queue and loss list mutexes are now both locked and they're both empty
if (uint32_t(_lastACKSequenceNumber) == uint32_t(_currentSequenceNumber)) {
@ -481,21 +482,22 @@ bool SendQueue::isInactive(bool sentAPacket) {
// use our condition_variable_any to wait
auto cvStatus = _emptyCondition.wait_for(locker, EMPTY_QUEUES_INACTIVE_TIMEOUT);
// we have the lock again - Make sure to unlock it
locker.unlock();
if (cvStatus == std::cv_status::timeout) {
if (cvStatus == std::cv_status::timeout && (_packets.isEmpty() || isFlowWindowFull()) && _naks.isEmpty()) {
#ifdef UDT_CONNECTION_DEBUG
qCDebug(networking) << "SendQueue to" << _destination << "has been empty for"
<< EMPTY_QUEUES_INACTIVE_TIMEOUT.count()
<< "seconds and receiver has ACKed all packets."
<< "The queue is now inactive and will be stopped.";
#endif
// we have the lock again - Make sure to unlock it
locker.unlock();
// Deactivate queue
deactivate();
return true;
}
} else {
// We think the client is still waiting for data (based on the sequence number gap)
// Let's wait either for a response from the client or until the estimated timeout
@ -505,17 +507,18 @@ bool SendQueue::isInactive(bool sentAPacket) {
// use our condition_variable_any to wait
auto cvStatus = _emptyCondition.wait_for(locker, waitDuration);
if (cvStatus == std::cv_status::timeout) {
// increase the number of timeouts
++_timeoutExpiryCount;
if (cvStatus == std::cv_status::timeout && (_packets.isEmpty() || isFlowWindowFull()) && _naks.isEmpty()
&& SequenceNumber(_lastACKSequenceNumber) < _currentSequenceNumber) {
// after a timeout if we still have sent packets that the client hasn't ACKed we
// add them to the loss list
if (SequenceNumber(_lastACKSequenceNumber) < _currentSequenceNumber) {
// after a timeout if we still have sent packets that the client hasn't ACKed we
// add them to the loss list
// Note that thanks to the DoubleLock we have the _naksLock right now
_naks.append(SequenceNumber(_lastACKSequenceNumber) + 1, _currentSequenceNumber);
}
// Note that thanks to the DoubleLock we have the _naksLock right now
_naks.append(SequenceNumber(_lastACKSequenceNumber) + 1, _currentSequenceNumber);
// we have the lock again - time to unlock it
locker.unlock();
emit timeout();
}
}
}
@ -530,3 +533,7 @@ void SendQueue::deactivate() {
_state = State::Stopped;
}
bool SendQueue::isFlowWindowFull() const {
return seqlen(SequenceNumber { (uint32_t) _lastACKSequenceNumber }, _currentSequenceNumber) > _flowWindowSize;
}

View file

@ -78,6 +78,8 @@ signals:
void packetRetransmitted();
void queueInactive();
void timeout();
private slots:
void run();
@ -97,6 +99,8 @@ private:
bool isInactive(bool sentAPacket);
void deactivate(); // makes the queue inactive and cleans it up
bool isFlowWindowFull() const;
// Increments current sequence number and return it
SequenceNumber getNextSequenceNumber();
@ -118,7 +122,6 @@ private:
std::atomic<int> _estimatedTimeout { 0 }; // Estimated timeout, set from CC
std::atomic<int> _syncInterval { udt::DEFAULT_SYN_INTERVAL_USECS }; // Sync interval, set from CC
std::atomic<int> _timeoutExpiryCount { 0 }; // The number of times the timeout has expired without response from client
std::atomic<uint64_t> _lastReceiverResponse { 0 }; // Timestamp for the last time we got new data from the receiver (ACK/NAK)
std::atomic<int> _flowWindowSize { 0 }; // Flow control window size (number of packets that can be on wire) - set from CC

View file

@ -1700,6 +1700,7 @@ bool Octree::readFromURL(const QString& urlString) {
QDataStream inputStream(reply);
readOk = readFromStream(resourceSize, inputStream);
}
delete reply;
}
return readOk;
}

View file

@ -252,25 +252,18 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
// If we hit the models box, then consider the submeshes...
_mutex.lock();
if (!_calculatedMeshBoxesValid) {
if (!_calculatedMeshBoxesValid || (pickAgainstTriangles && !_calculatedMeshTrianglesValid)) {
recalculateMeshBoxes(pickAgainstTriangles);
}
foreach (const AABox& subMeshBox, _calculatedMeshBoxes) {
for (const auto& subMeshBox : _calculatedMeshBoxes) {
if (subMeshBox.findRayIntersection(origin, direction, distanceToSubMesh, subMeshFace, subMeshSurfaceNormal)) {
if (distanceToSubMesh < bestDistance) {
if (pickAgainstTriangles) {
if (!_calculatedMeshTrianglesValid) {
recalculateMeshBoxes(pickAgainstTriangles);
}
// check our triangles here....
const QVector<Triangle>& meshTriangles = _calculatedMeshTriangles[subMeshIndex];
int t = 0;
foreach (const Triangle& triangle, meshTriangles) {
t++;
for(const auto& triangle : meshTriangles) {
float thisTriangleDistance;
if (findRayTriangleIntersection(origin, direction, triangle, thisTriangleDistance)) {
if (thisTriangleDistance < bestDistance) {
@ -1069,7 +1062,7 @@ void Model::updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrient
// post the blender if we're not currently waiting for one to finish
if (geometry.hasBlendedMeshes() && _blendshapeCoefficients != _blendedBlendshapeCoefficients) {
_blendedBlendshapeCoefficients = _blendshapeCoefficients;
DependencyManager::get<ModelBlender>()->noteRequiresBlend(this);
DependencyManager::get<ModelBlender>()->noteRequiresBlend(getThisPointer());
}
}
@ -1277,29 +1270,37 @@ ModelBlender::ModelBlender() :
ModelBlender::~ModelBlender() {
}
void ModelBlender::noteRequiresBlend(Model* model) {
void ModelBlender::noteRequiresBlend(ModelPointer model) {
if (_pendingBlenders < QThread::idealThreadCount()) {
if (model->maybeStartBlender()) {
_pendingBlenders++;
}
return;
}
if (!_modelsRequiringBlends.contains(model)) {
_modelsRequiringBlends.append(model);
{
Lock lock(_mutex);
_modelsRequiringBlends.insert(model);
}
}
void ModelBlender::setBlendedVertices(const QPointer<Model>& model, int blendNumber,
const QWeakPointer<NetworkGeometry>& geometry, const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals) {
if (!model.isNull()) {
model->setBlendedVertices(blendNumber, geometry, vertices, normals);
}
_pendingBlenders--;
while (!_modelsRequiringBlends.isEmpty()) {
Model* nextModel = _modelsRequiringBlends.takeFirst();
if (nextModel && nextModel->maybeStartBlender()) {
_pendingBlenders++;
return;
{
Lock lock(_mutex);
for (auto i = _modelsRequiringBlends.begin(); i != _modelsRequiringBlends.end();) {
auto weakPtr = *i;
_modelsRequiringBlends.erase(i++); // remove front of the set
ModelPointer nextModel = weakPtr.lock();
if (nextModel && nextModel->maybeStartBlender()) {
_pendingBlenders++;
return;
}
}
}
}

View file

@ -50,8 +50,13 @@ inline uint qHash(const std::shared_ptr<MeshPartPayload>& a, uint seed) {
return qHash(a.get(), seed);
}
class Model;
using ModelPointer = std::shared_ptr<Model>;
using ModelWeakPointer = std::weak_ptr<Model>;
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject {
class Model : public QObject, public std::enable_shared_from_this<Model> {
Q_OBJECT
public:
@ -63,6 +68,9 @@ public:
Model(RigPointer rig, QObject* parent = nullptr);
virtual ~Model();
inline ModelPointer getThisPointer() const {
return std::static_pointer_cast<Model>(std::const_pointer_cast<Model>(shared_from_this()));
}
/// Sets the URL of the model to render.
Q_INVOKABLE void setURL(const QUrl& url);
@ -387,18 +395,22 @@ class ModelBlender : public QObject, public Dependency {
public:
/// Adds the specified model to the list requiring vertex blends.
void noteRequiresBlend(Model* model);
void noteRequiresBlend(ModelPointer model);
public slots:
void setBlendedVertices(const QPointer<Model>& model, int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals);
private:
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
ModelBlender();
virtual ~ModelBlender();
QList<QPointer<Model> > _modelsRequiringBlends;
std::set<ModelWeakPointer, std::owner_less<ModelWeakPointer>> _modelsRequiringBlends;
int _pendingBlenders;
Mutex _mutex;
};

View file

@ -172,7 +172,6 @@ QScriptValue QmlWindowClass::internalConstructor(const QString& qmlSource,
setupServer();
retVal = builder(newTab);
retVal->_toolWindow = true;
offscreenUi->getRootContext()->engine()->setObjectOwnership(retVal->_qmlWindow, QQmlEngine::CppOwnership);
registerObject(url.toLower(), retVal);
return QVariant();
});
@ -330,10 +329,8 @@ void QmlWindowClass::close() {
if (_qmlWindow) {
if (_toolWindow) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto qmlWindow = _qmlWindow;
offscreenUi->executeOnUiThread([=] {
auto toolWindow = offscreenUi->getToolWindow();
offscreenUi->getRootContext()->engine()->setObjectOwnership(qmlWindow, QQmlEngine::JavaScriptOwnership);
auto invokeResult = QMetaObject::invokeMethod(toolWindow, "removeTabForUrl", Qt::DirectConnection,
Q_ARG(QVariant, _source));
Q_ASSERT(invokeResult);

View file

@ -257,6 +257,29 @@ void AnimTests::testAccumulateTime() {
endFrame = 15.0f;
timeScale = 2.0f;
testAccumulateTimeWithParameters(startFrame, endFrame, timeScale);
startFrame = 0.0f;
endFrame = 1.0f;
timeScale = 1.0f;
float dt = 1.0f;
QString id = "testNode";
AnimNode::Triggers triggers;
float loopFlag = true;
float resultFrame = accumulateTime(startFrame, endFrame, timeScale, startFrame, dt, loopFlag, id, triggers);
// a one frame looping animation should NOT trigger onLoop events
QVERIFY(triggers.empty());
const uint32_t MAX_TRIGGER_COUNT = 3;
startFrame = 0.0f;
endFrame = 1.1f;
timeScale = 10.0f;
dt = 10.0f;
triggers.clear();
loopFlag = true;
resultFrame = accumulateTime(startFrame, endFrame, timeScale, startFrame, dt, loopFlag, id, triggers);
// a short animation with a large dt & a large timescale, should only create a MAXIMUM of 3 loop events.
QVERIFY(triggers.size() <= MAX_TRIGGER_COUNT);
}
void AnimTests::testAccumulateTimeWithParameters(float startFrame, float endFrame, float timeScale) const {