From eedfc4fd38435bc361fea1a38823a2190feb80a9 Mon Sep 17 00:00:00 2001 From: Brad Hefta-Gaub Date: Wed, 8 Feb 2017 12:19:36 -0800 Subject: [PATCH] fix various bot related bugs --- assignment-client/src/Agent.cpp | 38 ++++++++++-- interface/src/avatar/Avatar.cpp | 1 - interface/src/avatar/AvatarManager.cpp | 2 +- interface/src/avatar/MyAvatar.cpp | 1 - libraries/avatars/src/AvatarData.cpp | 83 ++++++++++++++++++++++++-- libraries/avatars/src/AvatarData.h | 32 ++++++++-- libraries/fbx/src/FBXReader.cpp | 2 + libraries/fbx/src/FBXReader.h | 1 + 8 files changed, 142 insertions(+), 18 deletions(-) diff --git a/assignment-client/src/Agent.cpp b/assignment-client/src/Agent.cpp index 0ba83864c4..32076b60e3 100644 --- a/assignment-client/src/Agent.cpp +++ b/assignment-client/src/Agent.cpp @@ -138,7 +138,6 @@ void Agent::handleJurisdictionPacket(QSharedPointer message, Sh void Agent::handleAudioPacket(QSharedPointer message) { _receivedAudioStream.parseData(*message); - _lastReceivedAudioLoudness = _receivedAudioStream.getNextOutputFrameLoudness(); _receivedAudioStream.clearBuffer(); } @@ -323,12 +322,14 @@ void Agent::scriptRequestFinished() { request->deleteLater(); } + void Agent::executeScript() { _scriptEngine = std::unique_ptr(new ScriptEngine(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload)); _scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do // setup an Avatar for the script to use auto scriptedAvatar = DependencyManager::get(); + connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection); scriptedAvatar->setForceFaceTrackerConnected(true); @@ -338,11 +339,33 @@ void Agent::executeScript() { // give this AvatarData object to the script engine _scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data()); + auto player = DependencyManager::get(); + connect(player.data(), &recording::Deck::playbackStateChanged, [=] { + if (player->isPlaying()) { + auto recordingInterface = DependencyManager::get(); + if (recordingInterface->getPlayFromCurrentLocation()) { + scriptedAvatar->setRecordingBasis(); + } + } else { + scriptedAvatar->clearRecordingBasis(); + } + }); using namespace recording; static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME); - // FIXME how to deal with driving multiple avatars locally? Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) { + + auto recordingInterface = DependencyManager::get(); + bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel(); + + // FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording + if (!useFrameSkeleton) { + static std::once_flag warning; + std::call_once(warning, [] { + qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported."; + }); + } + AvatarData::fromFrame(frame->data, *scriptedAvatar); }); @@ -352,8 +375,12 @@ void Agent::executeScript() { const QByteArray& audio = frame->data; static quint16 audioSequenceNumber{ 0 }; Transform audioTransform; + + auto avatarOrientation = scriptedAvatar->getOrientation(); + glm::quat headOrientation = scriptedAvatar->getHeadOrientation(); // FIXME - should we be using head orientation of avatar orientation? audioTransform.setTranslation(scriptedAvatar->getPosition()); - audioTransform.setRotation(scriptedAvatar->getOrientation()); + audioTransform.setRotation(avatarOrientation); + QByteArray encodedBuffer; if (_encoder) { _encoder->encode(audio, encodedBuffer); @@ -537,7 +564,10 @@ void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) { } void Agent::processAgentAvatarAudio() { - if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) { + auto recordingInterface = DependencyManager::get(); + bool isPlayingRecording = recordingInterface->isPlaying(); + + if (_isAvatar && ((_isListeningToAudioStream && !isPlayingRecording) || _avatarSound)) { // if we have an avatar audio stream then send it out to our audio-mixer auto scriptedAvatar = DependencyManager::get(); bool silentFrame = true; diff --git a/interface/src/avatar/Avatar.cpp b/interface/src/avatar/Avatar.cpp index 64e82f63da..b8dea58cf1 100644 --- a/interface/src/avatar/Avatar.cpp +++ b/interface/src/avatar/Avatar.cpp @@ -1010,7 +1010,6 @@ void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) { void Avatar::setModelURLFinished(bool success) { if (!success && _skeletonModelURL != AvatarData::defaultFullAvatarModelUrl()) { - qDebug() << "Using default after failing to load Avatar model: " << _skeletonModelURL; // call _skeletonModel.setURL, but leave our copy of _skeletonModelURL alone. This is so that // we don't redo this every time we receive an identity packet from the avatar with the bad url. QMetaObject::invokeMethod(_skeletonModel.get(), "setURL", diff --git a/interface/src/avatar/AvatarManager.cpp b/interface/src/avatar/AvatarManager.cpp index df3164e6fc..7906b654c3 100644 --- a/interface/src/avatar/AvatarManager.cpp +++ b/interface/src/avatar/AvatarManager.cpp @@ -134,7 +134,7 @@ Q_LOGGING_CATEGORY(trace_simulation_avatar, "trace.simulation.avatar"); float AvatarManager::getAvatarDataRate(const QUuid& sessionID, const QString& rateName) { auto avatar = getAvatarBySessionID(sessionID); - return avatar->getDataRate(rateName); + return avatar ? avatar->getDataRate(rateName) : 0.0f; } class AvatarPriority { diff --git a/interface/src/avatar/MyAvatar.cpp b/interface/src/avatar/MyAvatar.cpp index d4815b35c6..0174423ee9 100644 --- a/interface/src/avatar/MyAvatar.cpp +++ b/interface/src/avatar/MyAvatar.cpp @@ -1165,7 +1165,6 @@ void MyAvatar::clearJointsData() { } void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) { - Avatar::setSkeletonModelURL(skeletonModelURL); render::ScenePointer scene = qApp->getMain3DScene(); _skeletonModel->setVisibleInScene(true, scene); diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index b25140d0a8..ce495494a8 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -283,14 +283,20 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent destinationBuffer += sizeof(packetStateFlags); if (hasAvatarGlobalPosition) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); data->globalPosition[0] = _globalPosition.x; data->globalPosition[1] = _globalPosition.y; data->globalPosition[2] = _globalPosition.z; destinationBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition); + + int numBytes = destinationBuffer - startSection; + + _globalPositionRateOutbound.increment(numBytes); } if (hasAvatarBoundingBox) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); data->avatarDimensions[0] = _globalBoundingBoxDimensions.x; @@ -302,36 +308,56 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent data->boundOriginOffset[2] = _globalBoundingBoxOffset.z; destinationBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox); + + int numBytes = destinationBuffer - startSection; + _avatarBoundingBoxRateOutbound.increment(numBytes); } if (hasAvatarOrientation) { + auto startSection = destinationBuffer; auto localOrientation = getLocalOrientation(); destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation); + + int numBytes = destinationBuffer - startSection; + _avatarOrientationRateOutbound.increment(numBytes); } if (hasAvatarScale) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); auto scale = getDomainLimitedScale(); packFloatRatioToTwoByte((uint8_t*)(&data->scale), scale); destinationBuffer += sizeof(AvatarDataPacket::AvatarScale); + + int numBytes = destinationBuffer - startSection; + _avatarScaleRateOutbound.increment(numBytes); } if (hasLookAtPosition) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); auto lookAt = _headData->getLookAtPosition(); data->lookAtPosition[0] = lookAt.x; data->lookAtPosition[1] = lookAt.y; data->lookAtPosition[2] = lookAt.z; destinationBuffer += sizeof(AvatarDataPacket::LookAtPosition); + + int numBytes = destinationBuffer - startSection; + _lookAtPositionRateOutbound.increment(numBytes); } if (hasAudioLoudness) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); data->audioLoudness = packFloatGainToByte(_headData->getAudioLoudness() / AUDIO_LOUDNESS_SCALE); destinationBuffer += sizeof(AvatarDataPacket::AudioLoudness); + + int numBytes = destinationBuffer - startSection; + _audioLoudnessRateOutbound.increment(numBytes); } if (hasSensorToWorldMatrix) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix(); packOrientationQuatToSixBytes(data->sensorToWorldQuat, glmExtractRotation(sensorToWorldMatrix)); @@ -341,9 +367,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent data->sensorToWorldTrans[1] = sensorToWorldMatrix[3][1]; data->sensorToWorldTrans[2] = sensorToWorldMatrix[3][2]; destinationBuffer += sizeof(AvatarDataPacket::SensorToWorldMatrix); + + int numBytes = destinationBuffer - startSection; + _sensorToWorldRateOutbound.increment(numBytes); } if (hasAdditionalFlags) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); uint8_t flags { 0 }; @@ -370,27 +400,39 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent } data->flags = flags; destinationBuffer += sizeof(AvatarDataPacket::AdditionalFlags); + + int numBytes = destinationBuffer - startSection; + _additionalFlagsRateOutbound.increment(numBytes); } if (hasAvatarLocalPosition) { + auto startSection = destinationBuffer; auto data = reinterpret_cast(destinationBuffer); auto localPosition = getLocalPosition(); data->localPosition[0] = localPosition.x; data->localPosition[1] = localPosition.y; data->localPosition[2] = localPosition.z; destinationBuffer += sizeof(AvatarDataPacket::AvatarLocalPosition); + + int numBytes = destinationBuffer - startSection; + _localPositionRateOutbound.increment(numBytes); } if (hasParentInfo) { + auto startSection = destinationBuffer; auto parentInfo = reinterpret_cast(destinationBuffer); QByteArray referentialAsBytes = parentID.toRfc4122(); memcpy(parentInfo->parentUUID, referentialAsBytes.data(), referentialAsBytes.size()); parentInfo->parentJointIndex = _parentJointIndex; destinationBuffer += sizeof(AvatarDataPacket::ParentInfo); + + int numBytes = destinationBuffer - startSection; + _parentInfoRateOutbound.increment(numBytes); } // If it is connected, pack up the data if (hasFaceTrackerInfo) { + auto startSection = destinationBuffer; auto faceTrackerInfo = reinterpret_cast(destinationBuffer); faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink; @@ -403,10 +445,14 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent // followed by a variable number of float coefficients memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float)); destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float); + + int numBytes = destinationBuffer - startSection; + _faceTrackerRateOutbound.increment(numBytes); } // If it is connected, pack up the data if (hasJointData) { + auto startSection = destinationBuffer; QReadLocker readLock(&_jointDataLock); // joint rotation data @@ -554,6 +600,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent << (destinationBuffer - startPosition); } #endif + + int numBytes = destinationBuffer - startSection; + _jointDataRateOutbound.increment(numBytes); } int avatarDataSize = destinationBuffer - startPosition; @@ -1028,6 +1077,30 @@ float AvatarData::getDataRate(const QString& rateName) { return _faceTrackerRate.rate() / BYTES_PER_KILOBIT; } else if (rateName == "jointData") { return _jointDataRate.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "globalPositionOutbound") { + return _globalPositionRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "localPositionOutbound") { + return _localPositionRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "avatarBoundingBoxOutbound") { + return _avatarBoundingBoxRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "avatarOrientationOutbound") { + return _avatarOrientationRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "avatarScaleOutbound") { + return _avatarScaleRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "lookAtPositionOutbound") { + return _lookAtPositionRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "audioLoudnessOutbound") { + return _audioLoudnessRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "sensorToWorkMatrixOutbound") { + return _sensorToWorldRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "additionalFlagsOutbound") { + return _additionalFlagsRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "parentInfoOutbound") { + return _parentInfoRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "faceTrackerOutbound") { + return _faceTrackerRateOutbound.rate() / BYTES_PER_KILOBIT; + } else if (rateName == "jointDataOutbound") { + return _jointDataRateOutbound.rate() / BYTES_PER_KILOBIT; } return 0.0f; } @@ -1842,8 +1915,7 @@ QJsonObject AvatarData::toJson() const { return root; } -void AvatarData::fromJson(const QJsonObject& json) { - +void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) { int version; if (json.contains(JSON_AVATAR_VERSION)) { version = json[JSON_AVATAR_VERSION].toInt(); @@ -1865,7 +1937,7 @@ void AvatarData::fromJson(const QJsonObject& json) { if (json.contains(JSON_AVATAR_BODY_MODEL)) { auto bodyModelURL = json[JSON_AVATAR_BODY_MODEL].toString(); - if (bodyModelURL != getSkeletonModelURL().toString()) { + if (useFrameSkeleton && bodyModelURL != getSkeletonModelURL().toString()) { setSkeletonModelURL(bodyModelURL); } } @@ -1958,8 +2030,9 @@ QByteArray AvatarData::toFrame(const AvatarData& avatar) { } -void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result) { +void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result, bool useFrameSkeleton) { QJsonDocument doc = QJsonDocument::fromBinaryData(frameData); + #ifdef WANT_JSON_DEBUG { QJsonObject obj = doc.object(); @@ -1967,7 +2040,7 @@ void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result) { qCDebug(avatars).noquote() << QJsonDocument(obj).toJson(QJsonDocument::JsonFormat::Indented); } #endif - result.fromJson(doc.object()); + result.fromJson(doc.object(), useFrameSkeleton); } float AvatarData::getBodyYaw() const { diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index 52cf81798e..cd97a726bf 100644 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -329,7 +329,7 @@ public: static const QString FRAME_NAME; - static void fromFrame(const QByteArray& frameData, AvatarData& avatar); + static void fromFrame(const QByteArray& frameData, AvatarData& avatar, bool useFrameSkeleton = true); static QByteArray toFrame(const AvatarData& avatar); AvatarData(); @@ -380,8 +380,13 @@ public: void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time. virtual void updateAttitude() {} // Tell skeleton mesh about changes - glm::quat getHeadOrientation() const { return _headData->getOrientation(); } - void setHeadOrientation(const glm::quat& orientation) { _headData->setOrientation(orientation); } + glm::quat getHeadOrientation() { + lazyInitHeadData(); + return _headData->getOrientation(); + } + void setHeadOrientation(const glm::quat& orientation) { if (_headData) _headData->setOrientation(orientation); } + void setLookAtPosition(const glm::vec3& lookAtPosition) { if (_headData) _headData->setLookAtPosition(lookAtPosition); } + void setBlendshapeCoefficients(const QVector& blendshapeCoefficients) { if (_headData) _headData->setBlendshapeCoefficients(blendshapeCoefficients); } // access to Head().set/getMousePitch (degrees) float getHeadPitch() const { return _headData->getBasePitch(); } @@ -513,7 +518,7 @@ public: TransformPointer getRecordingBasis() const; void setRecordingBasis(TransformPointer recordingBasis = TransformPointer()); QJsonObject toJson() const; - void fromJson(const QJsonObject& json); + void fromJson(const QJsonObject& json, bool useFrameSkeleton = true); glm::vec3 getClientGlobalPosition() { return _globalPosition; } glm::vec3 getGlobalBoundingBoxCorner() { return _globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions; } @@ -528,7 +533,7 @@ public: Q_INVOKABLE glm::mat4 getControllerLeftHandMatrix() const; Q_INVOKABLE glm::mat4 getControllerRightHandMatrix() const; - float getDataRate(const QString& rateName = QString("")); + Q_INVOKABLE float getDataRate(const QString& rateName = QString("")); int getJointCount() { return _jointData.size(); } @@ -596,7 +601,7 @@ protected: bool _forceFaceTrackerConnected; bool _hasNewJointData; // set in AvatarData, cleared in Avatar - HeadData* _headData; + HeadData* _headData { nullptr }; QUrl _skeletonModelURL; bool _firstSkeletonCheck { true }; @@ -659,6 +664,21 @@ protected: RateCounter<> _faceTrackerRate; RateCounter<> _jointDataRate; + // Some rate data for outgoing data + RateCounter<> _globalPositionRateOutbound; + RateCounter<> _localPositionRateOutbound; + RateCounter<> _avatarBoundingBoxRateOutbound; + RateCounter<> _avatarOrientationRateOutbound; + RateCounter<> _avatarScaleRateOutbound; + RateCounter<> _lookAtPositionRateOutbound; + RateCounter<> _audioLoudnessRateOutbound; + RateCounter<> _sensorToWorldRateOutbound; + RateCounter<> _additionalFlagsRateOutbound; + RateCounter<> _parentInfoRateOutbound; + RateCounter<> _faceTrackerRateOutbound; + RateCounter<> _jointDataRateOutbound; + + glm::vec3 _globalBoundingBoxDimensions; glm::vec3 _globalBoundingBoxOffset; diff --git a/libraries/fbx/src/FBXReader.cpp b/libraries/fbx/src/FBXReader.cpp index 42922ce226..71a8419091 100644 --- a/libraries/fbx/src/FBXReader.cpp +++ b/libraries/fbx/src/FBXReader.cpp @@ -537,6 +537,8 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS FBXGeometry* geometryPtr = new FBXGeometry; FBXGeometry& geometry = *geometryPtr; + geometry.originalURL = url; + float unitScaleFactor = 1.0f; glm::vec3 ambientColor; QString hifiGlobalNodeID; diff --git a/libraries/fbx/src/FBXReader.h b/libraries/fbx/src/FBXReader.h index cd1dbc5c4f..e2e6a8c004 100644 --- a/libraries/fbx/src/FBXReader.h +++ b/libraries/fbx/src/FBXReader.h @@ -281,6 +281,7 @@ class FBXGeometry { public: using Pointer = std::shared_ptr; + QString originalURL; QString author; QString applicationName; ///< the name of the application that generated the model