Merge branch 'rig' of github.com:howard-stearns/hifi into rig

This commit is contained in:
Seth Alves 2015-07-23 09:46:48 -07:00
commit 5a1c1446cd
6 changed files with 24 additions and 3 deletions

View file

@ -11,12 +11,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* TBD:
- What is responsibilities of Animation/AnimationPointer/AnimationCache/AnimationDetails? Is there common/copied code (e.g., ScriptableAvatar::update)?
- What iare responsibilities of Animation/AnimationPointer/AnimationCache/AnimationDetails/AnimationObject/AnimationLoop?
Is there common/copied code (e.g., ScriptableAvatar::update)?
- How do attachments interact with the physics of the attached entity? E.g., do hand joints need to reflect held object physics?
- Is there any current need (i.e., for initial campatability) to have multiple animations per role (e.g., idle) with the system choosing randomly?
- Distribute some doc from here to the right files if it turns out to be correct:
- AnimationDetails is a script-useable copy of animation state, analogous to EntityItemProperties, but without anything equivalent to editEntity.
But what's the intended difference vs AnimationObjection? Maybe AnimationDetails is to Animation as AnimationObject is to AnimationPointer?
*/
#ifndef __hifi__Rig__

View file

@ -1087,6 +1087,7 @@ void AvatarData::setJointMappingsFromNetworkReply() {
}
networkReply->deleteLater();
emit jointsLoaded();
}
void AvatarData::sendAvatarDataPacket() {

View file

@ -312,6 +312,9 @@ public:
bool shouldDie() const { return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS; }
signals:
void jointsLoaded(); // So that test cases or anyone waiting on asynchronous loading can be informed.
public slots:
void sendAvatarDataPacket();
void sendIdentityPacket();

View file

@ -1,7 +1,7 @@
# Declare dependencies
macro (setup_testcase_dependencies)
# link in the shared libraries
link_hifi_libraries(shared animation gpu fbx model)
link_hifi_libraries(shared animation gpu fbx model avatars networking audio)
copy_dlls_beside_windows_executable()
endmacro ()

View file

@ -40,11 +40,26 @@
*/
#include <iostream>
//#include "FSTReader.h"
// There are two good ways we could organize this:
// 1. Create a MyAvatar the same way that Interface does, and poke at it.
// We can't do that because MyAvatar (and even Avatar) are in interface, not a library, and our build system won't allow that dependency.
// 2. Create just the minimum skeleton in the most direct way possible, using only very basic library APIs (such as fbx).
// I don't think we can do that because not everything we need is exposed directly from, e.g., the fst and fbx readers.
// So here we do neither. Using as much as we can from AvatarData (which is in the avatar and further requires network and audio), and
// duplicating whatever other code we need from (My)Avatar. Ugh. We may refactor that later, but right now, cleaning this up is not on our critical path.
#include "AvatarData.h"
#include "RigTests.h"
QTEST_MAIN(RigTests)
void RigTests::initTestCase() {
AvatarData avatar;
QEventLoop loop; // Create an event loop that will quit when we get the finished signal
QObject::connect(&avatar, &AvatarData::jointsLoaded, &loop, &QEventLoop::quit);
avatar.setSkeletonModelURL(QUrl("https://hifi-public.s3.amazonaws.com/marketplace/contents/4a690585-3fa3-499e-9f8b-fd1226e561b1/e47e6898027aa40f1beb6adecc6a7db5.fst")); // Zach
//std::cout << "sleep start" << std::endl;
loop.exec(); // Nothing is going to happen on this whole run thread until we get this
_rig = new Rig();
}

View file

@ -15,7 +15,7 @@
#include <QtTest/QtTest>
#include <Rig.h>
#include "../QTestExtensions.h"
//#include "../QTestExtensions.h"
// The QTest terminology is not consistent with itself or with industry: