Merge branch 'master' into ratsteer

This commit is contained in:
James B. Pollack 2015-11-30 11:25:50 -08:00
commit 1190f1b6cc
40 changed files with 571 additions and 193 deletions

View file

@ -7,11 +7,18 @@ Please read the [general build guide](BUILD.md) for information on dependencies
We no longer require install of qt5 via our [homebrew formulas repository](https://github.com/highfidelity/homebrew-formulas). Versions of Qt that are 5.5.x and above provide a mechanism to disable the wireless scanning we previously had a custom patch for.
###Qt
###OpenSSL and Qt
Assuming you've installed Qt 5 using the homebrew instructions above, you'll need to set QT_CMAKE_PREFIX_PATH so CMake can find your installation of Qt. For Qt 5.5.1 installed via homebrew, set QT_CMAKE_PREFIX_PATH as follows.
Assuming you've installed OpenSSL or Qt 5 using the homebrew instructions above, you'll need to set OPENSSL_ROOT_DIR and QT_CMAKE_PREFIX_PATH so CMake can find your installations.
For OpenSSL installed via homebrew, set OPENSSL_ROOT_DIR:
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.5.1/lib/cmake
export OPENSSL_ROOT_DIR=/usr/local/Cellar/openssl/1.0.2d_1
For Qt 5.5.1 installed via homebrew, set QT_CMAKE_PREFIX_PATH as follows.
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.5.1_2/lib/cmake
Not that these use the versions from homebrew formulae at the time of this writing, and the version in the path will likely change.
###Xcode
If Xcode is your editor of choice, you can ask CMake to generate Xcode project files instead of Unix Makefiles.

View file

@ -67,7 +67,7 @@ Agent::Agent(NLPacket& packet) :
DependencyManager::set<RecordingScriptingInterface>();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListenerForTypes(
{ PacketType::MixedAudio, PacketType::SilentAudioFrame },
this, "handleAudioPacket");
@ -86,7 +86,7 @@ void Agent::handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointe
if (packet->getPayloadSize() > statsMessageLength) {
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
int piggyBackedSizeWithHeader = packet->getPayloadSize() - statsMessageLength;
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggyBackedSizeWithHeader);
@ -126,11 +126,11 @@ void Agent::handleAudioPacket(QSharedPointer<NLPacket> packet) {
const QString AGENT_LOGGING_NAME = "agent";
void Agent::run() {
// make sure we request our script once the agent connects to the domain
auto nodeList = DependencyManager::get<NodeList>();
connect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
// Setup MessagesClient
@ -140,7 +140,7 @@ void Agent::run() {
messagesClient->moveToThread(messagesThread);
connect(messagesThread, &QThread::started, messagesClient.data(), &MessagesClient::init);
messagesThread->start();
nodeList->addSetOfNodeTypesToNodeInterestSet({
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::EntityServer, NodeType::MessagesMixer, NodeType::AssetServer
});
@ -149,7 +149,7 @@ void Agent::run() {
void Agent::requestScript() {
auto nodeList = DependencyManager::get<NodeList>();
disconnect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
// figure out the URL for the script for this agent assignment
QUrl scriptURL;
if (_payload.isEmpty()) {
@ -160,24 +160,24 @@ void Agent::requestScript() {
} else {
scriptURL = QUrl(_payload);
}
// setup a network access manager and
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkDiskCache* cache = new QNetworkDiskCache();
QString cachePath = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
cache->setCacheDirectory(!cachePath.isEmpty() ? cachePath : "agentCache");
networkAccessManager.setCache(cache);
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
// setup a timeout for script request
static const int SCRIPT_TIMEOUT_MS = 10000;
_scriptRequestTimeout = new QTimer(this);
connect(_scriptRequestTimeout, &QTimer::timeout, this, &Agent::scriptRequestFinished);
_scriptRequestTimeout->start(SCRIPT_TIMEOUT_MS);
qDebug() << "Downloading script at" << scriptURL.toString();
QNetworkReply* reply = networkAccessManager.get(networkRequest);
connect(reply, &QNetworkReply::finished, this, &Agent::scriptRequestFinished);
@ -187,11 +187,11 @@ void Agent::scriptRequestFinished() {
auto reply = qobject_cast<QNetworkReply*>(sender());
_scriptRequestTimeout->stop();
if (reply && reply->error() == QNetworkReply::NoError) {
_scriptContents = reply->readAll();
qDebug() << "Downloaded script:" << _scriptContents;
// we could just call executeScript directly - we use a QueuedConnection to allow scriptRequestFinished
// to return before calling executeScript
QMetaObject::invokeMethod(this, "executeScript", Qt::QueuedConnection);
@ -202,38 +202,38 @@ void Agent::scriptRequestFinished() {
} else {
qDebug() << "Failed to download script - request timed out. Bailing on assignment.";
}
setFinished(true);
}
reply->deleteLater();
}
void Agent::executeScript() {
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(_scriptContents, _payload));
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
// setup an Avatar for the script to use
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
scriptedAvatar->setForceFaceTrackerConnected(true);
// call model URL setters with empty URLs so our avatar, if user, will have the default models
scriptedAvatar->setFaceModelURL(QUrl());
scriptedAvatar->setSkeletonModelURL(QUrl());
// give this AvatarData object to the script engine
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
using namespace recording;
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
// FIXME how to deal with driving multiple avatars locally?
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
AvatarData::fromFrame(frame->data, *scriptedAvatar);
});
using namespace recording;
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
const QByteArray& audio = frame->data;
static quint16 audioSequenceNumber{ 0 };
@ -242,49 +242,49 @@ void Agent::executeScript() {
audioTransform.setRotation(scriptedAvatar->getOrientation());
AbstractAudioInterface::emitAudioPacket(audio.data(), audio.size(), audioSequenceNumber, audioTransform, PacketType::MicrophoneAudioNoEcho);
});
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListener(PacketType::BulkAvatarData, avatarHashMap.data(), "processAvatarDataPacket");
packetReceiver.registerListener(PacketType::KillAvatar, avatarHashMap.data(), "processKillAvatar");
packetReceiver.registerListener(PacketType::AvatarIdentity, avatarHashMap.data(), "processAvatarIdentityPacket");
packetReceiver.registerListener(PacketType::AvatarBillboard, avatarHashMap.data(), "processAvatarBillboardPacket");
// register ourselves to the script engine
_scriptEngine->registerGlobalObject("Agent", this);
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
// viewers would need this called.
//_scriptEngine->init(); // must be done before we set up the viewers
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
// we need to make sure that init has been called for our EntityScriptingInterface
// so that it actually has a jurisdiction listener when we ask it for it next
entityScriptingInterface->init();
_entityViewer.setJurisdictionListener(entityScriptingInterface->getJurisdictionListener());
_entityViewer.init();
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
// wire up our additional agent related processing to the update signal
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatarAndAudio);
_scriptEngine->run();
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
setFinished(true);
}
@ -317,7 +317,7 @@ void Agent::setIsAvatar(bool isAvatar) {
delete _avatarIdentityTimer;
_avatarIdentityTimer = nullptr;
}
if (_avatarBillboardTimer) {
_avatarBillboardTimer->stop();
delete _avatarBillboardTimer;

View file

@ -31,7 +31,7 @@
class Agent : public ThreadedAssignment {
Q_OBJECT
Q_PROPERTY(bool isAvatar READ isAvatar WRITE setIsAvatar)
Q_PROPERTY(bool isPlayingAvatarSound READ isPlayingAvatarSound)
Q_PROPERTY(bool isListeningToAudioStream READ isListeningToAudioStream WRITE setIsListeningToAudioStream)
@ -40,7 +40,7 @@ class Agent : public ThreadedAssignment {
public:
Agent(NLPacket& packet);
void setIsAvatar(bool isAvatar);
bool isAvatar() const { return _isAvatar; }
@ -53,7 +53,7 @@ public:
QUuid getSessionUUID() const;
virtual void aboutToFinish();
public slots:
void run();
void playAvatarSound(Sound* avatarSound) { setAvatarSound(avatarSound); }
@ -62,7 +62,7 @@ private slots:
void requestScript();
void scriptRequestFinished();
void executeScript();
void handleAudioPacket(QSharedPointer<NLPacket> packet);
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
@ -72,7 +72,7 @@ private:
std::unique_ptr<ScriptEngine> _scriptEngine;
EntityEditPacketSender _entityEditSender;
EntityTreeHeadlessViewer _entityViewer;
MixedAudioStream _receivedAudioStream;
float _lastReceivedAudioLoudness;

View file

@ -18,8 +18,8 @@ if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
# URL https://bullet.googlecode.com/files/bullet-2.82-r2704.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.zip
URL_MD5 f5e8914fc9064ad32e0d62d19d33d977
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-ccd-fix.zip
URL_MD5 d95b07eb120de7dd7786361c0b5a8d9f
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0 -DUSE_DX11=0
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
@ -30,8 +30,8 @@ else ()
ExternalProject_Add(
${EXTERNAL_NAME}
#URL http://bullet.googlecode.com/files/bullet-2.82-r2704.tgz
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.tgz
URL_MD5 70b3c8d202dee91a0854b4cbc88173e8
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-ccd-fix.tgz
URL_MD5 fb140a4983b4109aa1c825a162aa8d64
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
@ -80,4 +80,4 @@ endif ()
if (DEFINED ${EXTERNAL_NAME_UPPER}_DYNAMICS_LIBRARY_RELEASE)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include/bullet CACHE PATH "Path to bullet include directory")
endif ()
endif ()

View file

@ -3,12 +3,14 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
cmake_policy(SET CMP0046 OLD)
include(ExternalProject)
string(REPLACE \\ / QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://s3-us-west-1.amazonaws.com/hifi-production/dependencies/quazip-0.6.2.zip
URL_MD5 514851970f1a14d815bdc3ad6267af4d
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_PREFIX_PATH=$ENV{QT_CMAKE_PREFIX_PATH} -DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib -DZLIB_ROOT=${ZLIB_ROOT}
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_PREFIX_PATH=${QT_CMAKE_PREFIX_PATH} -DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib -DZLIB_ROOT=${ZLIB_ROOT}
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1

View file

@ -21,7 +21,7 @@ macro(LINK_HIFI_LIBRARIES)
include_directories("${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src")
add_dependencies(${TARGET_NAME} ${HIFI_LIBRARY})
# link the actual library - it is static so don't bubble it up
target_link_libraries(${TARGET_NAME} ${HIFI_LIBRARY})

View file

@ -34,6 +34,13 @@ macro(SETUP_HIFI_PROJECT)
# find these Qt modules and link them to our own target
find_package(Qt5 COMPONENTS ${${TARGET_NAME}_DEPENDENCY_QT_MODULES} REQUIRED)
# disable /OPT:REF and /OPT:ICF for the Debug builds
# This will prevent the following linker warnings
# LINK : warning LNK4075: ignoring '/INCREMENTAL' due to '/OPT:ICF' specification
if (WIN32)
set_property(TARGET ${TARGET_NAME} APPEND_STRING PROPERTY LINK_FLAGS_DEBUG "/OPT:NOREF /OPT:NOICF")
endif()
foreach(QT_MODULE ${${TARGET_NAME}_DEPENDENCY_QT_MODULES})
target_link_libraries(${TARGET_NAME} Qt5::${QT_MODULE})
endforeach()

View file

@ -14,16 +14,17 @@
// An assignment client script that animates one avatar at random location within 'spread' meters of 'origin'.
// In Domain Server Settings, go to scripts and give the url of this script. Press '+', and then 'Save and restart'.
var origin = {x: 500, y: 502, z: 500};
var spread = 10; // meters
var origin = {x: 500, y: 500, z: 500};
var spread = 20; // meters
var animationData = {url: "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/walk_fwd.fbx", lastFrame: 35};
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/dd03b8e3-52fb-4ab3-9ac9-3b17e00cd85d/98baa90b3b66803c5d7bd4537fca6993.fst"; //lovejoy
Avatar.displayName = "'Bot";
var millisecondsToWaitBeforeStarting = 10 * 1000; // To give the various servers a chance to start.
Agent.isAvatar = true;
function coord() { return (Math.random() * spread) - (spread / 2); } // randomly distribute a coordinate zero += spread/2.
Script.setTimeout(function () {
Avatar.position = Vec3.sum(origin, {x: Math.random() * spread, y: 0, z: Math.random() * spread});
Avatar.position = Vec3.sum(origin, {x: coord(), y: 0, z: coord()});
print("Starting at", JSON.stringify(Avatar.position));
Avatar.startAnimation(animationData.url, animationData.fps || 30, 1, true, false, animationData.firstFrame || 0, animationData.lastFrame);
}, millisecondsToWaitBeforeStarting);

View file

@ -37,6 +37,7 @@ var BUMPER_ON_VALUE = 0.5;
var DISTANCE_HOLDING_RADIUS_FACTOR = 5; // multiplied by distance between hand and object
var DISTANCE_HOLDING_ACTION_TIMEFRAME = 0.1; // how quickly objects move to their new position
var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
var NO_INTERSECT_COLOR = {
red: 10,
green: 10,
@ -86,6 +87,7 @@ var ZERO_VEC = {
y: 0,
z: 0
};
var NULL_ACTION_ID = "{00000000-0000-0000-000000000000}";
var MSEC_PER_SEC = 1000.0;
@ -95,7 +97,8 @@ var ACTION_TTL = 15; // seconds
var ACTION_TTL_REFRESH = 5;
var PICKS_PER_SECOND_PER_HAND = 5;
var MSECS_PER_SEC = 1000.0;
var GRABBABLE_PROPERTIES = ["position",
var GRABBABLE_PROPERTIES = [
"position",
"rotation",
"gravity",
"ignoreForCollisions",
@ -104,7 +107,6 @@ var GRABBABLE_PROPERTIES = ["position",
"name"
];
var GRABBABLE_DATA_KEY = "grabbableKey"; // shared with grab.js
var GRAB_USER_DATA_KEY = "grabKey"; // shared with grab.js
@ -113,8 +115,6 @@ var DEFAULT_GRABBABLE_DATA = {
invertSolidWhileHeld: false
};
var disabledHand = 'none';
// states for the state machine
var STATE_OFF = 0;
@ -307,7 +307,14 @@ function MyController(hand) {
position: closePoint,
linePoints: [ZERO_VEC, farPoint],
color: color,
lifetime: 0.1
lifetime: 0.1,
collisionsWillMove: false,
ignoreForCollisions: true,
userData: JSON.stringify({
grabbableKey: {
grabbable: false
}
})
});
}
@ -322,7 +329,14 @@ function MyController(hand) {
position: closePoint,
linePoints: [ZERO_VEC, farPoint],
color: color,
lifetime: LIFETIME
lifetime: LIFETIME,
collisionsWillMove: false,
ignoreForCollisions: true,
userData: JSON.stringify({
grabbableKey: {
grabbable: false
}
})
});
} else {
var age = Entities.getEntityProperties(this.pointer, "age").age;
@ -396,11 +410,6 @@ function MyController(hand) {
this.search = function() {
this.grabbedEntity = null;
// if this hand is the one that's disabled, we don't want to search for anything at all
if (this.hand === disabledHand) {
return;
}
if (this.state == STATE_SEARCHING ? this.triggerSmoothedReleased() : this.bumperReleased()) {
this.setState(STATE_RELEASE);
return;
@ -445,17 +454,7 @@ function MyController(hand) {
// the ray is intersecting something we can move.
var intersectionDistance = Vec3.distance(pickRay.origin, intersection.intersection);
//this code will disabled the beam for the opposite hand of the one that grabbed it if the entity says so
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, intersection.entityID, DEFAULT_GRABBABLE_DATA);
if (grabbableData["turnOffOppositeBeam"]) {
if (this.hand === RIGHT_HAND) {
disabledHand = LEFT_HAND;
} else {
disabledHand = RIGHT_HAND;
}
} else {
disabledHand = 'none';
}
if (intersection.properties.name == "Grab Debug Entity") {
continue;
@ -526,7 +525,14 @@ function MyController(hand) {
green: 255,
blue: 0
},
lifetime: 0.1
lifetime: 0.1,
collisionsWillMove: false,
ignoreForCollisions: true,
userData: JSON.stringify({
grabbableKey: {
grabbable: false
}
})
});
}
@ -540,8 +546,7 @@ function MyController(hand) {
if (typeof grabbableDataForCandidate.grabbable !== 'undefined' && !grabbableDataForCandidate.grabbable) {
continue;
}
var propsForCandidate =
Entities.getEntityProperties(nearbyEntities[i], GRABBABLE_PROPERTIES);
var propsForCandidate = Entities.getEntityProperties(nearbyEntities[i], GRABBABLE_PROPERTIES);
if (propsForCandidate.type == 'Unknown') {
continue;
@ -737,15 +742,8 @@ function MyController(hand) {
this.nearGrabbing = function() {
var now = Date.now();
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
var turnOffOtherHand = grabbableData["turnOffOtherHand"];
if (turnOffOtherHand) {
//don't activate the second hand grab because the script is handling the second hand logic
return;
}
if (this.state == STATE_NEAR_GRABBING && this.triggerSmoothedReleased()) {
this.setState(STATE_RELEASE);
Entities.callEntityMethod(this.grabbedEntity, "releaseGrab");
@ -1094,10 +1092,6 @@ function MyController(hand) {
this.release = function() {
if (this.hand !== disabledHand) {
//release the disabled hand when we let go with the main one
disabledHand = 'none';
}
this.lineOff();
if (this.grabbedEntity !== null) {
@ -1218,12 +1212,35 @@ mapping.from([Controller.Standard.LB]).peek().to(leftController.bumperPress);
Controller.enableMapping(MAPPING_NAME);
var handToDisable = 'none';
function update() {
rightController.update();
leftController.update();
if (handToDisable !== LEFT_HAND) {
leftController.update();
}
if (handToDisable !== RIGHT_HAND) {
rightController.update();
}
}
Messages.subscribe('Hifi-Hand-Disabler');
handleHandDisablerMessages = function(channel, message, sender) {
if (sender === MyAvatar.sessionUUID) {
handToDisable = message;
if (message === 'left') {
handToDisable = LEFT_HAND;
}
if (message === 'right') {
handToDisable = RIGHT_HAND;
}
}
}
Messages.messageReceived.connect(handleHandDisablerMessages);
function cleanup() {
rightController.cleanup();
leftController.cleanup();

View file

@ -25,8 +25,6 @@ var LAST_FRAME = 15.0; // What is the number of the last frame we want to us
var SMOOTH_FACTOR = 0.75;
var MAX_FRAMES = 30.0;
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
var CONTROLLER_DEAD_SPOT = 0.25;
@ -45,8 +43,8 @@ function normalizeControllerValue(val) {
}
Script.update.connect(function(deltaTime) {
var leftTrigger = normalizeControllerValue(Controller.getActionValue(LEFT_HAND_CLICK));
var rightTrigger = normalizeControllerValue(Controller.getActionValue(RIGHT_HAND_CLICK));
var leftTrigger = normalizeControllerValue(Controller.getValue(Controller.Standard.LT));
var rightTrigger = normalizeControllerValue(Controller.getValue(Controller.Standard.RT));
// Average last few trigger values together for a bit of smoothing
var smoothLeftTrigger = leftTrigger * (1.0 - SMOOTH_FACTOR) + lastLeftTrigger * SMOOTH_FACTOR;

View file

@ -84,7 +84,7 @@
overlay = null;
},
startRecording: function (entityID) {
startRecording: function () {
if (!isAvatarRecording) {
print("RECORDING STARTED");
Messages.sendMessage(CLIENTS_TO_MASTER_CHANNEL, PARTICIPATING_MESSAGE); //tell to master that I'm participating
@ -94,7 +94,7 @@
}
},
stopRecording: function (entityID) {
stopRecording: function () {
if (isAvatarRecording) {
print("RECORDING ENDED");
Recording.stopRecording();
@ -109,7 +109,7 @@
_this.stopRecording();
Messages.unsubscribe(MASTER_TO_CLIENTS_CHANNEL);
Messages.messageReceived.disconnect(receivingMessage);
if(overlay !== null){
if (overlay !== null) {
Overlays.deleteOverlay(overlay);
overlay = null;
}

View file

@ -29,11 +29,14 @@ var STOP_MESSAGE = "recordingEnded";
var PARTICIPATING_MESSAGE = "participatingToRecording";
var TIMEOUT = 20;
var toolBar = null;
var recordIcon;
var isRecording = false;
var performanceJSON = { "avatarClips" : [] };
var responsesExpected = 0;
var readyToPrintInfo = false;
var performanceFileURL = null;
var waitingForPerformanceFile = true;
var totalWaitingTime = 0;
var extension = "txt";
@ -71,9 +74,9 @@ function mousePressEvent(event) {
print("I'm the master. I want to start recording");
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, START_MESSAGE);
isRecording = true;
waitingForPerformanceFile = true;
} else {
print("I want to stop recording");
waitingForPerformanceFile = true;
Script.update.connect(update);
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, STOP_MESSAGE);
isRecording = false;
@ -108,29 +111,38 @@ function update(deltaTime) {
}
//clean things after upload performance file to asset
waitingForPerformanceFile = false;
responsesExpected = 0;
totalWaitingTime = 0;
Script.update.disconnect(update);
performanceJSON = { "avatarClips" : [] };
}
} else if (readyToPrintInfo == true){
Window.prompt("Performance file and clips: ", getUtilityString());
responsesExpected = 0;
performanceJSON = { "avatarClips" : [] };
Script.update.disconnect(update);
}
}
function getUtilityString() {
var resultString = "JSON:\n" + performanceFileURL + "\n" + responsesExpected + " avatar clips:\n";
var avatarClips = performanceJSON.avatarClips;
avatarClips.forEach(function(param) {
resultString += param + "\n";
});
return resultString;
}
function uploadFinished(url){
//need to print somehow the url here this way the master can copy the url
print("PERFORMANCE FILE URL: " + url);
Assets.downloadData(url, function (data) {
printPerformanceJSON(JSON.parse(data));
});
}
function printPerformanceJSON(obj) {
print("some info:");
print("downloaded performance file from asset and examinating its content...");
var avatarClips = obj.avatarClips;
performanceFileURL = url;
print("PERFORMANCE FILE URL: " + performanceFileURL);
print("number of clips obtained:" + responsesExpected);
var avatarClips = performanceJSON.avatarClips;
avatarClips.forEach(function(param) {
print("clip url obtained: " + param);
});
readyToPrintInfo = true;
Script.update.connect(update);
}
function cleanup() {

View file

@ -361,8 +361,8 @@ function update() {
}
function updateControllerState() {
rightTriggerValue = Controller.getActionValue(rightHandClick);
leftTriggerValue = Controller.getActionValue(leftHandClick);
rightTriggerValue = Controller.getValue(Controller.Standard.RT);
leftTriggerValue =Controller.getValue(Controller.Standard.LT);
if (rightTriggerValue > TRIGGER_THRESHOLD && !swordHeld) {
grabSword("right")

View file

@ -159,7 +159,8 @@ function MyController(hand, triggerAction) {
}
this.updateControllerState = function() {
this.triggerValue = Controller.getActionValue(this.triggerAction);
this.triggerValue = Controller.getValue(this.triggerAction);
if (this.triggerValue > TRIGGER_ON_VALUE && this.prevTriggerValue <= TRIGGER_ON_VALUE) {
this.squeeze();
} else if (this.triggerValue < TRIGGER_ON_VALUE && this.prevTriggerValue >= TRIGGER_ON_VALUE) {
@ -256,8 +257,8 @@ function MyController(hand, triggerAction) {
}
}
var rightController = new MyController(RIGHT_HAND, Controller.findAction("RIGHT_HAND_CLICK"));
var leftController = new MyController(LEFT_HAND, Controller.findAction("LEFT_HAND_CLICK"));
var rightController = new MyController(RIGHT_HAND, Controller.Standard.RT);
var leftController = new MyController(LEFT_HAND, Controller.Standard.LT);
Controller.actionEvent.connect(function(action, state) {
if (state === 0) {

View file

@ -247,4 +247,4 @@ function cleanup() {
// Uncomment this line to delete whiteboard and all associated entity on script close
// Script.scriptEnding.connect(cleanup);
//Script.scriptEnding.connect(cleanup);

View file

@ -12,10 +12,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/*global MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
Script.include("https://hifi-public.s3.amazonaws.com/scripts/utilities.js");
Script.include("../../libraries/utils.js");
var scriptURL = Script.resolvePath('flashlight.js?123123');
var scriptURL = Script.resolvePath('flashlight.js');
var modelURL = "https://hifi-public.s3.amazonaws.com/models/props/flashlight.fbx";

View file

@ -183,11 +183,14 @@
},
changeLightWithTriggerPressure: function(flashLightHand) {
var handClickString = flashLightHand + "_HAND_CLICK";
var handClick = Controller.findAction(handClickString);
if (flashLightHand === 'LEFT') {
this.triggerValue = Controller.getValue(Controller.Standard.LT);
}
if (flashLightHand === 'RIGHT') {
this.triggerValue = Controller.getValue(Controller.Standard.RT);
this.triggerValue = Controller.getActionValue(handClick);
}
if (this.triggerValue < DISABLE_LIGHT_THRESHOLD && this.lightOn === true) {
this.turnLightOff();
@ -266,4 +269,4 @@
// entity scripts always need to return a newly constructed object of our type
return new Flashlight();
});
});

View file

@ -100,6 +100,13 @@ else()
add_executable(${TARGET_NAME} ${INTERFACE_SRCS} ${QM})
endif()
# disable /OPT:REF and /OPT:ICF for the Debug builds
# This will prevent the following linker warnings
# LINK : warning LNK4075: ignoring '/INCREMENTAL' due to '/OPT:ICF' specification
if (WIN32)
set_property(TARGET ${TARGET_NAME} APPEND_STRING PROPERTY LINK_FLAGS_DEBUG "/OPT:NOREF /OPT:NOICF")
endif()
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu gl procedural model render
recording fbx networking model-networking entities avatars

View file

@ -256,6 +256,12 @@ Item {
visible: root.expanded
text: "LOD: " + root.lodStatus;
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
visible: root.expanded
text: "Renderable avatars: " + root.avatarRenderableCount + " w/in " + root.avatarRenderDistance + "m";
}
}
}
}

View file

@ -454,14 +454,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
audioIO->setOrientationGetter([this]{ return getMyAvatar()->getOrientationForAudio(); });
audioIO->moveToThread(audioThread);
recording::Frame::registerFrameHandler(AudioConstants::AUDIO_FRAME_NAME, [=](recording::Frame::ConstPointer frame) {
recording::Frame::registerFrameHandler(AudioConstants::getAudioFrameName(), [=](recording::Frame::ConstPointer frame) {
audioIO->handleRecordedAudioInput(frame->data);
});
connect(audioIO.data(), &AudioClient::inputReceived, [](const QByteArray& audio){
static auto recorder = DependencyManager::get<recording::Recorder>();
if (recorder->isRecording()) {
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::getAudioFrameName());
recorder->recordFrame(AUDIO_FRAME_TYPE, audio);
}
});
@ -1077,8 +1077,10 @@ void Application::paintGL() {
uint64_t now = usecTimestampNow();
static uint64_t lastPaintBegin{ now };
uint64_t diff = now - lastPaintBegin;
float instantaneousFps = 0.0f;
if (diff != 0) {
_framesPerSecond.updateAverage((float)USECS_PER_SECOND / (float)diff);
instantaneousFps = (float)USECS_PER_SECOND / (float)diff;
_framesPerSecond.updateAverage(_lastInstantaneousFps);
}
lastPaintBegin = now;
@ -1109,6 +1111,29 @@ void Application::paintGL() {
_inPaint = true;
Finally clearFlagLambda([this] { _inPaint = false; });
// Some LOD-like controls need to know a smoothly varying "potential" frame rate that doesn't
// include time waiting for vsync, and which can report a number above target if we've got the headroom.
// For example, if we're shooting for 75fps and paintWait is 3.3333ms (= 75% * 13.33ms), our deducedNonVSyncFps
// would be 100fps. In principle, a paintWait of zero would have deducedNonVSyncFps=75.
// Here we make a guess for deducedNonVSyncFps = 1 / deducedNonVSyncPeriod.
//
// Time between previous paintGL call and this one, which can vary not only with vSync misses, but also with QT timing.
// We're using this as a proxy for the time between vsync and displayEnd, below. (Not exact, but tends to be the same over time.)
// This is not the same as update(deltaTime), because the latter attempts to throttle to 60hz and also clamps to 1/4 second.
const float actualPeriod = diff / (float)USECS_PER_SECOND; // same as 1/instantaneousFps but easier for compiler to optimize
// Note that _lastPaintWait (stored at end of last call) is for the same paint cycle.
float deducedNonVSyncPeriod = actualPeriod - _lastPaintWait + _marginForDeducedFramePeriod; // plus a some non-zero time for machinery we can't measure
// We don't know how much time to allow for that, but if we went over the target period, we know it's at least the portion
// of paintWait up to the next vSync. This gives us enough of a penalty so that when actualPeriod crosses two cycles,
// the key part (and not an exagerated part) of _lastPaintWait is accounted for.
const float targetPeriod = getTargetFramePeriod();
if (_lastPaintWait > EPSILON && actualPeriod > targetPeriod) {
// Don't use C++ remainder(). It's authors are mathematically insane.
deducedNonVSyncPeriod += fmod(actualPeriod, _lastPaintWait);
}
_lastDeducedNonVSyncFps = 1.0f / deducedNonVSyncPeriod;
_lastInstantaneousFps = instantaneousFps;
auto displayPlugin = getActiveDisplayPlugin();
displayPlugin->preRender();
_offscreenContext->makeCurrent();
@ -1355,6 +1380,7 @@ void Application::paintGL() {
// Ensure all operations from the previous context are complete before we try to read the fbo
glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(sync);
uint64_t displayStart = usecTimestampNow();
{
PROFILE_RANGE(__FUNCTION__ "/pluginDisplay");
@ -1367,6 +1393,10 @@ void Application::paintGL() {
PerformanceTimer perfTimer("bufferSwap");
displayPlugin->finishFrame();
}
uint64_t displayEnd = usecTimestampNow();
const float displayPeriodUsec = (float)(displayEnd - displayStart); // usecs
_lastPaintWait = displayPeriodUsec / (float)USECS_PER_SECOND;
}
{

View file

@ -159,6 +159,14 @@ public:
bool isForeground() const { return _isForeground; }
float getFps() const { return _fps; }
float const HMD_TARGET_FRAME_RATE = 75.0f;
float const DESKTOP_TARGET_FRAME_RATE = 60.0f;
float getTargetFrameRate() { return isHMDMode() ? HMD_TARGET_FRAME_RATE : DESKTOP_TARGET_FRAME_RATE; }
float getTargetFramePeriod() { return isHMDMode() ? 1.0f / HMD_TARGET_FRAME_RATE : 1.0f / DESKTOP_TARGET_FRAME_RATE; } // same as 1/getTargetFrameRate, but w/compile-time division
float getLastInstanteousFps() const { return _lastInstantaneousFps; }
float getLastPaintWait() const { return _lastPaintWait; };
float getLastDeducedNonVSyncFps() const { return _lastDeducedNonVSyncFps; }
void setMarginForDeducedFramePeriod(float newValue) { _marginForDeducedFramePeriod = newValue; }
float getFieldOfView() { return _fieldOfView.get(); }
void setFieldOfView(float fov);
@ -429,6 +437,10 @@ private:
float _fps;
QElapsedTimer _timerStart;
QElapsedTimer _lastTimeUpdated;
float _lastInstantaneousFps { 0.0f };
float _lastPaintWait { 0.0f };
float _lastDeducedNonVSyncFps { 0.0f };
float _marginForDeducedFramePeriod{ 0.002f }; // 2ms, adjustable
ShapeManager _shapeManager;
PhysicalEntitySimulation _entitySimulation;

View file

@ -183,9 +183,31 @@ void Avatar::simulate(float deltaTime) {
if (_shouldRenderBillboard) {
if (getLODDistance() < BILLBOARD_LOD_DISTANCE * (1.0f - BILLBOARD_HYSTERESIS_PROPORTION)) {
_shouldRenderBillboard = false;
qCDebug(interfaceapp) << "Unbillboarding" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for LOD" << getLODDistance();
}
} else if (getLODDistance() > BILLBOARD_LOD_DISTANCE * (1.0f + BILLBOARD_HYSTERESIS_PROPORTION)) {
_shouldRenderBillboard = true;
qCDebug(interfaceapp) << "Billboarding" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for LOD" << getLODDistance();
}
const bool isControllerLogging = DependencyManager::get<AvatarManager>()->getRenderDistanceControllerIsLogging();
float renderDistance = DependencyManager::get<AvatarManager>()->getRenderDistance();
const float SKIP_HYSTERESIS_PROPORTION = isControllerLogging ? 0.0f : BILLBOARD_HYSTERESIS_PROPORTION;
float distance = glm::distance(qApp->getCamera()->getPosition(), _position);
if (_shouldSkipRender) {
if (distance < renderDistance * (1.0f - SKIP_HYSTERESIS_PROPORTION)) {
_shouldSkipRender = false;
_skeletonModel.setVisibleInScene(true, qApp->getMain3DScene());
if (!isControllerLogging) { // Test for isMyAvatar is prophylactic. Never occurs in current code.
qCDebug(interfaceapp) << "Rerendering" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for distance" << renderDistance;
}
}
} else if (distance > renderDistance * (1.0f + SKIP_HYSTERESIS_PROPORTION)) {
_shouldSkipRender = true;
_skeletonModel.setVisibleInScene(false, qApp->getMain3DScene());
if (!isControllerLogging) {
qCDebug(interfaceapp) << "Unrendering" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for distance" << renderDistance;
}
}
// simple frustum check
@ -198,7 +220,7 @@ void Avatar::simulate(float deltaTime) {
getHand()->simulate(deltaTime, false);
}
if (!_shouldRenderBillboard && inViewFrustum) {
if (!_shouldRenderBillboard && !_shouldSkipRender && inViewFrustum) {
{
PerformanceTimer perfTimer("skeleton");
for (int i = 0; i < _jointData.size(); i++) {

View file

@ -140,6 +140,8 @@ public:
Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; }
Q_INVOKABLE glm::vec3 getAngularAcceleration() const { return _angularAcceleration; }
Q_INVOKABLE bool getShouldRender() const { return !_shouldSkipRender; }
/// Scales a world space position vector relative to the avatar position and scale
/// \param vector position to be scaled. Will store the result
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
@ -226,6 +228,7 @@ private:
bool _initialized;
NetworkTexturePointer _billboardTexture;
bool _shouldRenderBillboard;
bool _shouldSkipRender { false };
bool _isLookAtTarget;
void renderBillboard(RenderArgs* renderArgs);

View file

@ -90,6 +90,21 @@ void AvatarManager::init() {
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
const float target_fps = qApp->getTargetFrameRate();
_renderDistanceController.setMeasuredValueSetpoint(target_fps);
const float SMALLEST_REASONABLE_HORIZON = 5.0f; // meters
_renderDistanceController.setControlledValueHighLimit(1.0f / SMALLEST_REASONABLE_HORIZON);
_renderDistanceController.setControlledValueLowLimit(1.0f / (float) TREE_SCALE);
// Advice for tuning parameters:
// See PIDController.h. There's a section on tuning in the reference.
// Turn on logging with the following (or from js with AvatarList.setRenderDistanceControllerHistory("avatar render", 300))
//_renderDistanceController.setHistorySize("avatar render", target_fps * 4);
// Note that extra logging/hysteresis is turned off in Avatar.cpp when the above logging is on.
_renderDistanceController.setKP(0.0008f); // Usually about 0.6 of largest that doesn't oscillate when other parameters 0.
_renderDistanceController.setKI(0.0006f); // Big enough to bring us to target with the above KP.
_renderDistanceController.setKD(0.000001f); // A touch of kd increases the speed by which we get there.
}
void AvatarManager::updateMyAvatar(float deltaTime) {
@ -123,6 +138,17 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
PerformanceTimer perfTimer("otherAvatars");
_renderDistanceController.setMeasuredValueSetpoint(qApp->getTargetFrameRate()); // No problem updating in flight.
// The PID controller raises the controlled value when the measured value goes up.
// The measured value is frame rate. When the controlled value (1 / render cutoff distance)
// goes up, the render cutoff distance gets closer, the number of rendered avatars is less, and frame rate
// goes up.
const float deduced = qApp->getLastDeducedNonVSyncFps();
const float distance = 1.0f / _renderDistanceController.update(deduced, deltaTime);
_renderDistanceAverage.updateAverage(distance);
_renderDistance = _renderDistanceAverage.getAverage();
int renderableCount = 0;
// simulate avatars
auto hashCopy = getHashCopy();
@ -141,10 +167,14 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
} else {
avatar->startUpdate();
avatar->simulate(deltaTime);
if (avatar->getShouldRender()) {
renderableCount++;
}
avatar->endUpdate();
++avatarIterator;
}
}
_renderedAvatarCount = renderableCount;
// simulate avatar fades
simulateAvatarFades(deltaTime);

View file

@ -18,6 +18,8 @@
#include <AvatarHashMap.h>
#include <PhysicsEngine.h>
#include <PIDController.h>
#include <SimpleMovingAverage.h>
#include "Avatar.h"
#include "AvatarMotionState.h"
@ -43,6 +45,7 @@ public:
void clearOtherAvatars();
bool shouldShowReceiveStats() const { return _shouldShowReceiveStats; }
PIDController& getRenderDistanceController() { return _renderDistanceController; }
class LocalLight {
public:
@ -64,6 +67,17 @@ public:
void handleCollisionEvents(const CollisionEvents& collisionEvents);
void updateAvatarPhysicsShape(Avatar* avatar);
// Expose results and parameter-tuning operations to other systems, such as stats and javascript.
Q_INVOKABLE float getRenderDistance() { return _renderDistance; }
Q_INVOKABLE int getNumberInRenderRange() { return _renderedAvatarCount; }
Q_INVOKABLE bool getRenderDistanceControllerIsLogging() { return _renderDistanceController.getIsLogging(); }
Q_INVOKABLE void setRenderDistanceControllerHistory(QString label, int size) { return _renderDistanceController.setHistorySize(label, size); }
Q_INVOKABLE void setRenderDistanceKP(float newValue) { _renderDistanceController.setKP(newValue); }
Q_INVOKABLE void setRenderDistanceKI(float newValue) { _renderDistanceController.setKI(newValue); }
Q_INVOKABLE void setRenderDistanceKD(float newValue) { _renderDistanceController.setKD(newValue); }
Q_INVOKABLE void setRenderDistanceLowLimit(float newValue) { _renderDistanceController.setControlledValueLowLimit(newValue); }
Q_INVOKABLE void setRenderDistanceHighLimit(float newValue) { _renderDistanceController.setControlledValueHighLimit(newValue); }
public slots:
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
@ -90,6 +104,10 @@ private:
QVector<AvatarManager::LocalLight> _localLights;
bool _shouldShowReceiveStats = false;
float _renderDistance { (float) TREE_SCALE };
int _renderedAvatarCount { 0 };
PIDController _renderDistanceController { };
SimpleMovingAverage _renderDistanceAverage { 10 };
SetOfAvatarMotionStates _avatarMotionStates;
SetOfMotionStates _motionStatesToAdd;

View file

@ -196,5 +196,9 @@ QScriptValue WebWindowClass::constructor(QScriptContext* context, QScriptEngine*
}
void WebWindowClass::setTitle(const QString& title) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setTitle", Qt::AutoConnection, Q_ARG(QString, title));
return;
}
_windowWidget->setWindowTitle(title);
}

View file

@ -115,6 +115,8 @@ void Stats::updateStats(bool force) {
auto avatarManager = DependencyManager::get<AvatarManager>();
// we need to take one avatar out so we don't include ourselves
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
STAT_UPDATE(avatarRenderableCount, avatarManager->getNumberInRenderRange());
STAT_UPDATE(avatarRenderDistance, (int) round(avatarManager->getRenderDistance())); // deliberately truncating
STAT_UPDATE(serverCount, nodeList->size());
STAT_UPDATE(framerate, (int)qApp->getFps());
STAT_UPDATE(simrate, (int)qApp->getAverageSimsPerSecond());

View file

@ -36,6 +36,8 @@ class Stats : public QQuickItem {
STATS_PROPERTY(int, simrate, 0)
STATS_PROPERTY(int, avatarSimrate, 0)
STATS_PROPERTY(int, avatarCount, 0)
STATS_PROPERTY(int, avatarRenderableCount, 0)
STATS_PROPERTY(int, avatarRenderDistance, 0)
STATS_PROPERTY(int, packetInCount, 0)
STATS_PROPERTY(int, packetOutCount, 0)
STATS_PROPERTY(float, mbpsIn, 0)
@ -117,6 +119,8 @@ signals:
void simrateChanged();
void avatarSimrateChanged();
void avatarCountChanged();
void avatarRenderableCountChanged();
void avatarRenderDistanceChanged();
void packetInCountChanged();
void packetOutCountChanged();
void mbpsInChanged();

View file

@ -49,23 +49,6 @@ const AnimPose& AnimSkeleton::getAbsoluteBindPose(int jointIndex) const {
return _absoluteBindPoses[jointIndex];
}
AnimPose AnimSkeleton::getRootAbsoluteBindPoseByChildName(const QString& childName) const {
AnimPose pose = AnimPose::identity;
int jointIndex = nameToJointIndex(childName);
if (jointIndex >= 0) {
int numJoints = (int)(_absoluteBindPoses.size());
if (jointIndex < numJoints) {
int parentIndex = getParentIndex(jointIndex);
while (parentIndex != -1 && parentIndex < numJoints) {
jointIndex = parentIndex;
parentIndex = getParentIndex(jointIndex);
}
pose = _absoluteBindPoses[jointIndex];
}
}
return pose;
}
const AnimPose& AnimSkeleton::getRelativeBindPose(int jointIndex) const {
return _relativeBindPoses[jointIndex];
}

View file

@ -31,7 +31,6 @@ public:
// absolute pose, not relative to parent
const AnimPose& getAbsoluteBindPose(int jointIndex) const;
AnimPose getRootAbsoluteBindPoseByChildName(const QString& childName) const;
// relative to parent pose
const AnimPose& getRelativeBindPose(int jointIndex) const;

View file

@ -1306,10 +1306,10 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
// TODO: figure out how to obtain the yFlip from where it is actually stored
glm::quat yFlipHACK = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
AnimPose rootBindPose = _animSkeleton->getRootAbsoluteBindPoseByChildName("LeftHand");
AnimPose hipsBindPose = _animSkeleton->getAbsoluteBindPose(_animSkeleton->nameToJointIndex("Hips"));
if (params.isLeftEnabled) {
_animVars.set("leftHandPosition", rootBindPose.trans + rootBindPose.rot * yFlipHACK * params.leftPosition);
_animVars.set("leftHandRotation", rootBindPose.rot * yFlipHACK * params.leftOrientation);
_animVars.set("leftHandPosition", hipsBindPose.trans + hipsBindPose.rot * yFlipHACK * params.leftPosition);
_animVars.set("leftHandRotation", hipsBindPose.rot * yFlipHACK * params.leftOrientation);
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
} else {
_animVars.unset("leftHandPosition");
@ -1317,8 +1317,8 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
if (params.isRightEnabled) {
_animVars.set("rightHandPosition", rootBindPose.trans + rootBindPose.rot * yFlipHACK * params.rightPosition);
_animVars.set("rightHandRotation", rootBindPose.rot * yFlipHACK * params.rightOrientation);
_animVars.set("rightHandPosition", hipsBindPose.trans + hipsBindPose.rot * yFlipHACK * params.rightPosition);
_animVars.set("rightHandRotation", hipsBindPose.rot * yFlipHACK * params.rightOrientation);
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
} else {
_animVars.unset("rightHandPosition");

View file

@ -18,10 +18,10 @@
namespace AudioConstants {
const int SAMPLE_RATE = 24000;
typedef int16_t AudioSample;
static const char* AUDIO_FRAME_NAME = "com.highfidelity.recording.Audio";
inline const char* getAudioFrameName() { return "com.highfidelity.recording.Audio"; }
const int NETWORK_FRAME_BYTES_STEREO = 1024;
const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / sizeof(AudioSample);

View file

@ -31,6 +31,7 @@ EntityItemPointer RenderablePolyLineEntityItem::factory(const EntityItemID& enti
RenderablePolyLineEntityItem::RenderablePolyLineEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
PolyLineEntityItem(entityItemID, properties) {
_numVertices = 0;
_vertices = QVector<glm::vec3>(0.0f);
}
@ -114,13 +115,56 @@ void RenderablePolyLineEntityItem::updateGeometry() {
_numVertices += 2;
}
_pointsChanged = false;
_normalsChanged = false;
_strokeWidthsChanged = false;
}
void RenderablePolyLineEntityItem::updateVertices() {
// Calculate the minimum vector size out of normals, points, and stroke widths
int minVectorSize = _normals.size();
if (_points.size() < minVectorSize) {
minVectorSize = _points.size();
}
if (_strokeWidths.size() < minVectorSize) {
minVectorSize = _strokeWidths.size();
}
_vertices.clear();
glm::vec3 v1, v2, tangent, binormal, point;
int finalIndex = minVectorSize - 1;
for (int i = 0; i < finalIndex; i++) {
float width = _strokeWidths.at(i);
point = _points.at(i);
tangent = _points.at(i);
tangent = _points.at(i + 1) - point;
glm::vec3 normal = _normals.at(i);
binormal = glm::normalize(glm::cross(tangent, normal)) * width;
// Check to make sure binormal is not a NAN. If it is, don't add to vertices vector
if (binormal.x != binormal.x) {
continue;
}
v1 = point + binormal;
v2 = point - binormal;
_vertices << v1 << v2;
}
// For last point we can assume binormals are the same since it represents the last two vertices of quad
point = _points.at(finalIndex);
v1 = point + binormal;
v2 = point - binormal;
_vertices << v1 << v2;
}
void RenderablePolyLineEntityItem::render(RenderArgs* args) {
QWriteLocker lock(&_quadReadWriteLock);
if (_points.size() < 2 || _normals.size () < 2 || _vertices.size() < 2) {
if (_points.size() < 2 || _normals.size () < 2 || _strokeWidths.size() < 2) {
return;
}
@ -139,7 +183,8 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
Q_ASSERT(getType() == EntityTypes::PolyLine);
Q_ASSERT(args->_batch);
if (_pointsChanged) {
if (_pointsChanged || _strokeWidthsChanged || _normalsChanged) {
updateVertices();
updateGeometry();
}

View file

@ -40,8 +40,10 @@ public:
protected:
void updateGeometry();
void updateVertices();
gpu::BufferPointer _verticesBuffer;
unsigned int _numVertices;
QVector<glm::vec3> _vertices;
};

View file

@ -34,8 +34,9 @@ PolyLineEntityItem::PolyLineEntityItem(const EntityItemID& entityItemID, const E
EntityItem(entityItemID),
_lineWidth(DEFAULT_LINE_WIDTH),
_pointsChanged(true),
_normalsChanged(true),
_strokeWidthsChanged(true),
_points(QVector<glm::vec3>(0.0f)),
_vertices(QVector<glm::vec3>(0.0f)),
_normals(QVector<glm::vec3>(0.0f)),
_strokeWidths(QVector<float>(0.0f)),
_textures("")
@ -106,47 +107,13 @@ bool PolyLineEntityItem::appendPoint(const glm::vec3& point) {
bool PolyLineEntityItem::setStrokeWidths(const QVector<float>& strokeWidths) {
_strokeWidths = strokeWidths;
_strokeWidthsChanged = true;
return true;
}
bool PolyLineEntityItem::setNormals(const QVector<glm::vec3>& normals) {
_normals = normals;
if (_points.size() < 2 || _normals.size() < 2 || _strokeWidths.size() < 2) {
return false;
}
int minVectorSize = _normals.size();
if (_points.size() < minVectorSize) {
minVectorSize = _points.size();
}
if (_strokeWidths.size() < minVectorSize) {
minVectorSize = _strokeWidths.size();
}
_vertices.clear();
glm::vec3 v1, v2, tangent, binormal, point;
int finalIndex = minVectorSize -1;
for (int i = 0; i < finalIndex; i++) {
float width = _strokeWidths.at(i);
point = _points.at(i);
tangent = _points.at(i + 1) - point;
glm::vec3 normal = normals.at(i);
binormal = glm::normalize(glm::cross(tangent, normal)) * width;
//This checks to make sure binormal is not a NAN
assert(binormal.x == binormal.x);
v1 = point + binormal;
v2 = point - binormal;
_vertices << v1 << v2;
}
//for last point we can just assume binormals are same since it represents last two vertices of quad
point = _points.at(finalIndex);
v1 = point + binormal;
v2 = point - binormal;
_vertices << v1 << v2;
_normalsChanged = true;
return true;
}

View file

@ -93,8 +93,9 @@ class PolyLineEntityItem : public EntityItem {
rgbColor _color;
float _lineWidth;
bool _pointsChanged;
bool _normalsChanged;
bool _strokeWidthsChanged;
QVector<glm::vec3> _points;
QVector<glm::vec3> _vertices;
QVector<glm::vec3> _normals;
QVector<float> _strokeWidths;
QString _textures;

View file

@ -123,6 +123,31 @@ void ObjectMotionState::setMotionType(MotionType motionType) {
_motionType = motionType;
}
// Update the Continuous Collision Detection (CCD) configuration settings of our RigidBody so that
// CCD will be enabled automatically when its speed surpasses a certain threshold.
void ObjectMotionState::updateCCDConfiguration() {
if (_body) {
if (_shape) {
// If this object moves faster than its bounding radius * RADIUS_MOTION_THRESHOLD_MULTIPLIER,
// CCD will be enabled for this object.
const auto RADIUS_MOTION_THRESHOLD_MULTIPLIER = 0.5f;
btVector3 center;
btScalar radius;
_shape->getBoundingSphere(center, radius);
_body->setCcdMotionThreshold(radius * RADIUS_MOTION_THRESHOLD_MULTIPLIER);
// TODO: Ideally the swept sphere radius would be contained by the object. Using the bounding sphere
// radius works well for spherical objects, but may cause issues with other shapes. For arbitrary
// objects we may want to consider a different approach, such as grouping rigid bodies together.
_body->setCcdSweptSphereRadius(radius);
} else {
// Disable CCD
_body->setCcdMotionThreshold(0);
}
}
}
void ObjectMotionState::setRigidBody(btRigidBody* body) {
// give the body a (void*) back-pointer to this ObjectMotionState
if (_body != body) {
@ -133,6 +158,7 @@ void ObjectMotionState::setRigidBody(btRigidBody* body) {
if (_body) {
_body->setUserPointer(this);
}
updateCCDConfiguration();
}
}
@ -232,6 +258,8 @@ bool ObjectMotionState::handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine*
if (_shape != newShape) {
_shape = newShape;
_body->setCollisionShape(_shape);
updateCCDConfiguration();
} else {
// huh... the shape didn't actually change, so we clear the DIRTY_SHAPE flag
flags &= ~Simulation::DIRTY_SHAPE;

View file

@ -151,6 +151,7 @@ protected:
virtual bool isReadyToComputeShape() = 0;
virtual btCollisionShape* computeNewShape() = 0;
void setMotionType(MotionType motionType);
void updateCCDConfiguration();
// clearObjectBackPointer() overrrides should call the base method, then actually clear the object back pointer.
virtual void clearObjectBackPointer() { _type = MOTIONSTATE_TYPE_INVALID; }

View file

@ -0,0 +1,78 @@
//
// PIDController.cpp
// libraries/shared/src
//
// Created by Howard Stearns 11/13/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/glm.hpp>
#include <QDebug>
#include "SharedLogging.h"
#include "PIDController.h"
float PIDController::update(float measuredValue, float dt, bool resetAccumulator) {
const float error = getMeasuredValueSetpoint() - measuredValue; // Sign is the direction we want measuredValue to go. Positive means go higher.
const float p = getKP() * error; // term is Proportional to error
const float accumulatedError = glm::clamp(error * dt + (resetAccumulator ? 0 : _lastAccumulation), // integrate error
getAccumulatedValueLowLimit(), // but clamp by anti-windup limits
getAccumulatedValueHighLimit());
const float i = getKI() * accumulatedError; // term is Integral of error
const float changeInError = (error - _lastError) / dt; // positive value denotes increasing deficit
const float d = getKD() * changeInError; // term is Derivative of Error
const float computedValue = glm::clamp(p + i + d,
getControlledValueLowLimit(),
getControlledValueHighLimit());
if (getIsLogging()) { // if logging/reporting
updateHistory(measuredValue, dt, error, accumulatedError, changeInError, p, i, d, computedValue);
}
Q_ASSERT(!isnan(computedValue));
// update state for next time
_lastError = error;
_lastAccumulation = accumulatedError;
return computedValue;
}
// Just for logging/reporting. Used when picking/verifying the operational parameters.
void PIDController::updateHistory(float measuredValue, float dt, float error, float accumulatedError, float changeInError, float p, float i, float d, float computedValue) {
// Don't report each update(), as the I/O messes with the results a lot.
// Instead, add to history, and then dump out at once when full.
// Typically, the first few values reported in each batch should be ignored.
const int n = _history.size();
_history.resize(n + 1);
Row& next = _history[n];
next.measured = measuredValue;
next.dt = dt;
next.error = error;
next.accumulated = accumulatedError;
next.changed = changeInError;
next.p = p;
next.i = i;
next.d = d;
next.computed = computedValue;
if (_history.size() == _history.capacity()) { // report when buffer is full
reportHistory();
_history.resize(0);
}
}
void PIDController::reportHistory() {
qCDebug(shared) << _label << "measured dt FIXME || error accumulated changed || p i d controlled";
for (int i = 0; i < _history.size(); i++) {
Row& row = _history[i];
qCDebug(shared) << row.measured << row.dt <<
"||" << row.error << row.accumulated << row.changed <<
"||" << row.p << row.i << row.d << row.computed << 1.0f/row.computed;
}
qCDebug(shared) << "Limits: setpoint" << getMeasuredValueSetpoint() << "accumulate" << getAccumulatedValueLowLimit() << getAccumulatedValueHighLimit() <<
"controlled" << getControlledValueLowLimit() << getControlledValueHighLimit() <<
"kp/ki/kd" << getKP() << getKI() << getKD();
}

View file

@ -0,0 +1,89 @@
//
// PIDController.h
// libraries/shared/src
//
// Given a measure of system performance (such as frame rate, where bigger denotes more system work),
// compute a value that the system can take as input to control the amount of work done (such as an 1/LOD-distance,
// where bigger tends to give a higher measured system performance value). The controller's job is to compute a
// controlled value such that the measured value stays near the specified setpoint, even as system load changes.
// See http://www.wetmachine.com/inventing-the-future/mostly-reliable-performance-of-software-processes-by-dynamic-control-of-quality-parameters/
//
// Created by Howard Stearns 11/13/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_PIDController_h
#define hifi_PIDController_h
#include <limits>
#include <QVector>
// Although our coding standard shuns abbreviations, the control systems literature uniformly uses p, i, d, and dt rather than
// proportionalTerm, integralTerm, derivativeTerm, and deltaTime. Here we will be consistent with the literature.
class PIDController {
public:
// These are the main interfaces:
void setMeasuredValueSetpoint(float newValue) { _measuredValueSetpoint = newValue; }
float update(float measuredValue, float dt, bool resetAccumulator = false); // returns the new computedValue
void setHistorySize(QString label = QString(""), int size = 0) { _history.reserve(size); _history.resize(0); _label = label; } // non-empty does logging
bool getIsLogging() { return _history.capacity(); }
float getMeasuredValueSetpoint() const { return _measuredValueSetpoint; }
// In normal operation (where we can easily reach setpoint), controlledValue is typcially pinned at max.
// Defaults to [0, max float], but for 1/LODdistance, it might be, say, [0, 0.2 or 0.1]
float getControlledValueLowLimit() const { return _controlledValueLowLimit; }
float getControlledValueHighLimit() const { return _controlledValueHighLimit; }
float getAntiWindupFactor() const { return _antiWindupFactor; } // default 10
float getKP() const { return _kp; } // proportional to error. See comment above class.
float getKI() const { return _ki; } // to time integral of error
float getKD() const { return _kd; } // to time derivative of error
float getAccumulatedValueHighLimit() const { return getAntiWindupFactor() * getMeasuredValueSetpoint(); }
float getAccumulatedValueLowLimit() const { return -getAntiWindupFactor() * getMeasuredValueSetpoint(); }
// There are several values that rarely change and might be thought of as "constants", but which do change during tuning, debugging, or other
// special-but-expected circumstances. Thus the instance vars are not const.
void setControlledValueLowLimit(float newValue) { _controlledValueLowLimit = newValue; }
void setControlledValueHighLimit(float newValue) { _controlledValueHighLimit = newValue; }
void setAntiWindupFactor(float newValue) { _antiWindupFactor = newValue; }
void setKP(float newValue) { _kp = newValue; }
void setKI(float newValue) { _ki = newValue; }
void setKD(float newValue) { _kd = newValue; }
class Row { // one row of accumulated history, used only for logging (if at all)
public:
float measured;
float dt;
float error;
float accumulated;
float changed;
float p;
float i;
float d;
float computed;
};
protected:
void reportHistory();
void updateHistory(float measured, float dt, float error, float accumulatedError, float changeInErro, float p, float i, float d, float computedValue);
float _measuredValueSetpoint { 0.0f };
float _controlledValueLowLimit { 0.0f };
float _controlledValueHighLimit { std::numeric_limits<float>::max() };
float _antiWindupFactor { 10.0f };
float _kp { 0.0f };
float _ki { 0.0f };
float _kd { 0.0f };
// Controller operating state
float _lastError{ 0.0f };
float _lastAccumulation{ 0.0f };
// reporting
QVector<Row> _history{};
QString _label{ "" };
};
#endif // hifi_PIDController_h