Merge branch 'master' of https://github.com/highfidelity/hifi into baseball

This commit is contained in:
Atlante45 2015-11-09 13:23:31 -08:00
commit 6a53021818
78 changed files with 2746 additions and 580 deletions

View file

@ -73,24 +73,20 @@ void ScriptableAvatar::update(float deltatime) {
const FBXAnimationFrame& ceilFrame = _animation->getFrames().at((int)glm::ceil(currentFrame) % frameCount);
const float frameFraction = glm::fract(currentFrame);
for (int i = 0; i < modelJoints.size(); i++) {
int mapping = animationJoints.indexOf(modelJoints[i]);
if (mapping != -1 && !_maskedJoints.contains(modelJoints[i])) {
JointData& data = _jointData[i];
for (int i = 0; i < animationJoints.size(); i++) {
const QString& name = animationJoints[i];
int mapping = getJointIndex(name);
if (mapping != -1 && !_maskedJoints.contains(name)) {
JointData& data = _jointData[mapping];
auto newRotation = safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction);
auto newTranslation = floorFrame.translations.at(i) * (1.0f - frameFraction) +
ceilFrame.translations.at(i) * frameFraction;
// We could probably do translations as in interpolation in model space (rather than the parent space that each frame is in),
// but we don't do so for MyAvatar yet, so let's not be different here.
if (data.rotation != newRotation) {
data.rotation = newRotation;
data.rotationSet = true;
}
if (data.translation != newTranslation) {
data.translation = newTranslation;
data.translationSet = true;
}
}
}
}
} else {
_animation.clear();

View file

@ -18,12 +18,19 @@ macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
@ONLY
)
if (APPLE)
set(PLUGIN_PATH "interface.app/Contents/MacOS/plugins")
else()
set(PLUGIN_PATH "plugins")
endif()
# add a post-build command to copy DLLs beside the executable
add_custom_command(
TARGET ${TARGET_NAME}
POST_BUILD
COMMAND ${CMAKE_COMMAND}
-DBUNDLE_EXECUTABLE=$<TARGET_FILE:${TARGET_NAME}>
-DBUNDLE_PLUGIN_DIR=$<TARGET_FILE_DIR:${TARGET_NAME}>/${PLUGIN_PATH}
-P ${CMAKE_CURRENT_BINARY_DIR}/FixupBundlePostBuild.cmake
)

View file

@ -41,4 +41,16 @@ function(copy_resolved_item_into_bundle resolved_item resolved_embedded_item)
endfunction()
message(STATUS "FIXUP_LIBS for fixup_bundle called for bundle ${BUNDLE_EXECUTABLE} are @FIXUP_LIBS@")
fixup_bundle("${BUNDLE_EXECUTABLE}" "" "@FIXUP_LIBS@")
message(STATUS "Scanning for plugins from ${BUNDLE_PLUGIN_DIR}")
if (APPLE)
set(PLUGIN_EXTENSION "dylib")
elseif (WIN32)
set(PLUGIN_EXTENSION "dll")
else()
set(PLUGIN_EXTENSION "so")
endif()
file(GLOB RUNTIME_PLUGINS "${BUNDLE_PLUGIN_DIR}/*.${PLUGIN_EXTENSION}")
fixup_bundle("${BUNDLE_EXECUTABLE}" "${RUNTIME_PLUGINS}" "@FIXUP_LIBS@")

View file

@ -0,0 +1,29 @@
"use strict";
/*jslint vars: true, plusplus: true*/
/*global Agent, Avatar, Script, Entities, Vec3, print*/
//
// animatedAvatar.js
// examples/acScripts
//
// Created by Howard Stearns 11/6/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// An assignment client script that animates one avatar at random location within 'spread' meters of 'origin'.
// In Domain Server Settings, go to scripts and give the url of this script. Press '+', and then 'Save and restart'.
var origin = {x: 500, y: 502, z: 500};
var spread = 10; // meters
var animationData = {url: "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/walk_fwd.fbx", lastFrame: 35};
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/dd03b8e3-52fb-4ab3-9ac9-3b17e00cd85d/98baa90b3b66803c5d7bd4537fca6993.fst"; //lovejoy
Avatar.displayName = "'Bot";
var millisecondsToWaitBeforeStarting = 10 * 1000; // To give the various servers a chance to start.
Agent.isAvatar = true;
Script.setTimeout(function () {
Avatar.position = Vec3.sum(origin, {x: Math.random() * spread, y: 0, z: Math.random() * spread});
print("Starting at", JSON.stringify(Avatar.position));
Avatar.startAnimation(animationData.url, animationData.fps || 30, 1, true, false, animationData.firstFrame || 0, animationData.lastFrame);
}, millisecondsToWaitBeforeStarting);

View file

@ -0,0 +1,188 @@
(function() {
this.defaultRange = 5;
this.acceleration = {
x: 0,
y: 0,
z: 0
};
this.onColor = {
red: 77,
green: 11,
blue: 111
};
this.offColor = {
red: 200,
green: 0,
blue: 0
};
var self = this;
//Default forward direction of mover object
this.forward = {
x: 0,
y: 0,
z: -1
};
this.isMoving = false;
this.velocity = {
x: 0,
y: 0,
z: 0
};
this.defaultThrust = 500;
this.maxRotMixVal = 0.01;
this.minRotMixVal = this.maxRotMixVal * 0.5;
this.minThrustPercentage = 0.2;
this.userData = {};
this.getUserData = function() {
if (this.properties.userData) {
this.userData = JSON.parse(this.properties.userData);
}
}
this.updateUserData = function() {
Entities.editEntity(this.entityId, {
userData: JSON.stringify(this.userData)
});
}
this.toggleMover = function() {
if (!this.userData.active) {
this.activate();
} else if (this.userData.active) {
this.deactivate();
}
}
this.clickReleaseOnEntity = function(entityId, mouseEvent) {
this.entityId = entityId
if (mouseEvent.isLeftButton) {
this.toggleMover();
}
}
this.activate = function() {
//activate a light at the movers position
this.properties = Entities.getEntityProperties(this.entityId);
this.getUserData();
this.userData.active = true;
this.initUserData();
var lightPos = this.properties.position;
lightPos.y += .1;
this.light = Entities.addEntity({
type: "Light",
position: lightPos,
isSpotlight: false,
dimensions: {
x: 2,
y: 2,
z: 2
},
color: this.onColor,
intensity: 10
// rotation: {x : 0, y: Math.PI/2, z: 0}
});
this.field = Overlays.addOverlay("sphere", {
position: this.properties.position,
size: this.userData.range,
solid: false,
color: {
red: 250,
green: 10,
blue: 10
},
})
//change color
Entities.editEntity(this.entityId, {
color: this.onColor,
});
}
this.initUserData = function() {
this.userData.range = this.userData.range || this.defaultRange;
this.userData.thrust = this.userData.thrust || this.defaultThrust;
this.updateUserData();
}
this.updateOverlays = function() {
if (this.field) {
Overlays.editOverlay(this.field, {
size: this.userData.range
});
}
}
this.deactivate = function() {
this.userData.active = false;
this.updateUserData();
Entities.editEntity(this.entityId, {
color: this.offColor
});
this.cleanUp();
}
this.scriptEnding = function() {
this.cleanUp();
}
this.update = function(deltaTime) {
self.properties = Entities.getEntityProperties(self.entityId);
self.getUserData();
self.updateOverlays();
if (!self.userData.active) {
return;
}
self.distance = Vec3.distance(MyAvatar.position, self.properties.position);
if (self.distance < self.userData.range) {
self.rotationMixVal = map(self.distance, 0, self.userData.range, self.maxRotMixVal, self.minRotMixVal);
//We want to extract yaw from rotated object so avatars do not pith or roll, as they will be stuck that way.
self.sanitizedRotation = Quat.fromPitchYawRollDegrees(0, Quat.safeEulerAngles(self.properties.rotation).y, 0);
self.newOrientation = Quat.mix(MyAvatar.orientation, self.sanitizedRotation, self.rotationMixVal);
MyAvatar.orientation = self.newOrientation;
self.rotatedDir = {
x: self.forward.x,
y: self.forward.y,
z: self.forward.z
};
self.rotatedDir = Vec3.multiplyQbyV(self.properties.rotation, self.rotatedDir);
self.thrust = map(self.distance, 0, self.userData.range, self.userData.thrust, self.userData.thrust * self.minThrustPercentage);
self.direction = Vec3.normalize(self.rotatedDir);
self.velocity = Vec3.multiply(self.direction, self.thrust);
MyAvatar.addThrust(Vec3.multiply(self.velocity, deltaTime));
}
}
this.preload = function(entityId) {
this.entityId = entityId;
}
this.unload = function() {
Script.update.disconnect(this.update);
this.cleanUp();
}
this.cleanUp = function() {
Entities.deleteEntity(this.light);
Overlays.deleteOverlay(this.field);
}
function map(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
Script.scriptEnding.connect(this.scriptEnding);
Script.update.connect(this.update);
});

View file

@ -0,0 +1,18 @@
var modelURL = "https://s3.amazonaws.com/hifi-public/eric/models/arrow.fbx";
var scriptURL = Script.resolvePath('avatarMover.js');
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(Camera.getOrientation())));
var avatarMover = Entities.addEntity({
type: "Model",
modelURL: modelURL,
position: center,
userData: JSON.stringify({range: 5}),
script: scriptURL
});
function cleanup() {
Entities.deleteEntity(avatarMover);
}
Script.scriptEnding.connect(cleanup);

View file

@ -0,0 +1,10 @@
var MAPPING_NAME = "com.highfidelity.rightClickExample";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from(Controller.Hardware.Keyboard.RightMouseClicked).to(function (value) {
print("Keyboard.RightMouseClicked");
});
Controller.enableMapping(MAPPING_NAME);
Script.scriptEnding.connect(function () {
Controller.disableMapping(MAPPING_NAME);
});

View file

@ -0,0 +1,222 @@
// Copyright (c) 2014 Taylor Hakes
// Copyright (c) 2014 Forbes Lindesay
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
function promiseMaker() {
// Use polyfill for setImmediate for performance gains
var asap = (typeof setImmediate === 'function' && setImmediate) ||
function(fn) {
Script.setTimeout(fn, 1);
};
// Polyfill for Function.prototype.bind
function bind(fn, thisArg) {
return function() {
fn.apply(thisArg, arguments);
}
}
var isArray = Array.isArray || function(value) {
return Object.prototype.toString.call(value) === "[object Array]"
};
function Promise(fn) {
if (typeof this !== 'object') throw new TypeError('Promises must be constructed via new');
if (typeof fn !== 'function') throw new TypeError('not a function');
this._state = null;
this._value = null;
this._deferreds = []
doResolve(fn, bind(resolve, this), bind(reject, this))
}
function handle(deferred) {
var me = this;
if (this._state === null) {
this._deferreds.push(deferred);
return
}
asap(function() {
var cb = me._state ? deferred.onFulfilled : deferred.onRejected
if (cb === null) {
(me._state ? deferred.resolve : deferred.reject)(me._value);
return;
}
var ret;
try {
ret = cb(me._value);
} catch (e) {
deferred.reject(e);
return;
}
deferred.resolve(ret);
})
}
function resolve(newValue) {
try { //Promise Resolution Procedure: https://github.com/promises-aplus/promises-spec#the-promise-resolution-procedure
if (newValue === this) throw new TypeError('A promise cannot be resolved with itself.');
if (newValue && (typeof newValue === 'object' || typeof newValue === 'function')) {
var then = newValue.then;
if (typeof then === 'function') {
doResolve(bind(then, newValue), bind(resolve, this), bind(reject, this));
return;
}
}
this._state = true;
this._value = newValue;
finale.call(this);
} catch (e) {
reject.call(this, e);
}
}
function reject(newValue) {
this._state = false;
this._value = newValue;
finale.call(this);
}
function finale() {
for (var i = 0, len = this._deferreds.length; i < len; i++) {
handle.call(this, this._deferreds[i]);
}
this._deferreds = null;
}
function Handler(onFulfilled, onRejected, resolve, reject) {
this.onFulfilled = typeof onFulfilled === 'function' ? onFulfilled : null;
this.onRejected = typeof onRejected === 'function' ? onRejected : null;
this.resolve = resolve;
this.reject = reject;
}
/**
* Take a potentially misbehaving resolver function and make sure
* onFulfilled and onRejected are only called once.
*
* Makes no guarantees about asynchrony.
*/
function doResolve(fn, onFulfilled, onRejected) {
var done = false;
try {
fn(function(value) {
if (done) return;
done = true;
onFulfilled(value);
}, function(reason) {
if (done) return;
done = true;
onRejected(reason);
})
} catch (ex) {
if (done) return;
done = true;
onRejected(ex);
}
}
Promise.prototype['catch'] = function(onRejected) {
return this.then(null, onRejected);
};
Promise.prototype.then = function(onFulfilled, onRejected) {
var me = this;
return new Promise(function(resolve, reject) {
handle.call(me, new Handler(onFulfilled, onRejected, resolve, reject));
})
};
Promise.all = function() {
var args = Array.prototype.slice.call(arguments.length === 1 && isArray(arguments[0]) ? arguments[0] : arguments);
return new Promise(function(resolve, reject) {
if (args.length === 0) return resolve([]);
var remaining = args.length;
function res(i, val) {
try {
if (val && (typeof val === 'object' || typeof val === 'function')) {
var then = val.then;
if (typeof then === 'function') {
then.call(val, function(val) {
res(i, val)
}, reject);
return;
}
}
args[i] = val;
if (--remaining === 0) {
resolve(args);
}
} catch (ex) {
reject(ex);
}
}
for (var i = 0; i < args.length; i++) {
res(i, args[i]);
}
});
};
Promise.resolve = function(value) {
if (value && typeof value === 'object' && value.constructor === Promise) {
return value;
}
return new Promise(function(resolve) {
resolve(value);
});
};
Promise.reject = function(value) {
return new Promise(function(resolve, reject) {
reject(value);
});
};
Promise.race = function(values) {
return new Promise(function(resolve, reject) {
for (var i = 0, len = values.length; i < len; i++) {
values[i].then(resolve, reject);
}
});
};
/**
* Set the immediate function to execute callbacks
* @param fn {function} Function to execute
* @private
*/
Promise._setImmediateFn = function _setImmediateFn(fn) {
asap = fn;
};
return Promise
}
loadPromise = function() {
return promiseMaker();
}

View file

@ -0,0 +1,18 @@
Script.include('promise.js');
var Promise = loadPromise();
var prom = new Promise(function(resolve, reject) {
print('making a promise')
// do a thing, possibly async, then…
var thing = true;
if (thing) {
resolve("Stuff worked!");
} else {
print('ERROR')
reject(new Error("It broke"));
}
});
// Do something when async done
prom.then(function(result) {
print('result ' + result);
});

View file

@ -1,39 +1,36 @@
if (typeof String.prototype.fileName !== "function") {
String.prototype.fileName = function () {
String.prototype.fileName = function() {
return this.replace(/^(.*[\/\\])*/, "");
};
}
if (typeof String.prototype.fileBase !== "function") {
String.prototype.fileBase = function () {
String.prototype.fileBase = function() {
var filename = this.fileName();
return filename.slice(0, filename.indexOf("."));
};
}
if (typeof String.prototype.fileType !== "function") {
String.prototype.fileType = function () {
String.prototype.fileType = function() {
return this.slice(this.lastIndexOf(".") + 1);
};
}
if (typeof String.prototype.path !== "function") {
String.prototype.path = function () {
String.prototype.path = function() {
return this.replace(/[\\\/][^\\\/]*$/, "");
};
}
if (typeof String.prototype.regExpEscape !== "function") {
String.prototype.regExpEscape = function () {
String.prototype.regExpEscape = function() {
return this.replace(/([$\^.+*?|\\\/{}()\[\]])/g, '\\$1');
};
}
if (typeof String.prototype.toArrayBuffer !== "function") {
String.prototype.toArrayBuffer = function () {
String.prototype.toArrayBuffer = function() {
var length,
buffer,
view,
@ -64,3 +61,416 @@ if (typeof String.prototype.toArrayBuffer !== "function") {
return buffer;
};
}
// Copyright Mathias Bynens <https://mathiasbynens.be/>
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/*! https://mths.be/includes v1.0.0 by @mathias */
if (!String.prototype.includes) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var toString = {}.toString;
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var indexOf = ''.indexOf;
var includes = function(search) {
if (this == null) {
throw TypeError();
}
var string = String(this);
if (search && toString.call(search) == '[object RegExp]') {
throw TypeError();
}
var stringLength = string.length;
var searchString = String(search);
var searchLength = searchString.length;
var position = arguments.length > 1 ? arguments[1] : undefined;
// `ToInteger`
var pos = position ? Number(position) : 0;
if (pos != pos) { // better `isNaN`
pos = 0;
}
var start = Math.min(Math.max(pos, 0), stringLength);
// Avoid the `indexOf` call if no match is possible
if (searchLength + start > stringLength) {
return false;
}
return indexOf.call(string, searchString, pos) != -1;
};
if (defineProperty) {
defineProperty(String.prototype, 'includes', {
'value': includes,
'configurable': true,
'writable': true
});
} else {
String.prototype.includes = includes;
}
}());
}
/*! https://mths.be/startswith v0.2.0 by @mathias */
if (!String.prototype.startsWith) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var toString = {}.toString;
var startsWith = function(search) {
if (this == null) {
throw TypeError();
}
var string = String(this);
if (search && toString.call(search) == '[object RegExp]') {
throw TypeError();
}
var stringLength = string.length;
var searchString = String(search);
var searchLength = searchString.length;
var position = arguments.length > 1 ? arguments[1] : undefined;
// `ToInteger`
var pos = position ? Number(position) : 0;
if (pos != pos) { // better `isNaN`
pos = 0;
}
var start = Math.min(Math.max(pos, 0), stringLength);
// Avoid the `indexOf` call if no match is possible
if (searchLength + start > stringLength) {
return false;
}
var index = -1;
while (++index < searchLength) {
if (string.charCodeAt(start + index) != searchString.charCodeAt(index)) {
return false;
}
}
return true;
};
if (defineProperty) {
defineProperty(String.prototype, 'startsWith', {
'value': startsWith,
'configurable': true,
'writable': true
});
} else {
String.prototype.startsWith = startsWith;
}
}());
}
if (!String.prototype.endsWith) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var toString = {}.toString;
var endsWith = function(search) {
if (this == null) {
throw TypeError();
}
var string = String(this);
if (search && toString.call(search) == '[object RegExp]') {
throw TypeError();
}
var stringLength = string.length;
var searchString = String(search);
var searchLength = searchString.length;
var pos = stringLength;
if (arguments.length > 1) {
var position = arguments[1];
if (position !== undefined) {
// `ToInteger`
pos = position ? Number(position) : 0;
if (pos != pos) { // better `isNaN`
pos = 0;
}
}
}
var end = Math.min(Math.max(pos, 0), stringLength);
var start = end - searchLength;
if (start < 0) {
return false;
}
var index = -1;
while (++index < searchLength) {
if (string.charCodeAt(start + index) != searchString.charCodeAt(index)) {
return false;
}
}
return true;
};
if (defineProperty) {
defineProperty(String.prototype, 'endsWith', {
'value': endsWith,
'configurable': true,
'writable': true
});
} else {
String.prototype.endsWith = endsWith;
}
}());
}
/*! https://mths.be/repeat v0.2.0 by @mathias */
if (!String.prototype.repeat) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var repeat = function(count) {
if (this == null) {
throw TypeError();
}
var string = String(this);
// `ToInteger`
var n = count ? Number(count) : 0;
if (n != n) { // better `isNaN`
n = 0;
}
// Account for out-of-bounds indices
if (n < 0 || n == Infinity) {
throw RangeError();
}
var result = '';
while (n) {
if (n % 2 == 1) {
result += string;
}
if (n > 1) {
string += string;
}
n >>= 1;
}
return result;
};
if (defineProperty) {
defineProperty(String.prototype, 'repeat', {
'value': repeat,
'configurable': true,
'writable': true
});
} else {
String.prototype.repeat = repeat;
}
}());
}
if (!String.prototype.at) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements.
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (exception) {}
return result;
}());
var at = function(position) {
if (this == null) {
throw TypeError();
}
var string = String(this);
var size = string.length;
// `ToInteger`
var index = position ? Number(position) : 0;
if (index != index) { // better `isNaN`
index = 0;
}
// Account for out-of-bounds indices
// The odd lower bound is because the ToInteger operation is
// going to round `n` to `0` for `-1 < n <= 0`.
if (index <= -1 || index >= size) {
return '';
}
// Second half of `ToInteger`
index = index | 0;
// Get the first code unit and code unit value
var cuFirst = string.charCodeAt(index);
var cuSecond;
var nextIndex = index + 1;
var len = 1;
if ( // Check if its the start of a surrogate pair.
cuFirst >= 0xD800 && cuFirst <= 0xDBFF && // high surrogate
size > nextIndex // there is a next code unit
) {
cuSecond = string.charCodeAt(nextIndex);
if (cuSecond >= 0xDC00 && cuSecond <= 0xDFFF) { // low surrogate
len = 2;
}
}
return string.slice(index, index + len);
};
if (defineProperty) {
defineProperty(String.prototype, 'at', {
'value': at,
'configurable': true,
'writable': true
});
} else {
String.prototype.at = at;
}
}());
}
/*! https://mths.be/codepointat v0.2.0 by @mathias */
if (!String.prototype.codePointAt) {
(function() {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var codePointAt = function(position) {
if (this == null) {
throw TypeError();
}
var string = String(this);
var size = string.length;
// `ToInteger`
var index = position ? Number(position) : 0;
if (index != index) { // better `isNaN`
index = 0;
}
// Account for out-of-bounds indices:
if (index < 0 || index >= size) {
return undefined;
}
// Get the first code unit
var first = string.charCodeAt(index);
var second;
if ( // check if its the start of a surrogate pair
first >= 0xD800 && first <= 0xDBFF && // high surrogate
size > index + 1 // there is a next code unit
) {
second = string.charCodeAt(index + 1);
if (second >= 0xDC00 && second <= 0xDFFF) { // low surrogate
// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;
}
}
return first;
};
if (defineProperty) {
defineProperty(String.prototype, 'codePointAt', {
'value': codePointAt,
'configurable': true,
'writable': true
});
} else {
String.prototype.codePointAt = codePointAt;
}
}());
}
/*! https://mths.be/fromcodepoint v0.2.1 by @mathias */
if (!String.fromCodePoint) {
(function() {
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch (error) {}
return result;
}());
var stringFromCharCode = String.fromCharCode;
var floor = Math.floor;
var fromCodePoint = function(_) {
var MAX_SIZE = 0x4000;
var codeUnits = [];
var highSurrogate;
var lowSurrogate;
var index = -1;
var length = arguments.length;
if (!length) {
return '';
}
var result = '';
while (++index < length) {
var codePoint = Number(arguments[index]);
if (!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`
codePoint < 0 || // not a valid Unicode code point
codePoint > 0x10FFFF || // not a valid Unicode code point
floor(codePoint) != codePoint // not an integer
) {
throw RangeError('Invalid code point: ' + codePoint);
}
if (codePoint <= 0xFFFF) { // BMP code point
codeUnits.push(codePoint);
} else { // Astral code point; split in surrogate halves
// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
codePoint -= 0x10000;
highSurrogate = (codePoint >> 10) + 0xD800;
lowSurrogate = (codePoint % 0x400) + 0xDC00;
codeUnits.push(highSurrogate, lowSurrogate);
}
if (index + 1 == length || codeUnits.length > MAX_SIZE) {
result += stringFromCharCode.apply(null, codeUnits);
codeUnits.length = 0;
}
}
return result;
};
if (defineProperty) {
defineProperty(String, 'fromCodePoint', {
'value': fromCodePoint,
'configurable': true,
'writable': true
});
} else {
String.fromCodePoint = fromCodePoint;
}
}());
}

View file

@ -0,0 +1,547 @@
//
// usertiming.js
//
// A polyfill for UserTiming (http://www.w3.org/TR/user-timing/)
//
// Copyright 2013 Nic Jansma
// http://nicj.net
//
// https://github.com/nicjansma/usertiming.js
//
// Licensed under the MIT license
//
// Adapted for High Fidelity by James B. Pollack @imgntn on 11/6/2015
function userTiming() {
"use strict";
// allow running in Node.js environment
if (typeof window === "undefined") {
window = {};
}
// prepare base perf object
if (typeof window.performance === "undefined") {
window.performance = {};
}
// We need to keep a global reference to the window.performance object to
// prevent any added properties from being garbage-collected in Safari 8.
// https://bugs.webkit.org/show_bug.cgi?id=137407
window._perfRefForUserTimingPolyfill = window.performance;
//
// Note what we shimmed
//
window.performance.userTimingJsNow = false;
window.performance.userTimingJsNowPrefixed = false;
window.performance.userTimingJsUserTiming = false;
window.performance.userTimingJsUserTimingPrefixed = false;
window.performance.userTimingJsPerformanceTimeline = false;
window.performance.userTimingJsPerformanceTimelinePrefixed = false;
// for prefixed support
var prefixes = [];
var methods = [];
var methodTest = null;
var i, j;
//
// window.performance.now() shim
// http://www.w3.org/TR/hr-time/
//
if (typeof window.performance.now !== "function") {
window.performance.userTimingJsNow = true;
// copy prefixed version over if it exists
methods = ["webkitNow", "msNow", "mozNow"];
for (i = 0; i < methods.length; i++) {
if (typeof window.performance[methods[i]] === "function") {
window.performance.now = window.performance[methods[i]];
window.performance.userTimingJsNowPrefixed = true;
break;
}
}
//
// now() should be a DOMHighResTimeStamp, which is defined as being a time relative
// to navigationStart of the PerformanceTiming (PT) interface. If this browser supports
// PT, use that as our relative start. Otherwise, use "now" as the start and all other
// now() calls will be relative to our initialization.
//
var nowOffset = +(new Date());
if (window.performance.timing && window.performance.timing.navigationStart) {
nowOffset = window.performance.timing.navigationStart;
}
if (typeof window.performance.now !== "function") {
// No browser support, fall back to Date.now
if (Date.now) {
window.performance.now = function() {
return Date.now() - nowOffset;
};
} else {
// no Date.now support, get the time from new Date()
window.performance.now = function() {
return +(new Date()) - nowOffset;
};
}
}
}
//
// PerformanceTimeline (PT) shims
// http://www.w3.org/TR/performance-timeline/
//
/**
* Adds an object to our internal Performance Timeline array.
*
* Will be blank if the environment supports PT.
*/
var addToPerformanceTimeline = function() {};
/**
* Clears the specified entry types from our timeline array.
*
* Will be blank if the environment supports PT.
*/
var clearEntriesFromPerformanceTimeline = function() {};
// performance timeline array
var performanceTimeline = [];
// whether or not the timeline will require sort on getEntries()
var performanceTimelineRequiresSort = false;
// whether or not ResourceTiming is natively supported but UserTiming is
// not (eg Firefox 35)
var hasNativeGetEntriesButNotUserTiming = false;
//
// If getEntries() and mark() aren't defined, we'll assume
// we have to shim at least some PT functions.
//
if (typeof window.performance.getEntries !== "function" ||
typeof window.performance.mark !== "function") {
if (typeof window.performance.getEntries === "function" &&
typeof window.performance.mark !== "function") {
hasNativeGetEntriesButNotUserTiming = true;
}
window.performance.userTimingJsPerformanceTimeline = true;
// copy prefixed version over if it exists
prefixes = ["webkit", "moz"];
methods = ["getEntries", "getEntriesByName", "getEntriesByType"];
for (i = 0; i < methods.length; i++) {
for (j = 0; j < prefixes.length; j++) {
// prefixed method will likely have an upper-case first letter
methodTest = prefixes[j] + methods[i].substr(0, 1).toUpperCase() + methods[i].substr(1);
if (typeof window.performance[methodTest] === "function") {
window.performance[methods[i]] = window.performance[methodTest];
window.performance.userTimingJsPerformanceTimelinePrefixed = true;
}
}
}
/**
* Adds an object to our internal Performance Timeline array.
*
* @param {Object} obj PerformanceEntry
*/
addToPerformanceTimeline = function(obj) {
performanceTimeline.push(obj);
//
// If we insert a measure, its startTime may be out of order
// from the rest of the entries because the use can use any
// mark as the start time. If so, note we have to sort it before
// returning getEntries();
//
if (obj.entryType === "measure") {
performanceTimelineRequiresSort = true;
}
};
/**
* Ensures our PT array is in the correct sorted order (by startTime)
*/
var ensurePerformanceTimelineOrder = function() {
if (!performanceTimelineRequiresSort) {
return;
}
//
// Measures, which may be in this list, may enter the list in
// an unsorted order. For example:
//
// 1. measure("a")
// 2. mark("start_mark")
// 3. measure("b", "start_mark")
// 4. measure("c")
// 5. getEntries()
//
// When calling #5, we should return [a,c,b] because technically the start time
// of c is "0" (navigationStart), which will occur before b's start time due to the mark.
//
performanceTimeline.sort(function(a, b) {
return a.startTime - b.startTime;
});
performanceTimelineRequiresSort = false;
};
/**
* Clears the specified entry types from our timeline array.
*
* @param {string} entryType Entry type (eg "mark" or "measure")
* @param {string} [name] Entry name (optional)
*/
clearEntriesFromPerformanceTimeline = function(entryType, name) {
// clear all entries from the perf timeline
i = 0;
while (i < performanceTimeline.length) {
if (performanceTimeline[i].entryType !== entryType) {
// unmatched entry type
i++;
continue;
}
if (typeof name !== "undefined" && performanceTimeline[i].name !== name) {
// unmatched name
i++;
continue;
}
// this entry matches our criteria, remove just it
performanceTimeline.splice(i, 1);
}
};
if (typeof window.performance.getEntries !== "function" || hasNativeGetEntriesButNotUserTiming) {
var origGetEntries = window.performance.getEntries;
/**
* Gets all entries from the Performance Timeline.
* http://www.w3.org/TR/performance-timeline/#dom-performance-getentries
*
* NOTE: This will only ever return marks and measures.
*
* @returns {PerformanceEntry[]} Array of PerformanceEntrys
*/
window.performance.getEntries = function() {
ensurePerformanceTimelineOrder();
// get a copy of all of our entries
var entries = performanceTimeline.slice(0);
// if there was a native version of getEntries, add that
if (hasNativeGetEntriesButNotUserTiming && origGetEntries) {
// merge in native
Array.prototype.push.apply(entries, origGetEntries.call(window.performance));
// sort by startTime
entries.sort(function(a, b) {
return a.startTime - b.startTime;
});
}
return entries;
};
}
if (typeof window.performance.getEntriesByType !== "function" || hasNativeGetEntriesButNotUserTiming) {
var origGetEntriesByType = window.performance.getEntriesByType;
/**
* Gets all entries from the Performance Timeline of the specified type.
* http://www.w3.org/TR/performance-timeline/#dom-performance-getentriesbytype
*
* NOTE: This will only work for marks and measures.
*
* @param {string} entryType Entry type (eg "mark" or "measure")
*
* @returns {PerformanceEntry[]} Array of PerformanceEntrys
*/
window.performance.getEntriesByType = function(entryType) {
// we only support marks/measures
if (typeof entryType === "undefined" ||
(entryType !== "mark" && entryType !== "measure")) {
if (hasNativeGetEntriesButNotUserTiming && origGetEntriesByType) {
// native version exists, forward
return origGetEntriesByType.call(window.performance, entryType);
}
return [];
}
// see note in ensurePerformanceTimelineOrder() on why this is required
if (entryType === "measure") {
ensurePerformanceTimelineOrder();
}
// find all entries of entryType
var entries = [];
for (i = 0; i < performanceTimeline.length; i++) {
if (performanceTimeline[i].entryType === entryType) {
entries.push(performanceTimeline[i]);
}
}
return entries;
};
}
if (typeof window.performance.getEntriesByName !== "function" || hasNativeGetEntriesButNotUserTiming) {
var origGetEntriesByName = window.performance.getEntriesByName;
/**
* Gets all entries from the Performance Timeline of the specified
* name, and optionally, type.
* http://www.w3.org/TR/performance-timeline/#dom-performance-getentriesbyname
*
* NOTE: This will only work for marks and measures.
*
* @param {string} name Entry name
* @param {string} [entryType] Entry type (eg "mark" or "measure")
*
* @returns {PerformanceEntry[]} Array of PerformanceEntrys
*/
window.performance.getEntriesByName = function(name, entryType) {
if (entryType && entryType !== "mark" && entryType !== "measure") {
if (hasNativeGetEntriesButNotUserTiming && origGetEntriesByName) {
// native version exists, forward
return origGetEntriesByName.call(window.performance, name, entryType);
}
return [];
}
// see note in ensurePerformanceTimelineOrder() on why this is required
if (typeof entryType !== "undefined" && entryType === "measure") {
ensurePerformanceTimelineOrder();
}
// find all entries of the name and (optionally) type
var entries = [];
for (i = 0; i < performanceTimeline.length; i++) {
if (typeof entryType !== "undefined" &&
performanceTimeline[i].entryType !== entryType) {
continue;
}
if (performanceTimeline[i].name === name) {
entries.push(performanceTimeline[i]);
}
}
if (hasNativeGetEntriesButNotUserTiming && origGetEntriesByName) {
// merge in native
Array.prototype.push.apply(entries, origGetEntriesByName.call(window.performance, name, entryType));
// sort by startTime
entries.sort(function(a, b) {
return a.startTime - b.startTime;
});
}
return entries;
};
}
}
//
// UserTiming support
//
if (typeof window.performance.mark !== "function") {
window.performance.userTimingJsUserTiming = true;
// copy prefixed version over if it exists
prefixes = ["webkit", "moz", "ms"];
methods = ["mark", "measure", "clearMarks", "clearMeasures"];
for (i = 0; i < methods.length; i++) {
for (j = 0; j < prefixes.length; j++) {
// prefixed method will likely have an upper-case first letter
methodTest = prefixes[j] + methods[i].substr(0, 1).toUpperCase() + methods[i].substr(1);
if (typeof window.performance[methodTest] === "function") {
window.performance[methods[i]] = window.performance[methodTest];
window.performance.userTimingJsUserTimingPrefixed = true;
}
}
}
// only used for measure(), to quickly see the latest timestamp of a mark
var marks = {};
if (typeof window.performance.mark !== "function") {
/**
* UserTiming mark
* http://www.w3.org/TR/user-timing/#dom-performance-mark
*
* @param {string} markName Mark name
*/
window.performance.mark = function(markName) {
var now = window.performance.now();
// mark name is required
if (typeof markName === "undefined") {
throw new SyntaxError("Mark name must be specified");
}
// mark name can't be a NT timestamp
if (window.performance.timing && markName in window.performance.timing) {
throw new SyntaxError("Mark name is not allowed");
}
if (!marks[markName]) {
marks[markName] = [];
}
marks[markName].push(now);
// add to perf timeline as well
addToPerformanceTimeline({
entryType: "mark",
name: markName,
startTime: now,
duration: 0
});
};
}
if (typeof window.performance.clearMarks !== "function") {
/**
* UserTiming clear marks
* http://www.w3.org/TR/user-timing/#dom-performance-clearmarks
*
* @param {string} markName Mark name
*/
window.performance.clearMarks = function(markName) {
if (!markName) {
// clear all marks
marks = {};
} else {
marks[markName] = [];
}
clearEntriesFromPerformanceTimeline("mark", markName);
};
}
if (typeof window.performance.measure !== "function") {
/**
* UserTiming measure
* http://www.w3.org/TR/user-timing/#dom-performance-measure
*
* @param {string} measureName Measure name
* @param {string} [startMark] Start mark name
* @param {string} [endMark] End mark name
*/
window.performance.measure = function(measureName, startMark, endMark) {
var now = window.performance.now();
if (typeof measureName === "undefined") {
throw new SyntaxError("Measure must be specified");
}
// if there isn't a startMark, we measure from navigationStart to now
if (!startMark) {
// add to perf timeline as well
addToPerformanceTimeline({
entryType: "measure",
name: measureName,
startTime: 0,
duration: now
});
return;
}
//
// If there is a startMark, check for it first in the NavigationTiming interface,
// then check our own marks.
//
var startMarkTime = 0;
if (window.performance.timing && startMark in window.performance.timing) {
// mark cannot have a timing of 0
if (startMark !== "navigationStart" && window.performance.timing[startMark] === 0) {
throw new Error(startMark + " has a timing of 0");
}
// time is the offset of this mark to navigationStart's time
startMarkTime = window.performance.timing[startMark] - window.performance.timing.navigationStart;
} else if (startMark in marks) {
startMarkTime = marks[startMark][marks[startMark].length - 1];
} else {
throw new Error(startMark + " mark not found");
}
//
// If there is a endMark, check for it first in the NavigationTiming interface,
// then check our own marks.
//
var endMarkTime = now;
if (endMark) {
endMarkTime = 0;
if (window.performance.timing && endMark in window.performance.timing) {
// mark cannot have a timing of 0
if (endMark !== "navigationStart" && window.performance.timing[endMark] === 0) {
throw new Error(endMark + " has a timing of 0");
}
// time is the offset of this mark to navigationStart's time
endMarkTime = window.performance.timing[endMark] - window.performance.timing.navigationStart;
} else if (endMark in marks) {
endMarkTime = marks[endMark][marks[endMark].length - 1];
} else {
throw new Error(endMark + " mark not found");
}
}
// add to our measure array
var duration = endMarkTime - startMarkTime;
// add to perf timeline as well
addToPerformanceTimeline({
entryType: "measure",
name: measureName,
startTime: startMarkTime,
duration: duration
});
};
}
if (typeof window.performance.clearMeasures !== "function") {
/**
* UserTiming clear measures
* http://www.w3.org/TR/user-timing/#dom-performance-clearmeasures
*
* @param {string} measureName Measure name
*/
window.performance.clearMeasures = function(measureName) {
clearEntriesFromPerformanceTimeline("measure", measureName);
};
}
}
return window
}
loadUserTiming = function() {
return userTiming();
}

View file

@ -0,0 +1,18 @@
Script.include('usertiming.js');
var timing = loadUserTiming();
//set a mark
timing.performance.mark('firstMark');
//do something that takes time -- we're just going to set a timeout here as an example
Script.setTimeout(function() {
//and set another mark
timing.performance.mark('secondMark');
//measure time between marks (first parameter is a name for the measurement)
timing.performance.measure('howlong', 'firstMark', 'secondMark');
//you can also get the marks by changing the type
var measures = timing.performance.getEntriesByType('measure');
print('measures:::' + JSON.stringify(measures))
}, 1000)

View file

@ -14,8 +14,8 @@ Script.include("../../utilities.js");
var scriptURL = Script.resolvePath('pingPongGun.js');
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx'
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_collision_hull.obj';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
x: 0,
y: 0.5,
@ -25,9 +25,8 @@ var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
var pingPongGun = Entities.addEntity({
type: "Model",
modelURL: MODEL_URL,
shapeType:'box',
// shapeType: 'compound',
// compoundShapeURL: COLLISION_HULL_URL,
shapeType: 'compound',
compoundShapeURL: COLLISION_HULL_URL,
script: scriptURL,
position: center,
dimensions: {
@ -36,6 +35,7 @@ var pingPongGun = Entities.addEntity({
z: 0.47
},
collisionsWillMove: true,
collisionSoundURL: COLLISION_SOUND_URL
});
function cleanUp() {

View file

@ -14,8 +14,17 @@
]
},
{ "from": "Standard.RX", "to": "Actions.Yaw" },
{ "from": "Standard.RY", "when": "!Application.InHMD", "to": "Actions.Pitch" },
{ "from": "Standard.RY",
"when": "Application.Grounded",
"to": "Actions.Up",
"filters":
[
{ "type": "deadZone", "min": 0.95 },
"invert"
]
},
{ "from": "Standard.RY", "to": "Actions.Up", "filters": "invert"},
{ "from": [ "Standard.DU", "Standard.DL", "Standard.DR", "Standard.DD" ], "to": "Standard.LeftPrimaryThumb" },
{ "from": "Standard.Back", "to": "Standard.LeftSecondaryThumb" },

View file

@ -646,11 +646,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
_applicationStateDevice->addInputVariant(QString("ComfortMode"), controller::StateController::ReadLambda([]() -> float {
return (float)Menu::getInstance()->isOptionChecked(MenuOption::ComfortMode);
}));
_applicationStateDevice->addInputVariant(QString("Grounded"), controller::StateController::ReadLambda([]() -> float {
return (float)qApp->getMyAvatar()->getCharacterController()->onGround();
}));
userInputMapper->registerDevice(_applicationStateDevice);
// Setup the keyboardMouseDevice and the user input mapper with the default bindings
userInputMapper->registerDevice(_keyboardMouseDevice);
userInputMapper->registerDevice(_keyboardMouseDevice->getInputDevice());
userInputMapper->loadDefaultMapping(userInputMapper->getStandardDeviceID());
@ -726,8 +729,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// Now that menu is initalized we can sync myAvatar with it's state.
getMyAvatar()->updateMotionBehaviorFromMenu();
// FIXME spacemouse code still needs cleanup
#if 0
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
SpacemouseManager::getInstance().init();
#endif
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::DomainConnectionDenied, this, "handleDomainConnectionDeniedPacket");
@ -1847,9 +1853,12 @@ void Application::focusOutEvent(QFocusEvent* event) {
}
}
// FIXME spacemouse code still needs cleanup
#if 0
//SpacemouseDevice::getInstance().focusOutEvent();
//SpacemouseManager::getInstance().getDevice()->focusOutEvent();
SpacemouseManager::getInstance().ManagerFocusOutEvent();
#endif
// synthesize events for keys currently pressed, since we may not get their release events
foreach (int key, _keysPressed) {

View file

@ -44,22 +44,11 @@
#include "Menu.h"
Menu* Menu::_instance = NULL;
static const char* const MENU_PROPERTY_NAME = "com.highfidelity.Menu";
Menu* Menu::getInstance() {
static QMutex menuInstanceMutex;
// lock the menu instance mutex to make sure we don't race and create two menus and crash
menuInstanceMutex.lock();
if (!_instance) {
qCDebug(interfaceapp, "First call to Menu::getInstance() - initing menu.");
_instance = new Menu();
}
menuInstanceMutex.unlock();
return _instance;
static Menu* instance = globalInstance<Menu>(MENU_PROPERTY_NAME);
return instance;
}
Menu::Menu() {
@ -465,8 +454,6 @@ Menu::Menu() {
avatar, SLOT(setEnableMeshVisible(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::Connexion, 0, false, &SpacemouseManager::getInstance(), SLOT(toggleSpacemouse(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ComfortMode, 0, true);
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");

View file

@ -57,6 +57,7 @@ private:
class Menu : public QMenuBar {
Q_OBJECT
public:
Menu();
static Menu* getInstance();
void loadSettings();
@ -103,9 +104,6 @@ public slots:
void setIsOptionChecked(const QString& menuOption, bool isChecked);
private:
static Menu* _instance;
Menu();
typedef void(*settingsAction)(Settings&, QAction&);
static void loadAction(Settings& settings, QAction& action);
static void saveAction(Settings& settings, QAction& action);

View file

@ -30,7 +30,7 @@ public:
QByteArray serialize() const;
virtual void deserialize(QByteArray serializedArguments);
virtual bool shouldSuppressLocationEdits() { return true; }
virtual bool shouldSuppressLocationEdits() { return _active && !_ownerEntity.expired(); }
private:
static const uint16_t holdVersion;

View file

@ -473,18 +473,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
continue;
}
int controllerButtons = 0;
//Check for if we should toggle or drag the magnification window
if (controllerButtons & BUTTON_3) {
if (isPressed[index] == false) {
//We are now dragging the window
isPressed[index] = true;
//set the pressed time in us
pressedTime[index] = usecTimestampNow();
stateWhenPressed[index] = _magActive[index];
}
} else if (isPressed[index]) {
if (isPressed[index]) {
isPressed[index] = false;
//If the button was only pressed for < 250 ms
//then disable it.

View file

@ -156,11 +156,11 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
// cache tip absolute transform
int tipIndex = target.getIndex();
int pivotIndex = _skeleton->getParentIndex(tipIndex);
if (pivotIndex == -1) {
if (pivotIndex == -1 || pivotIndex == _hipsIndex) {
continue;
}
int pivotsParentIndex = _skeleton->getParentIndex(pivotIndex);
if (pivotsParentIndex == -1) {
if (pivotsParentIndex == -1 || pivotIndex == _hipsIndex) {
// TODO?: handle case where tip's parent is root?
continue;
}
@ -173,7 +173,7 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
glm::quat tipParentRotation = absolutePoses[pivotIndex].rot;
// descend toward root, pivoting each joint to get tip closer to target
while (pivotsParentIndex != -1) {
while (pivotsParentIndex != _hipsIndex && pivotsParentIndex != -1) {
// compute the two lines that should be aligned
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans;
glm::vec3 leverArm = tipPosition - jointPosition;
@ -285,7 +285,7 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
// only update the absolutePoses that need it: those between lowestMovedIndex and _maxTargetIndex
for (int i = lowestMovedIndex; i <= _maxTargetIndex; ++i) {
int parentIndex = _skeleton->getParentIndex(i);
if (parentIndex != -1) {
if (parentIndex != -1 && parentIndex != _hipsIndex) {
absolutePoses[i] = absolutePoses[parentIndex] * _relativePoses[i];
}
}
@ -295,7 +295,7 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
for (auto& target: targets) {
int tipIndex = target.getIndex();
int parentIndex = _skeleton->getParentIndex(tipIndex);
if (parentIndex != -1) {
if (parentIndex != -1 && parentIndex != _hipsIndex) {
const glm::quat& targetRotation = target.getRotation();
// compute tip's new parent-relative rotation
// Q = Qp * q --> q' = Qp^ * Q

View file

@ -300,6 +300,8 @@ void Rig::setJointAnimatinoPriority(int index, float newPriority) {
}
}
// Deprecated.
// WARNING: this is not symmetric with getJointRotation. It's historical. Use the appropriate specific variation.
void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, float priority) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];
@ -350,6 +352,8 @@ bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const
return true;
}
// Deprecated.
// WARNING: this is not symmetric with setJointRotation. It's historical. Use the appropriate specific variation.
bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return false;

View file

@ -240,7 +240,8 @@ public:
Q_INVOKABLE void setHandState(char s) { _handState = s; }
Q_INVOKABLE char getHandState() const { return _handState; }
const QVector<JointData>& getJointData() const { return _jointData; }
const QVector<JointData>& getRawJointData() const { return _jointData; }
void setRawJointData(QVector<JointData> data) { _jointData = data; }
Q_INVOKABLE virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation);
Q_INVOKABLE virtual void setJointRotation(int index, const glm::quat& rotation);

View file

@ -0,0 +1,11 @@
//
// Created by Bradley Austin Davis 2015/10/11
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Logging.h"
Q_LOGGING_CATEGORY(displayPlugins, "hifi.plugins.display")

View file

@ -0,0 +1,16 @@
//
// Created by Bradley Austin Davis 2015/10/11
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_DisplayPlugins_Logging_h
#define hifi_DisplayPlugins_Logging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(displayPlugins)
#endif

View file

@ -37,8 +37,6 @@ const QString & OpenVrDisplayPlugin::getName() const {
return NAME;
}
vr::IVRSystem* _hmd{ nullptr };
int hmdRefCount = 0;
static vr::IVRCompositor* _compositor{ nullptr };
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
@ -78,24 +76,17 @@ mat4 toGlm(const vr::HmdMatrix34_t& m) {
}
bool OpenVrDisplayPlugin::isSupported() const {
bool success = vr::VR_IsHmdPresent();
if (success) {
vr::HmdError eError = vr::HmdError_None;
auto hmd = vr::VR_Init(&eError);
success = (hmd != nullptr);
vr::VR_Shutdown();
}
auto hmd = acquireOpenVrSystem();
bool success = nullptr != hmd;
releaseOpenVrSystem();
return success;
}
void OpenVrDisplayPlugin::activate() {
_container->setIsOptionChecked(StandingHMDSensorMode, true);
hmdRefCount++;
vr::HmdError eError = vr::HmdError_None;
if (!_hmd) {
_hmd = vr::VR_Init(&eError);
Q_ASSERT(eError == vr::HmdError_None);
_hmd = acquireOpenVrSystem();
}
Q_ASSERT(_hmd);
@ -114,6 +105,7 @@ void OpenVrDisplayPlugin::activate() {
});
vr::HmdError eError = vr::HmdError_None;
_compositor = (vr::IVRCompositor*)vr::VR_GetGenericInterface(vr::IVRCompositor_Version, &eError);
Q_ASSERT(eError == vr::HmdError_None);
Q_ASSERT(_compositor);
@ -133,11 +125,8 @@ void OpenVrDisplayPlugin::activate() {
void OpenVrDisplayPlugin::deactivate() {
_container->setIsOptionChecked(StandingHMDSensorMode, false);
hmdRefCount--;
if (hmdRefCount == 0 && _hmd) {
vr::VR_Shutdown();
if (_hmd) {
releaseOpenVrSystem();
_hmd = nullptr;
}
_compositor = nullptr;

View file

@ -10,6 +10,7 @@
#include <QtGlobal>
#if defined(Q_OS_WIN)
#include <openvr.h>
#include "../WindowOpenGLDisplayPlugin.h"
@ -39,6 +40,7 @@ protected:
virtual void finishFrame() override;
private:
vr::IVRSystem* _hmd { nullptr };
static const QString NAME;
};

View file

@ -0,0 +1,75 @@
//
// Created by Bradley Austin Davis on 2015/11/01
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OpenVrHelpers.h"
#if defined(Q_OS_WIN)
#include <QtCore/QTimer>
#include <atomic>
#include <mutex>
#include "../Logging.h"
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
static int refCount { 0 };
static Mutex mutex;
static vr::IVRSystem* activeHmd { nullptr };
static bool hmdPresent = vr::VR_IsHmdPresent();
static const uint32_t RELEASE_OPENVR_HMD_DELAY_MS = 5000;
vr::IVRSystem* acquireOpenVrSystem() {
if (hmdPresent) {
Lock lock(mutex);
if (!activeHmd) {
qCDebug(displayPlugins) << "openvr: No vr::IVRSystem instance active, building";
vr::HmdError eError = vr::HmdError_None;
activeHmd = vr::VR_Init(&eError);
qCDebug(displayPlugins) << "openvr display: HMD is " << activeHmd << " error is " << eError;
}
if (activeHmd) {
qCDebug(displayPlugins) << "openvr: incrementing refcount";
++refCount;
}
}
return activeHmd;
}
void releaseOpenVrSystem() {
if (activeHmd) {
Lock lock(mutex);
qDebug() << "openvr: decrementing refcount";
--refCount;
if (0 == refCount) {
qDebug() << "openvr: zero refcount, deallocate VR system";
// Avoid spamming the VR system with activate/deactivate calls at system startup by
// putting in a delay before we destory the shutdown the VR subsystem
// FIXME releasing the VR system at all seems to trigger an exception deep inside the Oculus DLL.
// disabling for now.
//QTimer* releaseTimer = new QTimer();
//releaseTimer->singleShot(RELEASE_OPENVR_HMD_DELAY_MS, [releaseTimer] {
// Lock lock(mutex);
// qDebug() << "Delayed openvr destroy activated";
// if (0 == refCount && nullptr != activeHmd) {
// qDebug() << "Delayed openvr destroy: releasing resources";
// activeHmd = nullptr;
// vr::VR_Shutdown();
// } else {
// qDebug() << "Delayed openvr destroy: HMD still in use";
// }
// releaseTimer->deleteLater();
//});
}
}
}
#endif

View file

@ -7,10 +7,16 @@
//
#pragma once
#include <QtGlobal>
#if defined(Q_OS_WIN)
#include <openvr.h>
#include <GLMHelpers.h>
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
#endif

View file

@ -23,6 +23,7 @@
#include "untextured_particle_frag.h"
#include "textured_particle_vert.h"
#include "textured_particle_frag.h"
#include "textured_particle_alpha_discard_frag.h"
class ParticlePayload {
public:
@ -114,8 +115,7 @@ namespace render {
}
}
gpu::PipelinePointer RenderableParticleEffectEntityItem::_texturedPipeline;
gpu::PipelinePointer RenderableParticleEffectEntityItem::_untexturedPipeline;
EntityItemPointer RenderableParticleEffectEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
return std::make_shared<RenderableParticleEffectEntityItem>(entityID, properties);
@ -203,19 +203,26 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
// sort particles back to front
// NOTE: this is view frustum might be one frame out of date.
auto frustum = AbstractViewStateInterface::instance()->getCurrentViewFrustum();
::zSortAxis = frustum->getDirection();
qSort(particleDetails.begin(), particleDetails.end(), zSort);
// No need to sort if we're doing additive blending
if (_additiveBlending != true) {
::zSortAxis = frustum->getDirection();
qSort(particleDetails.begin(), particleDetails.end(), zSort);
}
// allocate vertices
_vertices.clear();
// build vertices from particle positions and radiuses
glm::vec3 frustumPosition = frustum->getPosition();
glm::vec3 dir = frustum->getDirection();
for (auto&& particle : particleDetails) {
glm::vec3 particleDirection = particle.position - frustumPosition;
glm::vec3 right = glm::normalize(glm::cross(glm::vec3(0.0f, 1.0f, 0.0f), particleDirection));
glm::vec3 up = glm::normalize(glm::cross(right, particleDirection));
glm::vec3 right = glm::normalize(glm::cross(glm::vec3(0.0f, 1.0f, 0.0f), dir));
glm::vec3 up = glm::normalize(glm::cross(right, dir));
glm::vec3 upOffset = up * particle.radius;
glm::vec3 rightOffset = right * particle.radius;
@ -309,12 +316,21 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
}
void RenderableParticleEffectEntityItem::createPipelines() {
bool writeToDepthBuffer = false;
gpu::State::BlendArg destinationColorBlendArg;
if (_additiveBlending) {
destinationColorBlendArg = gpu::State::ONE;
}
else {
destinationColorBlendArg = gpu::State::INV_SRC_ALPHA;
writeToDepthBuffer = true;
}
if (!_untexturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
state->setDepthTest(true, writeToDepthBuffer, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
gpu::State::ONE, gpu::State::FACTOR_ALPHA,
destinationColorBlendArg, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(untextured_particle_vert)));
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(untextured_particle_frag)));
@ -324,13 +340,24 @@ void RenderableParticleEffectEntityItem::createPipelines() {
if (!_texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
bool writeToDepthBuffer = !_additiveBlending;
state->setDepthTest(true, writeToDepthBuffer, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
gpu::State::INV_SRC_ALPHA, gpu::State::FACTOR_ALPHA,
destinationColorBlendArg, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(textured_particle_vert)));
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
gpu::ShaderPointer fragShader;
if (_additiveBlending) {
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
}
else {
//If we are sorting and have no additive blending, we want to discard pixels with low alpha to avoid inter-particle entity artifacts
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_alpha_discard_frag)));
}
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
_texturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
}
}

View file

@ -38,11 +38,11 @@ protected:
uint32_t rgba;
};
static void createPipelines();
void createPipelines();
std::vector<Vertex> _vertices;
static gpu::PipelinePointer _untexturedPipeline;
static gpu::PipelinePointer _texturedPipeline;
gpu::PipelinePointer _untexturedPipeline;
gpu::PipelinePointer _texturedPipeline;
render::ScenePointer _scene;
NetworkTexturePointer _texture;

View file

@ -0,0 +1,25 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// fragment shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D colorMap;
in vec4 _color;
in vec2 _texCoord0;
out vec4 outFragColor;
void main(void) {
vec4 color = texture(colorMap, _texCoord0);
if (color.a < 0.1) {
discard;
}
outFragColor = color * _color;
}

View file

@ -195,6 +195,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
CHECK_PROPERTY_CHANGE(PROP_ALPHA_SPREAD, alphaSpread);
CHECK_PROPERTY_CHANGE(PROP_ALPHA_START, alphaStart);
CHECK_PROPERTY_CHANGE(PROP_ALPHA_FINISH, alphaFinish);
CHECK_PROPERTY_CHANGE(PROP_ADDITIVE_BLENDING, additiveBlending);
CHECK_PROPERTY_CHANGE(PROP_MODEL_URL, modelURL);
CHECK_PROPERTY_CHANGE(PROP_COMPOUND_SHAPE_URL, compoundShapeURL);
CHECK_PROPERTY_CHANGE(PROP_VISIBLE, visible);
@ -351,6 +352,8 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ALPHA_SPREAD, alphaSpread);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ALPHA_START, alphaStart);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ALPHA_FINISH, alphaFinish);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_ADDITIVE_BLENDING, additiveBlending);
}
// Models only
@ -502,6 +505,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
COPY_PROPERTY_FROM_QSCRIPTVALUE(alphaSpread, float, setAlphaSpread);
COPY_PROPERTY_FROM_QSCRIPTVALUE(alphaStart, float, setAlphaStart);
COPY_PROPERTY_FROM_QSCRIPTVALUE(alphaFinish, float, setAlphaFinish);
COPY_PROPERTY_FROM_QSCRIPTVALUE(additiveBlending, bool, setAdditiveBlending);
COPY_PROPERTY_FROM_QSCRIPTVALUE(modelURL, QString, setModelURL);
COPY_PROPERTY_FROM_QSCRIPTVALUE(compoundShapeURL, QString, setCompoundShapeURL);
COPY_PROPERTY_FROM_QSCRIPTVALUE(glowLevel, float, setGlowLevel);
@ -650,6 +654,7 @@ void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue
ADD_PROPERTY_TO_MAP(PROP_ALPHA_SPREAD, AlphaSpread, alphaSpread, float);
ADD_PROPERTY_TO_MAP(PROP_ALPHA_START, AlphaStart, alphaStart, float);
ADD_PROPERTY_TO_MAP(PROP_ALPHA_FINISH, AlphaFinish, alphaFinish, float);
ADD_PROPERTY_TO_MAP(PROP_ADDITIVE_BLENDING, AdditiveBlending, additiveBlending, bool);
ADD_PROPERTY_TO_MAP(PROP_MODEL_URL, ModelURL, modelURL, QString);
ADD_PROPERTY_TO_MAP(PROP_COMPOUND_SHAPE_URL, CompoundShapeURL, compoundShapeURL, QString);
ADD_PROPERTY_TO_MAP(PROP_REGISTRATION_POINT, RegistrationPoint, registrationPoint, glm::vec3);
@ -959,6 +964,7 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
APPEND_ENTITY_PROPERTY(PROP_ALPHA_SPREAD, properties.getAlphaSpread());
APPEND_ENTITY_PROPERTY(PROP_ALPHA_START, properties.getAlphaStart());
APPEND_ENTITY_PROPERTY(PROP_ALPHA_FINISH, properties.getAlphaFinish());
APPEND_ENTITY_PROPERTY(PROP_ADDITIVE_BLENDING, properties.getAdditiveBlending());
}
if (properties.getType() == EntityTypes::Zone) {
@ -1241,6 +1247,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_ALPHA_SPREAD, float, setAlphaSpread);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_ALPHA_START, float, setAlphaStart);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_ALPHA_FINISH, float, setAlphaFinish);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_ADDITIVE_BLENDING, bool, setAdditiveBlending);
}
if (properties.getType() == EntityTypes::Zone) {
@ -1579,6 +1586,9 @@ QList<QString> EntityItemProperties::listChangedProperties() {
if (alphaFinishChanged()) {
out += "alphaFinish";
}
if (additiveBlendingChanged()) {
out += "additiveBlending";
}
if (modelURLChanged()) {
out += "modelURL";
}

View file

@ -159,6 +159,7 @@ public:
DEFINE_PROPERTY(PROP_RADIUS_SPREAD, RadiusSpread, radiusSpread, float, ParticleEffectEntityItem::DEFAULT_RADIUS_SPREAD);
DEFINE_PROPERTY(PROP_RADIUS_START, RadiusStart, radiusStart, float, ParticleEffectEntityItem::DEFAULT_RADIUS_START);
DEFINE_PROPERTY(PROP_RADIUS_FINISH, RadiusFinish, radiusFinish, float, ParticleEffectEntityItem::DEFAULT_RADIUS_FINISH);
DEFINE_PROPERTY(PROP_ADDITIVE_BLENDING, AdditiveBlending, additiveBlending, bool, ParticleEffectEntityItem::DEFAULT_ADDITIVE_BLENDING);
DEFINE_PROPERTY_REF(PROP_MARKETPLACE_ID, MarketplaceID, marketplaceID, QString, ENTITY_ITEM_DEFAULT_MARKETPLACE_ID);
DEFINE_PROPERTY_GROUP(KeyLight, keyLight, KeyLightPropertyGroup);
DEFINE_PROPERTY_REF(PROP_VOXEL_VOLUME_SIZE, VoxelVolumeSize, voxelVolumeSize, glm::vec3, PolyVoxEntityItem::DEFAULT_VOXEL_VOLUME_SIZE);

View file

@ -149,6 +149,8 @@ enum EntityPropertyList {
PROP_ANIMATION_HOLD,
PROP_ANIMATION_START_AUTOMATICALLY,
PROP_ADDITIVE_BLENDING,
////////////////////////////////////////////////////////////////////////////////////////////////////
// ATTENTION: add new properties to end of list just ABOVE this line
PROP_AFTER_LAST_ITEM,

View file

@ -611,6 +611,16 @@ EntityItemPointer EntityTree::findEntityByEntityItemID(const EntityItemID& entit
return foundEntity;
}
void EntityTree::fixupTerseEditLogging(EntityItemProperties& properties, QList<QString>& changedProperties) {
if (properties.simulationOwnerChanged()) {
int simIndex = changedProperties.indexOf("simulationOwner");
if (simIndex >= 0) {
SimulationOwner simOwner = properties.getSimulationOwner();
changedProperties[simIndex] = QString("simulationOwner:") + QString::number((int)simOwner.getPriority());
}
}
}
int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* editData, int maxLength,
const SharedNodePointer& senderNode) {
@ -661,7 +671,9 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
qCDebug(entities) << " properties:" << properties;
}
if (wantTerseEditLogging()) {
qCDebug(entities) << "edit" << entityItemID.toString() << properties.listChangedProperties();
QList<QString> changedProperties = properties.listChangedProperties();
fixupTerseEditLogging(properties, changedProperties);
qCDebug(entities) << "edit" << entityItemID.toString() << changedProperties;
}
endLogging = usecTimestampNow();
@ -689,7 +701,9 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
qCDebug(entities) << " properties:" << properties;
}
if (wantTerseEditLogging()) {
qCDebug(entities) << "add" << entityItemID.toString() << properties.listChangedProperties();
QList<QString> changedProperties = properties.listChangedProperties();
fixupTerseEditLogging(properties, changedProperties);
qCDebug(entities) << "add" << entityItemID.toString() << changedProperties;
}
endLogging = usecTimestampNow();

View file

@ -77,6 +77,7 @@ public:
virtual bool canProcessVersion(PacketVersion thisVersion) const
{ return thisVersion >= VERSION_ENTITIES_USE_METERS_AND_RADIANS; }
virtual bool handlesEditPacketType(PacketType packetType) const;
void fixupTerseEditLogging(EntityItemProperties& properties, QList<QString>& changedProperties);
virtual int processEditPacketData(NLPacket& packet, const unsigned char* editData, int maxLength,
const SharedNodePointer& senderNode);

View file

@ -18,6 +18,7 @@
#include "EntityItemProperties.h"
#include "EntityTree.h"
#include "EntityTreeElement.h"
#include "EntityTypes.h"
EntityTreeElement::EntityTreeElement(unsigned char* octalCode) : OctreeElement() {
init(octalCode);
@ -591,7 +592,8 @@ bool EntityTreeElement::findDetailedRayIntersection(const glm::vec3& origin, con
}
} else {
// if the entity type doesn't support a detailed intersection, then just return the non-AABox results
if (localDistance < distance) {
// Never intersect with particle effect entities
if (localDistance < distance && EntityTypes::getEntityTypeName(entity->getType()) != "ParticleEffect") {
distance = localDistance;
face = localFace;
surfaceNormal = localSurfaceNormal;

View file

@ -94,6 +94,7 @@ const float ParticleEffectEntityItem::DEFAULT_RADIUS_SPREAD = 0.0f;
const float ParticleEffectEntityItem::DEFAULT_RADIUS_START = DEFAULT_PARTICLE_RADIUS;
const float ParticleEffectEntityItem::DEFAULT_RADIUS_FINISH = DEFAULT_PARTICLE_RADIUS;
const QString ParticleEffectEntityItem::DEFAULT_TEXTURES = "";
const bool ParticleEffectEntityItem::DEFAULT_ADDITIVE_BLENDING = false;
EntityItemPointer ParticleEffectEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
@ -121,7 +122,8 @@ ParticleEffectEntityItem::ParticleEffectEntityItem(const EntityItemID& entityIte
_alphaMiddles(DEFAULT_MAX_PARTICLES, DEFAULT_ALPHA),
_alphaFinishes(DEFAULT_MAX_PARTICLES, DEFAULT_ALPHA),
_particleMaxBound(glm::vec3(1.0f, 1.0f, 1.0f)),
_particleMinBound(glm::vec3(-1.0f, -1.0f, -1.0f))
_particleMinBound(glm::vec3(-1.0f, -1.0f, -1.0f)) ,
_additiveBlending(DEFAULT_ADDITIVE_BLENDING)
{
_type = EntityTypes::ParticleEffect;
@ -355,6 +357,8 @@ EntityItemProperties ParticleEffectEntityItem::getProperties(EntityPropertyFlags
COPY_ENTITY_PROPERTY_TO_PROPERTIES(alphaStart, getAlphaStart);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(alphaFinish, getAlphaFinish);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(textures, getTextures);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(additiveBlending, getAdditiveBlending);
return properties;
}
@ -392,6 +396,7 @@ bool ParticleEffectEntityItem::setProperties(const EntityItemProperties& propert
SET_ENTITY_PROPERTY_FROM_PROPERTIES(alphaStart, setAlphaStart);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(alphaFinish, setAlphaFinish);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(textures, setTextures);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(additiveBlending, setAdditiveBlending);
if (somethingChanged) {
bool wantDebug = false;
@ -483,6 +488,10 @@ int ParticleEffectEntityItem::readEntitySubclassDataFromBuffer(const unsigned ch
READ_ENTITY_PROPERTY(PROP_AZIMUTH_FINISH, float, setAzimuthFinish);
}
if (args.bitstreamVersion >= VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING) {
READ_ENTITY_PROPERTY(PROP_ADDITIVE_BLENDING, bool, setAdditiveBlending);
}
return bytesRead;
}
@ -520,6 +529,7 @@ EntityPropertyFlags ParticleEffectEntityItem::getEntityProperties(EncodeBitstrea
requestedProperties += PROP_POLAR_FINISH;
requestedProperties += PROP_AZIMUTH_START;
requestedProperties += PROP_AZIMUTH_FINISH;
requestedProperties += PROP_ADDITIVE_BLENDING;
return requestedProperties;
}
@ -562,6 +572,7 @@ void ParticleEffectEntityItem::appendSubclassData(OctreePacketData* packetData,
APPEND_ENTITY_PROPERTY(PROP_POLAR_FINISH, getPolarFinish());
APPEND_ENTITY_PROPERTY(PROP_AZIMUTH_START, getAzimuthStart());
APPEND_ENTITY_PROPERTY(PROP_AZIMUTH_FINISH, getAzimuthFinish());
APPEND_ENTITY_PROPERTY(PROP_ADDITIVE_BLENDING, getAdditiveBlending());
}
bool ParticleEffectEntityItem::isEmittingParticles() const {

View file

@ -209,6 +209,14 @@ public:
}
}
static const bool DEFAULT_ADDITIVE_BLENDING;
bool getAdditiveBlending() const { return _additiveBlending; }
void setAdditiveBlending(bool additiveBlending) {
_additiveBlending = additiveBlending;
}
virtual bool supportsDetailedRayIntersection() const { return false; }
protected:
bool isAnimatingSomething() const;
@ -219,7 +227,6 @@ protected:
void extendBounds(const glm::vec3& point);
void integrateParticle(quint32 index, float deltaTime);
quint32 getLivingParticleCount() const;
// the properties of this entity
rgbColor _color;
xColor _colorStart = DEFAULT_COLOR;
@ -284,6 +291,8 @@ protected:
// bounding volume
glm::vec3 _particleMaxBound;
glm::vec3 _particleMinBound;
bool _additiveBlending;
};
#endif // hifi_ParticleEffectEntityItem_h

View file

@ -0,0 +1,14 @@
//
// InputPluginsLogging.cpp
// libraries/input-plugins/src/input-plugins
//
// Created by Clement on 11/6/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "InputPluginsLogging.h"
Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")

View file

@ -0,0 +1,18 @@
//
// InputPluginsLogging.h
// libraries/input-plugins/src/input-plugins
//
// Created by Clement on 11/6/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_InputPluginsLogging_h
#define hifi_InputPluginsLogging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
#endif // hifi_InputPluginsLogging_h

View file

@ -19,8 +19,8 @@
const QString KeyboardMouseDevice::NAME = "Keyboard/Mouse";
void KeyboardMouseDevice::update(float deltaTime, bool jointsCaptured) {
_axisStateMap.clear();
void KeyboardMouseDevice::pluginUpdate(float deltaTime, bool jointsCaptured) {
_inputDevice->update(deltaTime, jointsCaptured);
// For touch event, we need to check that the last event is not too long ago
// Maybe it's a Qt issue, but the touch event sequence (begin, update, end) is not always called properly
@ -35,26 +35,30 @@ void KeyboardMouseDevice::update(float deltaTime, bool jointsCaptured) {
}
}
void KeyboardMouseDevice::focusOutEvent() {
void KeyboardMouseDevice::InputDevice::update(float deltaTime, bool jointsCaptured) {
_axisStateMap.clear();
}
void KeyboardMouseDevice::InputDevice::focusOutEvent() {
_buttonPressedMap.clear();
};
}
void KeyboardMouseDevice::keyPressEvent(QKeyEvent* event) {
auto input = makeInput((Qt::Key) event->key());
auto result = _buttonPressedMap.insert(input.getChannel());
auto input = _inputDevice->makeInput((Qt::Key) event->key());
auto result = _inputDevice->_buttonPressedMap.insert(input.getChannel());
if (!result.second) {
// key pressed again ? without catching the release event ?
}
}
void KeyboardMouseDevice::keyReleaseEvent(QKeyEvent* event) {
auto input = makeInput((Qt::Key) event->key());
_buttonPressedMap.erase(input.getChannel());
auto input = _inputDevice->makeInput((Qt::Key) event->key());
_inputDevice->_buttonPressedMap.erase(input.getChannel());
}
void KeyboardMouseDevice::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
auto input = makeInput((Qt::MouseButton) event->button());
auto result = _buttonPressedMap.insert(input.getChannel());
auto input = _inputDevice->makeInput((Qt::MouseButton) event->button());
auto result = _inputDevice->_buttonPressedMap.insert(input.getChannel());
if (!result.second) {
// key pressed again ? without catching the release event ?
}
@ -65,32 +69,32 @@ void KeyboardMouseDevice::mousePressEvent(QMouseEvent* event, unsigned int devic
}
void KeyboardMouseDevice::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
auto input = makeInput((Qt::MouseButton) event->button());
_buttonPressedMap.erase(input.getChannel());
auto input = _inputDevice->makeInput((Qt::MouseButton) event->button());
_inputDevice->_buttonPressedMap.erase(input.getChannel());
// if we pressed and released at the same location, then create a "_CLICKED" input for this button
// we might want to add some small tolerance to this so if you do a small drag it still counts as
// a clicked.
if (_mousePressAt == event->pos()) {
_buttonPressedMap.insert(makeInput((Qt::MouseButton) event->button(), true).getChannel());
_inputDevice->_buttonPressedMap.insert(_inputDevice->makeInput((Qt::MouseButton) event->button(), true).getChannel());
}
}
void KeyboardMouseDevice::eraseMouseClicked() {
_buttonPressedMap.erase(makeInput(Qt::LeftButton, true).getChannel());
_buttonPressedMap.erase(makeInput(Qt::MiddleButton, true).getChannel());
_buttonPressedMap.erase(makeInput(Qt::RightButton, true).getChannel());
_inputDevice->_buttonPressedMap.erase(_inputDevice->makeInput(Qt::LeftButton, true).getChannel());
_inputDevice->_buttonPressedMap.erase(_inputDevice->makeInput(Qt::MiddleButton, true).getChannel());
_inputDevice->_buttonPressedMap.erase(_inputDevice->makeInput(Qt::RightButton, true).getChannel());
}
void KeyboardMouseDevice::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
QPoint currentPos = event->pos();
QPoint currentMove = currentPos - _lastCursor;
_axisStateMap[makeInput(MOUSE_AXIS_X_POS).getChannel()] = (currentMove.x() > 0 ? currentMove.x() : 0.0f);
_axisStateMap[makeInput(MOUSE_AXIS_X_NEG).getChannel()] = (currentMove.x() < 0 ? -currentMove.x() : 0.0f);
_inputDevice->_axisStateMap[MOUSE_AXIS_X_POS] = (currentMove.x() > 0 ? currentMove.x() : 0.0f);
_inputDevice->_axisStateMap[MOUSE_AXIS_X_NEG] = (currentMove.x() < 0 ? -currentMove.x() : 0.0f);
// Y mouse is inverted positive is pointing up the screen
_axisStateMap[makeInput(MOUSE_AXIS_Y_POS).getChannel()] = (currentMove.y() < 0 ? -currentMove.y() : 0.0f);
_axisStateMap[makeInput(MOUSE_AXIS_Y_NEG).getChannel()] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
_inputDevice->_axisStateMap[MOUSE_AXIS_Y_POS] = (currentMove.y() < 0 ? -currentMove.y() : 0.0f);
_inputDevice->_axisStateMap[MOUSE_AXIS_Y_NEG] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
_lastCursor = currentPos;
@ -100,10 +104,10 @@ void KeyboardMouseDevice::mouseMoveEvent(QMouseEvent* event, unsigned int device
void KeyboardMouseDevice::wheelEvent(QWheelEvent* event) {
auto currentMove = event->angleDelta() / 120.0f;
_axisStateMap[makeInput(MOUSE_AXIS_WHEEL_X_POS).getChannel()] = (currentMove.x() > 0 ? currentMove.x() : 0.0f);
_axisStateMap[makeInput(MOUSE_AXIS_WHEEL_X_NEG).getChannel()] = (currentMove.x() < 0 ? -currentMove.x() : 0.0f);
_axisStateMap[makeInput(MOUSE_AXIS_WHEEL_Y_POS).getChannel()] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
_axisStateMap[makeInput(MOUSE_AXIS_WHEEL_Y_NEG).getChannel()] = (currentMove.y() < 0 ? -currentMove.y() : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(MOUSE_AXIS_WHEEL_X_POS).getChannel()] = (currentMove.x() > 0 ? currentMove.x() : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(MOUSE_AXIS_WHEEL_X_NEG).getChannel()] = (currentMove.x() < 0 ? -currentMove.x() : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(MOUSE_AXIS_WHEEL_Y_POS).getChannel()] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(MOUSE_AXIS_WHEEL_Y_NEG).getChannel()] = (currentMove.y() < 0 ? -currentMove.y() : 0.0f);
}
glm::vec2 evalAverageTouchPoints(const QList<QTouchEvent::TouchPoint>& points) {
@ -138,17 +142,17 @@ void KeyboardMouseDevice::touchUpdateEvent(const QTouchEvent* event) {
} else {
auto currentMove = currentPos - _lastTouch;
_axisStateMap[makeInput(TOUCH_AXIS_X_POS).getChannel()] = (currentMove.x > 0 ? currentMove.x : 0.0f);
_axisStateMap[makeInput(TOUCH_AXIS_X_NEG).getChannel()] = (currentMove.x < 0 ? -currentMove.x : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(TOUCH_AXIS_X_POS).getChannel()] = (currentMove.x > 0 ? currentMove.x : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(TOUCH_AXIS_X_NEG).getChannel()] = (currentMove.x < 0 ? -currentMove.x : 0.0f);
// Y mouse is inverted positive is pointing up the screen
_axisStateMap[makeInput(TOUCH_AXIS_Y_POS).getChannel()] = (currentMove.y < 0 ? -currentMove.y : 0.0f);
_axisStateMap[makeInput(TOUCH_AXIS_Y_NEG).getChannel()] = (currentMove.y > 0 ? currentMove.y : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(TOUCH_AXIS_Y_POS).getChannel()] = (currentMove.y < 0 ? -currentMove.y : 0.0f);
_inputDevice->_axisStateMap[_inputDevice->makeInput(TOUCH_AXIS_Y_NEG).getChannel()] = (currentMove.y > 0 ? currentMove.y : 0.0f);
}
_lastTouch = currentPos;
}
controller::Input KeyboardMouseDevice::makeInput(Qt::Key code) const {
controller::Input KeyboardMouseDevice::InputDevice::makeInput(Qt::Key code) const {
auto shortCode = (uint16_t)(code & KEYBOARD_MASK);
if (shortCode != code) {
shortCode |= 0x0800; // add this bit instead of the way Qt::Key add a bit on the 3rd byte for some keys
@ -156,7 +160,7 @@ controller::Input KeyboardMouseDevice::makeInput(Qt::Key code) const {
return controller::Input(_deviceID, shortCode, controller::ChannelType::BUTTON);
}
controller::Input KeyboardMouseDevice::makeInput(Qt::MouseButton code, bool clicked) const {
controller::Input KeyboardMouseDevice::InputDevice::makeInput(Qt::MouseButton code, bool clicked) const {
switch (code) {
case Qt::LeftButton:
return controller::Input(_deviceID, clicked ? MOUSE_BUTTON_LEFT_CLICKED :
@ -172,19 +176,19 @@ controller::Input KeyboardMouseDevice::makeInput(Qt::MouseButton code, bool clic
};
}
controller::Input KeyboardMouseDevice::makeInput(KeyboardMouseDevice::MouseAxisChannel axis) const {
controller::Input KeyboardMouseDevice::InputDevice::makeInput(KeyboardMouseDevice::MouseAxisChannel axis) const {
return controller::Input(_deviceID, axis, controller::ChannelType::AXIS);
}
controller::Input KeyboardMouseDevice::makeInput(KeyboardMouseDevice::TouchAxisChannel axis) const {
controller::Input KeyboardMouseDevice::InputDevice::makeInput(KeyboardMouseDevice::TouchAxisChannel axis) const {
return controller::Input(_deviceID, axis, controller::ChannelType::AXIS);
}
controller::Input KeyboardMouseDevice::makeInput(KeyboardMouseDevice::TouchButtonChannel button) const {
controller::Input KeyboardMouseDevice::InputDevice::makeInput(KeyboardMouseDevice::TouchButtonChannel button) const {
return controller::Input(_deviceID, button, controller::ChannelType::BUTTON);
}
controller::Input::NamedVector KeyboardMouseDevice::getAvailableInputs() const {
controller::Input::NamedVector KeyboardMouseDevice::InputDevice::getAvailableInputs() const {
using namespace controller;
static QVector<Input::NamedPair> availableInputs;
static std::once_flag once;
@ -229,7 +233,7 @@ controller::Input::NamedVector KeyboardMouseDevice::getAvailableInputs() const {
return availableInputs;
}
QString KeyboardMouseDevice::getDefaultMappingConfig() const {
QString KeyboardMouseDevice::InputDevice::getDefaultMappingConfig() const {
static const QString MAPPING_JSON = PathUtils::resourcesPath() + "/controllers/keyboardMouse.json";
return MAPPING_JSON;
}

View file

@ -24,7 +24,7 @@ class QKeyEvent;
class QMouseEvent;
class QWheelEvent;
class KeyboardMouseDevice : public InputPlugin, public controller::InputDevice {
class KeyboardMouseDevice : public InputPlugin {
Q_OBJECT
public:
enum KeyboardChannel {
@ -64,22 +64,14 @@ public:
TOUCH_BUTTON_PRESS = TOUCH_AXIS_Y_NEG + 1,
};
KeyboardMouseDevice() : InputDevice("Keyboard") {}
// Plugin functions
virtual bool isSupported() const override { return true; }
virtual bool isJointController() const override { return false; }
const QString& getName() const override { return NAME; }
virtual void pluginFocusOutEvent() override { focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override { update(deltaTime, jointsCaptured); }
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override;
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
void keyPressEvent(QKeyEvent* event);
void keyReleaseEvent(QKeyEvent* event);
@ -94,21 +86,40 @@ public:
void wheelEvent(QWheelEvent* event);
// Let's make it easy for Qt because we assume we love Qt forever
controller::Input makeInput(Qt::Key code) const;
controller::Input makeInput(Qt::MouseButton code, bool clicked = false) const;
controller::Input makeInput(MouseAxisChannel axis) const;
controller::Input makeInput(TouchAxisChannel axis) const;
controller::Input makeInput(TouchButtonChannel button) const;
static const QString NAME;
protected:
class InputDevice : public controller::InputDevice {
public:
InputDevice() : controller::InputDevice("Keyboard") {}
private:
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
// Let's make it easy for Qt because we assume we love Qt forever
controller::Input makeInput(Qt::Key code) const;
controller::Input makeInput(Qt::MouseButton code, bool clicked = false) const;
controller::Input makeInput(MouseAxisChannel axis) const;
controller::Input makeInput(TouchAxisChannel axis) const;
controller::Input makeInput(TouchButtonChannel button) const;
friend class KeyboardMouseDevice;
};
public:
const std::shared_ptr<InputDevice>& getInputDevice() const { return _inputDevice; }
protected:
QPoint _lastCursor;
QPoint _mousePressAt;
glm::vec2 _lastTouch;
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>() };
bool _isTouching = false;
std::chrono::high_resolution_clock _clock;
std::chrono::high_resolution_clock::time_point _lastTouchTime;
};

View file

@ -9,53 +9,37 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <vector>
#include "SixenseManager.h"
#ifdef HAVE_SIXENSE
#include <sixense.h>
#endif
#include <QCoreApplication>
#include <QtCore/QSysInfo>
#include <QtGlobal>
#include <controllers/UserInputMapper.h>
#include <GLMHelpers.h>
#include <NumericalConstants.h>
#include <PerfStat.h>
#include <SettingHandle.h>
#include <plugins/PluginContainer.h>
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <PerfStat.h>
#include <plugins/PluginContainer.h>
#include <SettingHandle.h>
#include <UserActivityLogger.h>
#include <controllers/UserInputMapper.h>
#include "SixenseManager.h"
#include "InputPluginsLogging.h"
static const unsigned int BUTTON_0 = 1U << 0; // the skinny button between 1 and 2
static const unsigned int BUTTON_1 = 1U << 5;
static const unsigned int BUTTON_2 = 1U << 6;
static const unsigned int BUTTON_3 = 1U << 3;
static const unsigned int BUTTON_4 = 1U << 4;
static const unsigned int BUTTON_FWD = 1U << 7;
static const unsigned int BUTTON_TRIGGER = 1U << 8;
#ifdef HAVE_SIXENSE
#include "sixense.h"
#endif
// TODO: This should not be here
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")
#ifdef HAVE_SIXENSE
const int CALIBRATION_STATE_IDLE = 0;
const int CALIBRATION_STATE_IN_PROGRESS = 1;
const int CALIBRATION_STATE_COMPLETE = 2;
const glm::vec3 DEFAULT_AVATAR_POSITION(-0.25f, -0.35f, -0.3f); // in hydra frame
const float CONTROLLER_THRESHOLD = 0.35f;
#endif
#ifdef __APPLE__
typedef int (*SixenseBaseFunction)();
typedef int (*SixenseTakeIntFunction)(int);
#ifdef HAVE_SIXENSE
typedef int (*SixenseTakeIntAndSixenseControllerData)(int, sixenseControllerData*);
#endif
#endif
const glm::vec3 SixenseManager::DEFAULT_AVATAR_POSITION { -0.25f, -0.35f, -0.3f }; // in hydra frame
const float SixenseManager::CONTROLLER_THRESHOLD { 0.35f };
const QString SixenseManager::NAME = "Sixense";
const QString SixenseManager::HYDRA_ID_STRING = "Razer Hydra";
@ -64,15 +48,6 @@ const QString MENU_PARENT = "Avatar";
const QString MENU_NAME = "Sixense";
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
const QString TOGGLE_SMOOTH = "Smooth Sixense Movement";
const float DEFAULT_REACH_LENGTH = 1.5f;
static std::shared_ptr<SixenseManager> instance;
SixenseManager::SixenseManager() :
InputDevice("Hydra"),
_reachLength(DEFAULT_REACH_LENGTH)
{
instance = std::shared_ptr<SixenseManager>(this);
}
bool SixenseManager::isSupported() const {
#ifdef HAVE_SIXENSE
@ -90,43 +65,16 @@ bool SixenseManager::isSupported() const {
void SixenseManager::activate() {
InputPlugin::activate();
#ifdef HAVE_SIXENSE
_calibrationState = CALIBRATION_STATE_IDLE;
_avatarPosition = DEFAULT_AVATAR_POSITION;
_container->addMenu(MENU_PATH);
_container->addMenuItem(MENU_PATH, TOGGLE_SMOOTH,
[this] (bool clicked) { this->setSixenseFilter(clicked); },
[this] (bool clicked) { setSixenseFilter(clicked); },
true, true);
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->registerDevice(instance);
userInputMapper->registerDevice(_inputDevice);
#ifdef __APPLE__
if (!_sixenseLibrary) {
#ifdef SIXENSE_LIB_FILENAME
_sixenseLibrary = new QLibrary(SIXENSE_LIB_FILENAME);
#else
const QString SIXENSE_LIBRARY_NAME = "libsixense_x64";
QString frameworkSixenseLibrary = QCoreApplication::applicationDirPath() + "/../Frameworks/"
+ SIXENSE_LIBRARY_NAME;
_sixenseLibrary = new QLibrary(frameworkSixenseLibrary);
#endif
}
if (_sixenseLibrary->load()){
qCDebug(inputplugins) << "Loaded sixense library for hydra support -" << _sixenseLibrary->fileName();
} else {
qCDebug(inputplugins) << "Sixense library at" << _sixenseLibrary->fileName() << "failed to load."
<< "Continuing without hydra support.";
return;
}
SixenseBaseFunction sixenseInit = (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseInit");
#endif
loadSettings();
sixenseInit();
#endif
@ -139,54 +87,37 @@ void SixenseManager::deactivate() {
_container->removeMenuItem(MENU_NAME, TOGGLE_SMOOTH);
_container->removeMenu(MENU_PATH);
_poseStateMap.clear();
_collectedSamples.clear();
_inputDevice->_poseStateMap.clear();
_inputDevice->_collectedSamples.clear();
if (_deviceID != controller::Input::INVALID_DEVICE) {
if (_inputDevice->_deviceID != controller::Input::INVALID_DEVICE) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->removeDevice(_deviceID);
userInputMapper->removeDevice(_inputDevice->_deviceID);
}
#ifdef __APPLE__
SixenseBaseFunction sixenseExit = (SixenseBaseFunction)_sixenseLibrary->resolve("sixenseExit");
#endif
sixenseExit();
#ifdef __APPLE__
delete _sixenseLibrary;
#endif
saveSettings();
#endif
}
void SixenseManager::setSixenseFilter(bool filter) {
#ifdef HAVE_SIXENSE
#ifdef __APPLE__
SixenseTakeIntFunction sixenseSetFilterEnabled = (SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseSetFilterEnabled");
#endif
int newFilter = filter ? 1 : 0;
sixenseSetFilterEnabled(newFilter);
sixenseSetFilterEnabled(filter ? 1 : 0);
#endif
}
void SixenseManager::update(float deltaTime, bool jointsCaptured) {
// FIXME - Some of the code in update() will crash if you haven't actually activated the
// plugin. But we want register with the UserInputMapper if we don't call this.
// We need to clean this up.
//if (!_activated) {
// return;
//}
void SixenseManager::pluginUpdate(float deltaTime, bool jointsCaptured) {
_inputDevice->update(deltaTime, jointsCaptured);
if (_inputDevice->_requestReset) {
_container->requestReset();
_inputDevice->_requestReset = false;
}
}
void SixenseManager::InputDevice::update(float deltaTime, bool jointsCaptured) {
#ifdef HAVE_SIXENSE
_buttonPressedMap.clear();
#ifdef __APPLE__
SixenseBaseFunction sixenseGetNumActiveControllers =
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
#endif
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
static const float MAX_DISCONNECTED_TIME = 2.0f;
static bool disconnected { false };
static float disconnectedInterval { 0.0f };
@ -213,30 +144,17 @@ void SixenseManager::update(float deltaTime, bool jointsCaptured) {
// FIXME send this message once when we've positively identified hydra hardware
//UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
#ifdef __APPLE__
SixenseBaseFunction sixenseGetMaxControllers =
(SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetMaxControllers");
#endif
int maxControllers = sixenseGetMaxControllers();
// we only support two controllers
sixenseControllerData controllers[2];
#ifdef __APPLE__
SixenseTakeIntFunction sixenseIsControllerEnabled =
(SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseIsControllerEnabled");
SixenseTakeIntAndSixenseControllerData sixenseGetNewestData =
(SixenseTakeIntAndSixenseControllerData) _sixenseLibrary->resolve("sixenseGetNewestData");
#endif
SixenseControllerData controllers[2];
int numActiveControllers = 0;
for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
if (!sixenseIsControllerEnabled(i)) {
continue;
}
sixenseControllerData* data = controllers + numActiveControllers;
SixenseControllerData* data = controllers + numActiveControllers;
++numActiveControllers;
sixenseGetNewestData(i, data);
@ -293,36 +211,37 @@ void SixenseManager::update(float deltaTime, bool jointsCaptured) {
// (4) assume that the orb is on a flat surface (yAxis is UP)
// (5) compute the forward direction (zAxis = xAxis cross yAxis)
const float MINIMUM_ARM_REACH = 0.3f; // meters
const float MAXIMUM_NOISE_LEVEL = 0.05f; // meters
const quint64 LOCK_DURATION = USECS_PER_SECOND / 4; // time for lock to be acquired
static const float MINIMUM_ARM_REACH = 0.3f; // meters
static const float MAXIMUM_NOISE_LEVEL = 0.05f; // meters
static const quint64 LOCK_DURATION = USECS_PER_SECOND / 4; // time for lock to be acquired
void SixenseManager::updateCalibration(void* controllersX) {
auto controllers = reinterpret_cast<sixenseControllerData*>(controllersX);
const sixenseControllerData* dataLeft = controllers;
const sixenseControllerData* dataRight = controllers + 1;
static bool calibrationRequested(SixenseControllerData* controllers) {
return (controllers[0].buttons == BUTTON_FWD && controllers[1].buttons == BUTTON_FWD);
}
// calibration only happpens while both hands are holding BUTTON_FORWARD
if (dataLeft->buttons != BUTTON_FWD || dataRight->buttons != BUTTON_FWD) {
if (_calibrationState == CALIBRATION_STATE_IDLE) {
return;
}
void SixenseManager::InputDevice::updateCalibration(SixenseControllerData* controllers) {
const SixenseControllerData* dataLeft = controllers;
const SixenseControllerData* dataRight = controllers + 1;
// Calibration buttons aren't set, so check the state, and request a reset if necessary.
if (!calibrationRequested(controllers)) {
switch (_calibrationState) {
case CALIBRATION_STATE_COMPLETE:
{
// compute calibration results
_avatarPosition = - 0.5f * (_reachLeft + _reachRight); // neck is midway between right and left hands
glm::vec3 xAxis = glm::normalize(_reachRight - _reachLeft);
glm::vec3 zAxis = glm::normalize(glm::cross(xAxis, Vectors::UNIT_Y));
xAxis = glm::normalize(glm::cross(Vectors::UNIT_Y, zAxis));
_reachLength = glm::dot(xAxis, _reachRight - _reachLeft);
_avatarRotation = glm::inverse(glm::quat_cast(glm::mat3(xAxis, Vectors::UNIT_Y, zAxis)));
const float Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR = -0.3f;
_avatarPosition.y += Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR;
_container->requestReset();
qCDebug(inputplugins, "succeess: sixense calibration");
}
break;
case CALIBRATION_STATE_IDLE:
return;
case CALIBRATION_STATE_COMPLETE: {
// compute calibration results
_avatarPosition = -0.5f * (_reachLeft + _reachRight); // neck is midway between right and left hands
glm::vec3 xAxis = glm::normalize(_reachRight - _reachLeft);
glm::vec3 zAxis = glm::normalize(glm::cross(xAxis, Vectors::UNIT_Y));
xAxis = glm::normalize(glm::cross(Vectors::UNIT_Y, zAxis));
_avatarRotation = glm::inverse(glm::quat_cast(glm::mat3(xAxis, Vectors::UNIT_Y, zAxis)));
const float Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR = -0.3f;
_avatarPosition.y += Y_OFFSET_CALIBRATED_HANDS_TO_AVATAR;
qCDebug(inputplugins, "succeess: sixense calibration");
_requestReset = true;
}
break;
default:
qCDebug(inputplugins, "failed: sixense calibration");
break;
@ -332,6 +251,7 @@ void SixenseManager::updateCalibration(void* controllersX) {
return;
}
// Calibration buttons are set, continue calibration work
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
const float* pos = dataLeft->pos;
glm::vec3 positionLeft(pos[0], pos[1], pos[2]);
@ -340,6 +260,7 @@ void SixenseManager::updateCalibration(void* controllersX) {
glm::vec3 positionRight(pos[0], pos[1], pos[2]);
positionRight *= METERS_PER_MILLIMETER;
// Gather initial calibration data
if (_calibrationState == CALIBRATION_STATE_IDLE) {
float reach = glm::distance(positionLeft, positionRight);
if (reach > 2.0f * MINIMUM_ARM_REACH) {
@ -382,15 +303,12 @@ void SixenseManager::updateCalibration(void* controllersX) {
#endif // HAVE_SIXENSE
void SixenseManager::focusOutEvent() {
void SixenseManager::InputDevice::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
void SixenseManager::handleAxisEvent(float stickX, float stickY, float trigger, bool left) {
}
void SixenseManager::handleButtonEvent(unsigned int buttons, bool left) {
void SixenseManager::InputDevice::handleButtonEvent(unsigned int buttons, bool left) {
using namespace controller;
if (buttons & BUTTON_0) {
_buttonPressedMap.insert(left ? BACK : START);
@ -415,7 +333,7 @@ void SixenseManager::handleButtonEvent(unsigned int buttons, bool left) {
}
}
void SixenseManager::handlePoseEvent(float deltaTime, glm::vec3 position, glm::quat rotation, bool left) {
void SixenseManager::InputDevice::handlePoseEvent(float deltaTime, glm::vec3 position, glm::quat rotation, bool left) {
#ifdef HAVE_SIXENSE
auto hand = left ? controller::StandardPoseChannel::LEFT_HAND : controller::StandardPoseChannel::RIGHT_HAND;
@ -480,8 +398,6 @@ void SixenseManager::handlePoseEvent(float deltaTime, glm::vec3 position, glm::q
glm::vec3 velocity(0.0f);
glm::quat angularVelocity;
if (prevPose.isValid() && deltaTime > std::numeric_limits<float>::epsilon()) {
velocity = (position - prevPose.getTranslation()) / deltaTime;
@ -519,9 +435,7 @@ static const auto R2 = controller::A;
static const auto R3 = controller::B;
static const auto R4 = controller::Y;
using namespace controller;
controller::Input::NamedVector SixenseManager::getAvailableInputs() const {
controller::Input::NamedVector SixenseManager::InputDevice::getAvailableInputs() const {
using namespace controller;
static const Input::NamedVector availableInputs {
makePair(L0, "L0"),
@ -551,7 +465,7 @@ controller::Input::NamedVector SixenseManager::getAvailableInputs() const {
};
QString SixenseManager::getDefaultMappingConfig() const {
QString SixenseManager::InputDevice::getDefaultMappingConfig() const {
static const QString MAPPING_JSON = PathUtils::resourcesPath() + "/controllers/hydra.json";
return MAPPING_JSON;
}
@ -562,9 +476,8 @@ void SixenseManager::saveSettings() const {
QString idString = getID();
settings.beginGroup(idString);
{
settings.setVec3Value(QString("avatarPosition"), _avatarPosition);
settings.setQuatValue(QString("avatarRotation"), _avatarRotation);
settings.setValue(QString("reachLength"), QVariant(_reachLength));
settings.setVec3Value(QString("avatarPosition"), _inputDevice->_avatarPosition);
settings.setQuatValue(QString("avatarRotation"), _inputDevice->_avatarRotation);
}
settings.endGroup();
}
@ -574,9 +487,8 @@ void SixenseManager::loadSettings() {
QString idString = getID();
settings.beginGroup(idString);
{
settings.getVec3ValueIfValid(QString("avatarPosition"), _avatarPosition);
settings.getQuatValueIfValid(QString("avatarRotation"), _avatarRotation);
settings.getFloatValueIfValid(QString("reachLength"), _reachLength);
settings.getVec3ValueIfValid(QString("avatarPosition"), _inputDevice->_avatarPosition);
settings.getQuatValueIfValid(QString("avatarRotation"), _inputDevice->_avatarRotation);
}
settings.endGroup();
}

View file

@ -12,18 +12,6 @@
#ifndef hifi_SixenseManager_h
#define hifi_SixenseManager_h
#ifdef HAVE_SIXENSE
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include "sixense.h"
#ifdef __APPLE__
#include <QCoreApplication>
#include <qlibrary.h>
#endif
#endif
#include <SimpleMovingAverage.h>
#include <controllers/InputDevice.h>
@ -31,24 +19,13 @@
#include "InputPlugin.h"
class QLibrary;
const unsigned int BUTTON_0 = 1U << 0; // the skinny button between 1 and 2
const unsigned int BUTTON_1 = 1U << 5;
const unsigned int BUTTON_2 = 1U << 6;
const unsigned int BUTTON_3 = 1U << 3;
const unsigned int BUTTON_4 = 1U << 4;
const unsigned int BUTTON_FWD = 1U << 7;
const unsigned int BUTTON_TRIGGER = 1U << 8;
const bool DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS = false;
struct _sixenseControllerData;
using SixenseControllerData = _sixenseControllerData;
// Handles interaction with the Sixense SDK (e.g., Razer Hydra).
class SixenseManager : public InputPlugin, public controller::InputDevice {
class SixenseManager : public InputPlugin {
Q_OBJECT
public:
SixenseManager();
// Plugin functions
virtual bool isSupported() const override;
virtual bool isJointController() const override { return true; }
@ -58,15 +35,8 @@ public:
virtual void activate() override;
virtual void deactivate() override;
virtual void pluginFocusOutEvent() override { focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override { update(deltaTime, jointsCaptured); }
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override;
virtual void saveSettings() const override;
virtual void loadSettings() override;
@ -74,39 +44,54 @@ public:
public slots:
void setSixenseFilter(bool filter);
private:
void handleButtonEvent(unsigned int buttons, bool left);
void handleAxisEvent(float x, float y, float trigger, bool left);
void handlePoseEvent(float deltaTime, glm::vec3 position, glm::quat rotation, bool left);
void updateCalibration(void* controllers);
int _calibrationState;
// these are calibration results
glm::vec3 _avatarPosition; // in hydra-frame
glm::quat _avatarRotation; // in hydra-frame
float _reachLength;
// these are measured values used to compute the calibration results
quint64 _lockExpiry;
glm::vec3 _averageLeft;
glm::vec3 _averageRight;
glm::vec3 _reachLeft;
glm::vec3 _reachRight;
float _lastDistance;
bool _useSixenseFilter = true;
private:
static const int MAX_NUM_AVERAGING_SAMPLES = 50; // At ~100 updates per seconds this means averaging over ~.5s
using Samples = std::pair< MovingAverage< glm::vec3, MAX_NUM_AVERAGING_SAMPLES>, MovingAverage< glm::vec4, MAX_NUM_AVERAGING_SAMPLES> >;
using MovingAverageMap = std::map< int, Samples >;
MovingAverageMap _collectedSamples;
static const int CALIBRATION_STATE_IDLE = 0;
static const int CALIBRATION_STATE_IN_PROGRESS = 1;
static const int CALIBRATION_STATE_COMPLETE = 2;
static const glm::vec3 DEFAULT_AVATAR_POSITION;
static const float CONTROLLER_THRESHOLD;
#ifdef __APPLE__
QLibrary* _sixenseLibrary { nullptr };
#endif
template<typename T>
using SampleAverage = MovingAverage<T, MAX_NUM_AVERAGING_SAMPLES>;
using Samples = std::pair<SampleAverage<glm::vec3>, SampleAverage<glm::vec4>>;
using MovingAverageMap = std::map<int, Samples>;
class InputDevice : public controller::InputDevice {
public:
InputDevice() : controller::InputDevice("Hydra") {}
private:
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
void handleButtonEvent(unsigned int buttons, bool left);
void handlePoseEvent(float deltaTime, glm::vec3 position, glm::quat rotation, bool left);
void updateCalibration(SixenseControllerData* controllers);
friend class SixenseManager;
MovingAverageMap _collectedSamples;
int _calibrationState { CALIBRATION_STATE_IDLE };
// these are calibration results
glm::vec3 _avatarPosition { DEFAULT_AVATAR_POSITION }; // in hydra-frame
glm::quat _avatarRotation; // in hydra-frame
float _lastDistance;
bool _requestReset { false };
// these are measured values used to compute the calibration results
quint64 _lockExpiry;
glm::vec3 _averageLeft;
glm::vec3 _averageRight;
glm::vec3 _reachLeft;
glm::vec3 _reachRight;
};
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>() };
static const QString NAME;
static const QString HYDRA_ID_STRING;
};

View file

@ -0,0 +1,153 @@
//
// SixenseSupportOSX.cpp
// libraries/input-plugins/src/input-plugins
//
// Created by Clement on 10/20/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Mock implementation of sixense.h to hide dynamic linking on OS X
#if defined(__APPLE__) && defined(HAVE_SIXENSE)
#include <type_traits>
#include <sixense.h>
#include <QtCore/QCoreApplication>
#include <QtCore/QLibrary>
#include "InputPluginsLogging.h"
#ifndef SIXENSE_LIB_FILENAME
#define SIXENSE_LIB_FILENAME QCoreApplication::applicationDirPath() + "/../Frameworks/libsixense_x64"
#endif
using Library = std::unique_ptr<QLibrary>;
static Library SIXENSE;
struct Callable {
template<typename... Args>
int operator() (Args&&... args){
return reinterpret_cast<int(*)(Args...)>(function)(std::forward<Args>(args)...);
}
QFunctionPointer function;
};
Callable resolve(const Library& library, const char* name) {
Q_ASSERT_X(library && library->isLoaded(), __FUNCTION__, "Sixense library not loaded");
auto function = library->resolve(name);
Q_ASSERT_X(function, __FUNCTION__, std::string("Could not resolve ").append(name).c_str());
return Callable { function };
}
#define FORWARD resolve(SIXENSE, __FUNCTION__)
void loadSixense() {
Q_ASSERT_X(!(SIXENSE && SIXENSE->isLoaded()), __FUNCTION__, "Sixense library already loaded");
SIXENSE.reset(new QLibrary(SIXENSE_LIB_FILENAME));
Q_CHECK_PTR(SIXENSE);
if (SIXENSE->load()){
qDebug() << "Loaded sixense library for hydra support -" << SIXENSE->fileName();
} else {
qDebug() << "Sixense library at" << SIXENSE->fileName() << "failed to load:" << SIXENSE->errorString();
qDebug() << "Continuing without hydra support.";
}
}
void unloadSixense() {
SIXENSE->unload();
}
// sixense.h wrapper for OSX dynamic linking
int sixenseInit() {
loadSixense();
return FORWARD();
}
int sixenseExit() {
auto returnCode = FORWARD();
unloadSixense();
return returnCode;
}
int sixenseGetMaxBases() {
return FORWARD();
}
int sixenseSetActiveBase(int i) {
return FORWARD(i);
}
int sixenseIsBaseConnected(int i) {
return FORWARD(i);
}
int sixenseGetMaxControllers() {
return FORWARD();
}
int sixenseIsControllerEnabled(int which) {
return FORWARD(which);
}
int sixenseGetNumActiveControllers() {
return FORWARD();
}
int sixenseGetHistorySize() {
return FORWARD();
}
int sixenseGetData(int which, int index_back, sixenseControllerData* data) {
return FORWARD(which, index_back, data);
}
int sixenseGetAllData(int index_back, sixenseAllControllerData* data) {
return FORWARD(index_back, data);
}
int sixenseGetNewestData(int which, sixenseControllerData* data) {
return FORWARD(which, data);
}
int sixenseGetAllNewestData(sixenseAllControllerData* data) {
return FORWARD(data);
}
int sixenseSetHemisphereTrackingMode(int which_controller, int state) {
return FORWARD(which_controller, state);
}
int sixenseGetHemisphereTrackingMode(int which_controller, int* state) {
return FORWARD(which_controller, state);
}
int sixenseAutoEnableHemisphereTracking(int which_controller) {
return FORWARD(which_controller);
}
int sixenseSetHighPriorityBindingEnabled(int on_or_off) {
return FORWARD(on_or_off);
}
int sixenseGetHighPriorityBindingEnabled(int* on_or_off) {
return FORWARD(on_or_off);
}
int sixenseTriggerVibration(int controller_id, int duration_100ms, int pattern_id) {
return FORWARD(controller_id, duration_100ms, pattern_id);
}
int sixenseSetFilterEnabled(int on_or_off) {
return FORWARD(on_or_off);
}
int sixenseGetFilterEnabled(int* on_or_off) {
return FORWARD(on_or_off);
}
int sixenseSetFilterParams(float near_range, float near_val, float far_range, float far_val) {
return FORWARD(near_range, near_val, far_range, far_val);
}
int sixenseGetFilterParams(float* near_range, float* near_val, float* far_range, float* far_val) {
return FORWARD(near_range, near_val, far_range, far_val);
}
int sixenseSetBaseColor(unsigned char red, unsigned char green, unsigned char blue) {
return FORWARD(red, green, blue);
}
int sixenseGetBaseColor(unsigned char* red, unsigned char* green, unsigned char* blue) {
return FORWARD(red, green, blue);
}
#endif

View file

@ -21,11 +21,10 @@
const float MAX_AXIS = 75.0f; // max forward = 2x speed
static std::shared_ptr<SpacemouseDevice> instance;
SpacemouseDevice::SpacemouseDevice() :
InputDevice("Spacemouse")
static std::shared_ptr<SpacemouseDevice> instance = std::make_shared<SpacemouseDevice>();
SpacemouseDevice::SpacemouseDevice() : InputDevice("Spacemouse")
{
instance = std::shared_ptr<SpacemouseDevice>(this);
}
void SpacemouseDevice::focusOutEvent() {
@ -118,14 +117,6 @@ void SpacemouseDevice::update(float deltaTime, bool jointsCaptured) {
// for osx the api will call DeviceAddedHandler or DeviceRemoveHandler when a 3Dconnexion device is attached or detached
}
SpacemouseManager& SpacemouseManager::getInstance() {
static SpacemouseManager sharedInstance;
if (instance == nullptr) {
new SpacemouseDevice();
}
return sharedInstance;
}
void SpacemouseManager::ManagerFocusOutEvent() {
instance->focusOutEvent();
}

View file

@ -23,7 +23,6 @@
class SpacemouseManager : public QObject {
Q_OBJECT
public:
static SpacemouseManager& getInstance();
void ManagerFocusOutEvent();
void init();
void destroy() {};
@ -92,7 +91,6 @@ class SpacemouseManager : public QObject, public QAbstractNativeEventFilter {
public:
SpacemouseManager() {};
static SpacemouseManager& getInstance();
void init();
void destroy();
bool Is3dmouseAttached();
@ -169,7 +167,6 @@ private:
class SpacemouseManager : public QObject {
Q_OBJECT
public:
static SpacemouseManager& getInstance();
void init();
void destroy();
bool Is3dmouseAttached();

View file

@ -27,12 +27,13 @@
#include <controllers/StandardControls.h>
#ifdef Q_OS_WIN
extern vr::IVRSystem* _hmd;
extern int hmdRefCount;
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
#endif
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f, CONTROLLER_LENGTH_OFFSET / 2.0f, 2.0f * CONTROLLER_LENGTH_OFFSET); // three inches
@ -45,28 +46,11 @@ const QString MENU_NAME = "Vive Controllers";
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
const QString RENDER_CONTROLLERS = "Render Hand Controllers";
static std::shared_ptr<ViveControllerManager> instance;
ViveControllerManager::ViveControllerManager() :
InputDevice("Vive"),
_trackedControllers(0),
_modelLoaded(false),
_leftHandRenderID(0),
_rightHandRenderID(0),
_renderControllers(false)
{
instance = std::shared_ptr<ViveControllerManager>(this);
}
bool ViveControllerManager::isSupported() const {
#ifdef Q_OS_WIN
bool success = vr::VR_IsHmdPresent();
if (success) {
vr::HmdError eError = vr::HmdError_None;
auto hmd = vr::VR_Init(&eError);
success = (hmd != nullptr);
vr::VR_Shutdown();
}
auto hmd = acquireOpenVrSystem();
bool success = hmd != nullptr;
releaseOpenVrSystem();
return success;
#else
return false;
@ -81,11 +65,8 @@ void ViveControllerManager::activate() {
[this] (bool clicked) { this->setRenderControllers(clicked); },
true, true);
hmdRefCount++;
if (!_hmd) {
vr::HmdError eError = vr::HmdError_None;
_hmd = vr::VR_Init(&eError);
Q_ASSERT(eError == vr::HmdError_None);
_hmd = acquireOpenVrSystem();
}
Q_ASSERT(_hmd);
@ -139,7 +120,7 @@ void ViveControllerManager::activate() {
// unregister with UserInputMapper
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->registerDevice(instance);
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
}
@ -150,18 +131,17 @@ void ViveControllerManager::deactivate() {
_container->removeMenuItem(MENU_NAME, RENDER_CONTROLLERS);
_container->removeMenu(MENU_PATH);
hmdRefCount--;
if (hmdRefCount == 0 && _hmd) {
vr::VR_Shutdown();
if (_hmd) {
releaseOpenVrSystem();
_hmd = nullptr;
}
_poseStateMap.clear();
_inputDevice->_poseStateMap.clear();
#endif
// unregister with UserInputMapper
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->removeDevice(_deviceID);
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
}
@ -178,8 +158,8 @@ void ViveControllerManager::updateRendering(RenderArgs* args, render::ScenePoint
//pendingChanges.updateItem(_leftHandRenderID, );
controller::Pose leftHand = _poseStateMap[controller::StandardPoseChannel::LEFT_HAND];
controller::Pose rightHand = _poseStateMap[controller::StandardPoseChannel::RIGHT_HAND];
controller::Pose leftHand = _inputDevice->_poseStateMap[controller::StandardPoseChannel::LEFT_HAND];
controller::Pose rightHand = _inputDevice->_poseStateMap[controller::StandardPoseChannel::RIGHT_HAND];
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
@ -235,15 +215,27 @@ glm::quat ViveControllerManager::getRotation(int hand) const {
}
#endif
void ViveControllerManager::update(float deltaTime, bool jointsCaptured) {
void ViveControllerManager::pluginUpdate(float deltaTime, bool jointsCaptured) {
_inputDevice->update(deltaTime, jointsCaptured);
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
if (_inputDevice->_trackedControllers == 0 && _registeredWithInputMapper) {
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
_inputDevice->_poseStateMap.clear();
}
if (!_registeredWithInputMapper && _inputDevice->_trackedControllers > 0) {
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "steamVR");
}
}
void ViveControllerManager::InputDevice::update(float deltaTime, bool jointsCaptured) {
#ifdef Q_OS_WIN
_poseStateMap.clear();
// TODO: This shouldn't be necessary
if (!_hmd) {
return;
}
_buttonPressedMap.clear();
PerformanceTimer perfTimer("ViveControllerManager::update");
@ -290,34 +282,18 @@ void ViveControllerManager::update(float deltaTime, bool jointsCaptured) {
}
}
}
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
if (numTrackedControllers == 0) {
if (_registeredWithInputMapper) {
userInputMapper->removeDevice(_deviceID);
_registeredWithInputMapper = false;
_poseStateMap.clear();
}
}
if (_trackedControllers == 0 && numTrackedControllers > 0) {
userInputMapper->registerDevice(instance);
_registeredWithInputMapper = true;
UserActivityLogger::getInstance().connectedDevice("spatial_controller", "steamVR");
}
_trackedControllers = numTrackedControllers;
#endif
}
void ViveControllerManager::focusOutEvent() {
void ViveControllerManager::InputDevice::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
// These functions do translation from the Steam IDs to the standard controller IDs
void ViveControllerManager::handleAxisEvent(uint32_t axis, float x, float y, bool left) {
void ViveControllerManager::InputDevice::handleAxisEvent(uint32_t axis, float x, float y, bool left) {
#ifdef Q_OS_WIN
//FIX ME? It enters here every frame: probably we want to enter only if an event occurs
axis += vr::k_EButton_Axis0;
@ -332,7 +308,7 @@ void ViveControllerManager::handleAxisEvent(uint32_t axis, float x, float y, boo
}
// These functions do translation from the Steam IDs to the standard controller IDs
void ViveControllerManager::handleButtonEvent(uint32_t button, bool pressed, bool left) {
void ViveControllerManager::InputDevice::handleButtonEvent(uint32_t button, bool pressed, bool left) {
#ifdef Q_OS_WIN
if (!pressed) {
return;
@ -354,7 +330,7 @@ void ViveControllerManager::handleButtonEvent(uint32_t button, bool pressed, boo
#endif
}
void ViveControllerManager::handlePoseEvent(const mat4& mat, bool left) {
void ViveControllerManager::InputDevice::handlePoseEvent(const mat4& mat, bool left) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
@ -428,7 +404,7 @@ void ViveControllerManager::handlePoseEvent(const mat4& mat, bool left) {
_poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = controller::Pose(position, rotation);
}
controller::Input::NamedVector ViveControllerManager::getAvailableInputs() const {
controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const {
using namespace controller;
QVector<Input::NamedPair> availableInputs{
// Trackpad analogs
@ -469,7 +445,7 @@ controller::Input::NamedVector ViveControllerManager::getAvailableInputs() const
return availableInputs;
}
QString ViveControllerManager::getDefaultMappingConfig() const {
QString ViveControllerManager::InputDevice::getDefaultMappingConfig() const {
static const QString MAPPING_JSON = PathUtils::resourcesPath() + "/controllers/vive.json";
return MAPPING_JSON;
}

View file

@ -24,12 +24,14 @@
#include <RenderArgs.h>
#include <render/Scene.h>
class ViveControllerManager : public InputPlugin, public controller::InputDevice {
namespace vr {
class IVRSystem;
}
class ViveControllerManager : public InputPlugin {
Q_OBJECT
public:
static const QString NAME;
ViveControllerManager();
// Plugin functions
virtual bool isSupported() const override;
@ -39,14 +41,8 @@ public:
virtual void activate() override;
virtual void deactivate() override;
virtual void pluginFocusOutEvent() override { focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override { update(deltaTime, jointsCaptured); }
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
virtual void pluginUpdate(float deltaTime, bool jointsCaptured) override;
void updateRendering(RenderArgs* args, render::ScenePointer scene, render::PendingChanges pendingChanges);
@ -57,25 +53,42 @@ public:
glm::vec3 getPosition(int device) const;
glm::quat getRotation(int device) const;
#endif
private:
class InputDevice : public controller::InputDevice {
public:
InputDevice(vr::IVRSystem*& hmd) : controller::InputDevice("Vive"), _hmd(hmd) {}
private:
// Device functions
virtual controller::Input::NamedVector getAvailableInputs() const override;
virtual QString getDefaultMappingConfig() const override;
virtual void update(float deltaTime, bool jointsCaptured) override;
virtual void focusOutEvent() override;
void handleButtonEvent(uint32_t button, bool pressed, bool left);
void handleAxisEvent(uint32_t axis, float x, float y, bool left);
void handlePoseEvent(const mat4& mat, bool left);
int _trackedControllers { 0 };
vr::IVRSystem*& _hmd;
friend class ViveControllerManager;
};
void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign);
void handleButtonEvent(uint32_t button, bool pressed, bool left);
void handleAxisEvent(uint32_t axis, float x, float y, bool left);
void handlePoseEvent(const mat4& mat, bool left);
int _trackedControllers;
bool _modelLoaded;
bool _registeredWithInputMapper { false };
bool _modelLoaded { false };
model::Geometry _modelGeometry;
gpu::TexturePointer _texture;
int _leftHandRenderID;
int _rightHandRenderID;
int _leftHandRenderID { 0 };
int _rightHandRenderID { 0 };
bool _renderControllers;
bool _registeredWithInputMapper { false };
bool _renderControllers { false };
vr::IVRSystem* _hmd { nullptr };
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>(_hmd) };
};
#endif // hifi__ViveControllerManager

View file

@ -38,7 +38,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityAdd:
case PacketType::EntityEdit:
case PacketType::EntityData:
return VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP_BIS;
return VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING;
case PacketType::AvatarData:
case PacketType::BulkAvatarData:
default:

View file

@ -145,5 +145,6 @@ const PacketVersion VERSION_ENTITIES_PROTOCOL_CHANNELS = 45;
const PacketVersion VERSION_ENTITIES_ANIMATION_PROPERTIES_GROUP = 46;
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP = 47;
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP_BIS = 48;
const PacketVersion VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING = 49;
#endif // hifi_PacketHeaders_h

View file

@ -79,8 +79,13 @@ EntityMotionState::~EntityMotionState() {
assert(!_entity);
}
void EntityMotionState::updateServerPhysicsVariables() {
void EntityMotionState::updateServerPhysicsVariables(const QUuid& sessionID) {
assert(entityTreeIsLocked());
if (_entity->getSimulatorID() == sessionID) {
// don't slam these values if we are the simulation owner
return;
}
_serverPosition = _entity->getPosition();
_serverRotation = _entity->getRotation();
_serverVelocity = _entity->getVelocity();
@ -92,7 +97,7 @@ void EntityMotionState::updateServerPhysicsVariables() {
// virtual
bool EntityMotionState::handleEasyChanges(uint32_t flags, PhysicsEngine* engine) {
assert(entityTreeIsLocked());
updateServerPhysicsVariables();
updateServerPhysicsVariables(engine->getSessionID());
ObjectMotionState::handleEasyChanges(flags, engine);
if (flags & Simulation::DIRTY_SIMULATOR_ID) {
@ -129,7 +134,7 @@ bool EntityMotionState::handleEasyChanges(uint32_t flags, PhysicsEngine* engine)
// virtual
bool EntityMotionState::handleHardAndEasyChanges(uint32_t flags, PhysicsEngine* engine) {
updateServerPhysicsVariables();
updateServerPhysicsVariables(engine->getSessionID());
return ObjectMotionState::handleHardAndEasyChanges(flags, engine);
}

View file

@ -28,7 +28,7 @@ public:
EntityMotionState(btCollisionShape* shape, EntityItemPointer item);
virtual ~EntityMotionState();
void updateServerPhysicsVariables();
void updateServerPhysicsVariables(const QUuid& sessionID);
virtual bool handleEasyChanges(uint32_t flags, PhysicsEngine* engine);
virtual bool handleHardAndEasyChanges(uint32_t flags, PhysicsEngine* engine);

View file

@ -16,33 +16,34 @@
using namespace recording;
Clip::Pointer Clip::fromFile(const QString& filePath) {
return std::make_shared<FileClip>(filePath);
auto result = std::make_shared<FileClip>(filePath);
if (result->frameCount() == 0) {
return Clip::Pointer();
}
return result;
}
void Clip::toFile(Clip::Pointer clip, const QString& filePath) {
// FIXME
void Clip::toFile(const QString& filePath, Clip::Pointer clip) {
FileClip::write(filePath, clip->duplicate());
}
Clip::Pointer Clip::newClip() {
return std::make_shared<BufferClip>();
}
Clip::Pointer Clip::duplicate() {
Clip::Pointer result = std::make_shared<BufferClip>();
Locker lock(_mutex);
float currentPosition = position();
seek(0);
Frame::Pointer frame = nextFrame();
while (frame) {
result->appendFrame(frame);
result->addFrame(frame);
frame = nextFrame();
}
seek(currentPosition);
return result;
}
#if 0
Clip::Pointer Clip::fromIODevice(QIODevice * device) {
return std::make_shared<IOClip>(device);
}
void Clip::fromIODevice(Clip::Pointer clip, QIODevice * device) {
}
#endif

View file

@ -12,35 +12,44 @@
#include "Forward.h"
#include <mutex>
#include <QtCore/QObject>
class QIODevice;
namespace recording {
class Clip : public QObject {
class Clip {
public:
using Pointer = std::shared_ptr<Clip>;
Clip(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Clip() {}
Pointer duplicate();
virtual float duration() const = 0;
virtual size_t frameCount() const = 0;
virtual void seek(float offset) = 0;
virtual float position() const = 0;
virtual FramePointer peekFrame() const = 0;
virtual FramePointer nextFrame() = 0;
virtual void skipFrame() = 0;
virtual void appendFrame(FramePointer) = 0;
virtual void addFrame(FramePointer) = 0;
static Pointer fromFile(const QString& filePath);
static void toFile(Pointer clip, const QString& filePath);
static void toFile(const QString& filePath, Pointer clip);
static Pointer newClip();
protected:
using Mutex = std::recursive_mutex;
using Locker = std::unique_lock<Mutex>;
virtual void reset() = 0;
mutable Mutex _mutex;
};
}

View file

@ -29,6 +29,10 @@ public:
float timeOffset { 0 };
QByteArray data;
Frame() {}
Frame(FrameType type, float timeOffset, const QByteArray& data)
: type(type), timeOffset(timeOffset), data(data) {}
static FrameType registerFrameType(const QString& frameTypeName);
static QMap<QString, FrameType> getFrameTypes();
static QMap<FrameType, QString> getFrameTypeNames();

View file

@ -0,0 +1,11 @@
//
// Created by Bradley Austin Davis 2015/10/11
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Logging.h"
Q_LOGGING_CATEGORY(recordingLog, "hifi.recording")

View file

@ -0,0 +1,16 @@
//
// Created by Bradley Austin Davis 2015/10/11
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Controllers_Logging_h
#define hifi_Controllers_Logging_h
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(recordingLog)
#endif

View file

@ -51,7 +51,7 @@ void Recorder::recordFrame(FrameType type, QByteArray frameData) {
frame->type = type;
frame->data = frameData;
frame->timeOffset = (float)(_elapsed + _timer.elapsed()) / MSECS_PER_SECOND;
_clip->appendFrame(frame);
_clip->addFrame(frame);
}
ClipPointer Recorder::getClip() {

View file

@ -51,11 +51,15 @@ FramePointer BufferClip::nextFrame() {
return result;
}
void BufferClip::appendFrame(FramePointer newFrame) {
void BufferClip::addFrame(FramePointer newFrame) {
if (newFrame->timeOffset < 0.0f) {
throw std::runtime_error("Frames may not have negative time offsets");
}
auto currentPosition = position();
seek(newFrame->timeOffset);
{
Locker lock(_mutex);
_frames.insert(_frames.begin() + _frameIndex, newFrame);
}
seek(currentPosition);
@ -72,3 +76,15 @@ void BufferClip::reset() {
Locker lock(_mutex);
_frameIndex = 0;
}
float BufferClip::duration() const {
if (_frames.empty()) {
return 0;
}
return (*_frames.rbegin())->timeOffset;
}
size_t BufferClip::frameCount() const {
return _frames.size();
}

View file

@ -20,25 +20,23 @@ class BufferClip : public Clip {
public:
using Pointer = std::shared_ptr<BufferClip>;
BufferClip(QObject* parent = nullptr) : Clip(parent) {}
virtual ~BufferClip() {}
virtual float duration() const override;
virtual size_t frameCount() const override;
virtual void seek(float offset) override;
virtual float position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;
virtual void skipFrame() override;
virtual void appendFrame(FramePointer) override;
virtual void addFrame(FramePointer) override;
private:
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
virtual void reset() override;
std::vector<FramePointer> _frames;
mutable Mutex _mutex;
mutable size_t _frameIndex { 0 };
};

View file

@ -8,42 +8,197 @@
#include "FileClip.h"
#include "../Frame.h"
#include <algorithm>
#include <QtCore/QDebug>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <Finally.h>
#include "../Frame.h"
#include "../Logging.h"
using namespace recording;
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(float) + sizeof(uint16_t) + 1;
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(float) + sizeof(uint16_t);
FileClip::FileClip(const QString& fileName, QObject* parent) : Clip(parent), _file(fileName) {
auto size = _file.size();
_map = _file.map(0, size, QFile::MapPrivateOption);
static const QString FRAME_TYPE_MAP = QStringLiteral("frameTypes");
auto current = _map;
using FrameHeaderList = std::list<FileClip::FrameHeader>;
using FrameTranslationMap = QMap<FrameType, FrameType>;
FrameTranslationMap parseTranslationMap(const QJsonDocument& doc) {
FrameTranslationMap results;
auto headerObj = doc.object();
if (headerObj.contains(FRAME_TYPE_MAP)) {
auto frameTypeObj = headerObj[FRAME_TYPE_MAP].toObject();
auto currentFrameTypes = Frame::getFrameTypes();
for (auto frameTypeName : frameTypeObj.keys()) {
qDebug() << frameTypeName;
if (!currentFrameTypes.contains(frameTypeName)) {
continue;
}
FrameType currentTypeEnum = currentFrameTypes[frameTypeName];
FrameType storedTypeEnum = static_cast<FrameType>(frameTypeObj[frameTypeName].toInt());
results[storedTypeEnum] = currentTypeEnum;
}
}
return results;
}
FrameHeaderList parseFrameHeaders(uchar* const start, const qint64& size) {
using FrameHeader = FileClip::FrameHeader;
FrameHeaderList results;
auto current = start;
auto end = current + size;
// Read all the frame headers
while (end - current < MINIMUM_FRAME_SIZE) {
// FIXME move to Frame::readHeader?
while (end - current >= MINIMUM_FRAME_SIZE) {
FrameHeader header;
memcpy(&(header.type), current, sizeof(FrameType));
current += sizeof(FrameType);
memcpy(&(header.timeOffset), current, sizeof(FrameType));
memcpy(&(header.timeOffset), current, sizeof(float));
current += sizeof(float);
memcpy(&(header.size), current, sizeof(uint16_t));
current += sizeof(uint16_t);
header.fileOffset = current - _map;
header.fileOffset = current - start;
if (end - current < header.size) {
current = end;
break;
}
_frameHeaders.push_back(header);
current += header.size;
results.push_back(header);
}
return results;
}
FileClip::FileClip(const QString& fileName) : _file(fileName) {
auto size = _file.size();
bool opened = _file.open(QIODevice::ReadOnly);
if (!opened) {
qCWarning(recordingLog) << "Unable to open file " << fileName;
return;
}
_map = _file.map(0, size, QFile::MapPrivateOption);
if (!_map) {
qCWarning(recordingLog) << "Unable to map file " << fileName;
return;
}
FrameHeaderList parsedFrameHeaders = parseFrameHeaders(_map, size);
// Verify that at least one frame exists and that the first frame is a header
if (0 == parsedFrameHeaders.size()) {
qWarning() << "No frames found, invalid file";
return;
}
// Grab the file header
{
auto fileHeaderFrameHeader = *parsedFrameHeaders.begin();
parsedFrameHeaders.pop_front();
if (fileHeaderFrameHeader.type != Frame::TYPE_HEADER) {
qWarning() << "Missing header frame, invalid file";
return;
}
QByteArray fileHeaderData((char*)_map + fileHeaderFrameHeader.fileOffset, fileHeaderFrameHeader.size);
_fileHeader = QJsonDocument::fromBinaryData(fileHeaderData);
}
// Find the type enum translation map and fix up the frame headers
{
FrameTranslationMap translationMap = parseTranslationMap(_fileHeader);
if (translationMap.empty()) {
qWarning() << "Header missing frame type map, invalid file";
return;
}
// Update the loaded headers with the frame data
_frameHeaders.reserve(parsedFrameHeaders.size());
for (auto& frameHeader : parsedFrameHeaders) {
if (!translationMap.contains(frameHeader.type)) {
continue;
}
frameHeader.type = translationMap[frameHeader.type];
_frameHeaders.push_back(frameHeader);
}
}
}
// FIXME move to frame?
bool writeFrame(QIODevice& output, const Frame& frame) {
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
if (written != sizeof(FrameType)) {
return false;
}
written = output.write((char*)&(frame.timeOffset), sizeof(float));
if (written != sizeof(float)) {
return false;
}
uint16_t dataSize = frame.data.size();
written = output.write((char*)&dataSize, sizeof(uint16_t));
if (written != sizeof(uint16_t)) {
return false;
}
if (dataSize != 0) {
written = output.write(frame.data);
if (written != dataSize) {
return false;
}
}
return true;
}
bool FileClip::write(const QString& fileName, Clip::Pointer clip) {
qCDebug(recordingLog) << "Writing clip to file " << fileName;
if (0 == clip->frameCount()) {
return false;
}
QFile outputFile(fileName);
if (!outputFile.open(QFile::Truncate | QFile::WriteOnly)) {
return false;
}
Finally closer([&] { outputFile.close(); });
{
auto frameTypes = Frame::getFrameTypes();
QJsonObject frameTypeObj;
for (const auto& frameTypeName : frameTypes.keys()) {
frameTypeObj[frameTypeName] = frameTypes[frameTypeName];
}
QJsonObject rootObject;
rootObject.insert(FRAME_TYPE_MAP, frameTypeObj);
QByteArray headerFrameData = QJsonDocument(rootObject).toBinaryData();
if (!writeFrame(outputFile, Frame({ Frame::TYPE_HEADER, 0, headerFrameData }))) {
return false;
}
}
clip->seek(0);
for (auto frame = clip->nextFrame(); frame; frame = clip->nextFrame()) {
if (!writeFrame(outputFile, *frame)) {
return false;
}
}
outputFile.close();
return true;
}
FileClip::~FileClip() {
Locker lock(_mutex);
_file.unmap(_map);
_map = nullptr;
if (_file.isOpen()) {
_file.close();
}
}
void FileClip::seek(float offset) {
@ -72,7 +227,9 @@ FramePointer FileClip::readFrame(uint32_t frameIndex) const {
const FrameHeader& header = _frameHeaders[frameIndex];
result->type = header.type;
result->timeOffset = header.timeOffset;
result->data.insert(0, reinterpret_cast<char*>(_map)+header.fileOffset, header.size);
if (header.size) {
result->data.insert(0, reinterpret_cast<char*>(_map)+header.fileOffset, header.size);
}
}
return result;
}
@ -99,7 +256,18 @@ void FileClip::reset() {
_frameIndex = 0;
}
void FileClip::appendFrame(FramePointer) {
void FileClip::addFrame(FramePointer) {
throw std::runtime_error("File clips are read only");
}
float FileClip::duration() const {
if (_frameHeaders.empty()) {
return 0;
}
return _frameHeaders.rbegin()->timeOffset;
}
size_t FileClip::frameCount() const {
return _frameHeaders.size();
}

View file

@ -13,6 +13,7 @@
#include "../Clip.h"
#include <QtCore/QFile>
#include <QtCore/QJsonDocument>
#include <mutex>
@ -22,22 +23,25 @@ class FileClip : public Clip {
public:
using Pointer = std::shared_ptr<FileClip>;
FileClip(const QString& file, QObject* parent = nullptr);
FileClip(const QString& file);
virtual ~FileClip();
virtual float duration() const override;
virtual size_t frameCount() const override;
virtual void seek(float offset) override;
virtual float position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;
virtual void appendFrame(FramePointer) override;
virtual void skipFrame() override;
virtual void addFrame(FramePointer) override;
private:
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
const QJsonDocument& getHeader() {
return _fileHeader;
}
virtual void reset() override;
static bool write(const QString& filePath, Clip::Pointer clip);
struct FrameHeader {
FrameType type;
@ -46,15 +50,20 @@ private:
quint64 fileOffset;
};
using FrameHeaders = std::vector<FrameHeader>;
private:
virtual void reset() override;
using FrameHeaderVector = std::vector<FrameHeader>;
FramePointer readFrame(uint32_t frameIndex) const;
mutable Mutex _mutex;
QJsonDocument _fileHeader;
QFile _file;
uint32_t _frameIndex { 0 };
uchar* _map;
FrameHeaders _frameHeaders;
uchar* _map { nullptr };
FrameHeaderVector _frameHeaders;
};
}

View file

@ -11,8 +11,16 @@
#include "DependencyManager.h"
DependencyManager DependencyManager::_manager;
#include "SharedUtil.h"
#include "Finally.h"
static const char* const DEPENDENCY_PROPERTY_NAME = "com.highfidelity.DependencyMananger";
DependencyManager& DependencyManager::manager() {
static DependencyManager* instance = globalInstance<DependencyManager>(DEPENDENCY_PROPERTY_NAME);
return *instance;
}
QSharedPointer<Dependency>& DependencyManager::safeGet(size_t hashCode) {
return _instanceHash[hashCode];
}
}

View file

@ -62,8 +62,8 @@ public:
static void registerInheritance();
private:
static DependencyManager _manager;
static DependencyManager& manager();
template<typename T>
size_t getHashCode();
@ -75,11 +75,11 @@ private:
template <typename T>
QSharedPointer<T> DependencyManager::get() {
static size_t hashCode = _manager.getHashCode<T>();
static size_t hashCode = manager().getHashCode<T>();
static QWeakPointer<T> instance;
if (instance.isNull()) {
instance = qSharedPointerCast<T>(_manager.safeGet(hashCode));
instance = qSharedPointerCast<T>(manager().safeGet(hashCode));
if (instance.isNull()) {
qWarning() << "DependencyManager::get(): No instance available for" << typeid(T).name();
@ -91,9 +91,9 @@ QSharedPointer<T> DependencyManager::get() {
template <typename T, typename ...Args>
QSharedPointer<T> DependencyManager::set(Args&&... args) {
static size_t hashCode = _manager.getHashCode<T>();
static size_t hashCode = manager().getHashCode<T>();
QSharedPointer<Dependency>& instance = _manager.safeGet(hashCode);
QSharedPointer<Dependency>& instance = manager().safeGet(hashCode);
instance.clear(); // Clear instance before creation of new one to avoid edge cases
QSharedPointer<T> newInstance(new T(args...), &T::customDeleter);
QSharedPointer<Dependency> storedInstance = qSharedPointerCast<Dependency>(newInstance);
@ -104,9 +104,9 @@ QSharedPointer<T> DependencyManager::set(Args&&... args) {
template <typename T, typename I, typename ...Args>
QSharedPointer<T> DependencyManager::set(Args&&... args) {
static size_t hashCode = _manager.getHashCode<T>();
static size_t hashCode = manager().getHashCode<T>();
QSharedPointer<Dependency>& instance = _manager.safeGet(hashCode);
QSharedPointer<Dependency>& instance = manager().safeGet(hashCode);
instance.clear(); // Clear instance before creation of new one to avoid edge cases
QSharedPointer<T> newInstance(new I(args...), &I::customDeleter);
QSharedPointer<Dependency> storedInstance = qSharedPointerCast<Dependency>(newInstance);
@ -117,15 +117,15 @@ QSharedPointer<T> DependencyManager::set(Args&&... args) {
template <typename T>
void DependencyManager::destroy() {
static size_t hashCode = _manager.getHashCode<T>();
_manager.safeGet(hashCode).clear();
static size_t hashCode = manager().getHashCode<T>();
manager().safeGet(hashCode).clear();
}
template<typename Base, typename Derived>
void DependencyManager::registerInheritance() {
size_t baseHashCode = typeid(Base).hash_code();
size_t derivedHashCode = typeid(Derived).hash_code();
_manager._inheritanceHash.insert(baseHashCode, derivedHashCode);
manager()._inheritanceHash.insert(baseHashCode, derivedHashCode);
}
template<typename T>

View file

@ -13,6 +13,7 @@
#define hifi_SharedUtil_h
#include <memory>
#include <mutex>
#include <math.h>
#include <stdint.h>
@ -20,7 +21,36 @@
#include <unistd.h> // not on windows, not needed for mac or windows
#endif
#include <QDebug>
#include <QtCore/QDebug>
#include <QtCore/QCoreApplication>
// Provides efficient access to a named global type. By storing the value
// in the QApplication by name we can implement the singleton pattern and
// have the single instance function across DLL boundaries.
template <typename T, typename... Args>
T* globalInstance(const char* propertyName, Args&&... args) {
static std::unique_ptr<T> instancePtr;
static T* resultInstance { nullptr };
static std::mutex mutex;
if (!resultInstance) {
std::unique_lock<std::mutex> lock(mutex);
if (!resultInstance) {
auto variant = qApp->property(propertyName);
if (variant.isNull()) {
// Since we're building the object, store it in a shared_ptr so it's
// destroyed by the destructor of the static instancePtr
instancePtr = std::unique_ptr<T>(new T(std::forward<Args>(args)...));
void* voidInstance = &(*instancePtr);
variant = QVariant::fromValue(voidInstance);
qApp->setProperty(propertyName, variant);
}
void* returnedVoidInstance = variant.value<void*>();
resultInstance = static_cast<T*>(returnedVoidInstance);
}
}
return resultInstance;
}
const int BYTES_PER_COLOR = 3;
const int BYTES_PER_FLAGS = 1;

View file

@ -132,7 +132,7 @@ int main(int argc, char** argv) {
inputPlugin->activate();
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
if (name == KeyboardMouseDevice::NAME) {
userInputMapper->registerDevice(std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin));
userInputMapper->registerDevice(std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin)->getInputDevice());
}
inputPlugin->pluginUpdate(0, false);
}

View file

@ -1,10 +1,16 @@
set(TARGET_NAME recording-test)
# This is not a testcase -- just set it up as a regular hifi project
setup_hifi_project(Test)
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Tests/manual-tests/")
link_hifi_libraries(shared recording)
copy_dlls_beside_windows_executable()
# FIXME convert to unit tests
# Declare dependencies
macro (setup_testcase_dependencies)
# link in the shared libraries
link_hifi_libraries(shared recording)
copy_dlls_beside_windows_executable()
endmacro ()
setup_hifi_testcase()
#macro (setup_testcase_dependencies)
# # link in the shared libraries
# link_hifi_libraries(shared recording)
#
# copy_dlls_beside_windows_executable()
#endmacro ()
#setup_hifi_testcase()

View file

@ -11,6 +11,8 @@
#ifndef hifi_Constants_h
#define hifi_Constants_h
#include <QtCore/QString>
static const QString HEADER_NAME = "com.highfidelity.recording.Header";
static const QString TEST_NAME = "com.highfidelity.recording.Test";

View file

@ -8,6 +8,9 @@
#include "FrameTests.h"
#include "Constants.h"
#if 0
#include "../QTestExtensions.h"
#include <recording/Frame.h>
@ -27,3 +30,4 @@ void FrameTests::registerFrameTypeTest() {
QCOMPARE(backMap[recording::Frame::TYPE_HEADER], HEADER_NAME);
}
#endif

View file

@ -10,6 +10,7 @@
#ifndef hifi_FrameTests_h
#define hifi_FrameTests_h
#if 0
#include <QtTest/QtTest>
class FrameTests : public QObject {
@ -18,4 +19,6 @@ private slots:
void registerFrameTypeTest();
};
#endif
#endif // hifi_FrameTests_h

View file

@ -8,6 +8,9 @@
#include "RecorderTests.h"
#include "Constants.h"
#if 0
#include "../QTestExtensions.h"
#include <recording/Recorder.h>
@ -23,3 +26,4 @@ void RecorderTests::recorderTest() {
//QCOMPARE(recoreder.isRecording(), false);
}
#endif

View file

@ -10,6 +10,8 @@
#ifndef hifi_RecorderTests_h
#define hifi_RecorderTests_h
#if 0
#include <QtTest/QtTest>
class RecorderTests : public QObject {
@ -19,3 +21,5 @@ private slots:
};
#endif
#endif

View file

@ -0,0 +1,114 @@
#include <QtGlobal>
#include <QtTest/QtTest>
#include <QtCore/QTemporaryFile>
#include <QtCore/QString>
#ifdef Q_OS_WIN32
#include <Windows.h>
#endif
#include <recording/Clip.h>
#include <recording/Frame.h>
#include "Constants.h"
#define QVERIFY Q_ASSERT
using namespace recording;
FrameType TEST_FRAME_TYPE { Frame::TYPE_INVALID };
void testFrameTypeRegistration() {
TEST_FRAME_TYPE = Frame::registerFrameType(TEST_NAME);
QVERIFY(TEST_FRAME_TYPE != Frame::TYPE_INVALID);
QVERIFY(TEST_FRAME_TYPE != Frame::TYPE_HEADER);
auto forwardMap = recording::Frame::getFrameTypes();
QVERIFY(forwardMap.count(TEST_NAME) == 1);
QVERIFY(forwardMap[TEST_NAME] == TEST_FRAME_TYPE);
QVERIFY(forwardMap[HEADER_NAME] == recording::Frame::TYPE_HEADER);
auto backMap = recording::Frame::getFrameTypeNames();
QVERIFY(backMap.count(TEST_FRAME_TYPE) == 1);
QVERIFY(backMap[TEST_FRAME_TYPE] == TEST_NAME);
QVERIFY(backMap[recording::Frame::TYPE_HEADER] == HEADER_NAME);
}
void testFilePersist() {
QTemporaryFile file;
QString fileName;
if (file.open()) {
fileName = file.fileName();
file.close();
}
auto readClip = Clip::fromFile(fileName);
QVERIFY(Clip::Pointer() == readClip);
auto writeClip = Clip::newClip();
writeClip->addFrame(std::make_shared<Frame>(TEST_FRAME_TYPE, 5.0f, QByteArray()));
QVERIFY(writeClip->frameCount() == 1);
QVERIFY(writeClip->duration() == 5.0f);
Clip::toFile(fileName, writeClip);
readClip = Clip::fromFile(fileName);
QVERIFY(readClip != Clip::Pointer());
QVERIFY(readClip->frameCount() == 1);
QVERIFY(readClip->duration() == 5.0f);
readClip->seek(0);
writeClip->seek(0);
size_t count = 0;
for (auto readFrame = readClip->nextFrame(), writeFrame = writeClip->nextFrame(); readFrame && writeFrame;
readFrame = readClip->nextFrame(), writeFrame = writeClip->nextFrame(), ++count) {
QVERIFY(readFrame->type == writeFrame->type);
QVERIFY(readFrame->timeOffset == writeFrame->timeOffset);
QVERIFY(readFrame->data == writeFrame->data);
}
QVERIFY(readClip->frameCount() == count);
writeClip = Clip::newClip();
writeClip->addFrame(std::make_shared<Frame>(TEST_FRAME_TYPE, 5.0f, QByteArray()));
// Simulate an unknown frametype
writeClip->addFrame(std::make_shared<Frame>(Frame::TYPE_INVALID - 1, 10.0f, QByteArray()));
QVERIFY(writeClip->frameCount() == 2);
QVERIFY(writeClip->duration() == 10.0f);
Clip::toFile(fileName, writeClip);
// Verify that the read version of the clip ignores the unknown frame type
readClip = Clip::fromFile(fileName);
QVERIFY(readClip != Clip::Pointer());
QVERIFY(readClip->frameCount() == 1);
QVERIFY(readClip->duration() == 5.0f);
}
void testClipOrdering() {
auto writeClip = Clip::newClip();
// simulate our of order addition of frames
writeClip->addFrame(std::make_shared<Frame>(TEST_FRAME_TYPE, 10.0f, QByteArray()));
writeClip->addFrame(std::make_shared<Frame>(TEST_FRAME_TYPE, 5.0f, QByteArray()));
QVERIFY(writeClip->frameCount() == 2);
QVERIFY(writeClip->duration() == 10.0f);
QVERIFY(std::numeric_limits<float>::max() == writeClip->position());
writeClip->seek(0);
QVERIFY(5.0f == writeClip->position());
float lastFrameTimeOffset { 0 };
for (auto writeFrame = writeClip->nextFrame(); writeFrame; writeFrame = writeClip->nextFrame()) {
QVERIFY(writeClip->position() >= lastFrameTimeOffset);
}
}
#ifdef Q_OS_WIN32
void myMessageHandler(QtMsgType type, const QMessageLogContext & context, const QString & msg) {
OutputDebugStringA(msg.toLocal8Bit().toStdString().c_str());
OutputDebugStringA("\n");
}
#endif
int main(int, const char**) {
#ifdef Q_OS_WIN32
qInstallMessageHandler(myMessageHandler);
#endif
testFrameTypeRegistration();
testFilePersist();
testClipOrdering();
}

View file

@ -82,7 +82,7 @@ var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
z: 0
},
collisionsWillMove: true,
collisionsSoundURL: basketballCollisionSoundURL,
collisionSoundURL: basketballCollisionSoundURL,
ignoreForCollisions: false,
modelURL: basketballURL,
userData: JSON.stringify({

View file

@ -308,6 +308,7 @@
z: 0
},
collisionsWillMove: true,
collisionSoundURL: 'http://hifi-public.s3.amazonaws.com/sounds/basketball/basketball.wav',
ignoreForCollisions: false,
modelURL: basketballURL,
userData: JSON.stringify({
@ -886,8 +887,8 @@
function createPingPongBallGun() {
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_collision_hull.obj';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var position = {
x: 548.6,
y: 495.4,
@ -899,7 +900,8 @@
var pingPongGun = Entities.addEntity({
type: "Model",
modelURL: MODEL_URL,
shapeType: 'box',
shapeType: 'compound',
compoundShapeURL: COLLISION_SOUND_URL,
script: pingPongScriptURL,
position: position,
rotation: rotation,
@ -914,6 +916,7 @@
z: 0.47
},
collisionsWillMove: true,
collisionSoundURL: COLLISION_SOUND_URL,
userData: JSON.stringify({
resetMe: {
resetMe: true
@ -1258,4 +1261,4 @@
};
// entity scripts always need to return a newly constructed object of our type
return new ResetSwitch();
});
});

View file

@ -248,7 +248,7 @@ MasterReset = function() {
},
grabbableKey: {
grabbable: false,
wantsTrigger:true
wantsTrigger: true
}
})
});
@ -289,6 +289,7 @@ MasterReset = function() {
z: 0
},
collisionsWillMove: true,
collisionSoundURL: 'http://hifi-public.s3.amazonaws.com/sounds/basketball/basketball.wav',
ignoreForCollisions: false,
modelURL: basketballURL,
userData: JSON.stringify({
@ -334,7 +335,7 @@ MasterReset = function() {
name: "Basketball Resetter",
script: basketballResetterScriptURL,
dimensions: dimensions,
visible:false,
visible: false,
userData: JSON.stringify({
resetMe: {
resetMe: true
@ -367,7 +368,7 @@ MasterReset = function() {
name: "Target Resetter",
script: targetsResetterScriptURL,
dimensions: dimensions,
visible:false,
visible: false,
userData: JSON.stringify({
resetMe: {
resetMe: true
@ -868,8 +869,8 @@ MasterReset = function() {
function createPingPongBallGun() {
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_collision_hull.obj';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var position = {
x: 548.6,
y: 495.4,
@ -881,7 +882,8 @@ MasterReset = function() {
var pingPongGun = Entities.addEntity({
type: "Model",
modelURL: MODEL_URL,
shapeType: 'box',
shapeType: 'compound',
compoundShapeURL:COLLISION_SOUND_URL,
script: pingPongScriptURL,
position: position,
rotation: rotation,
@ -896,6 +898,7 @@ MasterReset = function() {
z: 0.47
},
collisionsWillMove: true,
collisionSoundURL: COLLISION_SOUND_URL,
userData: JSON.stringify({
resetMe: {
resetMe: true
@ -1238,4 +1241,4 @@ MasterReset = function() {
Script.scriptEnding.connect(cleanup);
}
};
};