Merge branch 'master' of github.com:highfidelity/hifi into motor-action

This commit is contained in:
Seth Alves 2017-05-11 09:29:30 -07:00
commit 4042e2bceb
42 changed files with 1089 additions and 223 deletions

View file

@ -941,10 +941,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
static const QString TESTER = "HIFI_TESTER";
auto gpuIdent = GPUIdent::getInstance();
auto glContextData = getGLContextData();
QJsonObject properties = {
{ "version", applicationVersion() },
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
{ "previousSessionCrashed", _previousSessionCrashed },
{ "previousSessionRuntime", sessionRunTime.get() },
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },

View file

@ -225,10 +225,6 @@ void Web3DOverlay::setMaxFPS(uint8_t maxFPS) {
}
void Web3DOverlay::render(RenderArgs* args) {
if (!_visible || !getParentVisible()) {
return;
}
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
QSurface * currentSurface = currentContext->surface();
if (!_webSurface) {
@ -282,6 +278,10 @@ void Web3DOverlay::render(RenderArgs* args) {
_webSurface->resize(QSize(_resolution.x, _resolution.y));
}
if (!_visible || !getParentVisible()) {
return;
}
vec2 halfSize = getSize() / 2.0f;
vec4 color(toGlm(getColor()), getAlpha());

View file

@ -202,6 +202,13 @@ void Head::calculateMouthShapes(float deltaTime) {
float trailingAudioJawOpenRatio = (100.0f - deltaTime * NORMAL_HZ) / 100.0f; // --> 0.99 at 60 Hz
_trailingAudioJawOpen = glm::mix(_trailingAudioJawOpen, _audioJawOpen, trailingAudioJawOpenRatio);
// truncate _mouthTime when mouth goes quiet to prevent floating point error on increment
const float SILENT_TRAILING_JAW_OPEN = 0.0002f;
const float MAX_SILENT_MOUTH_TIME = 10.0f;
if (_trailingAudioJawOpen < SILENT_TRAILING_JAW_OPEN && _mouthTime > MAX_SILENT_MOUTH_TIME) {
_mouthTime = 0.0f;
}
// Advance time at a rate proportional to loudness, and move the mouth shapes through
// a cycle at differing speeds to create a continuous random blend of shapes.
_mouthTime += sqrtf(_averageLoudness) * TIMESTEP_CONSTANT * deltaTimeRatio;

View file

@ -22,20 +22,20 @@
#include <BuildInfo.h>
#include <GLMHelpers.h>
QString SAVE_DIRECTORY = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/" + BuildInfo::MODIFIED_ORGANIZATION + "/" + BuildInfo::INTERFACE_NAME + "/hifi-input-recordings/";
QString FILE_PREFIX_NAME = "input-recording-";
QString COMPRESS_EXTENSION = ".tar.gz";
namespace controller {
QJsonObject poseToJsonObject(const Pose pose) {
QJsonObject newPose;
QJsonArray translation;
translation.append(pose.translation.x);
translation.append(pose.translation.y);
translation.append(pose.translation.z);
QJsonArray rotation;
rotation.append(pose.rotation.x);
rotation.append(pose.rotation.y);
@ -69,7 +69,7 @@ namespace controller {
QJsonArray angularVelocity = object["angularVelocity"].toArray();
pose.valid = object["valid"].toBool();
pose.translation.x = translation[0].toDouble();
pose.translation.y = translation[1].toDouble();
pose.translation.z = translation[2].toDouble();
@ -89,13 +89,13 @@ namespace controller {
return pose;
}
void exportToFile(QJsonObject& object) {
if (!QDir(SAVE_DIRECTORY).exists()) {
QDir().mkdir(SAVE_DIRECTORY);
}
QString timeStamp = QDateTime::currentDateTime().toString(Qt::ISODate);
timeStamp.replace(":", "-");
QString fileName = SAVE_DIRECTORY + FILE_PREFIX_NAME + timeStamp + COMPRESS_EXTENSION;
@ -124,7 +124,7 @@ namespace controller {
status = true;
return object;
}
InputRecorder::InputRecorder() {}
InputRecorder::~InputRecorder() {}
@ -195,16 +195,16 @@ namespace controller {
_framesRecorded = data["frameCount"].toInt();
QJsonArray actionArrayList = data["actionList"].toArray();
QJsonArray poseArrayList = data["poseList"].toArray();
for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) {
QJsonArray actionState = actionArrayList[actionIndex].toArray();
for (int index = 0; index < actionState.size(); index++) {
_currentFrameActions[index] = actionState[index].toInt();
_currentFrameActions[index] = actionState[index].toDouble();
}
_actionStateList.push_back(_currentFrameActions);
_currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS));
}
for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) {
QJsonArray poseState = poseArrayList[poseIndex].toArray();
for (int index = 0; index < poseState.size(); index++) {
@ -250,13 +250,13 @@ namespace controller {
for(auto& channel : _currentFramePoses) {
channel = Pose();
}
for(auto& channel : _currentFrameActions) {
channel = 0.0f;
}
}
}
float InputRecorder::getActionState(controller::Action action) {
if (_actionStateList.size() > 0 ) {
return _actionStateList[_playCount][toInt(action)];

View file

@ -1,5 +1,5 @@
set(TARGET_NAME gpu-gl)
setup_hifi_library()
setup_hifi_library(Concurrent)
link_hifi_libraries(shared gl gpu)
if (UNIX)
target_link_libraries(${TARGET_NAME} pthread)

View file

@ -160,8 +160,6 @@ const uvec3 GLVariableAllocationSupport::INITIAL_MIP_TRANSFER_DIMENSIONS { 64, 6
WorkQueue GLVariableAllocationSupport::_transferQueue;
WorkQueue GLVariableAllocationSupport::_promoteQueue;
WorkQueue GLVariableAllocationSupport::_demoteQueue;
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
size_t GLVariableAllocationSupport::_frameTexturesCreated { 0 };
#define OVERSUBSCRIBED_PRESSURE_VALUE 0.95f
@ -176,30 +174,19 @@ const uvec3 GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS { 1024, 1024, 1
const size_t GLVariableAllocationSupport::MAX_TRANSFER_SIZE = GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.x * GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.y * 4;
#if THREADED_TEXTURE_BUFFERING
std::shared_ptr<std::thread> TransferJob::_bufferThread { nullptr };
std::atomic<bool> TransferJob::_shutdownBufferingThread { false };
Mutex TransferJob::_mutex;
TransferJob::VoidLambdaQueue TransferJob::_bufferLambdaQueue;
void TransferJob::startTransferLoop() {
if (_bufferThread) {
return;
}
_shutdownBufferingThread = false;
_bufferThread = std::make_shared<std::thread>([] {
TransferJob::bufferLoop();
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
QThreadPool* TransferJob::_bufferThreadPool { nullptr };
void TransferJob::startBufferingThread() {
static std::once_flag once;
std::call_once(once, [&] {
_bufferThreadPool = new QThreadPool(qApp);
_bufferThreadPool->setMaxThreadCount(1);
});
}
void TransferJob::stopTransferLoop() {
if (!_bufferThread) {
return;
}
_shutdownBufferingThread = true;
_bufferThread->join();
_bufferThread.reset();
_shutdownBufferingThread = false;
}
#endif
TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t targetMip, uint8_t face, uint32_t lines, uint32_t lineOffset)
@ -233,7 +220,6 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
// Buffering can invoke disk IO, so it should be off of the main and render threads
_bufferingLambda = [=] {
_mipData = _parent._gpuObject.accessStoredMipFace(sourceMip, face)->createView(_transferSize, _transferOffset);
_bufferingCompleted = true;
};
_transferLambda = [=] {
@ -243,65 +229,66 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
}
TransferJob::TransferJob(const GLTexture& parent, std::function<void()> transferLambda)
: _parent(parent), _bufferingCompleted(true), _transferLambda(transferLambda) {
: _parent(parent), _bufferingRequired(false), _transferLambda(transferLambda) {
}
TransferJob::~TransferJob() {
Backend::updateTextureTransferPendingSize(_transferSize, 0);
}
bool TransferJob::tryTransfer() {
// Disable threaded texture transfer for now
#if THREADED_TEXTURE_BUFFERING
// Are we ready to transfer
if (_bufferingCompleted) {
_transferLambda();
if (!bufferingCompleted()) {
startBuffering();
return false;
}
#else
if (_bufferingRequired) {
_bufferingLambda();
}
#endif
_transferLambda();
return true;
}
#if THREADED_TEXTURE_BUFFERING
bool TransferJob::bufferingRequired() const {
if (!_bufferingRequired) {
return false;
}
// The default state of a QFuture is with status Canceled | Started | Finished,
// so we have to check isCancelled before we check the actual state
if (_bufferingStatus.isCanceled()) {
return true;
}
startBuffering();
return false;
#else
if (!_bufferingCompleted) {
_bufferingLambda();
_bufferingCompleted = true;
}
_transferLambda();
return true;
#endif
return !_bufferingStatus.isStarted();
}
#if THREADED_TEXTURE_BUFFERING
bool TransferJob::bufferingCompleted() const {
if (!_bufferingRequired) {
return true;
}
// The default state of a QFuture is with status Canceled | Started | Finished,
// so we have to check isCancelled before we check the actual state
if (_bufferingStatus.isCanceled()) {
return false;
}
return _bufferingStatus.isFinished();
}
void TransferJob::startBuffering() {
if (_bufferingStarted) {
return;
}
_bufferingStarted = true;
{
Lock lock(_mutex);
_bufferLambdaQueue.push(_bufferingLambda);
}
}
void TransferJob::bufferLoop() {
while (!_shutdownBufferingThread) {
VoidLambdaQueue workingQueue;
{
Lock lock(_mutex);
_bufferLambdaQueue.swap(workingQueue);
}
if (workingQueue.empty()) {
QThread::msleep(5);
continue;
}
while (!workingQueue.empty()) {
workingQueue.front()();
workingQueue.pop();
}
if (bufferingRequired()) {
assert(_bufferingStatus.isCanceled());
_bufferingStatus = QtConcurrent::run(_bufferThreadPool, [=] {
_bufferingLambda();
});
assert(!_bufferingStatus.isCanceled());
assert(_bufferingStatus.isStarted());
}
}
#endif
@ -316,7 +303,9 @@ GLVariableAllocationSupport::~GLVariableAllocationSupport() {
void GLVariableAllocationSupport::addMemoryManagedTexture(const TexturePointer& texturePointer) {
_memoryManagedTextures.push_back(texturePointer);
addToWorkQueue(texturePointer);
if (MemoryPressureState::Idle != _memoryPressureState) {
addToWorkQueue(texturePointer);
}
}
void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePointer) {
@ -345,10 +334,8 @@ void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePo
break;
case MemoryPressureState::Idle:
break;
default:
Q_UNREACHABLE();
break;
}
}
@ -364,10 +351,10 @@ WorkQueue& GLVariableAllocationSupport::getActiveWorkQueue() {
case MemoryPressureState::Transfer:
return _transferQueue;
default:
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
Q_UNREACHABLE();
return empty;
}
@ -460,16 +447,11 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
}
if (newState != _memoryPressureState) {
_memoryPressureState = newState;
#if THREADED_TEXTURE_BUFFERING
if (MemoryPressureState::Transfer == _memoryPressureState) {
TransferJob::stopTransferLoop();
TransferJob::startBufferingThread();
}
_memoryPressureState = newState;
if (MemoryPressureState::Transfer == _memoryPressureState) {
TransferJob::startTransferLoop();
}
#else
_memoryPressureState = newState;
#endif
// Clear the existing queue
_transferQueue = WorkQueue();
@ -487,49 +469,111 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
}
}
TexturePointer GLVariableAllocationSupport::getNextWorkQueueItem(WorkQueue& workQueue) {
while (!workQueue.empty()) {
auto workTarget = workQueue.top();
auto texture = workTarget.first.lock();
if (!texture) {
workQueue.pop();
continue;
}
// Check whether the resulting texture can actually have work performed
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
switch (_memoryPressureState) {
case MemoryPressureState::Oversubscribed:
if (vartexture->canDemote()) {
return texture;
}
break;
case MemoryPressureState::Undersubscribed:
if (vartexture->canPromote()) {
return texture;
}
break;
case MemoryPressureState::Transfer:
if (vartexture->hasPendingTransfers()) {
return texture;
}
break;
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
// If we got here, then the texture has no work to do in the current state,
// so pop it off the queue and continue
workQueue.pop();
}
return TexturePointer();
}
void GLVariableAllocationSupport::processWorkQueue(WorkQueue& workQueue) {
if (workQueue.empty()) {
return;
}
// Get the front of the work queue to perform work
auto texture = getNextWorkQueueItem(workQueue);
if (!texture) {
return;
}
// Grab the first item off the demote queue
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
switch (_memoryPressureState) {
case MemoryPressureState::Oversubscribed:
vartexture->demote();
workQueue.pop();
addToWorkQueue(texture);
break;
case MemoryPressureState::Undersubscribed:
vartexture->promote();
workQueue.pop();
addToWorkQueue(texture);
break;
case MemoryPressureState::Transfer:
if (vartexture->executeNextTransfer(texture)) {
workQueue.pop();
addToWorkQueue(texture);
#if THREADED_TEXTURE_BUFFERING
// Eagerly start the next buffering job if possible
texture = getNextWorkQueueItem(workQueue);
if (texture) {
gltexture = Backend::getGPUObject<GLTexture>(*texture);
vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
vartexture->executeNextBuffer(texture);
}
#endif
}
break;
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
}
void GLVariableAllocationSupport::processWorkQueues() {
if (MemoryPressureState::Idle == _memoryPressureState) {
return;
}
auto& workQueue = getActiveWorkQueue();
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
while (!workQueue.empty()) {
auto workTarget = workQueue.top();
workQueue.pop();
auto texture = workTarget.first.lock();
if (!texture) {
continue;
}
// Grab the first item off the demote queue
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
if (MemoryPressureState::Oversubscribed == _memoryPressureState) {
if (!vartexture->canDemote()) {
continue;
}
vartexture->demote();
_memoryPressureStateStale = true;
} else if (MemoryPressureState::Undersubscribed == _memoryPressureState) {
if (!vartexture->canPromote()) {
continue;
}
vartexture->promote();
_memoryPressureStateStale = true;
} else if (MemoryPressureState::Transfer == _memoryPressureState) {
if (!vartexture->hasPendingTransfers()) {
continue;
}
vartexture->executeNextTransfer(texture);
} else {
Q_UNREACHABLE();
}
// Reinject into the queue if more work to be done
addToWorkQueue(texture);
break;
}
// Do work on the front of the queue
processWorkQueue(workQueue);
if (workQueue.empty()) {
_memoryPressureState = MemoryPressureState::Idle;
@ -543,28 +587,83 @@ void GLVariableAllocationSupport::manageMemory() {
processWorkQueues();
}
bool GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
#if THREADED_TEXTURE_BUFFERING
// If a transfer job is active on the buffering thread, but has not completed it's buffering lambda,
// then we need to exit early, since we don't want to have the transfer job leave scope while it's
// being used in another thread -- See https://highfidelity.fogbugz.com/f/cases/4626
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
return false;
}
#endif
void GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
if (_populatedMip <= _allocatedMip) {
#if THREADED_TEXTURE_BUFFERING
_currentTransferJob.reset();
_currentTransferTexture.reset();
#endif
return true;
}
// If the transfer queue is empty, rebuild it
if (_pendingTransfers.empty()) {
populateTransferQueue();
}
bool result = false;
if (!_pendingTransfers.empty()) {
#if THREADED_TEXTURE_BUFFERING
// If there is a current transfer, but it's not the top of the pending transfer queue, then it's an orphan, so we want to abandon it.
if (_currentTransferJob && _currentTransferJob != _pendingTransfers.front()) {
_currentTransferJob.reset();
}
if (!_currentTransferJob) {
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
// doesn't leave scope, causing a crash in the buffering thread
_currentTransferJob = _pendingTransfers.front();
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
_currentTransferTexture = currentTexture;
}
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
// is complete
if (_currentTransferJob->tryTransfer()) {
_pendingTransfers.pop();
// Once a given job is finished, release the shared pointers keeping them alive
_currentTransferTexture.reset();
_currentTransferJob.reset();
result = true;
}
#else
if (_pendingTransfers.front()->tryTransfer()) {
_pendingTransfers.pop();
result = true;
}
#endif
}
return result;
}
#if THREADED_TEXTURE_BUFFERING
void GLVariableAllocationSupport::executeNextBuffer(const TexturePointer& currentTexture) {
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
return;
}
// If the transfer queue is empty, rebuild it
if (_pendingTransfers.empty()) {
populateTransferQueue();
}
if (!_pendingTransfers.empty()) {
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
_currentTransferTexture = currentTexture;
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
// doesn't leave scope, causing a crash in the buffering thread
_currentTransferJob = _pendingTransfers.front();
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
// is complete
if (_currentTransferJob->tryTransfer()) {
_pendingTransfers.pop();
_currentTransferTexture.reset();
_currentTransferJob.reset();
if (!_currentTransferJob) {
_currentTransferJob = _pendingTransfers.front();
_currentTransferTexture = currentTexture;
}
_currentTransferJob->startBuffering();
}
}
#endif

View file

@ -8,6 +8,9 @@
#ifndef hifi_gpu_gl_GLTexture_h
#define hifi_gpu_gl_GLTexture_h
#include <QtCore/QThreadPool>
#include <QtConcurrent>
#include "GLShared.h"
#include "GLBackend.h"
#include "GLTexelFormat.h"
@ -47,24 +50,19 @@ public:
class TransferJob {
using VoidLambda = std::function<void()>;
using VoidLambdaQueue = std::queue<VoidLambda>;
using ThreadPointer = std::shared_ptr<std::thread>;
const GLTexture& _parent;
Texture::PixelsPointer _mipData;
size_t _transferOffset { 0 };
size_t _transferSize { 0 };
// Indicates if a transfer from backing storage to interal storage has started
bool _bufferingStarted { false };
bool _bufferingCompleted { false };
bool _bufferingRequired { true };
VoidLambda _transferLambda;
VoidLambda _bufferingLambda;
#if THREADED_TEXTURE_BUFFERING
static Mutex _mutex;
static VoidLambdaQueue _bufferLambdaQueue;
static ThreadPointer _bufferThread;
static std::atomic<bool> _shutdownBufferingThread;
static void bufferLoop();
// Indicates if a transfer from backing storage to interal storage has started
QFuture<void> _bufferingStatus;
static QThreadPool* _bufferThreadPool;
#endif
public:
@ -75,14 +73,13 @@ public:
bool tryTransfer();
#if THREADED_TEXTURE_BUFFERING
static void startTransferLoop();
static void stopTransferLoop();
void startBuffering();
bool bufferingRequired() const;
bool bufferingCompleted() const;
static void startBufferingThread();
#endif
private:
#if THREADED_TEXTURE_BUFFERING
void startBuffering();
#endif
void transfer();
};
@ -100,8 +97,10 @@ protected:
static WorkQueue _transferQueue;
static WorkQueue _promoteQueue;
static WorkQueue _demoteQueue;
#if THREADED_TEXTURE_BUFFERING
static TexturePointer _currentTransferTexture;
static TransferJobPointer _currentTransferJob;
#endif
static const uvec3 INITIAL_MIP_TRANSFER_DIMENSIONS;
static const uvec3 MAX_TRANSFER_DIMENSIONS;
static const size_t MAX_TRANSFER_SIZE;
@ -109,6 +108,8 @@ protected:
static void updateMemoryPressure();
static void processWorkQueues();
static void processWorkQueue(WorkQueue& workQueue);
static TexturePointer getNextWorkQueueItem(WorkQueue& workQueue);
static void addToWorkQueue(const TexturePointer& texture);
static WorkQueue& getActiveWorkQueue();
@ -118,7 +119,10 @@ protected:
bool canPromote() const { return _allocatedMip > _minAllocatedMip; }
bool canDemote() const { return _allocatedMip < _maxAllocatedMip; }
bool hasPendingTransfers() const { return _populatedMip > _allocatedMip; }
void executeNextTransfer(const TexturePointer& currentTexture);
#if THREADED_TEXTURE_BUFFERING
void executeNextBuffer(const TexturePointer& currentTexture);
#endif
bool executeNextTransfer(const TexturePointer& currentTexture);
virtual void populateTransferQueue() = 0;
virtual void promote() = 0;
virtual void demote() = 0;

View file

@ -17,7 +17,6 @@
#include <thread>
#define INCREMENTAL_TRANSFER 0
#define THREADED_TEXTURE_BUFFERING 1
#define GPU_SSBO_TRANSFORM_OBJECT 1
namespace gpu { namespace gl45 {

View file

@ -17,6 +17,7 @@
#include <unordered_set>
#include <QDir>
#include <QSaveFile>
#include <PathUtils.h>
@ -110,13 +111,14 @@ FilePointer FileCache::writeFile(const char* data, File::Metadata&& metadata) {
return file;
}
// write the new file
FILE* saveFile = fopen(filepath.c_str(), "wb");
if (saveFile != nullptr && fwrite(data, metadata.length, 1, saveFile) && fclose(saveFile) == 0) {
QSaveFile saveFile(QString::fromStdString(filepath));
if (saveFile.open(QIODevice::WriteOnly)
&& saveFile.write(data, metadata.length) == static_cast<qint64>(metadata.length)
&& saveFile.commit()) {
file = addFile(std::move(metadata), filepath);
} else {
qCWarning(file_cache, "[%s] Failed to write %s (%s)", _dirname.c_str(), metadata.key.c_str(), strerror(errno));
errno = 0;
qCWarning(file_cache, "[%s] Failed to write %s", _dirname.c_str(), metadata.key.c_str());
}
return file;

View file

@ -1376,7 +1376,9 @@ function MyController(hand) {
visible: true,
alpha: 1,
parentID: AVATAR_SELF_ID,
parentJointIndex: this.controllerJointIndex,
parentJointIndex: MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
"_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" :
"_CAMERA_RELATIVE_CONTROLLER_LEFTHAND"),
endParentID: farParentID
};
this.overlayLine = Overlays.addOverlay("line3d", lineProperties);

View file

@ -96,7 +96,7 @@ function calcSpawnInfo(hand, height) {
* @param hand [number] -1 indicates no hand, Controller.Standard.RightHand or Controller.Standard.LeftHand
* @param clientOnly [bool] true indicates tablet model is only visible to client.
*/
WebTablet = function (url, width, dpi, hand, clientOnly, location) {
WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
var _this = this;
@ -107,6 +107,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
this.depth = TABLET_NATURAL_DIMENSIONS.z * tabletScaleFactor;
this.landscape = false;
visible = visible === true;
if (dpi) {
this.dpi = dpi;
} else {
@ -125,7 +127,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
"grabbableKey": {"grabbable": true}
}),
dimensions: this.getDimensions(),
parentID: AVATAR_SELF_ID
parentID: AVATAR_SELF_ID,
visible: visible
};
// compute position, rotation & parentJointIndex of the tablet
@ -158,7 +161,8 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
parentID: this.tabletEntityID,
parentJointIndex: -1,
showKeyboardFocusHighlight: false,
isAA: HMD.active
isAA: HMD.active,
visible: visible
});
var HOME_BUTTON_Y_OFFSET = (this.height / 2) - (this.height / 20);
@ -168,7 +172,7 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location) {
localRotation: {x: 0, y: 1, z: 0, w: 0},
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor},
alpha: 0.0,
visible: true,
visible: visible,
drawInFront: false,
parentID: this.tabletEntityID,
parentJointIndex: -1

View file

@ -3,6 +3,7 @@
// examples
//
// Created by Brad hefta-Gaub on 10/1/14.
// Modified by Daniela Fontes @DanielaFifo and Tiago Andrade @TagoWill on 4/7/2017
// Copyright 2014 High Fidelity, Inc.
//
// This script implements a class useful for building tools for editing entities.
@ -2592,6 +2593,16 @@ SelectionDisplay = (function() {
// pivot - point to use as a pivot
// offset - the position of the overlay tool relative to the selections center position
var makeStretchTool = function(stretchMode, direction, pivot, offset, customOnMove) {
// directionFor3DStretch - direction and pivot for 3D stretch
// distanceFor3DStretch - distance from the intersection point and the handController
// used to increase the scale taking into account the distance to the object
// DISTANCE_INFLUENCE_THRESHOLD - constant that holds the minimum distance where the
// distance to the object will influence the stretch/resize/scale
var directionFor3DStretch = getDirectionsFor3DStretch(stretchMode);
var distanceFor3DStretch = 0;
var DISTANCE_INFLUENCE_THRESHOLD = 1.2;
var signs = {
x: direction.x < 0 ? -1 : (direction.x > 0 ? 1 : 0),
y: direction.y < 0 ? -1 : (direction.y > 0 ? 1 : 0),
@ -2603,18 +2614,23 @@ SelectionDisplay = (function() {
y: Math.abs(direction.y) > 0 ? 1 : 0,
z: Math.abs(direction.z) > 0 ? 1 : 0,
};
var numDimensions = mask.x + mask.y + mask.z;
var planeNormal = null;
var lastPick = null;
var lastPick3D = null;
var initialPosition = null;
var initialDimensions = null;
var initialIntersection = null;
var initialProperties = null;
var registrationPoint = null;
var deltaPivot = null;
var deltaPivot3D = null;
var pickRayPosition = null;
var pickRayPosition3D = null;
var rotation = null;
var onBegin = function(event) {
@ -2652,8 +2668,20 @@ SelectionDisplay = (function() {
// Scaled offset in world coordinates
var scaledOffsetWorld = vec3Mult(initialDimensions, offsetRP);
pickRayPosition = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld));
if (directionFor3DStretch) {
// pivot, offset and pickPlanePosition for 3D manipulation
var scaledPivot3D = Vec3.multiply(0.5, Vec3.multiply(1.0, directionFor3DStretch));
deltaPivot3D = Vec3.subtract(centeredRP, scaledPivot3D);
var scaledOffsetWorld3D = vec3Mult(initialDimensions,
Vec3.subtract(Vec3.multiply(0.5, Vec3.multiply(-1.0, directionFor3DStretch)),
centeredRP));
pickRayPosition3D = Vec3.sum(initialPosition, Vec3.multiplyQbyV(rotation, scaledOffsetWorld));
}
var start = null;
var end = null;
if (numDimensions == 1 && mask.x) {
@ -2754,12 +2782,25 @@ SelectionDisplay = (function() {
};
}
}
planeNormal = Vec3.multiplyQbyV(rotation, planeNormal);
var pickRay = generalComputePickRay(event.x, event.y);
lastPick = rayPlaneIntersection(pickRay,
pickRayPosition,
planeNormal);
var planeNormal3D = {
x: 0,
y: 0,
z: 0
};
if (directionFor3DStretch) {
lastPick3D = rayPlaneIntersection(pickRay,
pickRayPosition3D,
planeNormal3D);
distanceFor3DStretch = Vec3.length(Vec3.subtract(pickRayPosition3D, pickRay.origin));
}
SelectionManager.saveProperties();
};
@ -2790,24 +2831,50 @@ SelectionDisplay = (function() {
dimensions = SelectionManager.worldDimensions;
rotation = SelectionManager.worldRotation;
}
var localDeltaPivot = deltaPivot;
var localSigns = signs;
var pickRay = generalComputePickRay(event.x, event.y);
newPick = rayPlaneIntersection(pickRay,
// Are we using handControllers or Mouse - only relevant for 3D tools
var controllerPose = getControllerWorldLocation(activeHand, true);
if (HMD.isHMDAvailable()
&& HMD.isHandControllerAvailable() && controllerPose.valid && that.triggered && directionFor3DStretch) {
localDeltaPivot = deltaPivot3D;
newPick = pickRay.origin;
var vector = Vec3.subtract(newPick, lastPick3D);
vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector);
if (distanceFor3DStretch > DISTANCE_INFLUENCE_THRESHOLD) {
// Range of Motion
vector = Vec3.multiply(distanceFor3DStretch , vector);
}
localSigns = directionFor3DStretch;
} else {
newPick = rayPlaneIntersection(pickRay,
pickRayPosition,
planeNormal);
var vector = Vec3.subtract(newPick, lastPick);
var vector = Vec3.subtract(newPick, lastPick);
vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector);
vector = vec3Mult(mask, vector);
vector = Vec3.multiplyQbyV(Quat.inverse(rotation), vector);
vector = vec3Mult(mask, vector);
}
if (customOnMove) {
var change = Vec3.multiply(-1, vec3Mult(signs, vector));
var change = Vec3.multiply(-1, vec3Mult(localSigns, vector));
customOnMove(vector, change);
} else {
vector = grid.snapToSpacing(vector);
var changeInDimensions = Vec3.multiply(-1, vec3Mult(signs, vector));
var changeInDimensions = Vec3.multiply(-1, vec3Mult(localSigns, vector));
var newDimensions;
if (proportional) {
var absX = Math.abs(changeInDimensions.x);
@ -2829,37 +2896,39 @@ SelectionDisplay = (function() {
} else {
newDimensions = Vec3.sum(initialDimensions, changeInDimensions);
}
newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION);
newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION);
newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION);
var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(deltaPivot, changeInDimensions));
var newPosition = Vec3.sum(initialPosition, changeInPosition);
for (var i = 0; i < SelectionManager.selections.length; i++) {
Entities.editEntity(SelectionManager.selections[i], {
position: newPosition,
dimensions: newDimensions,
});
}
var wantDebug = false;
if (wantDebug) {
print(stretchMode);
//Vec3.print(" newIntersection:", newIntersection);
Vec3.print(" vector:", vector);
//Vec3.print(" oldPOS:", oldPOS);
//Vec3.print(" newPOS:", newPOS);
Vec3.print(" changeInDimensions:", changeInDimensions);
Vec3.print(" newDimensions:", newDimensions);
Vec3.print(" changeInPosition:", changeInPosition);
Vec3.print(" newPosition:", newPosition);
}
SelectionManager._update();
}
newDimensions.x = Math.max(newDimensions.x, MINIMUM_DIMENSION);
newDimensions.y = Math.max(newDimensions.y, MINIMUM_DIMENSION);
newDimensions.z = Math.max(newDimensions.z, MINIMUM_DIMENSION);
var changeInPosition = Vec3.multiplyQbyV(rotation, vec3Mult(localDeltaPivot, changeInDimensions));
var newPosition = Vec3.sum(initialPosition, changeInPosition);
for (var i = 0; i < SelectionManager.selections.length; i++) {
Entities.editEntity(SelectionManager.selections[i], {
position: newPosition,
dimensions: newDimensions,
});
}
var wantDebug = false;
if (wantDebug) {
print(stretchMode);
//Vec3.print(" newIntersection:", newIntersection);
Vec3.print(" vector:", vector);
//Vec3.print(" oldPOS:", oldPOS);
//Vec3.print(" newPOS:", newPOS);
Vec3.print(" changeInDimensions:", changeInDimensions);
Vec3.print(" newDimensions:", newDimensions);
Vec3.print(" changeInPosition:", changeInPosition);
Vec3.print(" newPosition:", newPosition);
}
SelectionManager._update();
};
@ -2870,6 +2939,75 @@ SelectionDisplay = (function() {
onEnd: onEnd
};
};
// Direction for the stretch tool when using hand controller
var directionsFor3DGrab = {
LBN: {
x: 1,
y: 1,
z: 1
},
RBN: {
x: -1,
y: 1,
z: 1
},
LBF: {
x: 1,
y: 1,
z: -1
},
RBF: {
x: -1,
y: 1,
z: -1
},
LTN: {
x: 1,
y: -1,
z: 1
},
RTN: {
x: -1,
y: -1,
z: 1
},
LTF: {
x: 1,
y: -1,
z: -1
},
RTF: {
x: -1,
y: -1,
z: -1
}
};
// Returns a vector with directions for the stretch tool in 3D using hand controllers
function getDirectionsFor3DStretch(mode) {
if (mode === "STRETCH_LBN") {
return directionsFor3DGrab.LBN;
} else if (mode === "STRETCH_RBN") {
return directionsFor3DGrab.RBN;
} else if (mode === "STRETCH_LBF") {
return directionsFor3DGrab.LBF;
} else if (mode === "STRETCH_RBF") {
return directionsFor3DGrab.RBF;
} else if (mode === "STRETCH_LTN") {
return directionsFor3DGrab.LTN;
} else if (mode === "STRETCH_RTN") {
return directionsFor3DGrab.RTN;
} else if (mode === "STRETCH_LTF") {
return directionsFor3DGrab.LTF;
} else if (mode === "STRETCH_RTF") {
return directionsFor3DGrab.RTF;
} else {
return null;
}
}
function addStretchTool(overlay, mode, pivot, direction, offset, handleMove) {
if (!pivot) {

View file

@ -366,6 +366,8 @@
return nearestAvatar;
}
function messageSend(message) {
// we always append whether or not we are logged in...
message.isLoggedIn = Account.isLoggedIn();
Messages.sendMessage(MESSAGE_CHANNEL, JSON.stringify(message));
}
function handStringMessageSend(message) {
@ -463,7 +465,9 @@
endHandshakeAnimation();
// No-op if we were successful, but this way we ensure that failures and abandoned handshakes don't leave us
// in a weird state.
request({ uri: requestUrl, method: 'DELETE' }, debug);
if (Account.isLoggedIn()) {
request({ uri: requestUrl, method: 'DELETE' }, debug);
}
}
function updateTriggers(value, fromKeyboard, hand) {
@ -590,7 +594,7 @@
}
}
function makeConnection(id) {
function makeConnection(id, isLoggedIn) {
// send done to let the connection know you have made connection.
messageSend({
key: "done",
@ -606,7 +610,10 @@
// It would be "simpler" to skip this and just look at the response, but:
// 1. We don't want to bother the metaverse with request that we know will fail.
// 2. We don't want our code here to be dependent on precisely how the metaverse responds (400, 401, etc.)
if (!Account.isLoggedIn()) {
// 3. We also don't want to connect to someone who is anonymous _now_, but was not earlier and still has
// the same node id. Since the messaging doesn't say _who_ isn't logged in, fail the same as if we are
// not logged in.
if (!Account.isLoggedIn() || isLoggedIn === false) {
handleConnectionResponseAndMaybeRepeat("401:Unauthorized", {statusCode: 401});
return;
}
@ -628,8 +635,12 @@
// we change states, start the connectionInterval where we check
// to be sure the hand is still close enough. If not, we terminate
// the interval, go back to the waiting state. If we make it
// the entire CONNECTING_TIME, we make the connection.
function startConnecting(id, jointIndex) {
// the entire CONNECTING_TIME, we make the connection. We pass in
// whether or not the connecting id is actually logged in, as now we
// will allow to start the connection process but have it stop with a
// fail message before trying to call the backend if the other guy isn't
// logged in.
function startConnecting(id, jointIndex, isLoggedIn) {
var count = 0;
debug("connecting", id, "hand", jointIndex);
// do we need to do this?
@ -671,7 +682,7 @@
startHandshake();
} else if (count > CONNECTING_TIME / CONNECTING_INTERVAL) {
debug("made connection with " + id);
makeConnection(id);
makeConnection(id, isLoggedIn);
stopConnecting();
}
}, CONNECTING_INTERVAL);
@ -736,7 +747,7 @@
if (state === STATES.WAITING && (!connectingId || connectingId === senderID)) {
if (message.id === MyAvatar.sessionUUID) {
stopWaiting();
startConnecting(senderID, exisitingOrSearchedJointIndex());
startConnecting(senderID, exisitingOrSearchedJointIndex(), message.isLoggedIn);
} else if (connectingId) {
// this is for someone else (we lost race in connectionRequest),
// so lets start over
@ -755,7 +766,7 @@
startHandshake();
break;
}
startConnecting(senderID, connectingHandJointIndex);
startConnecting(senderID, connectingHandJointIndex, message.isLoggedIn);
}
break;
case "done":
@ -775,7 +786,7 @@
} else {
// they just created a connection request to us, and we are connecting to
// them, so lets just stop connecting and make connection..
makeConnection(connectingId);
makeConnection(connectingId, message.isLoggedIn);
stopConnecting();
}
} else {

View file

@ -111,7 +111,7 @@ function onMessage(message) {
case 'openSettings':
if ((HMD.active && Settings.getValue("hmdTabletBecomesToolbar", false))
|| (!HMD.active && Settings.getValue("desktopTabletBecomesToolbar", true))) {
Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "General Preferences");
Desktop.show("hifi/dialogs/GeneralPreferencesDialog.qml", "GeneralPreferencesDialog");
} else {
tablet.loadQMLOnTop("TabletGeneralPreferences.qml");
}

View file

@ -92,7 +92,7 @@
tabletScalePercentage = getTabletScalePercentageFromSettings();
UIWebTablet = new WebTablet("qml/hifi/tablet/TabletRoot.qml",
DEFAULT_WIDTH * (tabletScalePercentage / 100),
null, activeHand, true);
null, activeHand, true, null, false);
UIWebTablet.register();
HMD.tabletID = UIWebTablet.tabletEntityID;
HMD.homeButtonID = UIWebTablet.homeButtonID;

View file

@ -1,4 +1,5 @@
"use strict";
/*jslint nomen: true, plusplus: true, vars: true*/
/*global AvatarList, Entities, EntityViewer, Script, SoundCache, Audio, print, randFloat*/
//
@ -38,19 +39,27 @@ var DEFAULT_SOUND_DATA = {
playbackGapRange: 0 // in ms
};
//var AGENT_AVATAR_POSITION = { x: -1.5327, y: 0.672515, z: 5.91573 };
var AGENT_AVATAR_POSITION = { x: -2.83785, y: 1.45243, z: -13.6042 };
//var isACScript = this.EntityViewer !== undefined;
var isACScript = true;
Script.include("http://hifi-content.s3.amazonaws.com/ryan/development/utils_ryan.js");
if (isACScript) {
Agent.isAvatar = true; // This puts a robot at 0,0,0, but is currently necessary in order to use AvatarList.
Avatar.skeletonModelURL = "http://hifi-content.s3.amazonaws.com/ozan/dev/avatars/invisible_avatar/invisible_avatar.fst";
Avatar.position = AGENT_AVATAR_POSITION;
Agent.isListeningToAudioStream = true;
}
function ignore() {}
function debug() { // Display the arguments not just [Object object].
//print.apply(null, [].map.call(arguments, JSON.stringify));
}
function randFloat(low, high) {
return low + Math.random() * (high - low);
}
if (isACScript) {
EntityViewer.setCenterRadius(QUERY_RADIUS);
}
@ -93,6 +102,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
return;
}
var properties, soundData; // Latest data, pulled from local octree.
// getEntityProperties locks the tree, which competes with the asynchronous processing of queryOctree results.
// Most entity updates are fast and only a very few do getEntityProperties.
function ensureSoundData() { // We only getEntityProperities when we need to.
@ -115,43 +125,54 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
}
}
}
// Stumbling on big new pile of entities will do a lot of getEntityProperties. Once.
if (that.lastUserDataUpdate < userDataCutoff) { // NO DATA => SOUND DATA
ensureSoundData();
}
if (!that.url) { // NO DATA => NO DATA
return that.stop();
}
if (!that.sound) { // SOUND DATA => DOWNLOADING
that.sound = SoundCache.getSound(soundData.url); // SoundCache can manage duplicates better than we can.
}
if (!that.sound.downloaded) { // DOWNLOADING => DOWNLOADING
return;
}
if (that.playAfter > now) { // DOWNLOADING | WAITING => WAITING
return;
}
ensureSoundData(); // We'll try to play/setOptions and will need position, so we might as well get soundData, too.
if (soundData.url !== that.url) { // WAITING => NO DATA (update next time around)
return that.stop();
}
var options = {
position: properties.position,
loop: soundData.loop || DEFAULT_SOUND_DATA.loop,
volume: soundData.volume || DEFAULT_SOUND_DATA.volume
};
function repeat() {
return !options.loop && (soundData.playbackGap >= 0);
}
function randomizedNextPlay() { // time of next play or recheck, randomized to distribute the work
var range = soundData.playbackGapRange || DEFAULT_SOUND_DATA.playbackGapRange,
base = repeat() ? ((that.sound.duration * MSEC_PER_SEC) + (soundData.playbackGap || DEFAULT_SOUND_DATA.playbackGap)) : RECHECK_TIME;
return now + base + randFloat(-Math.min(base, range), range);
}
if (that.injector && soundData.playing === false) {
that.injector.stop();
that.injector = null;
}
if (!that.injector) {
if (soundData.playing === false) { // WAITING => PLAYING | WAITING
return;
@ -165,6 +186,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
}
return;
}
that.injector.setOptions(options); // PLAYING => UPDATE POSITION ETC
if (!that.injector.playing) { // Subtle: a looping sound will not check playbackGap.
if (repeat()) { // WAITING => PLAYING
@ -178,6 +200,7 @@ function EntityDatum(entityIdentifier) { // Just the data of an entity that we n
}
};
}
function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar) {
ignore(avatarPosition, avatar); // We could use avatars and/or avatarPositions to prioritize which ones to play.
var entitySound = entityCache[entityIdentifier];
@ -186,7 +209,9 @@ function internEntityDatum(entityIdentifier, timestamp, avatarPosition, avatar)
}
entitySound.timestamp = timestamp; // Might be updated for multiple avatars. That's fine.
}
var nUpdates = UPDATES_PER_STATS_LOG, lastStats = Date.now();
function updateAllEntityData() { // A fast update of all entities we know about. A few make sounds.
var now = Date.now(),
expirationCutoff = now - EXPIRATION_TIME,

View file

@ -0,0 +1,70 @@
//
// Created by Alan-Michael Moody on 5/2/2017
//
(function () {
var thisEntityID;
this.preload = function (entityID) {
thisEntityID = entityID;
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
// we have a range of 55 to -53 degrees for the needle
var scaledDegrees = (norm / -.94) + 54.5; // shifting scale from 100 to 55 to -53 ish its more like -51 ;
Entities.setAbsoluteJointRotationInObjectFrame(thisEntityID, 0, Quat.fromPitchYawRollDegrees(0, 0, scaledDegrees));
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,79 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
barID = childZero.id;
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish
var normShift = xShift - 0.88; //shifting local displacement (-0.88)
var halfShift = xShift / 2;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - (halfShift), y: -0.0625, z: -0.015},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,92 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID, textID;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
var childOne = Entities.getEntityProperties(children[1]);
var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData);
if (childZeroUserData.name === "bar") {
barID = childZero.id;
textID = childOne.id;
} else {
barID = childOne.id;
textID = childZero.id;
}
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
Entities.editEntity(textID, {text: "Loudness: % " + norm});
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 100; // changing scale from 0-100 to 0-1
var normShift = xShift - .5; //shifting scale form 0-1 to -.5 to .5
var halfShift = xShift / 2 ;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - (halfShift), y: 0, z: 0.1},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

View file

@ -0,0 +1,24 @@
//
// Created by Alan-Michael Moody on 5/2/2017
//
'use strict';
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var meter = {
stand: {
type: 'Model',
modelURL: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.fbx',
lifetime: '3600',
script: 'https://binaryrelay.com/files/public-docs/hifi/meter/applauseOmeter.js',
position: Vec3.sum(pos, {x: 0, y: 2.0, z: 0})
}
};
Entities.addEntity(meter.stand);
})();

View file

@ -0,0 +1,67 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () { // BEGIN LOCAL_SCOPE
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Box",
dimensions: {x: 1, y: 1, z: .1},
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/basic/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .25, z: .1},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.495, y: 0, z: 0.1})
},
displayText: {
type: "Text",
parentID: "",
userData: '{"name":"displayText"}',
text: "Loudness: % ",
textColor: {
red: 0,
green: 0,
blue: 0
},
backgroundColor: {
red: 128,
green: 128,
blue: 128
},
visible: 0.5,
dimensions: {x: 0.70, y: 0.15, z: 0.1},
lifetime: "3600",
position: Vec3.sum(pos, {x: 0, y: 0.4, z: 0.06})
}
};
var background = Entities.addEntity(graph.background);
graph.bar.parentID = background;
graph.displayText.parentID = background;
var bar = Entities.addEntity(graph.bar);
var displayText = Entities.addEntity(graph.displayText);
})(); // END LOCAL_SCOPE

View file

@ -0,0 +1,43 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter-plastic.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/plastic/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.90, y: 0, z: -0.15})
}
};
graph.bar.parentID = Entities.addEntity(graph.background);
Entities.addEntity(graph.bar);
})();

View file

@ -0,0 +1,67 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter-text-entity.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/text-entity/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15})
},
displayText: {
type: "Text",
parentID: "",
userData: '{"name":"displayText"}',
text: "Make Some Noise:",
textColor: {
red: 0,
green: 0,
blue: 0
},
backgroundColor: {
red: 255,
green: 255,
blue: 255
},
dimensions: {x: .82, y: 0.115, z: 0.15},
lifetime: "3600",
lineHeight: .08,
position: Vec3.sum(pos, {x: -0.2, y: 0.175, z: -0.035})
}
};
var background = Entities.addEntity(graph.background);
graph.bar.parentID = background;
graph.displayText.parentID = background;
var bar = Entities.addEntity(graph.bar);
var displayText = Entities.addEntity(graph.displayText);
})();

View file

@ -0,0 +1,42 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
"use strict";
(function () {
var pos = Vec3.sum(MyAvatar.position, Quat.getFront(MyAvatar.orientation));
var graph = {
background: {
type: "Model",
modelURL: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter-wood.fbx",
color: {
red: 128,
green: 128,
blue: 128
},
lifetime: "3600",
script: "https://binaryrelay.com/files/public-docs/hifi/meter/wood/meter.js",
position: pos
},
bar: {
type: "Box",
parentID: "",
userData: '{"name":"bar"}',
dimensions: {x: .05, y: .245, z: .07},
color: {
red: 0,
green: 0,
blue: 0
},
lifetime: "3600",
position: Vec3.sum(pos, {x: -0.88, y: 0, z: -0.15})
}
};
graph.bar.parentID = Entities.addEntity(graph.background);
Entities.addEntity(graph.bar);
})();

View file

@ -0,0 +1,89 @@
//
// Created by Alan-Michael Moody on 4/17/2017
//
(function () {
var barID, textID, originalText;
this.preload = function (entityID) {
var children = Entities.getChildrenIDs(entityID);
var childZero = Entities.getEntityProperties(children[0]);
var childOne = Entities.getEntityProperties(children[1]);
var childZeroUserData = JSON.parse(Entities.getEntityProperties(children[0]).userData);
if (childZeroUserData.name === "bar") {
barID = childZero.id;
textID = childOne.id;
originalText = childOne.text
} else {
barID = childOne.id;
textID = childZero.id;
originalText = childZero.text;
}
};
var SCAN_RATE = 100; //ms
var REFERENCE_FRAME_COUNT = 30;
var MAX_AUDIO_THRESHOLD = 16000;
var framePool = [];
function scanEngine() {
var avatarLoudnessPool = [];
function average(a) {
var sum = 0;
var total = a.length;
for (var i = 0; i < total; i++) {
sum += a[i];
}
return Math.round(sum / total);
}
function audioClamp(input) {
if (input > MAX_AUDIO_THRESHOLD) return MAX_AUDIO_THRESHOLD;
return input;
}
var avatars = AvatarList.getAvatarIdentifiers();
avatars.forEach(function (id) {
var avatar = AvatarList.getAvatar(id);
avatarLoudnessPool.push(audioClamp(Math.round(avatar.audioLoudness)));
});
framePool.push(average(avatarLoudnessPool));
if (framePool.length >= REFERENCE_FRAME_COUNT) {
framePool.shift();
}
function normalizedAverage(a) {
a = a.map(function (v) {
return Math.round(( 100 / MAX_AUDIO_THRESHOLD ) * v);
});
return average(a);
}
var norm = normalizedAverage(framePool);
Entities.editEntity(textID, {text: originalText + " % " + norm});
var barProperties = Entities.getEntityProperties(barID);
var colorShift = 2.55 * norm; //shifting the scale to 0 - 255
var xShift = norm / 52; // changing scale from 0-100 to 0-1.9 ish
var normShift = xShift - 0.88; //shifting local displacement (-0.88)
var halfShift = xShift / 2;
Entities.editEntity(barID, {
dimensions: {x: xShift, y: barProperties.dimensions.y, z: barProperties.dimensions.z},
localPosition: {x: normShift - ( halfShift ), y: -0.0625, z: -0.015},
color: {red: colorShift, green: barProperties.color.green, blue: barProperties.color.blue}
});
}
Script.setInterval(function () {
scanEngine();
}, SCAN_RATE);
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 399 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 410 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 651 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 528 KiB