Merge branch 'temp0' of https://github.com/samcake/hifi into temp1

This commit is contained in:
Sam Gateau 2014-10-23 15:20:20 -07:00
commit 9f6454f570
56 changed files with 1295 additions and 131 deletions

View file

@ -428,8 +428,8 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
}
int AudioMixer::prepareMixForListeningNode(Node* node) {
AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerNodeData = (AudioMixerClientData*)node->getLinkedData();
AvatarAudioStream* nodeAudioStream = static_cast<AudioMixerClientData*>(node->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerNodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
// zero out the client mix for this node
memset(_preMixSamples, 0, sizeof(_preMixSamples));
@ -730,6 +730,33 @@ void AudioMixer::run() {
memcpy(dataAt, &sequence, sizeof(quint16));
dataAt += sizeof(quint16);
// Pack stream properties
bool inAZone = false;
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
bool hasReverb = true;
float reverbTime = _zoneReverbSettings[i].reverbTime;
float wetLevel = _zoneReverbSettings[i].wetLevel;
memcpy(dataAt, &hasReverb, sizeof(bool));
dataAt += sizeof(bool);
memcpy(dataAt, &reverbTime, sizeof(float));
dataAt += sizeof(float);
memcpy(dataAt, &wetLevel, sizeof(float));
dataAt += sizeof(float);
inAZone = true;
break;
}
}
if (!inAZone) {
bool hasReverb = false;
memcpy(dataAt, &hasReverb, sizeof(bool));
dataAt += sizeof(bool);
}
// pack mixed audio samples
memcpy(dataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
@ -1033,6 +1060,38 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
}
}
}
const QString REVERB = "reverb";
if (audioEnvGroupObject[REVERB].isArray()) {
const QJsonArray& reverb = audioEnvGroupObject[REVERB].toArray();
const QString ZONE = "zone";
const QString REVERB_TIME = "reverb_time";
const QString WET_LEVEL = "wet_level";
for (int i = 0; i < reverb.count(); ++i) {
QJsonObject reverbObject = reverb[i].toObject();
if (reverbObject.contains(ZONE) &&
reverbObject.contains(REVERB_TIME) &&
reverbObject.contains(WET_LEVEL)) {
bool okReverbTime, okWetLevel;
QString zone = reverbObject.value(ZONE).toString();
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
ReverbSettings settings;
settings.zone = zone;
settings.reverbTime = reverbTime;
settings.wetLevel = wetLevel;
_zoneReverbSettings.push_back(settings);
qDebug() << "Added Reverb:" << zone << reverbTime << wetLevel;
}
}
}
}
}
}

View file

@ -82,7 +82,13 @@ private:
float coefficient;
};
QVector<ZonesSettings> _zonesSettings;
struct ReverbSettings {
QString zone;
float reverbTime;
float wetLevel;
};
QVector<ReverbSettings> _zoneReverbSettings;
static InboundAudioStream::Settings _streamSettings;
static bool _printStreamStats;

View file

@ -237,8 +237,8 @@ void MetavoxelSession::update() {
// go back to the beginning with the current packet and note that there's a delta pending
_sequencer.getOutputStream().getUnderlying().device()->seek(start);
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID };
out << QVariant::fromValue(msg);
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID, sendRecord->getPacketNumber(), _lodPacketNumber };
out << (_reliableDeltaMessage = QVariant::fromValue(msg));
_sequencer.endPacket();
} else {
@ -254,8 +254,9 @@ void MetavoxelSession::handleMessage(const QVariant& message, Bitstream& in) {
}
PacketRecord* MetavoxelSession::maybeCreateSendRecord() const {
return _reliableDeltaChannel ? new PacketRecord(_reliableDeltaLOD, _reliableDeltaData) :
new PacketRecord(_lod, _sender->getData());
return _reliableDeltaChannel ? new PacketRecord(_sequencer.getOutgoingPacketNumber(),
_reliableDeltaLOD, _reliableDeltaData) : new PacketRecord(_sequencer.getOutgoingPacketNumber(),
_lod, _sender->getData());
}
void MetavoxelSession::handleMessage(const QVariant& message) {
@ -263,7 +264,8 @@ void MetavoxelSession::handleMessage(const QVariant& message) {
if (userType == ClientStateMessage::Type) {
ClientStateMessage state = message.value<ClientStateMessage>();
_lod = state.lod;
_lodPacketNumber = _sequencer.getIncomingPacketNumber();
} else if (userType == MetavoxelEditMessage::Type) {
QMetaObject::invokeMethod(_sender->getServer(), "applyEdit", Q_ARG(const MetavoxelEditMessage&,
message.value<MetavoxelEditMessage>()));
@ -290,8 +292,7 @@ void MetavoxelSession::sendPacketGroup(int alreadySent) {
for (int i = 0; i < additionalPackets; i++) {
Bitstream& out = _sequencer.startPacket();
if (_reliableDeltaChannel) {
MetavoxelDeltaPendingMessage msg = { _reliableDeltaID };
out << QVariant::fromValue(msg);
out << _reliableDeltaMessage;
} else {
out << QVariant();
}

View file

@ -127,6 +127,7 @@ private:
MetavoxelSender* _sender;
MetavoxelLOD _lod;
int _lodPacketNumber;
ReliableChannel* _reliableDeltaChannel;
int _reliableDeltaReceivedOffset;
@ -134,6 +135,7 @@ private:
MetavoxelLOD _reliableDeltaLOD;
Bitstream::WriteMappings _reliableDeltaWriteMappings;
int _reliableDeltaID;
QVariant _reliableDeltaMessage;
};
/// Handles persistence in a separate thread.

View file

@ -0,0 +1,39 @@
# FindGVerb.cmake
#
# Try to find the Gverb library.
#
# You must provide a GVERB_ROOT_DIR which contains src and include directories
#
# Once done this will define
#
# GVERB_FOUND - system found Gverb
# GVERB_INCLUDE_DIRS - the Gverb include directory
#
# Copyright 2014 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (GVERB_INCLUDE_DIRS)
# in cache already
set(GVERB_FOUND TRUE)
else (GVERB_INCLUDE_DIRS)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("gverb")
find_path(GVERB_INCLUDE_DIRS gverb.h PATH_SUFFIXES include HINTS ${GVERB_SEARCH_DIRS})
find_path(GVERB_SRC_DIRS gverb.c PATH_SUFFIXES src HINTS ${GVERB_SEARCH_DIRS})
if (GVERB_INCLUDE_DIRS)
set(GVERB_FOUND TRUE)
endif (GVERB_INCLUDE_DIRS)
if (GVERB_FOUND)
message(STATUS "Found Gverb: ${GVERB_INCLUDE_DIRS}")
else (GVERB_FOUND)
message(FATAL_ERROR "Could NOT find Gverb. Read ./interface/externals/gverb/readme.txt")
endif (GVERB_FOUND)
endif(GVERB_INCLUDE_DIRS)

View file

@ -154,6 +154,33 @@
"placeholder": "0.18"
}
]
},
{
"name": "reverb",
"type": "table",
"label": "Reverb Settings",
"help": "In this table you can set custom reverb values for each audio zones",
"numbered": true,
"columns": [
{
"name": "zone",
"label": "Zone",
"can_set": true,
"placeholder": "Audio_Zone"
},
{
"name": "reverb_time",
"label": "Reverb Decay Time",
"can_set": true,
"placeholder": "(in sec)"
},
{
"name": "wet_level",
"label": "Wet Level",
"can_set": true,
"placeholder": "(in db)"
}
]
}
]
},

View file

@ -529,7 +529,7 @@ function deleteTableRow(delete_glyphicon) {
row.html("<input type='hidden' class='form-control' name='"
+ row.attr('name') + "' data-changed='true' value=''>");
} else {
if (table.find('.' + Settings.DATA_ROW_CLASS).length) {
if (table.find('.' + Settings.DATA_ROW_CLASS).length > 1) {
updateDataChangedForSiblingRows(row)
// this isn't the last row - we can just remove it

39
examples/audioReverbOn.js Normal file
View file

@ -0,0 +1,39 @@
//
// audioReverbOn.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// http://wiki.audacityteam.org/wiki/GVerb#Instant_reverb_settings
var audioOptions = new AudioEffectOptions({
// Square Meters
maxRoomSize: 50,
roomSize: 50,
// Seconds
reverbTime: 4,
// Between 0 - 1
damping: 0.50,
inputBandwidth: 0.75,
// dB
earlyLevel: -22,
tailLevel: -28,
dryLevel: 0,
wetLevel: 6
});
AudioDevice.setReverbOptions(audioOptions);
AudioDevice.setReverb(true);
print("Reverb is now on with the updated options.");
function scriptEnding() {
AudioDevice.setReverb(false);
print("Reberb is now off.");
}
Script.scriptEnding.connect(scriptEnding);

View file

@ -64,7 +64,6 @@ function activateWarp() {
var TRIGGER_PULLBACK_DISTANCE = 0.04;
var WATCH_AVATAR_DISTANCE = 1.5;
var MAX_WARP_YAW = 40.0;
var MAX_PULLBACK_YAW = 5.0;
var sound = new Sound("http://public.highfidelity.io/sounds/Footsteps/FootstepW2Right-12db.wav");
@ -72,7 +71,7 @@ function playSound() {
var options = new AudioInjectionOptions();
var position = MyAvatar.position;
options.position = position;
options.volume = 0.5;
options.volume = 1.0;
Audio.playSound(sound, options);
}
@ -89,7 +88,7 @@ function updateWarp() {
var deltaPitch = MyAvatar.getHeadFinalPitch() - headStartFinalPitch;
deltaYaw = MyAvatar.getHeadFinalYaw() - headStartYaw;
willMove = (!watchAvatar && (Math.abs(deltaYaw) < MAX_WARP_YAW) && (keyDownTime > WARP_START_TIME));
willMove = (!watchAvatar && (keyDownTime > WARP_START_TIME));
if (willMove) {
//var distance = Math.pow((deltaPitch - WARP_PITCH_DEAD_ZONE) * WARP_SENSITIVITY, 2.0);

View file

@ -78,6 +78,8 @@ SelectionManager = (function() {
that.worldDimensions = null;
that.worldPosition = null;
} else if (that.selections.length == 1) {
SelectionDisplay.setSpaceMode(SPACE_LOCAL);
var properties = Entities.getEntityProperties(that.selections[0]);
that.localDimensions = properties.dimensions;
that.localPosition = properties.position;
@ -622,8 +624,8 @@ SelectionDisplay = (function () {
}
var diagonal = (Vec3.length(properties.dimensions) / 2) * 1.1;
var halfDimensions = Vec3.multiply(properties.dimensions, 0.5);
var diagonal = (Vec3.length(selectionManager.worldDimensions) / 2) * 1.1;
var halfDimensions = Vec3.multiply(selectionManager.worldDimensions, 0.5);
innerRadius = diagonal;
outerRadius = diagonal * 1.15;
var innerActive = false;
@ -843,7 +845,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(grabberMoveUp, { visible: translateHandlesVisible, position: { x: boundsCenter.x, y: top + grabberMoveUpOffset, z: boundsCenter.z } });
that.updateHandles(entityID);
that.updateHandles();
Overlays.editOverlay(baseOfEntityProjectionOverlay,
@ -924,18 +926,17 @@ SelectionDisplay = (function () {
entitySelected = false;
};
that.updateHandles = function(entityID) {
if (!entitySelected) {
that.updateHandles = function() {
// print("Updating handles");
if (SelectionManager.selections.length == 0) {
that.setOverlaysVisible(false);
return;
}
var properties = Entities.getEntityProperties(entityID);
var rotation, dimensions, position;
if (spaceMode == SPACE_LOCAL) {
rotation = properties.rotation;
rotation = SelectionManager.localRotation;
dimensions = SelectionManager.localDimensions;
position = SelectionManager.localPosition;
} else {
@ -1095,6 +1096,44 @@ SelectionDisplay = (function () {
entitySelected = false;
};
function applyEntityProperties(data) {
for (var i = 0; i < data.length; i++) {
var entityID = data[i].entityID;
var properties = data[i].properties;
Entities.editEntity(entityID, properties);
}
selectionManager._update();
};
// For currently selected entities, push a command to the UndoStack that uses the current entity properties for the
// redo command, and the saved properties for the undo command.
function pushCommandForSelections() {
var undoData = [];
var redoData = [];
for (var i = 0; i < SelectionManager.selections.length; i++) {
var entityID = SelectionManager.selections[i];
var initialProperties = SelectionManager.savedProperties[entityID.id];
var currentProperties = Entities.getEntityProperties(entityID);
undoData.push({
entityID: entityID,
properties: {
position: initialProperties.position,
rotation: initialProperties.rotation,
dimensions: initialProperties.dimensions,
},
});
redoData.push({
entityID: entityID,
properties: {
position: currentProperties.position,
rotation: currentProperties.rotation,
dimensions: currentProperties.dimensions,
},
});
}
UndoStack.pushCommand(applyEntityProperties, undoData, applyEntityProperties, redoData);
}
var lastXZPick = null;
var translateXZTool = {
mode: 'TRANSLATE_XZ',
@ -1114,6 +1153,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1172,6 +1213,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1334,6 +1377,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
};
@ -1496,6 +1541,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1602,6 +1649,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1706,6 +1755,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {

View file

@ -8,4 +8,4 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "https://s3.amazonaws.com/hifi-public/";
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";

View file

@ -35,7 +35,7 @@ var entityPropertyDialogBox = EntityPropertyDialogBox;
Script.include("libraries/entityCameraTool.js");
var entityCameraTool = new EntityCameraTool();
selectionManager.setEventListener(selectionDisplay.updateHandles());
selectionManager.setEventListener(selectionDisplay.updateHandles);
var windowDimensions = Controller.getViewportDimensions();
var toolIconUrl = HIFI_PUBLIC_BUCKET + "images/tools/";
@ -608,9 +608,12 @@ function handeMenuEvent(menuItem) {
} else if (menuItem == "Delete") {
if (entitySelected) {
print(" Delete Entity.... selectedEntityID="+ selectedEntityID);
Entities.deleteEntity(selectedEntityID);
for (var i = 0; i < selectionManager.selections.length; i++) {
Entities.deleteEntity(selectionManager.selections[i]);
}
selectionDisplay.unselect(selectedEntityID);
entitySelected = false;
selectionManager.clearSelections();
} else {
print(" Delete Entity.... not holding...");
}
@ -618,7 +621,7 @@ function handeMenuEvent(menuItem) {
// good place to put the properties dialog
editModelID = -1;
if (entitySelected) {
if (selectionManager.selections.length == 1) {
print(" Edit Properties.... selectedEntityID="+ selectedEntityID);
editModelID = selectedEntityID;
} else {
@ -653,7 +656,7 @@ Controller.keyReleaseEvent.connect(function (event) {
if (event.text == "`") {
handeMenuEvent("Edit Properties...");
}
if (event.text == "BACKSPACE") {
if (event.text == "BACKSPACE" || event.text == "DELETE") {
handeMenuEvent("Delete");
} else if (event.text == "TAB") {
selectionDisplay.toggleSpaceMode();

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "Visage" "LeapMotion" "RtMidi" "Qxmpp" "SDL2")
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "Visage" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "Gverb")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -14,6 +14,10 @@ endforeach()
find_package(Qt5LinguistTools REQUIRED)
find_package(Qt5LinguistToolsMacros)
# As Gverb is currently the only reverb library, it's required.
find_package(Gverb REQUIRED)
if (DEFINED ENV{JOB_ID})
set(BUILD_SEQ $ENV{JOB_ID})
else ()
@ -166,6 +170,13 @@ if (QXMPP_FOUND AND NOT DISABLE_QXMPP AND WIN32)
add_definitions(-DQXMPP_STATIC)
endif ()
if (GVERB_FOUND)
file(GLOB GVERB_SRCS ${GVERB_SRC_DIRS}/*.c)
include_directories(${GVERB_INCLUDE_DIRS})
add_library(gverb STATIC ${GVERB_SRCS})
target_link_libraries(${TARGET_NAME} gverb)
endif (GVERB_FOUND)
# include headers for interface and InterfaceConfig.
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")

15
interface/external/gverb/readme.txt vendored Normal file
View file

@ -0,0 +1,15 @@
Instructions for adding the Gverb library to Interface
(This is a required library)
Clément Brisset, Octobre 22nd, 2014
1. Go to https://github.com/highfidelity/gverb
Or download the sources directly via this link:
https://github.com/highfidelity/gverb/archive/master.zip
2. Extract the archive
3. Place the directories “include” and “src” in interface/external/gverb
(Normally next to this readme)
4. Clear your build directory, run cmake, build and you should be all set.

View file

@ -176,6 +176,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_nodeBoundsDisplay(this),
_previousScriptLocation(),
_applicationOverlay(),
_undoStack(),
_undoStackScriptingInterface(&_undoStack),
_runningScriptsWidget(NULL),
_runningScriptsWidgetWasVisible(false),
_trayIcon(new QSystemTrayIcon(_window)),
@ -3791,8 +3793,9 @@ ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUser
// AvatarManager has some custom types
AvatarManager::registerMetaTypes(scriptEngine);
// hook our avatar object into this script engine
// hook our avatar and avatar hash map object into this script engine
scriptEngine->setAvatarData(_myAvatar, "MyAvatar"); // leave it as a MyAvatar class to expose thrust features
scriptEngine->setAvatarHashMap(&_avatarManager, "AvatarList");
CameraScriptableObject* cameraScriptable = new CameraScriptableObject(&_myCamera, &_viewFrustum);
scriptEngine->registerGlobalObject("Camera", cameraScriptable);
@ -3835,6 +3838,8 @@ ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUser
scriptEngine->registerGlobalObject("Joysticks", &JoystickScriptingInterface::getInstance());
qScriptRegisterMetaType(scriptEngine, joystickToScriptValue, joystickFromScriptValue);
scriptEngine->registerGlobalObject("UndoStack", &_undoStackScriptingInterface);
#ifdef HAVE_RTMIDI
scriptEngine->registerGlobalObject("MIDI", &MIDIManager::getInstance());
#endif

View file

@ -91,6 +91,9 @@
#include "voxels/VoxelSystem.h"
#include "UndoStackScriptingInterface.h"
class QAction;
class QActionGroup;
class QGLWidget;
@ -451,6 +454,7 @@ private:
int _numChangedSettings;
QUndoStack _undoStack;
UndoStackScriptingInterface _undoStackScriptingInterface;
glm::vec3 _gravity;

View file

@ -92,6 +92,8 @@ Audio::Audio(QObject* parent) :
_collisionSoundDuration(0.0f),
_proceduralEffectSample(0),
_muted(false),
_reverb(false),
_reverbOptions(&_scriptReverbOptions),
_processSpatialAudio(false),
_spatialAudioStart(0),
_spatialAudioFinish(0),
@ -123,11 +125,14 @@ Audio::Audio(QObject* parent) :
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
// Create the noise sample array
_noiseSampleFrames = new float[NUMBER_OF_NOISE_SAMPLE_FRAMES];
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedSilence, this, &Audio::addStereoSilenceToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade, this, &Audio::addLastFrameRepeatedWithFadeToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedStereoSamples, this, &Audio::addStereoSamplesToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection);
// Initialize GVerb
initGverb();
}
void Audio::init(QGLWidget *parent) {
@ -489,6 +494,69 @@ bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
}
void Audio::initGverb() {
// Initialize a new gverb instance
_gverb = gverb_new(_outputFormat.sampleRate(), _reverbOptions->getMaxRoomSize(), _reverbOptions->getRoomSize(),
_reverbOptions->getReverbTime(), _reverbOptions->getDamping(), _reverbOptions->getSpread(),
_reverbOptions->getInputBandwidth(), _reverbOptions->getEarlyLevel(),
_reverbOptions->getTailLevel());
// Configure the instance (these functions are not super well named - they actually set several internal variables)
gverb_set_roomsize(_gverb, _reverbOptions->getRoomSize());
gverb_set_revtime(_gverb, _reverbOptions->getReverbTime());
gverb_set_damping(_gverb, _reverbOptions->getDamping());
gverb_set_inputbandwidth(_gverb, _reverbOptions->getInputBandwidth());
gverb_set_earlylevel(_gverb, DB_CO(_reverbOptions->getEarlyLevel()));
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
}
void Audio::setReverbOptions(const AudioEffectOptions* options) {
// Save the new options
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
_scriptReverbOptions.setRoomSize(options->getRoomSize());
_scriptReverbOptions.setReverbTime(options->getReverbTime());
_scriptReverbOptions.setDamping(options->getDamping());
_scriptReverbOptions.setSpread(options->getSpread());
_scriptReverbOptions.setInputBandwidth(options->getInputBandwidth());
_scriptReverbOptions.setEarlyLevel(options->getEarlyLevel());
_scriptReverbOptions.setTailLevel(options->getTailLevel());
_scriptReverbOptions.setDryLevel(options->getDryLevel());
_scriptReverbOptions.setWetLevel(options->getWetLevel());
if (_reverbOptions == &_scriptReverbOptions) {
// Apply them to the reverb instance(s)
initGverb();
}
}
void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioFormat) {
float dryFraction = DB_CO(_reverbOptions->getDryLevel());
float wetFraction = DB_CO(_reverbOptions->getWetLevel());
float lValue,rValue;
for (int sample = 0; sample < numSamples; sample += audioFormat.channelCount()) {
// Run GVerb
float value = (float)samplesData[sample];
gverb_do(_gverb, value, &lValue, &rValue);
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
for (unsigned int j = sample; j < sample + audioFormat.channelCount(); j++) {
if (j == sample) {
// left channel
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
samplesData[j] = (int16_t)lResult;
} else if (j == (sample + 1)) {
// right channel
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), -32768, 32767);
samplesData[j] = (int16_t)rResult;
} else {
// ignore channels above 2
}
}
}
}
void Audio::handleAudioInput() {
static char audioDataPacket[MAX_PACKET_SIZE];
@ -720,7 +788,6 @@ void Audio::handleAudioInput() {
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (_recorder && _recorder.data()->isRecording()) {
_recorder.data()->record(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
}
@ -840,12 +907,10 @@ void Audio::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
}
void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
const int16_t* receivedSamples;
@ -884,10 +949,37 @@ void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& ou
numNetworkOutputSamples,
numDeviceOutputSamples,
_desiredOutputFormat, _outputFormat);
if(_reverb || _receivedAudioStream.hasReverb()) {
bool reverbChanged = false;
if (_receivedAudioStream.hasReverb()) {
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
reverbChanged = true;
}
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
reverbChanged = true;
}
if (_reverbOptions != &_zoneReverbOptions) {
_reverbOptions = &_zoneReverbOptions;
reverbChanged = true;
}
} else if (_reverbOptions != &_scriptReverbOptions) {
_reverbOptions = &_scriptReverbOptions;
reverbChanged = true;
}
if (reverbChanged) {
initGverb();
}
addReverb((int16_t*)outputBuffer.data(), numDeviceOutputSamples, _outputFormat);
}
}
void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
if (_audioOutput) {
// Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(audioByteArray);

View file

@ -43,6 +43,14 @@
#include <StdDev.h>
#include "MixedProcessedAudioStream.h"
#include "AudioEffectOptions.h"
#include <AudioRingBuffer.h>
#include <StdDev.h>
extern "C" {
#include <gverb.h>
#include <gverbdsp.h>
}
static const int NUM_AUDIO_CHANNELS = 2;
@ -159,6 +167,8 @@ public slots:
float getInputVolume() const { return (_audioInput) ? _audioInput->volume() : 0.0f; }
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
void setReverb(bool reverb) { _reverb = reverb; }
void setReverbOptions(const AudioEffectOptions* options);
const AudioStreamStats& getAudioMixerAvatarStreamAudioStats() const { return _audioMixerAvatarStreamAudioStats; }
const QHash<QUuid, AudioStreamStats>& getAudioMixerInjectedStreamAudioStatsMap() const { return _audioMixerInjectedStreamAudioStatsMap; }
@ -230,6 +240,11 @@ private:
int _proceduralEffectSample;
bool _muted;
bool _localEcho;
bool _reverb;
AudioEffectOptions _scriptReverbOptions;
AudioEffectOptions _zoneReverbOptions;
AudioEffectOptions* _reverbOptions;
ty_gverb *_gverb;
GLuint _micTextureId;
GLuint _muteTextureId;
GLuint _boxTextureId;
@ -249,6 +264,10 @@ private:
// 2. Mix with the audio input
void processProceduralAudio(int16_t* monoInput, int numSamples);
// Adds Reverb
void initGverb();
void addReverb(int16_t* samples, int numSamples, QAudioFormat& format);
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
void addProceduralSounds(int16_t* monoInput, int numSamples);

View file

@ -92,7 +92,7 @@ Hair::Hair(int strands,
}
}
const float SOUND_THRESHOLD = 50.0f;
const float SOUND_THRESHOLD = 40.0f;
void Hair::simulate(float deltaTime) {
deltaTime = glm::clamp(deltaTime, 0.0f, 1.0f / 30.0f);
@ -121,13 +121,13 @@ void Hair::simulate(float deltaTime) {
(_radius - glm::length(_hairPosition[vertexIndex]));
}
// Add random thing driven by loudness
float loudnessFactor = (_loudness > SOUND_THRESHOLD) ? logf(_loudness - SOUND_THRESHOLD) / 8000.0f : 0.0f;
float loudnessFactor = (_loudness > SOUND_THRESHOLD) ? logf(_loudness - SOUND_THRESHOLD) / 2000.0f : 0.0f;
const float QUIESCENT_LOUDNESS = 0.0f;
_hairPosition[vertexIndex] += randVector() * (QUIESCENT_LOUDNESS + loudnessFactor) * ((float)link / (float)_links);
// Add gravity
const float SCALE_GRAVITY = 0.10f;
const float SCALE_GRAVITY = 0.13f;
_hairPosition[vertexIndex] += _gravity * deltaTime * SCALE_GRAVITY;
// Add linear acceleration

View file

@ -26,7 +26,7 @@ const int HAIR_CONSTRAINTS = 2;
const int DEFAULT_HAIR_STRANDS = 20;
const int DEFAULT_HAIR_LINKS = 10;
const float DEFAULT_HAIR_RADIUS = 0.15f;
const float DEFAULT_HAIR_LINK_LENGTH = 0.07f;
const float DEFAULT_HAIR_LINK_LENGTH = 0.06f;
const float DEFAULT_HAIR_THICKNESS = 0.025f;
class Hair {

View file

@ -45,6 +45,7 @@
#include "ui/AttachmentsDialog.h"
#include "ui/InfoView.h"
#include "ui/MetavoxelEditor.h"
#include "ui/MetavoxelNetworkSimulator.h"
#include "ui/ModelsBrowser.h"
#include "ui/LoginDialog.h"
#include "ui/NodeBounds.h"
@ -433,6 +434,8 @@ Menu::Menu() :
QMenu* metavoxelOptionsMenu = developerMenu->addMenu("Metavoxels");
addCheckableActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::DisplayHermiteData, 0, false,
Application::getInstance()->getMetavoxels(), SLOT(refreshVoxelData()));
addActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::NetworkSimulator, 0, this,
SLOT(showMetavoxelNetworkSimulator()));
QMenu* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);
@ -1388,6 +1391,13 @@ void Menu::showMetavoxelEditor() {
_MetavoxelEditor->raise();
}
void Menu::showMetavoxelNetworkSimulator() {
if (!_metavoxelNetworkSimulator) {
_metavoxelNetworkSimulator = new MetavoxelNetworkSimulator();
}
_metavoxelNetworkSimulator->raise();
}
void Menu::showScriptEditor() {
if(!_ScriptEditor || !_ScriptEditor->isVisible()) {
_ScriptEditor = new ScriptEditorWindow();

View file

@ -78,6 +78,7 @@ class AttachmentsDialog;
class BandwidthDialog;
class LodToolsDialog;
class MetavoxelEditor;
class MetavoxelNetworkSimulator;
class ChatWindow;
class OctreeStatsDialog;
class MenuItemProperties;
@ -218,6 +219,7 @@ private slots:
void cycleFrustumRenderMode();
void runTests();
void showMetavoxelEditor();
void showMetavoxelNetworkSimulator();
void showScriptEditor();
void showChat();
void toggleConsole();
@ -275,6 +277,7 @@ private:
FrustumDrawMode _frustumDrawMode;
ViewFrustumOffset _viewFrustumOffset;
QPointer<MetavoxelEditor> _MetavoxelEditor;
QPointer<MetavoxelNetworkSimulator> _metavoxelNetworkSimulator;
QPointer<ScriptEditorWindow> _ScriptEditor;
QPointer<ChatWindow> _chatWindow;
QDialog* _jsConsole;
@ -431,6 +434,7 @@ namespace MenuOption {
const QString MuteEnvironment = "Mute Environment";
const QString MyLocations = "My Locations...";
const QString NameLocation = "Name this location";
const QString NetworkSimulator = "Network Simulator...";
const QString NewVoxelCullingMode = "New Voxel Culling Mode";
const QString ObeyEnvironmentalGravity = "Obey Environmental Gravity";
const QString OctreeStats = "Voxel and Entity Statistics";

View file

@ -39,6 +39,15 @@ REGISTER_META_OBJECT(StaticModelRenderer)
static int bufferPointVectorMetaTypeId = qRegisterMetaType<BufferPointVector>();
MetavoxelSystem::NetworkSimulation::NetworkSimulation(float dropRate, float repeatRate,
int minimumDelay, int maximumDelay, int bandwidthLimit) :
dropRate(dropRate),
repeatRate(repeatRate),
minimumDelay(minimumDelay),
maximumDelay(maximumDelay),
bandwidthLimit(bandwidthLimit) {
}
void MetavoxelSystem::init() {
MetavoxelClientManager::init();
DefaultMetavoxelRendererImplementation::init();
@ -61,6 +70,16 @@ MetavoxelLOD MetavoxelSystem::getLOD() {
return _lod;
}
void MetavoxelSystem::setNetworkSimulation(const NetworkSimulation& simulation) {
QWriteLocker locker(&_networkSimulationLock);
_networkSimulation = simulation;
}
MetavoxelSystem::NetworkSimulation MetavoxelSystem::getNetworkSimulation() {
QReadLocker locker(&_networkSimulationLock);
return _networkSimulation;
}
class SimulateVisitor : public MetavoxelVisitor {
public:
@ -678,6 +697,28 @@ void MetavoxelSystem::guideToAugmented(MetavoxelVisitor& visitor, bool render) {
}
}
Throttle::Throttle() :
_limit(INT_MAX),
_total(0) {
}
bool Throttle::shouldThrottle(int bytes) {
// clear expired buckets
qint64 now = QDateTime::currentMSecsSinceEpoch();
while (!_buckets.isEmpty() && now >= _buckets.first().first) {
_total -= _buckets.takeFirst().second;
}
// if possible, add the new bucket
if (_total + bytes > _limit) {
return true;
}
const int BUCKET_DURATION = 1000;
_buckets.append(Bucket(now + BUCKET_DURATION, bytes));
_total += bytes;
return false;
}
MetavoxelSystemClient::MetavoxelSystemClient(const SharedNodePointer& node, MetavoxelUpdater* updater) :
MetavoxelClient(node, updater) {
}
@ -692,10 +733,59 @@ MetavoxelData MetavoxelSystemClient::getAugmentedData() {
return _augmentedData;
}
class ReceiveDelayer : public QObject {
public:
ReceiveDelayer(const SharedNodePointer& node, const QByteArray& packet);
protected:
virtual void timerEvent(QTimerEvent* event);
private:
SharedNodePointer _node;
QByteArray _packet;
};
ReceiveDelayer::ReceiveDelayer(const SharedNodePointer& node, const QByteArray& packet) :
_node(node),
_packet(packet) {
}
void ReceiveDelayer::timerEvent(QTimerEvent* event) {
QMutexLocker locker(&_node->getMutex());
MetavoxelClient* client = static_cast<MetavoxelClient*>(_node->getLinkedData());
if (client) {
QMetaObject::invokeMethod(&client->getSequencer(), "receivedDatagram", Q_ARG(const QByteArray&, _packet));
}
deleteLater();
}
int MetavoxelSystemClient::parseData(const QByteArray& packet) {
// process through sequencer
QMetaObject::invokeMethod(&_sequencer, "receivedDatagram", Q_ARG(const QByteArray&, packet));
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::METAVOXELS).updateValue(packet.size());
MetavoxelSystem::NetworkSimulation simulation = Application::getInstance()->getMetavoxels()->getNetworkSimulation();
if (randFloat() < simulation.dropRate) {
return packet.size();
}
int count = (randFloat() < simulation.repeatRate) ? 2 : 1;
for (int i = 0; i < count; i++) {
if (simulation.bandwidthLimit > 0) {
_receiveThrottle.setLimit(simulation.bandwidthLimit);
if (_receiveThrottle.shouldThrottle(packet.size())) {
continue;
}
}
int delay = randIntInRange(simulation.minimumDelay, simulation.maximumDelay);
if (delay > 0) {
ReceiveDelayer* delayer = new ReceiveDelayer(_node, packet);
delayer->startTimer(delay);
} else {
QMetaObject::invokeMethod(&_sequencer, "receivedDatagram", Q_ARG(const QByteArray&, packet));
}
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::METAVOXELS).updateValue(packet.size());
}
return packet.size();
}
@ -774,9 +864,52 @@ void MetavoxelSystemClient::dataChanged(const MetavoxelData& oldData) {
QThreadPool::globalInstance()->start(new Augmenter(_node, _data, getAugmentedData(), _remoteDataLOD));
}
class SendDelayer : public QObject {
public:
SendDelayer(const SharedNodePointer& node, const QByteArray& data);
virtual void timerEvent(QTimerEvent* event);
private:
SharedNodePointer _node;
QByteArray _data;
};
SendDelayer::SendDelayer(const SharedNodePointer& node, const QByteArray& data) :
_node(node),
_data(data.constData(), data.size()) {
}
void SendDelayer::timerEvent(QTimerEvent* event) {
NodeList::getInstance()->writeDatagram(_data, _node);
deleteLater();
}
void MetavoxelSystemClient::sendDatagram(const QByteArray& data) {
NodeList::getInstance()->writeDatagram(data, _node);
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::METAVOXELS).updateValue(data.size());
MetavoxelSystem::NetworkSimulation simulation = Application::getInstance()->getMetavoxels()->getNetworkSimulation();
if (randFloat() < simulation.dropRate) {
return;
}
int count = (randFloat() < simulation.repeatRate) ? 2 : 1;
for (int i = 0; i < count; i++) {
if (simulation.bandwidthLimit > 0) {
_sendThrottle.setLimit(simulation.bandwidthLimit);
if (_sendThrottle.shouldThrottle(data.size())) {
continue;
}
}
int delay = randIntInRange(simulation.minimumDelay, simulation.maximumDelay);
if (delay > 0) {
SendDelayer* delayer = new SendDelayer(_node, data);
delayer->startTimer(delay);
} else {
NodeList::getInstance()->writeDatagram(data, _node);
}
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::METAVOXELS).updateValue(data.size());
}
}
BufferData::~BufferData() {

View file

@ -31,12 +31,27 @@ class MetavoxelSystem : public MetavoxelClientManager {
public:
class NetworkSimulation {
public:
float dropRate;
float repeatRate;
int minimumDelay;
int maximumDelay;
int bandwidthLimit;
NetworkSimulation(float dropRate = 0.0f, float repeatRate = 0.0f, int minimumDelay = 0,
int maximumDelay = 0, int bandwidthLimit = 0);
};
virtual void init();
virtual MetavoxelLOD getLOD();
const Frustum& getFrustum() const { return _frustum; }
void setNetworkSimulation(const NetworkSimulation& simulation);
NetworkSimulation getNetworkSimulation();
const AttributePointer& getPointBufferAttribute() { return _pointBufferAttribute; }
const AttributePointer& getHeightfieldBufferAttribute() { return _heightfieldBufferAttribute; }
const AttributePointer& getVoxelBufferAttribute() { return _voxelBufferAttribute; }
@ -93,6 +108,9 @@ private:
MetavoxelLOD _lod;
QReadWriteLock _lodLock;
Frustum _frustum;
NetworkSimulation _networkSimulation;
QReadWriteLock _networkSimulationLock;
};
/// Generic abstract base class for objects that handle a signal.
@ -116,6 +134,28 @@ typedef QVector<BufferPoint> BufferPointVector;
Q_DECLARE_METATYPE(BufferPointVector)
/// Simple throttle for limiting bandwidth on a per-second basis.
class Throttle {
public:
Throttle();
/// Sets the per-second limit.
void setLimit(int limit) { _limit = limit; }
/// Determines whether the message with the given size should be throttled (discarded). If not, registers the message
/// as having been processed (i.e., contributing to later throttling).
bool shouldThrottle(int bytes);
private:
int _limit;
int _total;
typedef QPair<qint64, int> Bucket;
QList<Bucket> _buckets;
};
/// A client session associated with a single server.
class MetavoxelSystemClient : public MetavoxelClient {
Q_OBJECT
@ -145,6 +185,9 @@ private:
MetavoxelData _augmentedData;
MetavoxelData _renderedAugmentedData;
QReadWriteLock _augmentedDataLock;
Throttle _sendThrottle;
Throttle _receiveThrottle;
};
/// Base class for cached static buffers.

View file

@ -21,7 +21,7 @@
#include "ScriptsModel.h"
static const QString S3_URL = "https://s3.amazonaws.com/hifi-public";
static const QString S3_URL = "http://s3.amazonaws.com/hifi-public";
static const QString PUBLIC_URL = "http://public.highfidelity.io";
static const QString MODELS_LOCATION = "scripts/";

View file

@ -66,8 +66,6 @@ glm::vec3 OculusManager::_calibrationPosition;
glm::quat OculusManager::_calibrationOrientation;
quint64 OculusManager::_calibrationStartTime;
int OculusManager::_calibrationMessage = NULL;
QString OculusManager::CALIBRATION_BILLBOARD_URL = "http://hifi-public.s3.amazonaws.com/images/hold-to-calibrate.svg";
float OculusManager::CALIBRATION_BILLBOARD_SCALE = 2.f;
#endif
@ -191,7 +189,7 @@ void OculusManager::disconnect() {
}
#ifdef HAVE_LIBOVR
void OculusManager::positionCalibrationBillboard(BillboardOverlay* billboard) {
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
glm::quat headOrientation = Application::getInstance()->getAvatar()->getHeadOrientation();
headOrientation.x = 0;
headOrientation.z = 0;
@ -204,8 +202,9 @@ void OculusManager::positionCalibrationBillboard(BillboardOverlay* billboard) {
#ifdef HAVE_LIBOVR
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
static QString instructionMessage = "Hold still to calibrate";
static QString progressMessage;
static BillboardOverlay* billboard;
static Text3DOverlay* billboard;
switch (_calibrationState) {
@ -235,9 +234,13 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
if (!_calibrationMessage) {
qDebug() << "Hold still to calibrate HMD";
billboard = new BillboardOverlay();
billboard->setURL(CALIBRATION_BILLBOARD_URL);
billboard->setScale(CALIBRATION_BILLBOARD_SCALE);
billboard = new Text3DOverlay();
billboard->setDimensions(glm::vec2(2.0f, 1.25f));
billboard->setTopMargin(0.35f);
billboard->setLeftMargin(0.28f);
billboard->setText(instructionMessage);
billboard->setAlpha(0.5f);
billboard->setLineHeight(0.1f);
billboard->setIsFacingAvatar(false);
positionCalibrationBillboard(billboard);
@ -275,7 +278,7 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
} else {
progressMessage += ".";
}
//qDebug() << progressMessage; // Progress message ready for 3D text overlays.
billboard->setText(instructionMessage + "\n\n" + progressMessage);
}
}
} else {

View file

@ -18,7 +18,7 @@
#endif
#include "renderer/ProgramObject.h"
#include "ui/overlays/BillboardOverlay.h"
#include "ui/overlays/Text3DOverlay.h"
const float DEFAULT_OCULUS_UI_ANGULAR_SIZE = 72.0f;
@ -111,7 +111,7 @@ private:
WAITING_FOR_ZERO_HELD,
CALIBRATED
};
static void positionCalibrationBillboard(BillboardOverlay* billboard);
static void positionCalibrationBillboard(Text3DOverlay* message);
static float CALIBRATION_DELTA_MINIMUM_LENGTH;
static float CALIBRATION_DELTA_MINIMUM_ANGLE;
static float CALIBRATION_ZERO_MAXIMUM_LENGTH;
@ -123,8 +123,6 @@ private:
static glm::quat _calibrationOrientation;
static quint64 _calibrationStartTime;
static int _calibrationMessage;
static QString CALIBRATION_BILLBOARD_URL;
static float CALIBRATION_BILLBOARD_SCALE;
#endif

View file

@ -1649,20 +1649,11 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
ProgramObject* activeProgram = program;
Locations* activeLocations = locations;
// Try to use the Batch
//gpu::Batch batch;
/*if (isSkinned) {
skinProgram->bind();
activeProgram = skinProgram;
activeLocations = skinLocations;
} else {
program->bind();
}*/
if (isSkinned) {
activeProgram = skinProgram;
activeLocations = skinLocations;
}
// This code replace the "bind()" on the QGLProgram
if (!activeProgram->isLinked()) {
activeProgram->link();
}

View file

@ -70,3 +70,11 @@ float AudioDeviceScriptingInterface::getInputVolume() {
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
Application::getInstance()->getAudio()->setInputVolume(volume);
}
void AudioDeviceScriptingInterface::setReverb(bool reverb) {
Application::getInstance()->getAudio()->setReverb(reverb);
}
void AudioDeviceScriptingInterface::setReverbOptions(const AudioEffectOptions* options) {
Application::getInstance()->getAudio()->setReverbOptions(options);
}

View file

@ -39,6 +39,8 @@ public slots:
float getInputVolume();
void setInputVolume(float volume);
void setReverb(bool reverb);
void setReverbOptions(const AudioEffectOptions* options);
};
#endif // hifi_AudioDeviceScriptingInterface_h

View file

@ -0,0 +1,87 @@
//
// MetavoxelNetworkSimulator.cpp
// interface/src/ui
//
// Created by Andrzej Kapolka on 10/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDialogButtonBox>
#include <QDoubleSpinBox>
#include <QFormLayout>
#include <QSpinBox>
#include <QVBoxLayout>
#include "Application.h"
#include "MetavoxelNetworkSimulator.h"
const int BYTES_PER_KILOBYTE = 1024;
MetavoxelNetworkSimulator::MetavoxelNetworkSimulator() :
QWidget(Application::getInstance()->getGLWidget(), Qt::Dialog) {
setWindowTitle("Metavoxel Network Simulator");
setAttribute(Qt::WA_DeleteOnClose);
QVBoxLayout* topLayout = new QVBoxLayout();
setLayout(topLayout);
QFormLayout* form = new QFormLayout();
topLayout->addLayout(form);
MetavoxelSystem::NetworkSimulation simulation = Application::getInstance()->getMetavoxels()->getNetworkSimulation();
form->addRow("Drop Rate:", _dropRate = new QDoubleSpinBox());
_dropRate->setSuffix("%");
_dropRate->setValue(simulation.dropRate * 100.0);
connect(_dropRate, static_cast<void (QDoubleSpinBox::*)(double)>(&QDoubleSpinBox::valueChanged), this,
&MetavoxelNetworkSimulator::updateMetavoxelSystem);
form->addRow("Repeat Rate:", _repeatRate = new QDoubleSpinBox());
_repeatRate->setSuffix("%");
_repeatRate->setValue(simulation.repeatRate * 100.0);
connect(_repeatRate, static_cast<void (QDoubleSpinBox::*)(double)>(&QDoubleSpinBox::valueChanged), this,
&MetavoxelNetworkSimulator::updateMetavoxelSystem);
form->addRow("Minimum Delay:", _minimumDelay = new QSpinBox());
_minimumDelay->setMaximum(1000);
_minimumDelay->setSuffix("ms");
_minimumDelay->setValue(simulation.minimumDelay);
connect(_minimumDelay, static_cast<void (QSpinBox::*)(int)>(&QSpinBox::valueChanged), this,
&MetavoxelNetworkSimulator::updateMetavoxelSystem);
form->addRow("Maximum Delay:", _maximumDelay = new QSpinBox());
_maximumDelay->setMaximum(1000);
_maximumDelay->setSuffix("ms");
_maximumDelay->setValue(simulation.maximumDelay);
connect(_maximumDelay, static_cast<void (QSpinBox::*)(int)>(&QSpinBox::valueChanged), this,
&MetavoxelNetworkSimulator::updateMetavoxelSystem);
form->addRow("Bandwidth Limit:", _bandwidthLimit = new QSpinBox());
_bandwidthLimit->setMaximum(1024 * 1024);
_bandwidthLimit->setSuffix("KB/s");
_bandwidthLimit->setValue(simulation.bandwidthLimit / BYTES_PER_KILOBYTE);
connect(_bandwidthLimit, static_cast<void (QSpinBox::*)(int)>(&QSpinBox::valueChanged), this,
&MetavoxelNetworkSimulator::updateMetavoxelSystem);
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok, this);
topLayout->addWidget(buttons);
connect(buttons, &QDialogButtonBox::accepted, this, &QWidget::close);
show();
}
void MetavoxelNetworkSimulator::updateMetavoxelSystem() {
int bandwidthLimit = _bandwidthLimit->value() * BYTES_PER_KILOBYTE;
if (bandwidthLimit > 0) {
// make sure the limit is enough to let at least one packet through
const int MINIMUM_BANDWIDTH_LIMIT = 2048;
bandwidthLimit = qMax(bandwidthLimit, MINIMUM_BANDWIDTH_LIMIT);
}
Application::getInstance()->getMetavoxels()->setNetworkSimulation(MetavoxelSystem::NetworkSimulation(
_dropRate->value() / 100.0, _repeatRate->value() / 100.0, qMin(_minimumDelay->value(), _maximumDelay->value()),
qMax(_minimumDelay->value(), _maximumDelay->value()), bandwidthLimit));
}

View file

@ -0,0 +1,41 @@
//
// MetavoxelNetworkSimulator.h
// interface/src/ui
//
// Created by Andrzej Kapolka on 10/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MetavoxelNetworkSimulator_h
#define hifi_MetavoxelNetworkSimulator_h
#include <QWidget>
class QDoubleSpinBox;
class QSpinBox;
/// Allows tweaking network simulation (packet drop percentage, etc.) settings for metavoxels.
class MetavoxelNetworkSimulator : public QWidget {
Q_OBJECT
public:
MetavoxelNetworkSimulator();
private slots:
void updateMetavoxelSystem();
private:
QDoubleSpinBox* _dropRate;
QDoubleSpinBox* _repeatRate;
QSpinBox* _minimumDelay;
QSpinBox* _maximumDelay;
QSpinBox* _bandwidthLimit;
};
#endif // hifi_MetavoxelNetworkSimulator_h

View file

@ -26,7 +26,7 @@
const char* MODEL_TYPE_NAMES[] = { "entities", "heads", "skeletons", "attachments" };
static const QString S3_URL = "https://s3.amazonaws.com/hifi-public";
static const QString S3_URL = "http://s3.amazonaws.com/hifi-public";
static const QString PUBLIC_URL = "http://public.highfidelity.io";
static const QString MODELS_LOCATION = "models/";

View file

@ -32,6 +32,7 @@ public:
float getTopMargin() const { return _topMargin; }
float getRightMargin() const { return _rightMargin; }
float getBottomMargin() const { return _bottomMargin; }
bool getIsFacingAvatar() const { return _isFacingAvatar; }
xColor getBackgroundColor();
// setters
@ -41,6 +42,7 @@ public:
void setTopMargin(float margin) { _topMargin = margin; }
void setRightMargin(float margin) { _rightMargin = margin; }
void setBottomMargin(float margin) { _bottomMargin = margin; }
void setIsFacingAvatar(bool isFacingAvatar) { _isFacingAvatar = isFacingAvatar; }
virtual void setProperties(const QScriptValue& properties);

View file

@ -0,0 +1,88 @@
//
// AudioEffectOptions.cpp
// libraries/audio/src
//
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioEffectOptions.h"
static const QString MAX_ROOM_SIZE_HANDLE = "maxRoomSize";
static const QString ROOM_SIZE_HANDLE = "roomSize";
static const QString REVERB_TIME_HANDLE = "reverbTime";
static const QString DAMPIMG_HANDLE = "damping";
static const QString SPREAD_HANDLE = "spread";
static const QString INPUT_BANDWIDTH_HANDLE = "inputBandwidth";
static const QString EARLY_LEVEL_HANDLE = "earlyLevel";
static const QString TAIL_LEVEL_HANDLE = "tailLevel";
static const QString DRY_LEVEL_HANDLE = "dryLevel";
static const QString WET_LEVEL_HANDLE = "wetLevel";
AudioEffectOptions::AudioEffectOptions(QScriptValue arguments) :
_maxRoomSize(50.0f),
_roomSize(50.0f),
_reverbTime(4.0f),
_damping(0.5f),
_spread(15.0f),
_inputBandwidth(0.75f),
_earlyLevel(-22.0f),
_tailLevel(-28.0f),
_dryLevel(0.0f),
_wetLevel(6.0f) {
if (arguments.property(MAX_ROOM_SIZE_HANDLE).isNumber()) {
_maxRoomSize = arguments.property(MAX_ROOM_SIZE_HANDLE).toNumber();
}
if (arguments.property(ROOM_SIZE_HANDLE).isNumber()) {
_roomSize = arguments.property(ROOM_SIZE_HANDLE).toNumber();
}
if (arguments.property(REVERB_TIME_HANDLE).isNumber()) {
_reverbTime = arguments.property(REVERB_TIME_HANDLE).toNumber();
}
if (arguments.property(DAMPIMG_HANDLE).isNumber()) {
_damping = arguments.property(DAMPIMG_HANDLE).toNumber();
}
if (arguments.property(SPREAD_HANDLE).isNumber()) {
_spread = arguments.property(SPREAD_HANDLE).toNumber();
}
if (arguments.property(INPUT_BANDWIDTH_HANDLE).isNumber()) {
_inputBandwidth = arguments.property(INPUT_BANDWIDTH_HANDLE).toNumber();
}
if (arguments.property(EARLY_LEVEL_HANDLE).isNumber()) {
_earlyLevel = arguments.property(EARLY_LEVEL_HANDLE).toNumber();
}
if (arguments.property(TAIL_LEVEL_HANDLE).isNumber()) {
_tailLevel = arguments.property(TAIL_LEVEL_HANDLE).toNumber();
}
if (arguments.property(DRY_LEVEL_HANDLE).isNumber()) {
_dryLevel = arguments.property(DRY_LEVEL_HANDLE).toNumber();
}
if (arguments.property(WET_LEVEL_HANDLE).isNumber()) {
_wetLevel = arguments.property(WET_LEVEL_HANDLE).toNumber();
}
}
AudioEffectOptions::AudioEffectOptions(const AudioEffectOptions &other) {
*this = other;
}
AudioEffectOptions& AudioEffectOptions::operator=(const AudioEffectOptions &other) {
_maxRoomSize = other._maxRoomSize;
_roomSize = other._roomSize;
_reverbTime = other._reverbTime;
_damping = other._damping;
_spread = other._spread;
_inputBandwidth = other._inputBandwidth;
_earlyLevel = other._earlyLevel;
_tailLevel = other._tailLevel;
_dryLevel = other._dryLevel;
_wetLevel = other._wetLevel;
return *this;
}
QScriptValue AudioEffectOptions::constructor(QScriptContext* context, QScriptEngine* engine) {
return engine->newQObject(new AudioEffectOptions(context->argument(0)));
}

View file

@ -0,0 +1,106 @@
//
// AudioEffectOptions.h
// libraries/audio/src
//
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioEffectOptions_h
#define hifi_AudioEffectOptions_h
#include <QObject>
#include <QtScript/QScriptContext>
#include <QtScript/QScriptEngine>
class AudioEffectOptions : public QObject {
Q_OBJECT
// Meters Square
Q_PROPERTY(float maxRoomSize READ getMaxRoomSize WRITE setMaxRoomSize)
Q_PROPERTY(float roomSize READ getRoomSize WRITE setRoomSize)
// Seconds
Q_PROPERTY(float reverbTime READ getReverbTime WRITE setReverbTime)
// Ratio between 0 and 1
Q_PROPERTY(float damping READ getDamping WRITE setDamping)
// (?) Does not appear to be set externally very often
Q_PROPERTY(float spread READ getSpread WRITE setSpread)
// Ratio between 0 and 1
Q_PROPERTY(float inputBandwidth READ getInputBandwidth WRITE setInputBandwidth)
// in dB
Q_PROPERTY(float earlyLevel READ getEarlyLevel WRITE setEarlyLevel)
Q_PROPERTY(float tailLevel READ getTailLevel WRITE setTailLevel)
Q_PROPERTY(float dryLevel READ getDryLevel WRITE setDryLevel)
Q_PROPERTY(float wetLevel READ getWetLevel WRITE setWetLevel)
public:
AudioEffectOptions(QScriptValue arguments = QScriptValue());
AudioEffectOptions(const AudioEffectOptions &other);
AudioEffectOptions& operator=(const AudioEffectOptions &other);
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
float getRoomSize() const { return _roomSize; }
void setRoomSize(float roomSize ) { _roomSize = roomSize; }
float getMaxRoomSize() const { return _maxRoomSize; }
void setMaxRoomSize(float maxRoomSize ) { _maxRoomSize = maxRoomSize; }
float getReverbTime() const { return _reverbTime; }
void setReverbTime(float reverbTime ) { _reverbTime = reverbTime; }
float getDamping() const { return _damping; }
void setDamping(float damping ) { _damping = damping; }
float getSpread() const { return _spread; }
void setSpread(float spread ) { _spread = spread; }
float getInputBandwidth() const { return _inputBandwidth; }
void setInputBandwidth(float inputBandwidth ) { _inputBandwidth = inputBandwidth; }
float getEarlyLevel() const { return _earlyLevel; }
void setEarlyLevel(float earlyLevel ) { _earlyLevel = earlyLevel; }
float getTailLevel() const { return _tailLevel; }
void setTailLevel(float tailLevel ) { _tailLevel = tailLevel; }
float getDryLevel() const { return _dryLevel; }
void setDryLevel(float dryLevel) { _dryLevel = dryLevel; }
float getWetLevel() const { return _wetLevel; }
void setWetLevel(float wetLevel) { _wetLevel = wetLevel; }
private:
// http://wiki.audacityteam.org/wiki/GVerb#Instant_Reverberb_settings
// Meters Square
float _maxRoomSize;
float _roomSize;
// Seconds
float _reverbTime;
// Ratio between 0 and 1
float _damping;
// ? (Does not appear to be set externally very often)
float _spread;
// Ratio between 0 and 1
float _inputBandwidth;
// dB
float _earlyLevel;
float _tailLevel;
float _dryLevel;
float _wetLevel;
};
#endif // hifi_AudioEffectOptions_h

View file

@ -44,7 +44,8 @@ InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacit
_framesAvailableStat(),
_currentJitterBufferFrames(0),
_timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_repetitionWithFade(settings._repetitionWithFade)
_repetitionWithFade(settings._repetitionWithFade),
_hasReverb(false)
{
}
@ -162,9 +163,22 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
}
int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
int read = 0;
if (type == PacketTypeMixedAudio) {
memcpy(&_hasReverb, packetAfterSeqNum.data() + read, sizeof(bool));
read += sizeof(bool);
if (_hasReverb) {
memcpy(&_reverbTime, packetAfterSeqNum.data() + read, sizeof(float));
read += sizeof(float);
memcpy(&_wetLevel, packetAfterSeqNum.data() + read, sizeof(float));
read += sizeof(float);
}
}
// mixed audio packets do not have any info between the seq num and the audio data.
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
return 0;
numAudioSamples = (packetAfterSeqNum.size() - read) / sizeof(int16_t);
return read;
}
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {

View file

@ -154,6 +154,10 @@ public:
int getOverflowCount() const { return _ringBuffer.getOverflowCount(); }
int getPacketsReceived() const { return _incomingSequenceNumberStats.getReceived(); }
bool hasReverb() const { return _hasReverb; }
float getRevebTime() const { return _reverbTime; }
float getWetLevel() const { return _wetLevel; }
public slots:
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
@ -243,6 +247,11 @@ protected:
MovingMinMaxAvg<quint64> _timeGapStatsForStatsPacket;
bool _repetitionWithFade;
// Reverb properties
bool _hasReverb;
float _reverbTime;
float _wetLevel;
};
float calculateRepeatedFrameFadeFactor(int indexOfRepeat);

View file

@ -381,6 +381,8 @@ private:
AvatarData& operator= (const AvatarData&);
};
Q_DECLARE_METATYPE(AvatarData*)
class JointData {
public:
bool valid;

View file

@ -21,6 +21,7 @@ AvatarHashMap::AvatarHashMap() :
connect(NodeList::getInstance(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
}
AvatarHash::iterator AvatarHashMap::erase(const AvatarHash::iterator& iterator) {
qDebug() << "Removing Avatar with UUID" << iterator.key() << "from AvatarHashMap.";
return _avatarHash.erase(iterator);
@ -53,29 +54,26 @@ void AvatarHashMap::processAvatarMixerDatagram(const QByteArray& datagram, const
}
bool AvatarHashMap::containsAvatarWithDisplayName(const QString& displayName) {
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
AvatarSharedPointer sharedAvatar = avatarIterator.value();
if (avatarIterator.value()->getDisplayName() == displayName) {
return avatarWithDisplayName(displayName) == NULL ? false : true;
}
AvatarData* AvatarHashMap::avatarWithDisplayName(const QString& displayName) {
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
if (sharedAvatar->getDisplayName() == displayName) {
// this is a match
// check if this avatar should still be around
if (!shouldKillAvatar(sharedAvatar)) {
// we have a match, return true
return true;
// we have a match, return the AvatarData
return sharedAvatar.data();
} else {
// we should remove this avatar, do that now
erase(avatarIterator);
// we should remove this avatar, but we might not be on a thread that is allowed
// so we just return NULL to the caller
return NULL;
}
break;
} else {
++avatarIterator;
}
}
// return false, no match
return false;
return NULL;
}
AvatarSharedPointer AvatarHashMap::newSharedAvatar() {

View file

@ -21,6 +21,7 @@
#include "AvatarData.h"
typedef QSharedPointer<AvatarData> AvatarSharedPointer;
typedef QWeakPointer<AvatarData> AvatarWeakPointer;
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
class AvatarHashMap : public QObject {
@ -34,6 +35,7 @@ public:
public slots:
void processAvatarMixerDatagram(const QByteArray& datagram, const QWeakPointer<Node>& mixerWeakPointer);
bool containsAvatarWithDisplayName(const QString& displayName);
AvatarData* avatarWithDisplayName(const QString& displayname);
private slots:
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);

View file

@ -301,10 +301,10 @@ void DatagramSequencer::clearReliableChannel(QObject* object) {
void DatagramSequencer::sendRecordAcknowledged(const SendRecord& record) {
// stop acknowledging the recorded packets
while (!_receiveRecords.isEmpty() && _receiveRecords.first().packetNumber <= record.lastReceivedPacketNumber) {
emit receiveAcknowledged(0);
const ReceiveRecord& received = _receiveRecords.first();
_inputStream.persistReadMappings(received.mappings);
_receivedHighPriorityMessages -= received.newHighPriorityMessages;
emit receiveAcknowledged(0);
_receiveRecords.removeFirst();
}
_outputStream.persistWriteMappings(record.mappings);

View file

@ -108,6 +108,9 @@ public:
/// Returns the intput channel at the specified index, creating it if necessary.
ReliableChannel* getReliableInputChannel(int index = 0);
/// Returns a reference to the stored receive mappings at the specified index.
const Bitstream::ReadMappings& getReadMappings(int index) const { return _receiveRecords.at(index).mappings; }
/// Adds stats for all reliable channels to the referenced variables.
void addReliableChannelStats(int& sendProgress, int& sendTotal, int& receiveProgress, int& receiveTotal) const;

View file

@ -107,7 +107,8 @@ PacketRecord* Endpoint::maybeCreateReceiveRecord() const {
return NULL;
}
PacketRecord::PacketRecord(const MetavoxelLOD& lod, const MetavoxelData& data) :
PacketRecord::PacketRecord(int packetNumber, const MetavoxelLOD& lod, const MetavoxelData& data) :
_packetNumber(packetNumber),
_lod(lod),
_data(data) {
}

View file

@ -32,7 +32,7 @@ public:
PacketRecord* baselineReceiveRecord = NULL);
virtual ~Endpoint();
const DatagramSequencer& getSequencer() const { return _sequencer; }
DatagramSequencer& getSequencer() { return _sequencer; }
virtual void update();
@ -45,10 +45,10 @@ protected slots:
virtual void handleMessage(const QVariant& message, Bitstream& in);
void recordSend();
void recordReceive();
virtual void recordReceive();
void clearSendRecordsBefore(int index);
void clearReceiveRecordsBefore(int index);
virtual void clearSendRecordsBefore(int index);
virtual void clearReceiveRecordsBefore(int index);
protected:
@ -71,14 +71,16 @@ protected:
class PacketRecord {
public:
PacketRecord(const MetavoxelLOD& lod = MetavoxelLOD(), const MetavoxelData& data = MetavoxelData());
PacketRecord(int packetNumber = 0, const MetavoxelLOD& lod = MetavoxelLOD(), const MetavoxelData& data = MetavoxelData());
virtual ~PacketRecord();
int getPacketNumber() const { return _packetNumber; }
const MetavoxelLOD& getLOD() const { return _lod; }
const MetavoxelData& getData() const { return _data; }
private:
int _packetNumber;
MetavoxelLOD _lod;
MetavoxelData _data;
};

View file

@ -188,7 +188,9 @@ MetavoxelClient::MetavoxelClient(const SharedNodePointer& node, MetavoxelUpdater
Endpoint(node, new PacketRecord(), new PacketRecord()),
_updater(updater),
_reliableDeltaChannel(NULL),
_reliableDeltaID(0) {
_reliableDeltaID(0),
_dummyInputStream(_dummyDataStream),
_dummyPacketNumber(0) {
connect(_sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX),
SIGNAL(receivedMessage(const QVariant&, Bitstream&)), SLOT(handleMessage(const QVariant&, Bitstream&)));
@ -216,12 +218,77 @@ void MetavoxelClient::applyEdit(const MetavoxelEditMessage& edit, bool reliable)
}
}
PacketRecord* MetavoxelClient::getAcknowledgedSendRecord(int packetNumber) const {
PacketRecord* lastAcknowledged = getLastAcknowledgedSendRecord();
if (lastAcknowledged->getPacketNumber() == packetNumber) {
return lastAcknowledged;
}
foreach (PacketRecord* record, _clearedSendRecords) {
if (record->getPacketNumber() == packetNumber) {
return record;
}
}
return NULL;
}
PacketRecord* MetavoxelClient::getAcknowledgedReceiveRecord(int packetNumber) const {
PacketRecord* lastAcknowledged = getLastAcknowledgedReceiveRecord();
if (lastAcknowledged->getPacketNumber() == packetNumber) {
return lastAcknowledged;
}
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
if (record.first->getPacketNumber() == packetNumber) {
return record.first;
}
}
return NULL;
}
void MetavoxelClient::dataChanged(const MetavoxelData& oldData) {
// make thread-safe copy
QWriteLocker locker(&_dataCopyLock);
_dataCopy = _data;
}
void MetavoxelClient::recordReceive() {
Endpoint::recordReceive();
// clear the cleared lists
foreach (PacketRecord* record, _clearedSendRecords) {
delete record;
}
_clearedSendRecords.clear();
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
delete record.first;
}
_clearedReceiveRecords.clear();
}
void MetavoxelClient::clearSendRecordsBefore(int index) {
// move to cleared list
QList<PacketRecord*>::iterator end = _sendRecords.begin() + index + 1;
for (QList<PacketRecord*>::const_iterator it = _sendRecords.begin(); it != end; it++) {
_clearedSendRecords.append(*it);
}
_sendRecords.erase(_sendRecords.begin(), end);
}
void MetavoxelClient::clearReceiveRecordsBefore(int index) {
// copy the mappings on first call per packet
if (_sequencer.getIncomingPacketNumber() > _dummyPacketNumber) {
_dummyPacketNumber = _sequencer.getIncomingPacketNumber();
_dummyInputStream.copyPersistentMappings(_sequencer.getInputStream());
}
// move to cleared list
QList<PacketRecord*>::iterator end = _receiveRecords.begin() + index + 1;
for (QList<PacketRecord*>::const_iterator it = _receiveRecords.begin(); it != end; it++) {
_clearedReceiveRecords.append(ClearedReceiveRecord(*it, _sequencer.getReadMappings(index)));
}
_receiveRecords.erase(_receiveRecords.begin(), end);
}
void MetavoxelClient::writeUpdateMessage(Bitstream& out) {
ClientStateMessage state = { _updater->getLOD() };
out << QVariant::fromValue(state);
@ -230,14 +297,16 @@ void MetavoxelClient::writeUpdateMessage(Bitstream& out) {
void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
int userType = message.userType();
if (userType == MetavoxelDeltaMessage::Type) {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
if (_reliableDeltaChannel) {
_remoteData.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in, _remoteDataLOD = _reliableDeltaLOD);
MetavoxelData reference = _remoteData;
MetavoxelLOD referenceLOD = _remoteDataLOD;
_remoteData.readDelta(reference, referenceLOD, in, _remoteDataLOD = _reliableDeltaLOD);
_sequencer.getInputStream().persistReadMappings(in.getAndResetReadMappings());
in.clearPersistentMappings();
_reliableDeltaChannel = NULL;
} else {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_remoteData.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in,
_remoteDataLOD = getLastAcknowledgedSendRecord()->getLOD());
in.reset();
@ -255,15 +324,36 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
}
} else if (userType == MetavoxelDeltaPendingMessage::Type) {
// check the id to make sure this is not a delta we've already processed
int id = message.value<MetavoxelDeltaPendingMessage>().id;
if (id > _reliableDeltaID) {
_reliableDeltaID = id;
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
_reliableDeltaLOD = getLastAcknowledgedSendRecord()->getLOD();
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
MetavoxelDeltaPendingMessage pending = message.value<MetavoxelDeltaPendingMessage>();
if (pending.id > _reliableDeltaID) {
_reliableDeltaID = pending.id;
PacketRecord* sendRecord = getAcknowledgedSendRecord(pending.receivedPacketNumber);
if (!sendRecord) {
qWarning() << "Missing send record for delta" << pending.receivedPacketNumber;
return;
}
_reliableDeltaLOD = sendRecord->getLOD();
PacketRecord* receiveRecord = getAcknowledgedReceiveRecord(pending.sentPacketNumber);
if (!receiveRecord) {
qWarning() << "Missing receive record for delta" << pending.sentPacketNumber;
return;
}
_remoteDataLOD = receiveRecord->getLOD();
_remoteData = receiveRecord->getData();
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
if (receiveRecord == getLastAcknowledgedReceiveRecord()) {
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
} else {
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_dummyInputStream);
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
_reliableDeltaChannel->getBitstream().persistReadMappings(record.second);
if (record.first == receiveRecord) {
break;
}
}
}
}
} else {
Endpoint::handleMessage(message, in);
@ -271,10 +361,11 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
}
PacketRecord* MetavoxelClient::maybeCreateSendRecord() const {
return new PacketRecord(_reliableDeltaChannel ? _reliableDeltaLOD : _updater->getLOD());
return new PacketRecord(_sequencer.getOutgoingPacketNumber(),
_reliableDeltaChannel ? _reliableDeltaLOD : _updater->getLOD());
}
PacketRecord* MetavoxelClient::maybeCreateReceiveRecord() const {
return new PacketRecord(_remoteDataLOD, _remoteData);
return new PacketRecord(_sequencer.getIncomingPacketNumber(), _remoteDataLOD, _remoteData);
}

View file

@ -116,8 +116,16 @@ public:
protected:
PacketRecord* getAcknowledgedSendRecord(int packetNumber) const;
PacketRecord* getAcknowledgedReceiveRecord(int packetNumber) const;
virtual void dataChanged(const MetavoxelData& oldData);
virtual void recordReceive();
virtual void clearSendRecordsBefore(int index);
virtual void clearReceiveRecordsBefore(int index);
virtual void writeUpdateMessage(Bitstream& out);
virtual void handleMessage(const QVariant& message, Bitstream& in);
@ -132,9 +140,18 @@ protected:
ReliableChannel* _reliableDeltaChannel;
MetavoxelLOD _reliableDeltaLOD;
int _reliableDeltaID;
QVariant _reliableDeltaMessage;
MetavoxelData _dataCopy;
QReadWriteLock _dataCopyLock;
QDataStream _dummyDataStream;
Bitstream _dummyInputStream;
int _dummyPacketNumber;
QList<PacketRecord*> _clearedSendRecords;
typedef QPair<PacketRecord*, Bitstream::ReadMappings> ClearedReceiveRecord;
QList<ClearedReceiveRecord> _clearedReceiveRecords;
};
#endif // hifi_MetavoxelClientManager_h

View file

@ -701,8 +701,7 @@ int VoxelMaterialSpannerEditVisitor::visit(MetavoxelInfo& info) {
int sizeY = (int)overlap.maximum.y - minY + 1;
int sizeZ = (int)overlap.maximum.z - minZ + 1;
QRgb rgb = _color.rgba();
bool flipped = (qAlpha(rgb) == 0);
bool flipped = false;
float step = 1.0f / scale;
glm::vec3 position(0.0f, 0.0f, info.minimum.z + minZ * step);
if (_spanner->hasOwnColors()) {
@ -720,6 +719,8 @@ int VoxelMaterialSpannerEditVisitor::visit(MetavoxelInfo& info) {
}
}
} else {
QRgb rgb = _color.rgba();
flipped = (qAlpha(rgb) == 0);
for (QRgb* destZ = colorContents.data() + minZ * VOXEL_BLOCK_AREA + minY * VOXEL_BLOCK_SAMPLES + minX,
*endZ = destZ + sizeZ * VOXEL_BLOCK_AREA; destZ != endZ; destZ += VOXEL_BLOCK_AREA, position.z += step) {
position.y = info.minimum.y + minY * step;
@ -734,6 +735,22 @@ int VoxelMaterialSpannerEditVisitor::visit(MetavoxelInfo& info) {
}
}
}
// if there are no visible colors, we can clear everything
bool foundOpaque = false;
for (const QRgb* src = colorContents.constData(), *end = src + colorContents.size(); src != end; src++) {
if (qAlpha(*src) != 0) {
foundOpaque = true;
break;
}
}
if (!foundOpaque) {
info.outputValues[0] = AttributeValue(_outputs.at(0));
info.outputValues[1] = AttributeValue(_outputs.at(1));
info.outputValues[2] = AttributeValue(_outputs.at(2));
return STOP_RECURSION;
}
VoxelColorDataPointer newColorPointer(new VoxelColorData(colorContents, VOXEL_BLOCK_SAMPLES));
info.outputValues[0] = AttributeValue(info.inputValues.at(0).getAttribute(),
encodeInline<VoxelColorDataPointer>(newColorPointer));

View file

@ -68,6 +68,8 @@ class MetavoxelDeltaPendingMessage {
public:
STREAM int id;
STREAM int sentPacketNumber;
STREAM int receivedPacketNumber;
};
DECLARE_STREAMABLE_METATYPE(MetavoxelDeltaPendingMessage)

View file

@ -53,7 +53,7 @@ PacketVersion versionForPacketType(PacketType type) {
case PacketTypeSilentAudioFrame:
return 4;
case PacketTypeMixedAudio:
return 1;
return 2;
case PacketTypeAvatarData:
return 3;
case PacketTypeAvatarIdentity:
@ -81,7 +81,7 @@ PacketVersion versionForPacketType(PacketType type) {
case PacketTypeAudioStreamStats:
return 1;
case PacketTypeMetavoxelData:
return 7;
return 8;
case PacketTypeVoxelData:
return VERSION_VOXELS_HAS_FILE_BREAKS;
default:

View file

@ -5,7 +5,7 @@ setup_hifi_library(Gui Network Script Widgets)
include_glm()
link_hifi_libraries(shared octree voxels fbx entities animation)
link_hifi_libraries(shared octree voxels fbx entities animation audio)
# call macro to link our dependencies and bubble them up via a property on our target
link_shared_dependencies()

View file

@ -17,6 +17,7 @@
#include <QtNetwork/QNetworkReply>
#include <QScriptEngine>
#include <AudioEffectOptions.h>
#include <AudioInjector.h>
#include <AudioRingBuffer.h>
#include <AvatarData.h>
@ -63,7 +64,15 @@ static QScriptValue debugPrint(QScriptContext* context, QScriptEngine* engine){
return QScriptValue();
}
QScriptValue injectorToScriptValue(QScriptEngine *engine, AudioInjector* const &in) {
QScriptValue avatarDataToScriptValue(QScriptEngine* engine, AvatarData* const &in) {
return engine->newQObject(in);
}
void avatarDataFromScriptValue(const QScriptValue &object, AvatarData* &out) {
out = qobject_cast<AvatarData*>(object.toQObject());
}
QScriptValue injectorToScriptValue(QScriptEngine* engine, AudioInjector* const &in) {
return engine->newQObject(in);
}
@ -269,10 +278,13 @@ void ScriptEngine::init() {
QScriptValue localVoxelsValue = scriptValueFromQMetaObject<LocalVoxels>();
globalObject().setProperty("LocalVoxels", localVoxelsValue);
QScriptValue audioEffectOptionsConstructorValue = newFunction(AudioEffectOptions::constructor);
globalObject().setProperty("AudioEffectOptions", audioEffectOptionsConstructorValue);
qScriptRegisterMetaType(this, injectorToScriptValue, injectorFromScriptValue);
qScriptRegisterMetaType(this, inputControllerToScriptValue, inputControllerFromScriptValue);
qScriptRegisterMetaType(this, avatarDataToScriptValue, avatarDataFromScriptValue);
qScriptRegisterMetaType(this, animationDetailsToScriptValue, animationDetailsFromScriptValue);
registerGlobalObject("Script", this);

View file

@ -0,0 +1,58 @@
//
// UndoStackScriptingInterface.cpp
// libraries/script-engine/src
//
// Created by Ryan Huffman on 10/22/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <QScriptValue>
#include <QScriptValueList>
#include <QScriptEngine>
#include "UndoStackScriptingInterface.h"
UndoStackScriptingInterface::UndoStackScriptingInterface(QUndoStack* undoStack) : _undoStack(undoStack) {
}
void UndoStackScriptingInterface::pushCommand(QScriptValue undoFunction, QScriptValue undoData,
QScriptValue redoFunction, QScriptValue redoData) {
if (undoFunction.engine()) {
ScriptUndoCommand* undoCommand = new ScriptUndoCommand(undoFunction, undoData, redoFunction, redoData);
undoCommand->moveToThread(undoFunction.engine()->thread());
_undoStack->push(undoCommand);
}
}
ScriptUndoCommand::ScriptUndoCommand(QScriptValue undoFunction, QScriptValue undoData,
QScriptValue redoFunction, QScriptValue redoData) :
_undoFunction(undoFunction),
_undoData(undoData),
_redoFunction(redoFunction),
_redoData(redoData) {
}
void ScriptUndoCommand::undo() {
QMetaObject::invokeMethod(this, "doUndo");
}
void ScriptUndoCommand::redo() {
QMetaObject::invokeMethod(this, "doRedo");
}
void ScriptUndoCommand::doUndo() {
QScriptValueList args;
args << _undoData;
_undoFunction.call(QScriptValue(), args);
}
void ScriptUndoCommand::doRedo() {
QScriptValueList args;
args << _redoData;
_redoFunction.call(QScriptValue(), args);
}

View file

@ -0,0 +1,52 @@
//
// UndoStackScriptingInterface.h
// libraries/script-engine/src
//
// Created by Ryan Huffman on 10/22/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_UndoStackScriptingInterface_h
#define hifi_UndoStackScriptingInterface_h
#include <QUndoCommand>
#include <QUndoStack>
#include <QScriptValue>
class UndoStackScriptingInterface : public QObject {
Q_OBJECT
public:
UndoStackScriptingInterface(QUndoStack* undoStack);
public slots:
void pushCommand(QScriptValue undoFunction, QScriptValue undoData, QScriptValue redoFunction, QScriptValue redoData);
private:
QUndoStack* _undoStack;
};
class ScriptUndoCommand : public QObject, public QUndoCommand {
Q_OBJECT
public:
ScriptUndoCommand(QScriptValue undoFunction, QScriptValue undoData, QScriptValue redoFunction, QScriptValue redoData);
virtual void undo();
virtual void redo();
virtual bool mergeWith(const QUndoCommand* command) { return false; }
virtual int id() const { return -1; }
public slots:
void doUndo();
void doRedo();
private:
QScriptValue _undoFunction;
QScriptValue _undoData;
QScriptValue _redoFunction;
QScriptValue _redoData;
};
#endif // hifi_UndoStackScriptingInterface_h

View file

@ -603,31 +603,27 @@ int RandomVisitor::visit(MetavoxelInfo& info) {
class TestSendRecord : public PacketRecord {
public:
TestSendRecord(const MetavoxelLOD& lod = MetavoxelLOD(), const MetavoxelData& data = MetavoxelData(),
const SharedObjectPointer& localState = SharedObjectPointer(), int packetNumber = 0);
TestSendRecord(int packetNumber = 0, const MetavoxelLOD& lod = MetavoxelLOD(), const MetavoxelData& data = MetavoxelData(),
const SharedObjectPointer& localState = SharedObjectPointer());
const SharedObjectPointer& getLocalState() const { return _localState; }
int getPacketNumber() const { return _packetNumber; }
private:
SharedObjectPointer _localState;
int _packetNumber;
};
TestSendRecord::TestSendRecord(const MetavoxelLOD& lod, const MetavoxelData& data,
const SharedObjectPointer& localState, int packetNumber) :
PacketRecord(lod, data),
_localState(localState),
_packetNumber(packetNumber) {
TestSendRecord::TestSendRecord(int packetNumber, const MetavoxelLOD& lod, const MetavoxelData& data,
const SharedObjectPointer& localState) :
PacketRecord(packetNumber, lod, data),
_localState(localState) {
}
class TestReceiveRecord : public PacketRecord {
public:
TestReceiveRecord(const MetavoxelLOD& lod = MetavoxelLOD(), const MetavoxelData& data = MetavoxelData(),
const SharedObjectPointer& remoteState = SharedObjectPointer());
TestReceiveRecord(int packetNumber = 0, const MetavoxelLOD& lod = MetavoxelLOD(),
const MetavoxelData& data = MetavoxelData(), const SharedObjectPointer& remoteState = SharedObjectPointer());
const SharedObjectPointer& getRemoteState() const { return _remoteState; }
@ -636,9 +632,9 @@ private:
SharedObjectPointer _remoteState;
};
TestReceiveRecord::TestReceiveRecord(const MetavoxelLOD& lod,
TestReceiveRecord::TestReceiveRecord(int packetNumber, const MetavoxelLOD& lod,
const MetavoxelData& data, const SharedObjectPointer& remoteState) :
PacketRecord(lod, data),
PacketRecord(packetNumber, lod, data),
_remoteState(remoteState) {
}
@ -1110,14 +1106,14 @@ void TestEndpoint::handleMessage(const QVariant& message, Bitstream& in) {
PacketRecord* TestEndpoint::maybeCreateSendRecord() const {
if (_reliableDeltaChannel) {
return new TestSendRecord(_reliableDeltaLOD, _reliableDeltaData, _localState, _sequencer.getOutgoingPacketNumber());
return new TestSendRecord(_sequencer.getOutgoingPacketNumber(), _reliableDeltaLOD, _reliableDeltaData, _localState);
}
return new TestSendRecord(_lod, (_mode == METAVOXEL_CLIENT_MODE) ? MetavoxelData() : _data,
_localState, _sequencer.getOutgoingPacketNumber());
return new TestSendRecord(_sequencer.getOutgoingPacketNumber(), _lod,
(_mode == METAVOXEL_CLIENT_MODE) ? MetavoxelData() : _data, _localState);
}
PacketRecord* TestEndpoint::maybeCreateReceiveRecord() const {
return new TestReceiveRecord(_remoteDataLOD, _remoteData, _remoteState);
return new TestReceiveRecord(_sequencer.getIncomingPacketNumber(), _remoteDataLOD, _remoteData, _remoteState);
}
void TestEndpoint::handleHighPriorityMessage(const QVariant& message) {

View file

@ -62,7 +62,7 @@ void EntityTests::entityTreeTests(bool verbose) {
properties.setPosition(positionAtCenterInMeters);
// TODO: Fix these unit tests.
//properties.setRadius(halfMeter);
//properties.setModelURL("https://s3.amazonaws.com/hifi-public/ozan/theater.fbx");
//properties.setModelURL("http://s3.amazonaws.com/hifi-public/ozan/theater.fbx");
tree.addEntity(entityID, properties);
@ -269,7 +269,7 @@ void EntityTests::entityTreeTests(bool verbose) {
// TODO: fix these unit tests
//properties.setRadius(halfMeter);
//properties.setModelURL("https://s3.amazonaws.com/hifi-public/ozan/theater.fbx");
//properties.setModelURL("http://s3.amazonaws.com/hifi-public/ozan/theater.fbx");
if (extraVerbose) {
qDebug() << "iteration:" << i