Merge branch 'master' of https://github.com/highfidelity/hifi into fadeInWhenOpeningNoiseGate

Conflicts:
	libraries/audio-client/src/AudioNoiseGate.cpp
This commit is contained in:
Brad Hefta-Gaub 2017-03-10 11:43:30 -08:00
commit a4eac3cbc0
21 changed files with 71 additions and 1320 deletions

View file

@ -241,6 +241,7 @@ void AudioMixer::sendStatsPacket() {
statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames; statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames;
statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames; statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames;
statsObject["avg_listeners_(silent)_per_frame"] = (float)_stats.sumListenersSilent / (float)_numStatFrames;
statsObject["silent_packets_per_frame"] = (float)_numSilentPackets / (float)_numStatFrames; statsObject["silent_packets_per_frame"] = (float)_numSilentPackets / (float)_numStatFrames;

View file

@ -106,6 +106,7 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
sendMixPacket(node, *data, encodedBuffer); sendMixPacket(node, *data, encodedBuffer);
} else { } else {
++stats.sumListenersSilent;
sendSilentPacket(node, *data); sendSilentPacket(node, *data);
} }
@ -221,17 +222,19 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
stats.mixTime += mixTime.count(); stats.mixTime += mixTime.count();
#endif #endif
// use the per listener AudioLimiter to render the mixed data... // check for silent audio before limiting
listenerData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL); // limiting uses a dither and can only guarantee abs(sample) <= 1
// check for silent audio after the peak limiter has converted the samples
bool hasAudio = false; bool hasAudio = false;
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) { for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; ++i) {
if (_bufferSamples[i] != 0) { if (_mixSamples[i] != 0.0f) {
hasAudio = true; hasAudio = true;
break; break;
} }
} }
// use the per listener AudioLimiter to render the mixed data
listenerData->audioLimiter.render(_mixSamples, _bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
return hasAudio; return hasAudio;
} }

View file

@ -14,6 +14,7 @@
void AudioMixerStats::reset() { void AudioMixerStats::reset() {
sumStreams = 0; sumStreams = 0;
sumListeners = 0; sumListeners = 0;
sumListenersSilent = 0;
totalMixes = 0; totalMixes = 0;
hrtfRenders = 0; hrtfRenders = 0;
hrtfSilentRenders = 0; hrtfSilentRenders = 0;
@ -28,6 +29,7 @@ void AudioMixerStats::reset() {
void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) { void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
sumStreams += otherStats.sumStreams; sumStreams += otherStats.sumStreams;
sumListeners += otherStats.sumListeners; sumListeners += otherStats.sumListeners;
sumListenersSilent += otherStats.sumListenersSilent;
totalMixes += otherStats.totalMixes; totalMixes += otherStats.totalMixes;
hrtfRenders += otherStats.hrtfRenders; hrtfRenders += otherStats.hrtfRenders;
hrtfSilentRenders += otherStats.hrtfSilentRenders; hrtfSilentRenders += otherStats.hrtfSilentRenders;

View file

@ -19,6 +19,7 @@
struct AudioMixerStats { struct AudioMixerStats {
int sumStreams { 0 }; int sumStreams { 0 };
int sumListeners { 0 }; int sumListeners { 0 };
int sumListenersSilent { 0 };
int totalMixes { 0 }; int totalMixes { 0 };

View file

@ -81,6 +81,10 @@ void HMDScriptingInterface::closeTablet() {
_showTablet = false; _showTablet = false;
} }
void HMDScriptingInterface::openTablet() {
_showTablet = true;
}
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) { QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
glm::vec3 hudIntersection; glm::vec3 hudIntersection;
auto instance = DependencyManager::get<HMDScriptingInterface>(); auto instance = DependencyManager::get<HMDScriptingInterface>();

View file

@ -76,6 +76,8 @@ public:
Q_INVOKABLE void closeTablet(); Q_INVOKABLE void closeTablet();
Q_INVOKABLE void openTablet();
signals: signals:
bool shouldShowHandControllersChanged(); bool shouldShowHandControllersChanged();

View file

@ -258,8 +258,3 @@ QVariant Line3DOverlay::getProperty(const QString& property) {
Line3DOverlay* Line3DOverlay::createClone() const { Line3DOverlay* Line3DOverlay::createClone() const {
return new Line3DOverlay(this); return new Line3DOverlay(this);
} }
void Line3DOverlay::locationChanged(bool tellPhysics) {
// do nothing
}

View file

@ -48,8 +48,6 @@ public:
virtual Line3DOverlay* createClone() const override; virtual Line3DOverlay* createClone() const override;
virtual void locationChanged(bool tellPhysics = true) override;
glm::vec3 getDirection() const { return _direction; } glm::vec3 getDirection() const { return _direction; }
float getLength() const { return _length; } float getLength() const { return _length; }
glm::vec3 getLocalStart() const { return getLocalPosition(); } glm::vec3 getLocalStart() const { return getLocalPosition(); }

View file

@ -77,8 +77,6 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
// More means better rejection but also can reject continuous things like singing. // More means better rejection but also can reject continuous things like singing.
// NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor? // NUMBER_OF_NOISE_SAMPLE_FRAMES: How often should we re-evaluate the noise floor?
_closedInLastFrame = false;
float loudness = 0; float loudness = 0;
int thisSample = 0; int thisSample = 0;
int samplesOverNoiseGate = 0; int samplesOverNoiseGate = 0;
@ -151,9 +149,7 @@ void AudioNoiseGate::gateSamples(int16_t* samples, int numSamples) {
_framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY; _framesToClose = NOISE_GATE_CLOSE_FRAME_DELAY;
} else { } else {
if (--_framesToClose == 0) { if (--_framesToClose == 0) {
if (_isOpen) { _closedInLastFrame = !_isOpen;
_closedInLastFrame = true;
}
_isOpen = false; _isOpen = false;
} }
} }

View file

@ -136,9 +136,9 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
break; break;
} }
case SequenceNumberStats::Early: { case SequenceNumberStats::Early: {
// Packet is early treat the packets as if all the packets between the last // Packet is early. Treat the packets as if all the packets between the last
// OnTime packet and this packet was lost. If we're using a codec this will // OnTime packet and this packet were lost. If we're using a codec this will
// also result in allowing the codec to flush its internal state. Then // also result in allowing the codec to interpolate lost data. Then
// fall through to the "on time" logic to actually handle this packet // fall through to the "on time" logic to actually handle this packet
int packetsDropped = arrivalInfo._seqDiffFromExpected; int packetsDropped = arrivalInfo._seqDiffFromExpected;
lostAudioData(packetsDropped); lostAudioData(packetsDropped);
@ -150,8 +150,6 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
if (message.getType() == PacketType::SilentAudioFrame) { if (message.getType() == PacketType::SilentAudioFrame) {
// If we recieved a SilentAudioFrame from our sender, we might want to drop // If we recieved a SilentAudioFrame from our sender, we might want to drop
// some of the samples in order to catch up to our desired jitter buffer size. // some of the samples in order to catch up to our desired jitter buffer size.
// NOTE: If we're using a codec we will be calling the codec's lostFrame()
// method to allow the codec to flush its internal state.
writeDroppableSilentFrames(networkFrames); writeDroppableSilentFrames(networkFrames);
} else { } else {
// note: PCM and no codec are identical // note: PCM and no codec are identical
@ -163,9 +161,9 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
} else { } else {
qDebug(audio) << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence"; qDebug(audio) << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
// Since the data in the stream is using a codec that we're not prepared for, // Since the data in the stream is using a codec that we aren't prepared for,
// we need to let the codec know that we don't have data for it, this will // we need to let the codec know that we don't have data for it, this will
// flush any internal codec state and produce fade to silence. // allow the codec to interpolate missing data and produce a fade to silence.
lostAudioData(1); lostAudioData(1);
// inform others of the mismatch // inform others of the mismatch
@ -249,17 +247,21 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet
int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) { int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) {
// if we have a decoder, we still want to tell the decoder about our // We can't guarentee that all clients have faded the stream down
// lost frame. this will flush the internal state of the decoder // to silence and encoded that silence before sending us a
// we can safely ignore the output of the codec in this case, because // SilentAudioFrame. If the encoder has truncated the stream it will
// we've enforced that on the sending side, the encoder ran at least // leave the decoder holding some unknown loud state. To handle this
// one frame of truly silent audio before we sent the "droppable" silent // case we will call the decoder's lostFrame() method, which indicates
// frame. Technically we could leave this out, if we know for certain // that it should interpolate from its last known state down toward
// that the sender has really sent us an encoded packet of zeros, but // silence.
// since we can trust all encoders to always encode at least one silent
// frame (open source, someone code modify it), we will go ahead and
// tell our decoder about the lost frame.
if (_decoder) { if (_decoder) {
// FIXME - We could potentially use the output from the codec, in which
// case we might get a cleaner fade toward silence. NOTE: The below logic
// attempts to catch up in the event that the jitter buffers have grown.
// The better long term fix is to use the output from the decode, detect
// when it actually reaches silence, and then delete the silent portions
// of the jitter buffers. Or petentially do a cross fade from the decode
// output to silence.
QByteArray decodedBuffer; QByteArray decodedBuffer;
_decoder->lostFrame(decodedBuffer); _decoder->lostFrame(decodedBuffer);
} }

View file

@ -72,6 +72,9 @@ tablet.screenChanged.connect(onScreenChanged);
AudioDevice.muteToggled.connect(onMuteToggled); AudioDevice.muteToggled.connect(onMuteToggled);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
if (onAudioScreen) {
tablet.gotoHomeScreen();
}
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
tablet.screenChanged.disconnect(onScreenChanged); tablet.screenChanged.disconnect(onScreenChanged);
AudioDevice.muteToggled.disconnect(onMuteToggled); AudioDevice.muteToggled.disconnect(onMuteToggled);

View file

@ -18,13 +18,14 @@ var button;
var buttonName = "GOTO"; var buttonName = "GOTO";
var toolBar = null; var toolBar = null;
var tablet = null; var tablet = null;
var onGotoScreen = false;
function onAddressBarShown(visible) { function onAddressBarShown(visible) {
button.editProperties({isActive: visible}); button.editProperties({isActive: visible});
} }
function onClicked(){ function onClicked(){
DialogsManager.toggleAddressBar(); DialogsManager.toggleAddressBar();
onGotoScreen = !onGotoScreen;
} }
if (Settings.getValue("HUDUIEnabled")) { if (Settings.getValue("HUDUIEnabled")) {
@ -49,6 +50,9 @@ button.clicked.connect(onClicked);
DialogsManager.addressBarShown.connect(onAddressBarShown); DialogsManager.addressBarShown.connect(onAddressBarShown);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
if (onGotoScreen) {
DialogsManager.toggleAddressBar();
}
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
if (tablet) { if (tablet) {
tablet.removeButton(button); tablet.removeButton(button);

View file

@ -48,6 +48,9 @@
}, POLL_RATE); }, POLL_RATE);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
if (enabled) {
Menu.closeInfoView('InfoView_html/help.html');
}
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
Script.clearInterval(interval); Script.clearInterval(interval);
if (tablet) { if (tablet) {

View file

@ -45,6 +45,7 @@ var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
var desktopOnlyViews = ['Mirror', 'Independent Mode', 'Entity Mode']; var desktopOnlyViews = ['Mirror', 'Independent Mode', 'Entity Mode'];
function onHmdChanged(isHmd) { function onHmdChanged(isHmd) {
HMD.closeTablet();
if (isHmd) { if (isHmd) {
button.editProperties({ button.editProperties({
icon: "icons/tablet-icons/switch-desk-i.svg", icon: "icons/tablet-icons/switch-desk-i.svg",

View file

@ -121,6 +121,7 @@ function onClick() {
if (onMarketplaceScreen) { if (onMarketplaceScreen) {
// for toolbar-mode: go back to home screen, this will close the window. // for toolbar-mode: go back to home screen, this will close the window.
tablet.gotoHomeScreen(); tablet.gotoHomeScreen();
onMarketplaceScreen = false;
} else { } else {
var entity = HMD.tabletID; var entity = HMD.tabletID;
Entities.editEntity(entity, {textures: JSON.stringify({"tex.close": HOME_BUTTON_TEXTURE})}); Entities.editEntity(entity, {textures: JSON.stringify({"tex.close": HOME_BUTTON_TEXTURE})});
@ -140,6 +141,9 @@ tablet.screenChanged.connect(onScreenChanged);
Entities.canWriteAssetsChanged.connect(onCanWriteAssetsChanged); Entities.canWriteAssetsChanged.connect(onCanWriteAssetsChanged);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
if (onMarketplaceScreen) {
tablet.gotoHomeScreen();
}
tablet.removeButton(marketplaceButton); tablet.removeButton(marketplaceButton);
tablet.screenChanged.disconnect(onScreenChanged); tablet.screenChanged.disconnect(onScreenChanged);
Entities.canWriteAssetsChanged.disconnect(onCanWriteAssetsChanged); Entities.canWriteAssetsChanged.disconnect(onCanWriteAssetsChanged);

View file

@ -48,6 +48,9 @@ var HOME_BUTTON_TEXTURE = "http://hifi-content.s3.amazonaws.com/alan/dev/tablet-
tablet.screenChanged.connect(onScreenChanged); tablet.screenChanged.connect(onScreenChanged);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
if (onMenuScreen) {
tablet.gotoHomeScreen();
}
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
tablet.removeButton(button); tablet.removeButton(button);
tablet.screenChanged.disconnect(onScreenChanged); tablet.screenChanged.disconnect(onScreenChanged);

View file

@ -696,6 +696,9 @@ function clearLocalQMLDataAndClosePAL() {
} }
function shutdown() { function shutdown() {
if (onPalScreen) {
tablet.gotoHomeScreen();
}
button.clicked.disconnect(onTabletButtonClicked); button.clicked.disconnect(onTabletButtonClicked);
tablet.removeButton(button); tablet.removeButton(button);
tablet.screenChanged.disconnect(onTabletScreenChanged); tablet.screenChanged.disconnect(onTabletScreenChanged);

View file

@ -191,12 +191,12 @@ function resetButtons(pathStillSnapshot, pathAnimatedSnapshot, notify) {
if (clearOverlayWhenMoving) { if (clearOverlayWhenMoving) {
MyAvatar.setClearOverlayWhenMoving(true); // not until after the share dialog MyAvatar.setClearOverlayWhenMoving(true); // not until after the share dialog
} }
HMD.openTablet();
} }
function processingGif() { function processingGif() {
// show hud // show hud
Reticle.visible = reticleVisible; Reticle.visible = reticleVisible;
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
buttonConnected = false; buttonConnected = false;
// show overlays if they were on // show overlays if they were on
@ -211,8 +211,10 @@ Window.snapshotShared.connect(snapshotShared);
Window.processingGif.connect(processingGif); Window.processingGif.connect(processingGif);
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
button.clicked.disconnect(onClicked); if (buttonConnected) {
buttonConnected = false; button.clicked.disconnect(onClicked);
buttonConnected = false;
}
if (tablet) { if (tablet) {
tablet.removeButton(button); tablet.removeButton(button);
} }

View file

@ -131,7 +131,9 @@
} }
Script.scriptEnding.connect(function () { Script.scriptEnding.connect(function () {
Entities.deleteEntity(HMD.tabletID); var tabletID = HMD.tabletID;
Entities.deleteEntity(tabletID);
Overlays.deleteOverlay(tabletID)
HMD.tabletID = null; HMD.tabletID = null;
HMD.homeButtonID = null; HMD.homeButtonID = null;
HMD.tabletScreenID = null; HMD.tabletScreenID = null;

View file

@ -115,6 +115,9 @@
tablet.screenChanged.connect(onScreenChanged); tablet.screenChanged.connect(onScreenChanged);
function cleanup() { function cleanup() {
if (onUsersScreen) {
tablet.gotoHomeScreen();
}
button.clicked.disconnect(onClicked); button.clicked.disconnect(onClicked);
tablet.removeButton(button); tablet.removeButton(button);
} }

File diff suppressed because it is too large Load diff