added record wav files

This commit is contained in:
luiscuenca 2017-12-03 11:54:37 -07:00
parent ad02118588
commit 4030688e7a
9 changed files with 312 additions and 35 deletions

View file

@ -7,6 +7,7 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import "styles-uit"
@ -32,7 +33,7 @@ Item {
property var _triggered: false
property var _steps
property var _refreshMs: 10
property var _refreshMs: 32
property var _framesPerSecond: AudioScope.getFramesPerSecond()
property var _isFrameUnits: true
@ -46,9 +47,17 @@ Item {
property int y: 0
}
property var _timeBeforeHold: 100;
property var _pressedTime: 0;
property var _isPressed: false;
property var _timeBeforeHold: 300
property var _pressedTime: 0
property var _isPressed: false
property var _recOpacity : 0.0
property var _recSign : 0.05
property var _outputLeftState: false
property var _outputRightState: false
property var _wavFilePath: ""
function isHolding() {
return (_pressedTime > _timeBeforeHold);
@ -84,13 +93,29 @@ Item {
_triggerOutputRightData = AudioScope.triggerOutputRight;
}
}
function setRecordingLabelOpacity(opacity) {
_recOpacity = opacity;
recCircle.opacity = _recOpacity;
recText.opacity = _recOpacity;
}
function updateRecordingLabel() {
_recOpacity += _recSign;
if (_recOpacity > 1.0 || _recOpacity < 0.0) {
_recOpacity = _recOpacity > 1.0 ? 1.0 : 0.0;
_recSign *= -1;
}
setRecordingLabelOpacity(_recOpacity);
}
function pullFreshValues() {
if (!AudioScope.getPause()){
if (AudioScope.getTriggered()) {
_triggered = true;
collectTriggerData();
} else {
if (Audio.getRecording()) {
updateRecordingLabel();
}
if (!AudioScope.getPause()) {
if (!_triggered) {
collectScopeData();
}
}
@ -99,8 +124,49 @@ Item {
}
}
function startRecording() {
_wavFilePath = (new Date()).toISOString(); // yyyy-mm-ddThh:mm:ss.sssZ
_wavFilePath = _wavFilePath.replace(/[\-:]|\.\d*Z$/g, "").replace("T", "-") + ".wav";
// Using controller recording default directory
_wavFilePath = Recording.getDefaultRecordingSaveDirectory() + _wavFilePath;
if (!Audio.startRecording(_wavFilePath)) {
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Error creating: "+_wavFilePath}));
updateRecordingUI(false);
}
}
function stopRecording() {
Audio.stopRecording();
setRecordingLabelOpacity(0.0);
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Saved: "+_wavFilePath}));
}
function updateRecordingUI(isRecording) {
if (!isRecording) {
recordButton.text = "Record";
recordButton.color = hifi.buttons.black;
outputLeftCh.checked = _outputLeftState;
outputRightCh.checked = _outputRightState;
} else {
recordButton.text = "Stop";
recordButton.color = hifi.buttons.red;
_outputLeftState = outputLeftCh.checked;
_outputRightState = outputRightCh.checked;
outputLeftCh.checked = true;
outputRightCh.checked = true;
}
}
function toggleRecording() {
if (Audio.getRecording()) {
updateRecordingUI(false);
stopRecording();
} else {
updateRecordingUI(true);
startRecording();
}
}
Timer {
interval: _refreshMs; running: true; repeat: true
onTriggered: pullFreshValues()
@ -306,7 +372,7 @@ Item {
boxSize: 20
anchors.top: parent.top;
anchors.left: parent.left;
anchors.topMargin: 20;
anchors.topMargin: 8;
anchors.leftMargin: 20;
checked: AudioScope.getVisible();
onCheckedChanged: {
@ -333,6 +399,7 @@ Item {
AudioScope.setServerEcho(outputLeftCh.checked || outputRightCh.checked);
}
}
HifiControlsUit.Label {
text: "Channels";
anchors.horizontalCenter: outputLeftCh.horizontalCenter;
@ -346,9 +413,9 @@ Item {
text: "Input Mono"
anchors.bottom: outputLeftCh.bottom;
anchors.right: outputLeftCh.left;
anchors.rightMargin: 80;
checked: true;
anchors.rightMargin: 40;
onCheckedChanged: {
AudioScope.setLocalEcho(checked);
}
}
@ -358,21 +425,38 @@ Item {
text: "Output R"
anchors.bottom: outputLeftCh.bottom;
anchors.left: outputLeftCh.right;
anchors.leftMargin: 80;
anchors.leftMargin: 40;
onCheckedChanged: {
AudioScope.setServerEcho(outputLeftCh.checked || outputRightCh.checked);
}
}
HifiControlsUit.Button {
id: recordButton;
text: "Record";
color: hifi.buttons.black;
colorScheme: hifi.colorSchemes.dark;
anchors.right: parent.right;
anchors.bottom: parent.bottom;
anchors.rightMargin: 30;
anchors.bottomMargin: 8;
width: 95;
height: 55;
onClicked: {
toggleRecording();
}
}
HifiControlsUit.Button {
id: pauseButton;
color: hifi.buttons.black;
colorScheme: hifi.colorSchemes.dark;
anchors.right: parent.right;
anchors.right: recordButton.left;
anchors.bottom: parent.bottom;
anchors.rightMargin: 30;
anchors.bottomMargin: 8;
height: 26;
height: 55;
width: 95;
text: " Pause ";
onClicked: {
AudioScope.togglePause();
@ -391,8 +475,7 @@ Item {
fiveFrames.checked = false;
AudioScope.selectAudioScopeTwentyFrames();
_steps = 20;
_triggered = false;
AudioScope.setTriggered(false);
AudioScope.setPause(false);
}
}
}
@ -432,8 +515,7 @@ Item {
twentyFrames.checked = false;
AudioScope.selectAudioScopeFiveFrames();
_steps = 5;
_triggered = false;
AudioScope.setTriggered(false);
AudioScope.setPause(false);
}
}
}
@ -457,8 +539,7 @@ Item {
fiveFrames.checked = false;
AudioScope.selectAudioScopeFiftyFrames();
_steps = 50;
_triggered = false;
AudioScope.setTriggered(false);
AudioScope.setPause(false);
}
}
}
@ -480,9 +561,9 @@ Item {
labelTextOn: "On";
onCheckedChanged: {
if (!checked) AudioScope.setPause(false);
_triggered = false;
AudioScope.setTriggered(false);
AudioScope.setPause(false);
AudioScope.setAutoTrigger(checked);
AudioScope.setTriggerValues(_triggerValues.x, _triggerValues.y-root.height/2);
}
}
@ -493,21 +574,68 @@ Item {
anchors.bottom: triggerSwitch.top;
}
Rectangle {
id: recordIcon;
width:110;
height:40;
anchors.right: parent.right;
anchors.top: parent.top;
anchors.topMargin: 8;
color: "transparent"
Text {
id: recText
text: "REC"
color: "red"
font.pixelSize: 30;
anchors.left: recCircle.right;
anchors.leftMargin: 10;
opacity: _recOpacity;
y: -8;
}
Rectangle {
id: recCircle;
width: 25;
height: 25;
radius: width*0.5
opacity: _recOpacity;
color: "red";
}
}
Component.onCompleted: {
_steps = AudioScope.getFramesPerScope();
AudioScope.setTriggerValues(_triggerValues.x, _triggerValues.y-root.height/2);
activated.checked = true;
inputCh.checked = true;
updateMeasureUnits();
}
Component.onDestruction: {
if (Audio.getRecording()) {
stopRecording();
}
AudioScope.setVisible(false);
}
Connections {
target: AudioScope
onPauseChanged: {
pauseButton.text = AudioScope.getPause() ? "Continue" : " Pause ";
if (!AudioScope.getPause()) {
pauseButton.text = "Pause";
pauseButton.color = hifi.buttons.black;
AudioScope.setTriggered(false);
_triggered = false;
} else {
pauseButton.text = "Continue";
pauseButton.color = hifi.buttons.blue;
}
}
onTriggered: {
_triggered = true;
collectTriggerData();
AudioScope.setPause(true);
}
}
}

View file

@ -78,6 +78,10 @@ void AudioScope::selectAudioScopeFiftyFrames() {
reallocateScope(50);
}
void AudioScope::setLocalEcho(bool localEcho) {
DependencyManager::get<AudioClient>()->setLocalEcho(localEcho);
}
void AudioScope::setServerEcho(bool serverEcho) {
DependencyManager::get<AudioClient>()->setServerEcho(serverEcho);
}
@ -191,6 +195,7 @@ void AudioScope::storeTriggerValues() {
_triggerOutputLeftData = _scopeOutputLeftData;
_triggerOutputRightData = _scopeOutputRightData;
_isTriggered = true;
emit triggered();
}
void AudioScope::computeInputData() {

View file

@ -71,10 +71,12 @@ public slots:
QVector<int> getTriggerOutputLeft() { return _triggerOutputLeftData; };
QVector<int> getTriggerOutputRight() { return _triggerOutputRightData; };
void setLocalEcho(bool serverEcho);
void setServerEcho(bool serverEcho);
signals:
void pauseChanged();
void triggered();
protected:
AudioScope();

View file

@ -58,6 +58,32 @@ Audio::Audio() : _devices(_contextIsHMD) {
enableNoiseReduction(enableNoiseReductionSetting.get());
}
void Audio::onOutputBufferReceived(const QByteArray outputBuffer) {
if (_isRecording) {
_audioFileWav.addRawAudioChunk((char*)outputBuffer.data(), outputBuffer.size());
}
}
bool Audio::startRecording(const QString& filepath) {
auto client = DependencyManager::get<AudioClient>().data();
if (!_audioFileWav.create(client->getOutputFormat(), filepath)) {
qDebug() << "Error creating audio file: "+filepath;
return false;
}
connect(client, &AudioClient::outputBufferReceived, this, &Audio::onOutputBufferReceived);
_isRecording = true;
return true;
}
void Audio::stopRecording() {
auto client = DependencyManager::get<AudioClient>().data();
disconnect(client, &AudioClient::outputBufferReceived, this, 0);
if (_isRecording) {
_isRecording = false;
_audioFileWav.close();
}
}
void Audio::setMuted(bool isMuted) {
if (_isMuted != isMuted) {
auto client = DependencyManager::get<AudioClient>().data();

View file

@ -16,6 +16,7 @@
#include "AudioDevices.h"
#include "AudioEffectOptions.h"
#include "SettingHandle.h"
#include "AudioFileWav.h"
namespace scripting {
@ -55,6 +56,10 @@ public:
Q_INVOKABLE void setReverb(bool enable);
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
Q_INVOKABLE bool startRecording(const QString& filename);
Q_INVOKABLE void stopRecording();
Q_INVOKABLE bool getRecording() { return _isRecording; };
signals:
void nop();
void mutedChanged(bool isMuted);
@ -71,6 +76,7 @@ private slots:
void onNoiseReductionChanged();
void onInputVolumeChanged(float volume);
void onInputLoudnessChanged(float loudness);
void onOutputBufferReceived(const QByteArray outputBuffer);
protected:
// Audio must live on a separate thread from AudioClient to avoid deadlocks
@ -83,9 +89,10 @@ private:
bool _isMuted { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _contextIsHMD { false };
bool _isRecording { false };
AudioDevices* getDevices() { return &_devices; }
AudioDevices _devices;
AudioFileWav _audioFileWav;
};
};

View file

@ -222,8 +222,7 @@ AudioClient::AudioClient() :
// initialize wasapi; if getAvailableDevices is called from the CheckDevicesThread before this, it will crash
getAvailableDevices(QAudio::AudioInput);
getAvailableDevices(QAudio::AudioOutput);
// start a thread to detect any device changes
_checkDevicesTimer = new QTimer(this);
connect(_checkDevicesTimer, &QTimer::timeout, [this] {
@ -1845,11 +1844,9 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
qCDebug(audiostream, "Read %d samples from buffer (%d available, %d requested)", networkSamplesPopped, _receivedAudioStream.getSamplesAvailable(), samplesRequested);
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
lastPopOutput.readSamples(scratchBuffer, networkSamplesPopped);
for (int i = 0; i < networkSamplesPopped; i++) {
mixBuffer[i] = convertToFloat(scratchBuffer[i]);
}
samplesRequested = networkSamplesPopped;
}
@ -1911,6 +1908,10 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
bytesWritten = maxSize;
}
// send output buffer for recording
QByteArray outputBuffer(reinterpret_cast<char*>(scratchBuffer), bytesWritten);
emit _audio->outputBufferReceived(outputBuffer);
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
float msecsAudioOutputUnplayed = bytesAudioOutputUnplayed / (float)_audio->_outputFormat.bytesForDuration(USECS_PER_MSEC);
_audio->_stats.updateOutputMsUnplayed(msecsAudioOutputUnplayed);

View file

@ -47,11 +47,13 @@
#include <AudioConstants.h>
#include <AudioGate.h>
#include <shared/RateCounter.h>
#include <plugins/CodecPlugin.h>
#include "AudioIOStats.h"
#include "AudioFileWav.h"
#ifdef _WIN32
#pragma warning( push )
@ -67,7 +69,6 @@ class QAudioInput;
class QAudioOutput;
class QIODevice;
class Transform;
class NLPacket;
@ -118,6 +119,8 @@ public:
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
const QAudioFormat& getOutputFormat() const { return _outputFormat; }
float getLastInputLoudness() const { return _lastInputLoudness; } // TODO: relative to noise floor?
float getTimeSinceLastClip() const { return _timeSinceLastClip; }
@ -142,7 +145,7 @@ public:
void setIsPlayingBackRecording(bool isPlayingBackRecording) { _isPlayingBackRecording = isPlayingBackRecording; }
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
bool outputLocalInjector(const AudioInjectorPointer& injector) override;
QAudioDeviceInfo getActiveAudioDevice(QAudio::Mode mode) const;
@ -184,12 +187,13 @@ public slots:
void toggleMute();
bool isMuted() { return _muted; }
virtual void setIsStereoInput(bool stereo) override;
void setNoiseReduction(bool isNoiseGateEnabled);
bool isNoiseReductionEnabled() const { return _isNoiseGateEnabled; }
bool getLocalEcho() { return _shouldEchoLocally; }
void setLocalEcho(bool localEcho) { _shouldEchoLocally = localEcho; }
void toggleLocalEcho() { _shouldEchoLocally = !_shouldEchoLocally; }
bool getServerEcho() { return _shouldEchoToServer; }
@ -242,6 +246,8 @@ signals:
void muteEnvironmentRequested(glm::vec3 position, float radius);
void outputBufferReceived(const QByteArray _outputBuffer);
protected:
AudioClient();
~AudioClient();
@ -357,9 +363,8 @@ private:
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
Mutex _localAudioMutex;
AudioLimiter _audioLimiter;
// Adds Reverb
void configureReverb();
void updateReverbOptions();

View file

@ -0,0 +1,69 @@
//
// AudioWavFile.h
// libraries/audio-client/src
//
// Created by Luis Cuenca on 12/1/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioFileWav.h"
bool AudioFileWav::create(const QAudioFormat& audioFormat, const QString& filepath) {
if (_file.isOpen()) {
_file.close();
}
_file.setFileName(filepath);
if (!_file.open(QIODevice::WriteOnly)) {
return false;
}
addHeader(audioFormat);
return true;
}
bool AudioFileWav::addRawAudioChunk(char* chunk, int size) {
if (_file.isOpen()) {
QDataStream stream(&_file);
stream.writeRawData(chunk, size);
return true;
}
return false;
}
void AudioFileWav::close() {
QDataStream stream(&_file);
stream.setByteOrder(QDataStream::LittleEndian);
// fill RIFF and size data on header
_file.seek(4);
stream << quint32(_file.size() - 8);
_file.seek(40);
stream << quint32(_file.size() - 44);
_file.close();
}
void AudioFileWav::addHeader(const QAudioFormat& audioFormat) {
QDataStream stream(&_file);
stream.setByteOrder(QDataStream::LittleEndian);
// RIFF
stream.writeRawData("RIFF", 4);
stream << quint32(0);
stream.writeRawData("WAVE", 4);
// Format description PCM = 16
stream.writeRawData("fmt ", 4);
stream << quint32(16);
stream << quint16(1);
stream << quint16(audioFormat.channelCount());
stream << quint32(audioFormat.sampleRate());
stream << quint32(audioFormat.sampleRate() * audioFormat.channelCount() * audioFormat.sampleSize() / 8); // bytes per second
stream << quint16(audioFormat.channelCount() * audioFormat.sampleSize() / 8); // block align
stream << quint16(audioFormat.sampleSize()); // bits Per Sample
// Init data chunck
stream.writeRawData("data", 4);
stream << quint32(0);
}

View file

@ -0,0 +1,34 @@
//
// AudioWavFile.h
// libraries/audio-client/src
//
// Created by Luis Cuenca on 12/1/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioFileWav_h
#define hifi_AudioFileWav_h
#include <QObject>
#include <QFile>
#include <QDataStream>
#include <QVector>
#include <QAudioFormat>
class AudioFileWav : public QObject {
Q_OBJECT
public:
AudioFileWav() {}
bool create(const QAudioFormat& audioFormat, const QString& filepath);
bool addRawAudioChunk(char* chunk, int size);
void close();
private:
void addHeader(const QAudioFormat& audioFormat);
QFile _file;
};
#endif // hifi_AudioFileWav_h