Merge branch 'master' of https://github.com/highfidelity/hifi into hdr

This commit is contained in:
samcake 2016-11-23 10:35:32 -08:00
commit 9478f242d8
3 changed files with 49 additions and 33 deletions

View file

@ -375,41 +375,53 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
return (mode == QAudio::AudioInput) ? QAudioDeviceInfo::defaultInputDevice() : QAudioDeviceInfo::defaultOutputDevice();
}
// attempt to use the native sample rate and channel count
bool nativeFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
QAudioFormat& audioFormat) {
audioFormat = audioDevice.preferredFormat();
audioFormat.setCodec("audio/pcm");
audioFormat.setSampleSize(16);
audioFormat.setSampleType(QAudioFormat::SignedInt);
audioFormat.setByteOrder(QAudioFormat::LittleEndian);
if (!audioDevice.isFormatSupported(audioFormat)) {
qCDebug(audioclient) << "WARNING: The native format is" << audioFormat << "but isFormatSupported() failed.";
return false;
}
// converting to/from this rate must produce an integral number of samples
if (audioFormat.sampleRate() * AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL % AudioConstants::SAMPLE_RATE != 0) {
qCDebug(audioclient) << "WARNING: The native sample rate [" << audioFormat.sampleRate() << "] is not supported.";
return false;
}
return true;
}
bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
const QAudioFormat& desiredAudioFormat,
QAudioFormat& adjustedAudioFormat) {
qCDebug(audioclient) << "The desired format for audio I/O is" << desiredAudioFormat;
adjustedAudioFormat = desiredAudioFormat;
#if defined(Q_OS_ANDROID) || defined(Q_OS_OSX)
// As of Qt5.6, Android returns the native OpenSLES sample rate when possible, else 48000
// Mac OSX returns the preferred CoreAudio format
if (nativeFormatForAudioDevice(audioDevice, adjustedAudioFormat)) {
return true;
}
#endif
#if defined(Q_OS_WIN)
// On Windows, using WASAPI shared mode, the sample rate and channel count must
// exactly match the internal mix format. Any other format will fail to open.
adjustedAudioFormat = audioDevice.preferredFormat(); // returns mixFormat
adjustedAudioFormat.setCodec("audio/pcm");
adjustedAudioFormat.setSampleSize(16);
adjustedAudioFormat.setSampleType(QAudioFormat::SignedInt);
adjustedAudioFormat.setByteOrder(QAudioFormat::LittleEndian);
if (!audioDevice.isFormatSupported(adjustedAudioFormat)) {
qCDebug(audioclient) << "WARNING: The mix format is" << adjustedAudioFormat << "but isFormatSupported() failed.";
return false;
if (IsWindows8OrGreater()) {
// On Windows using WASAPI shared-mode, returns the internal mix format
if (nativeFormatForAudioDevice(audioDevice, adjustedAudioFormat)) {
return true;
}
}
// converting to/from this rate must produce an integral number of samples
if (adjustedAudioFormat.sampleRate() * AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL % AudioConstants::SAMPLE_RATE != 0) {
qCDebug(audioclient) << "WARNING: The current sample rate [" << adjustedAudioFormat.sampleRate() << "] is not supported.";
return false;
}
return true;
#endif
#elif defined(Q_OS_ANDROID)
// FIXME: query the native sample rate of the device?
adjustedAudioFormat.setSampleRate(48000);
#else
adjustedAudioFormat = desiredAudioFormat;
//
// Attempt the device sample rate in decreasing order of preference.
@ -433,7 +445,6 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
} else if (audioDevice.supportedSampleRates().contains(176400)) {
adjustedAudioFormat.setSampleRate(176400);
}
#endif
if (adjustedAudioFormat != desiredAudioFormat) {
// return the nearest in case it needs 2 channels

View file

@ -46,7 +46,8 @@ _numVertices(0)
gpu::PipelinePointer RenderablePolyLineEntityItem::_pipeline;
gpu::Stream::FormatPointer RenderablePolyLineEntityItem::_format;
int32_t RenderablePolyLineEntityItem::PAINTSTROKE_GPU_SLOT;
const int32_t RenderablePolyLineEntityItem::PAINTSTROKE_TEXTURE_SLOT;
const int32_t RenderablePolyLineEntityItem::PAINTSTROKE_UNIFORM_SLOT;
void RenderablePolyLineEntityItem::createPipeline() {
static const int NORMAL_OFFSET = 12;
@ -62,8 +63,8 @@ void RenderablePolyLineEntityItem::createPipeline() {
gpu::ShaderPointer program = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
PAINTSTROKE_GPU_SLOT = 0;
slotBindings.insert(gpu::Shader::Binding(std::string("paintStrokeTextureBinding"), PAINTSTROKE_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("originalTexture"), PAINTSTROKE_TEXTURE_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("polyLineBuffer"), PAINTSTROKE_UNIFORM_SLOT));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
@ -193,14 +194,14 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
Transform transform = Transform();
transform.setTranslation(getPosition());
transform.setRotation(getRotation());
batch.setUniformBuffer(0, _uniformBuffer);
batch.setUniformBuffer(PAINTSTROKE_UNIFORM_SLOT, _uniformBuffer);
batch.setModelTransform(transform);
batch.setPipeline(_pipeline);
if (_texture->isLoaded()) {
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, _texture->getGPUTexture());
batch.setResourceTexture(PAINTSTROKE_TEXTURE_SLOT, _texture->getGPUTexture());
} else {
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, args->_whiteTexture);
batch.setResourceTexture(PAINTSTROKE_TEXTURE_SLOT, args->_whiteTexture);
}
batch.setInputFormat(_format);
@ -208,6 +209,8 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
if (_isFading) {
batch._glColor4f(1.0f, 1.0f, 1.0f, Interpolate::calculateFadeRatio(_fadeStartTime));
} else {
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
batch.draw(gpu::TRIANGLE_STRIP, _numVertices, 0);

View file

@ -40,7 +40,9 @@ public:
static gpu::PipelinePointer _pipeline;
static gpu::Stream::FormatPointer _format;
static int32_t PAINTSTROKE_GPU_SLOT;
static const int32_t PAINTSTROKE_TEXTURE_SLOT { 0 };
static const int32_t PAINTSTROKE_UNIFORM_SLOT { 0 };
protected:
void updateGeometry();