mirror of
https://github.com/overte-org/overte.git
synced 2025-08-07 16:10:40 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into team-teaching
This commit is contained in:
commit
24c8e7d0e6
11 changed files with 340 additions and 296 deletions
23
examples/dialTone.js
Normal file
23
examples/dialTone.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// dialTone.js
|
||||
// examples
|
||||
//
|
||||
// Created by Stephen Birarda on 06/08/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// setup the local sound we're going to use
|
||||
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/short1.wav");
|
||||
|
||||
// setup the options needed for that sound
|
||||
var connectSoundOptions = {
|
||||
localOnly: true
|
||||
}
|
||||
|
||||
// play the sound locally once we get the first audio packet from a mixer
|
||||
Audio.receivedFirstPacket.connect(function(){
|
||||
Audio.playSound(connectSound, connectSoundOptions);
|
||||
});
|
BIN
interface/resources/sounds/short1.wav
Normal file
BIN
interface/resources/sounds/short1.wav
Normal file
Binary file not shown.
|
@ -293,6 +293,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto discoverabilityManager = DependencyManager::set<DiscoverabilityManager>();
|
||||
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
|
||||
auto offscreenUi = DependencyManager::set<OffscreenUi>();
|
||||
auto pathUtils = DependencyManager::set<PathUtils>();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -373,8 +374,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_bookmarks = new Bookmarks(); // Before setting up the menu
|
||||
|
||||
_runningScriptsWidget = new RunningScriptsWidget(_window);
|
||||
|
||||
|
||||
_renderEngine->addTask(render::TaskPointer(new RenderDeferredTask()));
|
||||
_renderEngine->registerScene(_main3DScene);
|
||||
|
||||
|
@ -420,6 +419,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
connect(audioIO.data(), &AudioClient::destroyed, audioThread, &QThread::quit);
|
||||
connect(audioThread, &QThread::finished, audioThread, &QThread::deleteLater);
|
||||
connect(audioIO.data(), &AudioClient::muteToggled, this, &Application::audioMuteToggled);
|
||||
connect(audioIO.data(), &AudioClient::receivedFirstPacket,
|
||||
&AudioScriptingInterface::getInstance(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
|
||||
audioThread->start();
|
||||
|
||||
|
@ -927,9 +928,7 @@ void Application::paintGL() {
|
|||
OculusManager::display(_glWidget, &renderArgs, _myAvatar->getWorldAlignedOrientation(), _myAvatar->getDefaultEyePosition(), _myCamera);
|
||||
}
|
||||
} else if (TV3DManager::isConnected()) {
|
||||
|
||||
TV3DManager::display(&renderArgs, _myCamera);
|
||||
|
||||
} else {
|
||||
DependencyManager::get<GlowEffect>()->prepare(&renderArgs);
|
||||
|
||||
|
@ -2293,7 +2292,6 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
} else {
|
||||
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
|
||||
if (lookingAt && _myAvatar != lookingAt.get()) {
|
||||
|
||||
isLookingAtSomeone = true;
|
||||
// If I am looking at someone else, look directly at one of their eyes
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
|
@ -3121,6 +3119,7 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
|
|||
|
||||
// the "glow" here causes an alpha of one
|
||||
Glower glower(renderArgs);
|
||||
|
||||
const int BILLBOARD_SIZE = 64;
|
||||
// TODO: Pass a RenderArgs to renderAvatarBillboard
|
||||
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
|
||||
|
@ -3382,16 +3381,14 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
glTexGenfv(GL_R, GL_EYE_PLANE, (const GLfloat*)&_shadowMatrices[i][2]);
|
||||
}
|
||||
|
||||
// THe pending changes collecting the changes here
|
||||
// The pending changes collecting the changes here
|
||||
render::PendingChanges pendingChanges;
|
||||
|
||||
// Background rendering decision
|
||||
if (BackgroundRenderData::_item == 0) {
|
||||
auto backgroundRenderData = BackgroundRenderData::Pointer(new BackgroundRenderData(&_environment));
|
||||
auto backgroundRenderPayload = render::PayloadPointer(new BackgroundRenderData::Payload(backgroundRenderData));
|
||||
|
||||
BackgroundRenderData::_item = _main3DScene->allocateID();
|
||||
|
||||
pendingChanges.resetItem(WorldBoxRenderData::_item, backgroundRenderPayload);
|
||||
} else {
|
||||
|
||||
|
@ -3448,7 +3445,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// Make sure the WorldBox is in the scene
|
||||
if (WorldBoxRenderData::_item == 0) {
|
||||
auto worldBoxRenderData = WorldBoxRenderData::Pointer(new WorldBoxRenderData());
|
||||
|
@ -3484,7 +3480,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
PROFILE_RANGE("DeferredLighting");
|
||||
PerformanceTimer perfTimer("lighting");
|
||||
DependencyManager::get<DeferredLightingEffect>()->render();*/
|
||||
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -3533,8 +3528,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
sceneInterface->setEngineDrawnTransparentItems(engineRC->_numDrawnTransparentItems);
|
||||
|
||||
}
|
||||
|
||||
|
||||
//Render the sixense lasers
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
|
||||
_myAvatar->renderLaserPointers();
|
||||
|
@ -4081,6 +4074,8 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
|
||||
scriptEngine->registerGlobalObject("LODManager", DependencyManager::get<LODManager>().data());
|
||||
|
||||
scriptEngine->registerGlobalObject("Paths", DependencyManager::get<PathUtils>().data());
|
||||
|
||||
QScriptValue hmdInterface = scriptEngine->registerGlobalObject("HMD", &HMDScriptingInterface::getInstance());
|
||||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition2D", HMDScriptingInterface::getHUDLookAtPosition2D, 0);
|
||||
scriptEngine->registerFunction(hmdInterface, "getHUDLookAtPosition3D", HMDScriptingInterface::getHUDLookAtPosition3D, 0);
|
||||
|
|
|
@ -153,6 +153,7 @@ void AudioClient::reset() {
|
|||
}
|
||||
|
||||
void AudioClient::audioMixerKilled() {
|
||||
_hasReceivedFirstPacket = false;
|
||||
_outgoingAvatarAudioSequenceNumber = 0;
|
||||
_stats.reset();
|
||||
}
|
||||
|
@ -481,6 +482,7 @@ void AudioClient::start() {
|
|||
qCDebug(audioclient) << "Unable to set up audio input because of a problem with input format.";
|
||||
qCDebug(audioclient) << "The closest format available is" << inputDeviceInfo.nearestFormat(_desiredInputFormat);
|
||||
}
|
||||
|
||||
if (!outputFormatSupported) {
|
||||
qCDebug(audioclient) << "Unable to set up audio output because of a problem with output format.";
|
||||
qCDebug(audioclient) << "The closest format available is" << outputDeviceInfo.nearestFormat(_desiredOutputFormat);
|
||||
|
@ -489,6 +491,7 @@ void AudioClient::start() {
|
|||
if (_audioInput) {
|
||||
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 8 );
|
||||
}
|
||||
|
||||
_inputGain.initialize();
|
||||
_sourceGain.initialize();
|
||||
_noiseSource.initialize();
|
||||
|
@ -926,6 +929,14 @@ void AudioClient::addReceivedAudioToStream(const QByteArray& audioByteArray) {
|
|||
DependencyManager::get<NodeList>()->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
|
||||
|
||||
if (_audioOutput) {
|
||||
|
||||
if (!_hasReceivedFirstPacket) {
|
||||
_hasReceivedFirstPacket = true;
|
||||
|
||||
// have the audio scripting interface emit a signal to say we just connected to mixer
|
||||
emit receivedFirstPacket();
|
||||
}
|
||||
|
||||
// Audio output must exist and be correctly set up if we're going to process received audio
|
||||
_receivedAudioStream.parseData(audioByteArray);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
#include <MixedProcessedAudioStream.h>
|
||||
#include <RingBufferHistory.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <Sound.h>
|
||||
#include <StDev.h>
|
||||
|
||||
#include "AudioIOStats.h"
|
||||
|
@ -184,6 +185,8 @@ signals:
|
|||
|
||||
void deviceChanged();
|
||||
|
||||
void receivedFirstPacket();
|
||||
|
||||
protected:
|
||||
AudioClient();
|
||||
~AudioClient();
|
||||
|
@ -297,6 +300,8 @@ private:
|
|||
QVector<QString> _inputDevices;
|
||||
QVector<QString> _outputDevices;
|
||||
void checkDevices();
|
||||
|
||||
bool _hasReceivedFirstPacket = false;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -59,15 +59,19 @@ Sound::Sound(const QUrl& url, bool isStereo) :
|
|||
void Sound::downloadFinished(QNetworkReply* reply) {
|
||||
// replace our byte array with the downloaded data
|
||||
QByteArray rawAudioByteArray = reply->readAll();
|
||||
QString fileName = reply->url().fileName();
|
||||
|
||||
if (reply->hasRawHeader("Content-Type")) {
|
||||
const QString WAV_EXTENSION = ".wav";
|
||||
|
||||
if (reply->hasRawHeader("Content-Type") || fileName.endsWith(WAV_EXTENSION)) {
|
||||
|
||||
QByteArray headerContentType = reply->rawHeader("Content-Type");
|
||||
|
||||
// WAV audio file encountered
|
||||
if (headerContentType == "audio/x-wav"
|
||||
|| headerContentType == "audio/wav"
|
||||
|| headerContentType == "audio/wave") {
|
||||
|| headerContentType == "audio/wave"
|
||||
|| fileName.endsWith(WAV_EXTENSION)) {
|
||||
|
||||
QByteArray outputAudioByteArray;
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ protected:
|
|||
signals:
|
||||
void mutedByMixer();
|
||||
void environmentMuted();
|
||||
void receivedFirstPacket();
|
||||
|
||||
private:
|
||||
AudioScriptingInterface();
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#include "PathUtils.h"
|
||||
|
||||
|
||||
QString& PathUtils::resourcesPath() {
|
||||
const QString& PathUtils::resourcesPath() {
|
||||
#ifdef Q_OS_MAC
|
||||
static QString staticResourcePath = QCoreApplication::applicationDirPath() + "/../Resources/";
|
||||
#else
|
||||
|
|
|
@ -12,12 +12,17 @@
|
|||
#ifndef hifi_PathUtils_h
|
||||
#define hifi_PathUtils_h
|
||||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include <QString>
|
||||
#include "DependencyManager.h"
|
||||
|
||||
namespace PathUtils {
|
||||
QString& resourcesPath();
|
||||
}
|
||||
class PathUtils : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
Q_PROPERTY(QString resources READ resourcesPath)
|
||||
public:
|
||||
static const QString& resourcesPath();
|
||||
};
|
||||
|
||||
QString fileNameWithoutExtension(const QString& fileName, const QVector<QString> possibleExtensions);
|
||||
QString findMostRecentFileExtension(const QString& originalFileName, QVector<QString> possibleExtensions);
|
||||
|
|
Loading…
Reference in a new issue