mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 18:23:54 +02:00
rename exposed to QML "Audio" context object to "AudioScriptingInteface" to avoid naming conflict
note: I'm not sure what exactly causes naming conflict - importing QtMultimedia in ForceLoad.qml, or the fact of existance of Audio.qml, but starting with Qt 5.10.0 'Audio' already exists in global scope
This commit is contained in:
parent
15fcf66d0e
commit
bd3a056f4e
7 changed files with 41 additions and 36 deletions
|
@ -110,7 +110,7 @@ Item {
|
|||
}
|
||||
|
||||
function pullFreshValues() {
|
||||
if (Audio.getRecording()) {
|
||||
if (AudioScriptingInterface.getRecording()) {
|
||||
updateRecordingLabel();
|
||||
}
|
||||
|
||||
|
@ -129,14 +129,14 @@ Item {
|
|||
_wavFilePath = _wavFilePath.replace(/[\-:]|\.\d*Z$/g, "").replace("T", "-") + ".wav";
|
||||
// Using controller recording default directory
|
||||
_wavFilePath = Recording.getDefaultRecordingSaveDirectory() + _wavFilePath;
|
||||
if (!Audio.startRecording(_wavFilePath)) {
|
||||
if (!AudioScriptingInterface.startRecording(_wavFilePath)) {
|
||||
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Error creating: "+_wavFilePath}));
|
||||
updateRecordingUI(false);
|
||||
}
|
||||
}
|
||||
|
||||
function stopRecording() {
|
||||
Audio.stopRecording();
|
||||
AudioScriptingInterface.stopRecording();
|
||||
setRecordingLabelOpacity(0.0);
|
||||
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Saved: "+_wavFilePath}));
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ Item {
|
|||
}
|
||||
|
||||
function toggleRecording() {
|
||||
if (Audio.getRecording()) {
|
||||
if (AudioScriptingInterface.getRecording()) {
|
||||
updateRecordingUI(false);
|
||||
stopRecording();
|
||||
} else {
|
||||
|
|
|
@ -26,7 +26,7 @@ Rectangle {
|
|||
HifiConstants { id: hifi; }
|
||||
|
||||
property var eventBridge;
|
||||
property string title: "Audio Settings - " + Audio.context;
|
||||
property string title: "Audio Settings - " + AudioScriptingInterface.context;
|
||||
signal sendToScript(var message);
|
||||
|
||||
color: hifi.colors.baseGray;
|
||||
|
@ -37,7 +37,7 @@ Rectangle {
|
|||
}
|
||||
|
||||
|
||||
property bool isVR: Audio.context === "VR"
|
||||
property bool isVR: AudioScriptingInterface.context === "VR"
|
||||
property real rightMostInputLevelPos: 0
|
||||
//placeholder for control sizes and paddings
|
||||
//recalculates dynamically in case of UI size is changed
|
||||
|
@ -72,17 +72,17 @@ Rectangle {
|
|||
property bool showPeaks: true;
|
||||
|
||||
function enablePeakValues() {
|
||||
Audio.devices.input.peakValuesEnabled = true;
|
||||
Audio.devices.input.peakValuesEnabledChanged.connect(function(enabled) {
|
||||
AudioScriptingInterface.devices.input.peakValuesEnabled = true;
|
||||
AudioScriptingInterface.devices.input.peakValuesEnabledChanged.connect(function(enabled) {
|
||||
if (!enabled && root.showPeaks) {
|
||||
Audio.devices.input.peakValuesEnabled = true;
|
||||
AudioScriptingInterface.devices.input.peakValuesEnabled = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function disablePeakValues() {
|
||||
root.showPeaks = false;
|
||||
Audio.devices.input.peakValuesEnabled = false;
|
||||
AudioScriptingInterface.devices.input.peakValuesEnabled = false;
|
||||
}
|
||||
|
||||
Component.onCompleted: enablePeakValues();
|
||||
|
@ -117,10 +117,10 @@ Rectangle {
|
|||
text: qsTr("Mute microphone");
|
||||
spacing: margins.sizeCheckBox - boxSize
|
||||
isRedCheck: true;
|
||||
checked: Audio.muted;
|
||||
checked: AudioScriptingInterface.muted;
|
||||
onClicked: {
|
||||
Audio.muted = checked;
|
||||
checked = Qt.binding(function() { return Audio.muted; }); // restore binding
|
||||
AudioScriptingInterface.muted = checked;
|
||||
checked = Qt.binding(function() { return AudioScriptingInterface.muted; }); // restore binding
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,10 +130,10 @@ Rectangle {
|
|||
AudioControls.CheckBox {
|
||||
spacing: muteMic.spacing
|
||||
text: qsTr("Enable noise reduction");
|
||||
checked: Audio.noiseReduction;
|
||||
checked: AudioScriptingInterface.noiseReduction;
|
||||
onClicked: {
|
||||
Audio.noiseReduction = checked;
|
||||
checked = Qt.binding(function() { return Audio.noiseReduction; }); // restore binding
|
||||
AudioScriptingInterface.noiseReduction = checked;
|
||||
checked = Qt.binding(function() { return AudioScriptingInterface.noiseReduction; }); // restore binding
|
||||
}
|
||||
}
|
||||
AudioControls.CheckBox {
|
||||
|
@ -184,7 +184,7 @@ Rectangle {
|
|||
spacing: 4;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: Audio.devices.input;
|
||||
model: AudioScriptingInterface.devices.input;
|
||||
delegate: Item {
|
||||
width: rightMostInputLevelPos
|
||||
height: margins.sizeCheckBox > checkBoxInput.implicitHeight ?
|
||||
|
@ -204,7 +204,7 @@ Rectangle {
|
|||
text: devicename
|
||||
onPressed: {
|
||||
if (!checked) {
|
||||
Audio.setInputDevice(info, bar.currentIndex === 1);
|
||||
AudioScriptingInterface.setInputDevice(info, bar.currentIndex === 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ Rectangle {
|
|||
anchors.verticalCenter: parent.verticalCenter
|
||||
visible: ((bar.currentIndex === 1 && isVR) ||
|
||||
(bar.currentIndex === 0 && !isVR)) &&
|
||||
Audio.devices.input.peakValuesAvailable;
|
||||
AudioScriptingInterface.devices.input.peakValuesAvailable;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ Rectangle {
|
|||
spacing: 4;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
model: Audio.devices.output;
|
||||
model: AudioScriptingInterface.devices.output;
|
||||
delegate: Item {
|
||||
width: rightMostInputLevelPos
|
||||
height: margins.sizeCheckBox > checkBoxOutput.implicitHeight ?
|
||||
|
@ -273,7 +273,7 @@ Rectangle {
|
|||
text: devicename
|
||||
onPressed: {
|
||||
if (!checked) {
|
||||
Audio.setOutputDevice(info, bar.currentIndex === 1);
|
||||
AudioScriptingInterface.setOutputDevice(info, bar.currentIndex === 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ Rectangle {
|
|||
verticalCenter: parent.verticalCenter;
|
||||
}
|
||||
|
||||
visible: Audio.muted;
|
||||
visible: AudioScriptingInterface.muted;
|
||||
color: colors.muted;
|
||||
|
||||
text: "MUTED";
|
||||
|
|
|
@ -17,7 +17,7 @@ import QtGraphicalEffects 1.0
|
|||
import TabletScriptingInterface 1.0
|
||||
|
||||
Rectangle {
|
||||
readonly property var level: Audio.inputLevel;
|
||||
readonly property var level: AudioScriptingInterface.inputLevel;
|
||||
|
||||
property bool standalone: false;
|
||||
property var dragTarget: null;
|
||||
|
@ -60,7 +60,7 @@ Rectangle {
|
|||
hoverEnabled: true;
|
||||
scrollGestureEnabled: false;
|
||||
onClicked: {
|
||||
Audio.muted = !Audio.muted;
|
||||
AudioScriptingInterface.muted = !AudioScriptingInterface.muted;
|
||||
Tablet.playSound(TabletEnums.ButtonClick);
|
||||
}
|
||||
drag.target: dragTarget;
|
||||
|
@ -82,7 +82,7 @@ Rectangle {
|
|||
readonly property string red: colors.muted;
|
||||
readonly property string fill: "#55000000";
|
||||
readonly property string border: standalone ? "#80FFFFFF" : "#55FFFFFF";
|
||||
readonly property string icon: Audio.muted ? muted : unmuted;
|
||||
readonly property string icon: AudioScriptingInterface.muted ? muted : unmuted;
|
||||
}
|
||||
|
||||
Item {
|
||||
|
@ -103,7 +103,7 @@ Rectangle {
|
|||
readonly property string mutedIcon: "../../../icons/tablet-icons/mic-mute-i.svg";
|
||||
|
||||
id: image;
|
||||
source: Audio.muted ? mutedIcon : unmutedIcon;
|
||||
source: AudioScriptingInterface.muted ? mutedIcon : unmutedIcon;
|
||||
|
||||
width: 30;
|
||||
height: 30;
|
||||
|
@ -126,9 +126,9 @@ Rectangle {
|
|||
Item {
|
||||
id: status;
|
||||
|
||||
readonly property string color: Audio.muted ? colors.muted : colors.unmuted;
|
||||
readonly property string color: AudioScriptingInterface.muted ? colors.muted : colors.unmuted;
|
||||
|
||||
visible: Audio.muted;
|
||||
visible: AudioScriptingInterface.muted;
|
||||
|
||||
anchors {
|
||||
left: parent.left;
|
||||
|
@ -147,7 +147,7 @@ Rectangle {
|
|||
|
||||
color: parent.color;
|
||||
|
||||
text: Audio.muted ? "MUTED" : "MUTE";
|
||||
text: AudioScriptingInterface.muted ? "MUTED" : "MUTE";
|
||||
font.pointSize: 12;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,9 +27,9 @@ RowLayout {
|
|||
}
|
||||
function playSound() {
|
||||
// FIXME: MyAvatar is not properly exposed to QML; MyAvatar.qmlPosition is a stopgap
|
||||
// FIXME: Audio.playSystemSound should not require position
|
||||
// FIXME: AudioScriptingInterface.playSystemSound should not require position
|
||||
if (sample === null && !isPlaying) {
|
||||
sample = Audio.playSystemSound(sound, MyAvatar.qmlPosition);
|
||||
sample = AudioScriptingInterface.playSystemSound(sound, MyAvatar.qmlPosition);
|
||||
isPlaying = true;
|
||||
sample.finished.connect(reset);
|
||||
}
|
||||
|
|
|
@ -2320,9 +2320,10 @@ void Application::initializeUi() {
|
|||
|
||||
setupPreferences();
|
||||
|
||||
// For some reason there is already an "Application" object in the QML context,
|
||||
// though I can't find it. Hence, "ApplicationInterface"
|
||||
surfaceContext->setContextProperty("Audio", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
// in Qt 5.10.0 there is already an "Audio" object in the QML context
|
||||
// though I failed to find it (from QtMultimedia??). So.. let it be "AudioScriptingInterface"
|
||||
surfaceContext->setContextProperty("AudioScriptingInterface", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
|
||||
surfaceContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
surfaceContext->setContextProperty("AudioScope", DependencyManager::get<AudioScope>().data());
|
||||
|
||||
|
|
|
@ -194,7 +194,11 @@ void Web3DOverlay::setupQmlSurface() {
|
|||
_webSurface->getSurfaceContext()->setContextProperty("offscreenFlags", flags);
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AddressManager", DependencyManager::get<AddressManager>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Account", AccountScriptingInterface::getInstance());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Audio", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
|
||||
// in Qt 5.10.0 there is already an "Audio" object in the QML context
|
||||
// though I failed to find it (from QtMultimedia??). So.. let it be "AudioScriptingInterface"
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AudioScriptingInterface", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
|
||||
_webSurface->getSurfaceContext()->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("fileDialogHelper", new FileDialogHelper());
|
||||
|
@ -605,4 +609,4 @@ Web3DOverlay* Web3DOverlay::createClone() const {
|
|||
|
||||
void Web3DOverlay::emitScriptEvent(const QVariant& message) {
|
||||
QMetaObject::invokeMethod(this, "scriptEventReceived", Q_ARG(QVariant, message));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue