Merge branch 'master' of github.com:highfidelity/hifi into no-url-logging
|
@ -210,11 +210,13 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnDest
|
|||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGotoUrl(JNIEnv* env, jobject obj, jstring url) {
|
||||
QAndroidJniObject jniUrl("java/lang/String", "(Ljava/lang/String;)V", url);
|
||||
DependencyManager::get<AddressManager>()->loadSettings(jniUrl.toString());
|
||||
AndroidHelper::instance().muteMic();
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGoToUser(JNIEnv* env, jobject obj, jstring username) {
|
||||
QAndroidJniObject jniUsername("java/lang/String", "(Ljava/lang/String;)V", username);
|
||||
DependencyManager::get<AddressManager>()->goToUser(jniUsername.toString(), false);
|
||||
AndroidHelper::instance().muteMic();
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnPause(JNIEnv* env, jobject obj) {
|
||||
|
|
|
@ -3,8 +3,8 @@ package io.highfidelity.hifiinterface.fragment;
|
|||
import android.content.SharedPreferences;
|
||||
import android.media.audiofx.AcousticEchoCanceler;
|
||||
import android.os.Bundle;
|
||||
import android.preference.Preference;
|
||||
import android.preference.PreferenceFragment;
|
||||
import android.preference.PreferenceManager;
|
||||
import android.support.annotation.Nullable;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
@ -18,17 +18,23 @@ public class SettingsFragment extends PreferenceFragment implements SharedPrefer
|
|||
private final String HIFI_SETTINGS_AEC_KEY = "aec";
|
||||
private final String PREFERENCE_KEY_AEC = "aec";
|
||||
|
||||
private final boolean DEFAULT_AEC_ENABLED = true;
|
||||
|
||||
@Override
|
||||
public void onCreate(@Nullable Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
addPreferencesFromResource(R.xml.settings);
|
||||
boolean aecAvailable = AcousticEchoCanceler.isAvailable();
|
||||
PreferenceManager.setDefaultValues(getContext(), R.xml.settings, false);
|
||||
|
||||
if (!AcousticEchoCanceler.isAvailable()) {
|
||||
getPreferenceScreen().getPreferenceManager().findPreference("aec").setEnabled(false);
|
||||
if (!aecAvailable) {
|
||||
findPreference(PREFERENCE_KEY_AEC).setEnabled(false);
|
||||
updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, false);
|
||||
}
|
||||
|
||||
getPreferenceScreen().getSharedPreferences().edit().putBoolean(PREFERENCE_KEY_AEC,
|
||||
getHifiSettingBoolean(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, false));
|
||||
aecAvailable && getHifiSettingBoolean(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, DEFAULT_AEC_ENABLED)).commit();
|
||||
|
||||
}
|
||||
|
||||
public static SettingsFragment newInstance() {
|
||||
|
@ -46,15 +52,13 @@ public class SettingsFragment extends PreferenceFragment implements SharedPrefer
|
|||
public void onPause() {
|
||||
super.onPause();
|
||||
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
|
||||
Preference pref = findPreference(key);
|
||||
switch (key) {
|
||||
case "aec":
|
||||
updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, sharedPreferences.getBoolean(key, false));
|
||||
case PREFERENCE_KEY_AEC:
|
||||
updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, sharedPreferences.getBoolean(key, DEFAULT_AEC_ENABLED));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
<SwitchPreference
|
||||
android:key="aec"
|
||||
android:title="@string/AEC"
|
||||
android:summary="@string/acoustic_echo_cancellation" />
|
||||
android:summary="@string/acoustic_echo_cancellation"
|
||||
android:defaultValue="true" />
|
||||
</PreferenceCategory>
|
||||
</PreferenceScreen>
|
|
@ -89,7 +89,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::NodeIgnoreRequest,
|
||||
PacketType::RadiusIgnoreRequest,
|
||||
PacketType::RequestsDomainListData,
|
||||
PacketType::PerAvatarGainSet },
|
||||
PacketType::PerAvatarGainSet,
|
||||
PacketType::AudioSoloRequest },
|
||||
this, "queueAudioPacket");
|
||||
|
||||
// packets whose consequences are global should be processed on the main thread
|
||||
|
|
|
@ -98,6 +98,9 @@ int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
|
|||
case PacketType::RadiusIgnoreRequest:
|
||||
parseRadiusIgnoreRequest(packet, node);
|
||||
break;
|
||||
case PacketType::AudioSoloRequest:
|
||||
parseSoloRequest(packet, node);
|
||||
break;
|
||||
default:
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
|
@ -295,6 +298,25 @@ void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessa
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioMixerClientData::parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
|
||||
uint8_t addToSolo;
|
||||
message->readPrimitive(&addToSolo);
|
||||
|
||||
while (message->getBytesLeftToRead()) {
|
||||
// parse out the UUID being soloed from the packet
|
||||
QUuid soloedUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (addToSolo) {
|
||||
_soloedNodes.push_back(soloedUUID);
|
||||
} else {
|
||||
auto it = std::remove(std::begin(_soloedNodes), std::end(_soloedNodes), soloedUUID);
|
||||
_soloedNodes.erase(it, std::end(_soloedNodes));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
|
||||
auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
|
|
|
@ -65,6 +65,7 @@ public:
|
|||
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
|
||||
void parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
void parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
void parseSoloRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node);
|
||||
|
||||
// attempt to pop a frame from each audio stream, and return the number of streams from this client
|
||||
int checkBuffersBeforeFrameSend();
|
||||
|
@ -150,6 +151,9 @@ public:
|
|||
|
||||
const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
|
||||
|
||||
|
||||
const std::vector<QUuid>& getSoloedNodes() const { return _soloedNodes; }
|
||||
|
||||
bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
|
||||
void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
|
||||
|
||||
|
@ -209,6 +213,8 @@ private:
|
|||
|
||||
std::atomic_bool _isIgnoreRadiusEnabled { false };
|
||||
|
||||
std::vector<QUuid> _soloedNodes;
|
||||
|
||||
bool _hasReceivedFirstMix { false };
|
||||
};
|
||||
|
||||
|
|
|
@ -272,6 +272,10 @@ bool shouldBeSkipped(MixableStream& stream, const Node& listener,
|
|||
return true;
|
||||
}
|
||||
|
||||
if (!listenerData.getSoloedNodes().empty()) {
|
||||
return !contains(listenerData.getSoloedNodes(), stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
|
||||
stream.positionalStream->isIgnoreBoxEnabled());
|
||||
if (shouldCheckIgnoreBox &&
|
||||
|
@ -310,6 +314,7 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
|
||||
bool isThrottling = _numToRetain != -1;
|
||||
bool isSoloing = !listenerData->getSoloedNodes().empty();
|
||||
|
||||
auto& streams = listenerData->getStreams();
|
||||
|
||||
|
@ -376,13 +381,14 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
|
||||
} else {
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
addStream(stream, *listenerAudioStream, 0.0f);
|
||||
addStream(stream, *listenerAudioStream, 0.0f, isSoloing);
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
|
||||
isSoloing);
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
|
@ -417,7 +423,8 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
return true;
|
||||
}
|
||||
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
|
||||
isSoloing);
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
|
@ -484,7 +491,7 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain) {
|
||||
float masterListenerGain, bool isSoloing) {
|
||||
++stats.totalMixes;
|
||||
|
||||
auto streamToAdd = mixableStream.positionalStream;
|
||||
|
@ -495,9 +502,13 @@ void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStre
|
|||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
float gain = 1.0f;
|
||||
if (!isSoloing) {
|
||||
gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
}
|
||||
|
||||
const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd->lastPopSucceeded()) {
|
||||
|
|
|
@ -57,7 +57,7 @@ private:
|
|||
bool prepareMix(const SharedNodePointer& listener);
|
||||
void addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain);
|
||||
float masterListenerGain, bool isSoloing);
|
||||
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain);
|
||||
|
|
|
@ -18,6 +18,11 @@
|
|||
window.isKeyboardRaised = false;
|
||||
window.isNumericKeyboard = false;
|
||||
window.isPasswordField = false;
|
||||
window.lastActiveElement = null;
|
||||
|
||||
function getActiveElement() {
|
||||
return document.activeElement;
|
||||
}
|
||||
|
||||
function shouldSetPasswordField() {
|
||||
var nodeType = document.activeElement.type;
|
||||
|
@ -65,10 +70,11 @@
|
|||
var keyboardRaised = shouldRaiseKeyboard();
|
||||
var numericKeyboard = shouldSetNumeric();
|
||||
var passwordField = shouldSetPasswordField();
|
||||
var activeElement = getActiveElement();
|
||||
|
||||
if (isWindowFocused &&
|
||||
(keyboardRaised !== window.isKeyboardRaised || numericKeyboard !== window.isNumericKeyboard
|
||||
|| passwordField !== window.isPasswordField)) {
|
||||
|| passwordField !== window.isPasswordField || activeElement !== window.lastActiveElement)) {
|
||||
|
||||
if (typeof EventBridge !== "undefined" && EventBridge !== null) {
|
||||
EventBridge.emitWebEvent(
|
||||
|
@ -90,6 +96,7 @@
|
|||
window.isKeyboardRaised = keyboardRaised;
|
||||
window.isNumericKeyboard = numericKeyboard;
|
||||
window.isPasswordField = passwordField;
|
||||
window.lastActiveElement = activeElement;
|
||||
}
|
||||
}, POLL_FREQUENCY);
|
||||
|
||||
|
|
|
@ -1,4 +1,20 @@
|
|||
<svg width="22" height="26" fill="none" version="1.1" viewBox="0 0 22 26" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1 7L11 1L21 7M1 7L11 13M1 7V19L11 25M11 13L21 7M11 13V25M21 7V19L11 25" stroke="#000" stroke-linejoin="round" stroke-width="2"/>
|
||||
<circle class="st1" cx="19.407" cy="2.5881" r="2.5846" fill="#ef3b4e" stroke-width=".24043"/>
|
||||
</svg>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#EF3B4E;}
|
||||
</style>
|
||||
<g>
|
||||
<circle cx="27.8" cy="13.3" r="2.4"/>
|
||||
<circle cx="19.7" cy="7.2" r="2.3"/>
|
||||
<circle cx="9.4" cy="6.9" r="2.2"/>
|
||||
<path d="M41.8,17.5l-8.9-5.2c0.1,0.3,0.1,0.7,0.1,1c0,1-0.3,1.8-0.8,2.6l5.1,2.9L25,26l-12.3-7.1l3.1-1.8c-0.4-0.7-0.7-1.6-0.7-2.5
|
||||
c0-0.4,0.1-0.8,0.2-1.2l-7.1,4.1c-0.5,0.3-0.9,0.9-0.9,1.5v16.5c0,0.6,0.3,1.2,0.9,1.5l16,9.2c0.3,0.2,0.6,0.2,0.9,0.2
|
||||
s0.6-0.1,0.9-0.2l16-9.2c0.5-0.3,0.9-0.9,0.9-1.5V19C42.7,18.4,42.3,17.8,41.8,17.5z M10.7,21.7L23.3,29v12.8l-12.5-7.2V21.7z
|
||||
M39.2,34.5l-12.5,7.2V28.9l12.5-7.2V34.5z"/>
|
||||
<circle cx="25" cy="20.3" r="2.8"/>
|
||||
<circle cx="20" cy="14.6" r="2.4"/>
|
||||
</g>
|
||||
<circle class="st0" cx="44.1" cy="6" r="5.6"/>
|
||||
</svg>
|
||||
|
|
Before Width: | Height: | Size: 356 B After Width: | Height: | Size: 1 KiB |
|
@ -1,3 +1,16 @@
|
|||
<svg width="22" height="26" viewBox="0 0 22 26" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1 7L11 1L21 7M1 7L11 13M1 7V19L11 25M11 13L21 7M11 13V25M21 7V19L11 25" stroke="black" stroke-width="2" stroke-linejoin="round"/>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<g>
|
||||
<circle cx="27.8" cy="13.3" r="2.4"/>
|
||||
<circle cx="19.7" cy="7.2" r="2.3"/>
|
||||
<circle cx="9.4" cy="6.9" r="2.2"/>
|
||||
<path d="M41.8,17.5l-8.9-5.2c0.1,0.3,0.1,0.7,0.1,1c0,1-0.3,1.8-0.8,2.6l5.1,2.9L25,26l-12.3-7.1l3.1-1.8c-0.4-0.7-0.7-1.6-0.7-2.5
|
||||
c0-0.4,0.1-0.8,0.2-1.2l-7.1,4.1c-0.5,0.3-0.9,0.9-0.9,1.5v16.5c0,0.6,0.3,1.2,0.9,1.5l16,9.2c0.3,0.2,0.6,0.2,0.9,0.2
|
||||
s0.6-0.1,0.9-0.2l16-9.2c0.5-0.3,0.9-0.9,0.9-1.5V19C42.7,18.4,42.3,17.8,41.8,17.5z M10.7,21.7L23.3,29v12.8l-12.5-7.2V21.7z
|
||||
M39.2,34.5l-12.5,7.2V28.9l12.5-7.2V34.5z"/>
|
||||
<circle cx="25" cy="20.3" r="2.8"/>
|
||||
<circle cx="20" cy="14.6" r="2.4"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
|
Before Width: | Height: | Size: 243 B After Width: | Height: | Size: 966 B |
|
@ -1,4 +1,21 @@
|
|||
<svg width="22" height="26" fill="none" version="1.1" viewBox="0 0 22 26" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1 7L11 1L21 7M1 7L11 13M1 7V19L11 25M11 13L21 7M11 13V25M21 7V19L11 25" stroke="#fff" stroke-linejoin="round" stroke-width="2"/>
|
||||
<circle class="st1" cx="19.41" cy="2.5828" r="2.5846" fill="#ef3b4e" stroke-width=".24043"/>
|
||||
</svg>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
.st1{fill:#EF3B4E;}
|
||||
</style>
|
||||
<g>
|
||||
<circle class="st0" cx="27.8" cy="13.3" r="2.4"/>
|
||||
<circle class="st0" cx="19.7" cy="7.2" r="2.3"/>
|
||||
<circle class="st0" cx="9.4" cy="6.9" r="2.2"/>
|
||||
<path class="st0" d="M41.8,17.5l-8.9-5.2c0.1,0.3,0.1,0.7,0.1,1c0,1-0.3,1.8-0.8,2.6l5.1,2.9L25,26l-12.3-7.1l3.1-1.8
|
||||
c-0.4-0.7-0.7-1.6-0.7-2.5c0-0.4,0.1-0.8,0.2-1.2l-7.1,4.1c-0.5,0.3-0.9,0.9-0.9,1.5v16.5c0,0.6,0.3,1.2,0.9,1.5l16,9.2
|
||||
c0.3,0.2,0.6,0.2,0.9,0.2s0.6-0.1,0.9-0.2l16-9.2c0.5-0.3,0.9-0.9,0.9-1.5V19C42.7,18.4,42.3,17.8,41.8,17.5z M10.7,21.7L23.3,29
|
||||
v12.8l-12.5-7.2V21.7z M39.2,34.5l-12.5,7.2V28.9l12.5-7.2V34.5z"/>
|
||||
<circle class="st0" cx="25" cy="20.3" r="2.8"/>
|
||||
<circle class="st0" cx="20" cy="14.6" r="2.4"/>
|
||||
</g>
|
||||
<circle class="st1" cx="44.1" cy="6" r="5.6"/>
|
||||
</svg>
|
||||
|
|
Before Width: | Height: | Size: 355 B After Width: | Height: | Size: 1.1 KiB |
|
@ -1,3 +1,19 @@
|
|||
<svg width="22" height="26" viewBox="0 0 22 26" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1 7L11 1L21 7M1 7L11 13M1 7V19L11 25M11 13L21 7M11 13V25M21 7V19L11 25" stroke="white" stroke-width="2" stroke-linejoin="round"/>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g>
|
||||
<circle class="st0" cx="27.8" cy="13.3" r="2.4"/>
|
||||
<circle class="st0" cx="19.7" cy="7.2" r="2.3"/>
|
||||
<circle class="st0" cx="9.4" cy="6.9" r="2.2"/>
|
||||
<path class="st0" d="M41.8,17.5l-8.9-5.2c0.1,0.3,0.1,0.7,0.1,1c0,1-0.3,1.8-0.8,2.6l5.1,2.9L25,26l-12.3-7.1l3.1-1.8
|
||||
c-0.4-0.7-0.7-1.6-0.7-2.5c0-0.4,0.1-0.8,0.2-1.2l-7.1,4.1c-0.5,0.3-0.9,0.9-0.9,1.5v16.5c0,0.6,0.3,1.2,0.9,1.5l16,9.2
|
||||
c0.3,0.2,0.6,0.2,0.9,0.2s0.6-0.1,0.9-0.2l16-9.2c0.5-0.3,0.9-0.9,0.9-1.5V19C42.7,18.4,42.3,17.8,41.8,17.5z M10.7,21.7L23.3,29
|
||||
v12.8l-12.5-7.2V21.7z M39.2,34.5l-12.5,7.2V28.9l12.5-7.2V34.5z"/>
|
||||
<circle class="st0" cx="25" cy="20.3" r="2.8"/>
|
||||
<circle class="st0" cx="20" cy="14.6" r="2.4"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
|
Before Width: | Height: | Size: 243 B After Width: | Height: | Size: 1.1 KiB |
|
@ -34,10 +34,34 @@ Item {
|
|||
webViewCore.stop();
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: delayedUnfocuser
|
||||
repeat: false
|
||||
interval: 200
|
||||
onTriggered: {
|
||||
|
||||
// The idea behind this is to delay unfocusing, so that fast lower/raise will not result actual unfocusing.
|
||||
// Fast lower/raise happens every time keyboard is being re-raised (see the code below in OffscreenQmlSurface::setKeyboardRaised)
|
||||
//
|
||||
// if (raised) {
|
||||
// item->setProperty("keyboardRaised", QVariant(!raised));
|
||||
// }
|
||||
//
|
||||
// item->setProperty("keyboardRaised", QVariant(raised));
|
||||
//
|
||||
|
||||
webViewCore.runJavaScript("if (document.activeElement) document.activeElement.blur();", function(result) {
|
||||
console.log('unfocus completed: ', result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function unfocus() {
|
||||
webViewCore.runJavaScript("if (document.activeElement) document.activeElement.blur();", function(result) {
|
||||
console.log('unfocus completed: ', result);
|
||||
});
|
||||
delayedUnfocuser.start();
|
||||
}
|
||||
|
||||
function stopUnfocus() {
|
||||
delayedUnfocuser.stop();
|
||||
}
|
||||
|
||||
function onLoadingChanged(loadRequest) {
|
||||
|
|
|
@ -13,6 +13,8 @@ Item {
|
|||
onKeyboardRaisedChanged: {
|
||||
if(!keyboardRaised) {
|
||||
webroot.unfocus();
|
||||
} else {
|
||||
webroot.stopUnfocus();
|
||||
}
|
||||
}
|
||||
property bool punctuationMode: false
|
||||
|
|
|
@ -17,6 +17,8 @@ Item {
|
|||
onKeyboardRaisedChanged: {
|
||||
if(!keyboardRaised) {
|
||||
webroot.unfocus();
|
||||
} else {
|
||||
webroot.stopUnfocus();
|
||||
}
|
||||
}
|
||||
property bool punctuationMode: false
|
||||
|
|
|
@ -15,6 +15,8 @@ Item {
|
|||
onKeyboardRaisedChanged: {
|
||||
if(!keyboardRaised) {
|
||||
webroot.unfocus();
|
||||
} else {
|
||||
webroot.stopUnfocus();
|
||||
}
|
||||
}
|
||||
property bool punctuationMode: false
|
||||
|
|
|
@ -49,6 +49,7 @@ Item {
|
|||
property string defaultThumbnail: Qt.resolvedUrl("../../images/default-domain.gif");
|
||||
property int shadowHeight: 10;
|
||||
property bool hovered: false
|
||||
property bool scrolling: false
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
|
@ -236,11 +237,12 @@ Item {
|
|||
property var hoverThunk: function () { };
|
||||
property var unhoverThunk: function () { };
|
||||
Rectangle {
|
||||
anchors.fill: parent;
|
||||
visible: root.hovered
|
||||
color: "transparent";
|
||||
border.width: 4; border.color: hifiStyleConstants.colors.primaryHighlight;
|
||||
z: 1;
|
||||
anchors.fill: parent
|
||||
visible: root.hovered && !root.scrolling
|
||||
color: "transparent"
|
||||
border.width: 4
|
||||
border.color: hifiStyleConstants.colors.primaryHighlight
|
||||
z: 1
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
|
@ -255,6 +257,12 @@ Item {
|
|||
hoverThunk();
|
||||
}
|
||||
onExited: unhoverThunk();
|
||||
onCanceled: unhoverThunk();
|
||||
}
|
||||
MouseArea {
|
||||
// This second mouse area causes onEntered to fire on the first if you scroll just a little and the cursor stays on
|
||||
// the original card. I.e., the original card is re-highlighted if the cursor is on it after scrolling finishes.
|
||||
anchors.fill: parent
|
||||
}
|
||||
StateImage {
|
||||
id: actionIcon;
|
||||
|
|
|
@ -141,6 +141,8 @@ Column {
|
|||
textSizeSmall: root.textSizeSmall;
|
||||
stackShadowNarrowing: root.stackShadowNarrowing;
|
||||
shadowHeight: root.stackedCardShadowHeight;
|
||||
scrolling: scroll.moving
|
||||
|
||||
hoverThunk: function () {
|
||||
hovered = true;
|
||||
if(root.autoScrollTimerEnabled) {
|
||||
|
|
|
@ -536,8 +536,8 @@ Rectangle {
|
|||
Rectangle {
|
||||
id: exchangeMoneyMessagesWaitingLight;
|
||||
visible: parent.messagesWaiting;
|
||||
anchors.right: exchangeMoneyTabIcon.left;
|
||||
anchors.rightMargin: 9;
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: 16;
|
||||
anchors.top: exchangeMoneyTabIcon.top;
|
||||
anchors.topMargin: 4;
|
||||
height: 10;
|
||||
|
|
|
@ -251,17 +251,29 @@ Item {
|
|||
height: 15
|
||||
|
||||
Rectangle {
|
||||
property bool isHovered: false
|
||||
anchors.centerIn: parent
|
||||
opacity: index === pageIndicator.currentIndex ? 0.95 : 0.45
|
||||
implicitWidth: index === pageIndicator.currentIndex ? 15 : 10
|
||||
opacity: index === pageIndicator.currentIndex || isHovered ? 0.95 : 0.45
|
||||
implicitWidth: index === pageIndicator.currentIndex || isHovered ? 15 : 10
|
||||
implicitHeight: implicitWidth
|
||||
radius: width/2
|
||||
color: "white"
|
||||
color: isHovered && index !== pageIndicator.currentIndex ? "#1fc6a6" : "white"
|
||||
Behavior on opacity {
|
||||
OpacityAnimator {
|
||||
duration: 100
|
||||
}
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.centerIn: parent
|
||||
width: 20
|
||||
height: 30 // Make it easier to target with laser.
|
||||
hoverEnabled: true
|
||||
enabled: true
|
||||
onEntered: parent.isHovered = true;
|
||||
onExited: parent.isHovered = false;
|
||||
onClicked: swipeView.currentIndex = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,14 @@ void AndroidHelper::notifyHeadsetOn(bool pluggedIn) {
|
|||
#endif
|
||||
}
|
||||
|
||||
void AndroidHelper::muteMic() {
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
if (audioClient) {
|
||||
QMetaObject::invokeMethod(audioClient.data(), "setMuted", Q_ARG(bool, true), Q_ARG(bool, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AndroidHelper::signup(QString email, QString username, QString password) {
|
||||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.callbackReceiver = this;
|
||||
|
|
|
@ -35,6 +35,7 @@ public:
|
|||
void performHapticFeedback(int duration);
|
||||
void processURL(const QString &url);
|
||||
void notifyHeadsetOn(bool pluggedIn);
|
||||
void muteMic();
|
||||
|
||||
AndroidHelper(AndroidHelper const&) = delete;
|
||||
void operator=(AndroidHelper const&) = delete;
|
||||
|
|
|
@ -3537,7 +3537,17 @@ void Application::handleSandboxStatus(QNetworkReply* reply) {
|
|||
|
||||
} else {
|
||||
#if !defined(Q_OS_ANDROID)
|
||||
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
|
||||
QString goingTo = "";
|
||||
if (addressLookupString.isEmpty()) {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::HomeLocation)) {
|
||||
auto locationBookmarks = DependencyManager::get<LocationBookmarks>();
|
||||
addressLookupString = locationBookmarks->addressForBookmark(LocationBookmarks::HOME_BOOKMARK);
|
||||
goingTo = "home location";
|
||||
} else {
|
||||
goingTo = "previous location";
|
||||
}
|
||||
}
|
||||
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(!goingTo.isEmpty() ? goingTo : addressLookupString);
|
||||
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||
sentTo = SENT_TO_PREVIOUS_LOCATION;
|
||||
#endif
|
||||
|
|
|
@ -226,6 +226,14 @@ Menu::Menu() {
|
|||
addActionToQMenuAndActionHash(navigateMenu, MenuOption::CopyPath, 0,
|
||||
addressManager.data(), SLOT(copyPath()));
|
||||
|
||||
// Navigate > Start-up Location
|
||||
MenuWrapper* startupLocationMenu = navigateMenu->addMenu(MenuOption::StartUpLocation);
|
||||
QActionGroup* startupLocatiopnGroup = new QActionGroup(startupLocationMenu);
|
||||
startupLocatiopnGroup->setExclusive(true);
|
||||
startupLocatiopnGroup->addAction(addCheckableActionToQMenuAndActionHash(startupLocationMenu, MenuOption::HomeLocation, 0,
|
||||
false));
|
||||
startupLocatiopnGroup->addAction(addCheckableActionToQMenuAndActionHash(startupLocationMenu, MenuOption::LastLocation, 0,
|
||||
true));
|
||||
|
||||
// Settings menu ----------------------------------
|
||||
MenuWrapper* settingsMenu = addMenu("Settings");
|
||||
|
|
|
@ -117,9 +117,11 @@ namespace MenuOption {
|
|||
const QString FrameTimer = "Show Timer";
|
||||
const QString FullscreenMirror = "Mirror";
|
||||
const QString Help = "Help...";
|
||||
const QString HomeLocation = "Home";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
const QString ActionMotorControl = "Enable Default Motor Control";
|
||||
const QString LastLocation = "Last Location";
|
||||
const QString LoadScript = "Open and Run Script File...";
|
||||
const QString LoadScriptURL = "Open and Run Script from URL...";
|
||||
const QString LodTools = "LOD Tools";
|
||||
|
@ -197,6 +199,7 @@ namespace MenuOption {
|
|||
const QString SimulateEyeTracking = "Simulate";
|
||||
const QString SMIEyeTracking = "SMI Eye Tracking";
|
||||
const QString SparseTextureManagement = "Enable Sparse Texture Management";
|
||||
const QString StartUpLocation = "Start-Up Location";
|
||||
const QString Stats = "Show Statistics";
|
||||
const QString AnimStats = "Show Animation Stats";
|
||||
const QString StopAllScripts = "Stop All Scripts";
|
||||
|
|
|
@ -538,17 +538,25 @@ void MyAvatar::update(float deltaTime) {
|
|||
|
||||
// put the average hand azimuth into sensor space.
|
||||
// then mix it with head facing direction to determine rotation recenter
|
||||
if (getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() && getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND).isValid()) {
|
||||
glm::vec3 handHipAzimuthWorldSpace = transformVectorFast(getTransform().getMatrix(), glm::vec3(_hipToHandController.x, 0.0f, _hipToHandController.y));
|
||||
int spine2Index = _skeletonModel->getRig().indexOfJoint("Spine2");
|
||||
if (getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() && getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND).isValid() && !(spine2Index < 0)) {
|
||||
|
||||
// use the spine for the azimuth origin.
|
||||
glm::quat spine2Rot = getAbsoluteJointRotationInObjectFrame(spine2Index);
|
||||
glm::vec3 handHipAzimuthAvatarSpace = spine2Rot * glm::vec3(_hipToHandController.x, 0.0f, _hipToHandController.y);
|
||||
glm::vec3 handHipAzimuthWorldSpace = transformVectorFast(getTransform().getMatrix(), handHipAzimuthAvatarSpace);
|
||||
glm::mat4 sensorToWorldMat = getSensorToWorldMatrix();
|
||||
glm::mat4 worldToSensorMat = glm::inverse(sensorToWorldMat);
|
||||
glm::vec3 handHipAzimuthSensorSpace = transformVectorFast(worldToSensorMat, handHipAzimuthWorldSpace);
|
||||
glm::vec2 normedHandHipAzimuthSensorSpace(0.0f, 1.0f);
|
||||
if (glm::length(glm::vec2(handHipAzimuthSensorSpace.x, handHipAzimuthSensorSpace.z)) > 0.0f) {
|
||||
normedHandHipAzimuthSensorSpace = glm::normalize(glm::vec2(handHipAzimuthSensorSpace.x, handHipAzimuthSensorSpace.z));
|
||||
glm::vec2 headFacingPlusHandHipAzimuthMix = lerp(normedHandHipAzimuthSensorSpace, _headControllerFacing, PERCENTAGE_WEIGHT_HEAD_VS_SHOULDERS_AZIMUTH);
|
||||
_headControllerFacingMovingAverage = lerp(_headControllerFacingMovingAverage, headFacingPlusHandHipAzimuthMix, tau);
|
||||
} else {
|
||||
// use head facing if the chest arms vector is up or down.
|
||||
_headControllerFacingMovingAverage = lerp(_headControllerFacingMovingAverage, _headControllerFacing, tau);
|
||||
}
|
||||
glm::vec2 headFacingPlusHandHipAzimuthMix = lerp(normedHandHipAzimuthSensorSpace, _headControllerFacing, PERCENTAGE_WEIGHT_HEAD_VS_SHOULDERS_AZIMUTH);
|
||||
_headControllerFacingMovingAverage = lerp(_headControllerFacingMovingAverage, headFacingPlusHandHipAzimuthMix, tau);
|
||||
} else {
|
||||
_headControllerFacingMovingAverage = lerp(_headControllerFacingMovingAverage, _headControllerFacing, tau);
|
||||
}
|
||||
|
@ -979,35 +987,48 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
|||
}
|
||||
|
||||
// Find the vector halfway between the hip to hand azimuth vectors
|
||||
// This midpoint hand azimuth is in Avatar space
|
||||
// This midpoint hand azimuth is in Spine2 space
|
||||
glm::vec2 MyAvatar::computeHandAzimuth() const {
|
||||
controller::Pose leftHandPoseAvatarSpace = getLeftHandPose();
|
||||
controller::Pose rightHandPoseAvatarSpace = getRightHandPose();
|
||||
controller::Pose headPoseAvatarSpace = getControllerPoseInAvatarFrame(controller::Action::HEAD);
|
||||
const float HALFWAY = 0.50f;
|
||||
|
||||
glm::vec2 latestHipToHandController = _hipToHandController;
|
||||
|
||||
if (leftHandPoseAvatarSpace.isValid() && rightHandPoseAvatarSpace.isValid() && headPoseAvatarSpace.isValid()) {
|
||||
int spine2Index = _skeletonModel->getRig().indexOfJoint("Spine2");
|
||||
if (leftHandPoseAvatarSpace.isValid() && rightHandPoseAvatarSpace.isValid() && headPoseAvatarSpace.isValid() && !(spine2Index < 0)) {
|
||||
|
||||
glm::vec3 spine2Position = getAbsoluteJointTranslationInObjectFrame(spine2Index);
|
||||
glm::quat spine2Rotation = getAbsoluteJointRotationInObjectFrame(spine2Index);
|
||||
|
||||
glm::vec3 rightHandOffset = rightHandPoseAvatarSpace.translation - spine2Position;
|
||||
glm::vec3 leftHandOffset = leftHandPoseAvatarSpace.translation - spine2Position;
|
||||
glm::vec3 rightHandSpine2Space = glm::inverse(spine2Rotation) * rightHandOffset;
|
||||
glm::vec3 leftHandSpine2Space = glm::inverse(spine2Rotation) * leftHandOffset;
|
||||
|
||||
// we need the old azimuth reading to prevent flipping the facing direction 180
|
||||
// in the case where the hands go from being slightly less than 180 apart to slightly more than 180 apart.
|
||||
glm::vec2 oldAzimuthReading = _hipToHandController;
|
||||
if ((glm::length(glm::vec2(rightHandPoseAvatarSpace.translation.x, rightHandPoseAvatarSpace.translation.z)) > 0.0f) && (glm::length(glm::vec2(leftHandPoseAvatarSpace.translation.x, leftHandPoseAvatarSpace.translation.z)) > 0.0f)) {
|
||||
latestHipToHandController = lerp(glm::normalize(glm::vec2(rightHandPoseAvatarSpace.translation.x, rightHandPoseAvatarSpace.translation.z)), glm::normalize(glm::vec2(leftHandPoseAvatarSpace.translation.x, leftHandPoseAvatarSpace.translation.z)), HALFWAY);
|
||||
if ((glm::length(glm::vec2(rightHandSpine2Space.x, rightHandSpine2Space.z)) > 0.0f) && (glm::length(glm::vec2(leftHandSpine2Space.x, leftHandSpine2Space.z)) > 0.0f)) {
|
||||
latestHipToHandController = lerp(glm::normalize(glm::vec2(rightHandSpine2Space.x, rightHandSpine2Space.z)), glm::normalize(glm::vec2(leftHandSpine2Space.x, leftHandSpine2Space.z)), HALFWAY);
|
||||
} else {
|
||||
latestHipToHandController = glm::vec2(0.0f, -1.0f);
|
||||
latestHipToHandController = glm::vec2(0.0f, 1.0f);
|
||||
}
|
||||
|
||||
glm::vec3 headLookAtAvatarSpace = transformVectorFast(headPoseAvatarSpace.getMatrix(), glm::vec3(0.0f, 0.0f, 1.0f));
|
||||
glm::vec2 headAzimuthAvatarSpace = glm::vec2(headLookAtAvatarSpace.x, headLookAtAvatarSpace.z);
|
||||
if (glm::length(headAzimuthAvatarSpace) > 0.0f) {
|
||||
headAzimuthAvatarSpace = glm::normalize(headAzimuthAvatarSpace);
|
||||
glm::vec3 headLookAtSpine2Space = glm::inverse(spine2Rotation) * headLookAtAvatarSpace;
|
||||
|
||||
glm::vec2 headAzimuthSpine2Space = glm::vec2(headLookAtSpine2Space.x, headLookAtSpine2Space.z);
|
||||
if (glm::length(headAzimuthSpine2Space) > 0.0f) {
|
||||
headAzimuthSpine2Space = glm::normalize(headAzimuthSpine2Space);
|
||||
} else {
|
||||
headAzimuthAvatarSpace = -latestHipToHandController;
|
||||
headAzimuthSpine2Space = -latestHipToHandController;
|
||||
}
|
||||
|
||||
// check the angular distance from forward and back
|
||||
float cosForwardAngle = glm::dot(latestHipToHandController, oldAzimuthReading);
|
||||
float cosHeadShoulder = glm::dot(-latestHipToHandController, headAzimuthAvatarSpace);
|
||||
float cosHeadShoulder = glm::dot(-latestHipToHandController, headAzimuthSpine2Space);
|
||||
// if we are now closer to the 180 flip of the previous chest forward
|
||||
// then we negate our computed latestHipToHandController to keep the chest from flipping.
|
||||
// also check the head to shoulder azimuth difference if we negate.
|
||||
|
@ -3518,19 +3539,33 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
|||
}
|
||||
|
||||
glm::mat4 MyAvatar::getSpine2RotationRigSpace() const {
|
||||
int spine2Index = _skeletonModel->getRig().indexOfJoint("Spine2");
|
||||
glm::quat spine2Rot = Quaternions::IDENTITY;
|
||||
if (!(spine2Index < 0)) {
|
||||
// use the spine for the azimuth origin.
|
||||
spine2Rot = getAbsoluteJointRotationInObjectFrame(spine2Index);
|
||||
}
|
||||
glm::vec3 spine2UpAvatarSpace = spine2Rot * glm::vec3(0.0f, 1.0f, 0.0f);
|
||||
glm::vec3 spine2FwdAvatarSpace = spine2Rot * glm::vec3(_hipToHandController.x, 0.0f, _hipToHandController.y);
|
||||
|
||||
// static const glm::quat RIG_CHANGE_OF_BASIS = Quaternions::Y_180;
|
||||
// RIG_CHANGE_OF_BASIS * AVATAR_TO_RIG_ROTATION * inverse(RIG_CHANGE_OF_BASIS) = Quaternions::Y_180; //avatar Space;
|
||||
const glm::quat AVATAR_TO_RIG_ROTATION = Quaternions::Y_180;
|
||||
glm::vec3 hipToHandRigSpace = AVATAR_TO_RIG_ROTATION * glm::vec3(_hipToHandController.x, 0.0f, _hipToHandController.y);
|
||||
glm::vec3 spine2UpRigSpace = AVATAR_TO_RIG_ROTATION * spine2UpAvatarSpace;
|
||||
glm::vec3 spine2FwdRigSpace = AVATAR_TO_RIG_ROTATION * spine2FwdAvatarSpace;
|
||||
|
||||
glm::vec3 u, v, w;
|
||||
if (glm::length(hipToHandRigSpace) > 0.0f) {
|
||||
hipToHandRigSpace = glm::normalize(hipToHandRigSpace);
|
||||
if (glm::length(spine2FwdRigSpace) > 0.0f) {
|
||||
spine2FwdRigSpace = glm::normalize(spine2FwdRigSpace);
|
||||
} else {
|
||||
hipToHandRigSpace = glm::vec3(0.0f, 0.0f, 1.0f);
|
||||
spine2FwdRigSpace = glm::vec3(0.0f, 0.0f, 1.0f);
|
||||
}
|
||||
generateBasisVectors(glm::vec3(0.0f,1.0f,0.0f), hipToHandRigSpace, u, v, w);
|
||||
if (glm::length(spine2UpRigSpace) > 0.0f) {
|
||||
spine2UpRigSpace = glm::normalize(spine2UpRigSpace);
|
||||
} else {
|
||||
spine2UpRigSpace = glm::vec3(0.0f, 1.0f, 0.0f);
|
||||
}
|
||||
generateBasisVectors(spine2UpRigSpace, spine2FwdRigSpace, u, v, w);
|
||||
glm::mat4 spine2RigSpace(glm::vec4(w, 0.0f), glm::vec4(u, 0.0f), glm::vec4(v, 0.0f), glm::vec4(glm::vec3(0.0f, 0.0f, 0.0f), 1.0f));
|
||||
return spine2RigSpace;
|
||||
}
|
||||
|
|
|
@ -1731,7 +1731,7 @@ private:
|
|||
glm::vec2 _headControllerFacingMovingAverage { 0.0f, 0.0f }; // facing vector in xz plane (sensor space)
|
||||
glm::quat _averageHeadRotation { 0.0f, 0.0f, 0.0f, 1.0f };
|
||||
|
||||
glm::vec2 _hipToHandController { 0.0f, -1.0f }; // spine2 facing vector in xz plane (avatar space)
|
||||
glm::vec2 _hipToHandController { 0.0f, 1.0f }; // spine2 facing vector in xz plane (spine2 space)
|
||||
|
||||
float _currentStandingHeight { 0.0f };
|
||||
bool _resetMode { true };
|
||||
|
|
|
@ -233,6 +233,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
myAvatar->getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() &&
|
||||
!(params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] & (uint8_t)Rig::ControllerFlags::Enabled)) {
|
||||
|
||||
const float SPINE2_ROTATION_FILTER = 0.5f;
|
||||
AnimPose currentSpine2Pose;
|
||||
AnimPose currentHeadPose;
|
||||
AnimPose currentHipsPose;
|
||||
|
@ -252,7 +253,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
}
|
||||
generateBasisVectors(up, fwd, u, v, w);
|
||||
AnimPose newSpinePose(glm::mat4(glm::vec4(w, 0.0f), glm::vec4(u, 0.0f), glm::vec4(v, 0.0f), glm::vec4(glm::vec3(0.0f, 0.0f, 0.0f), 1.0f)));
|
||||
currentSpine2Pose.rot() = newSpinePose.rot();
|
||||
currentSpine2Pose.rot() = safeLerp(currentSpine2Pose.rot(), newSpinePose.rot(), SPINE2_ROTATION_FILTER);
|
||||
params.primaryControllerPoses[Rig::PrimaryControllerType_Spine2] = currentSpine2Pose;
|
||||
params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] = (uint8_t)Rig::ControllerFlags::Enabled | (uint8_t)Rig::ControllerFlags::Estimated;
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
|
|||
* <em>Read-only.</em>
|
||||
* @property {object} devices <em>Read-only.</em> <strong>Deprecated:</strong> This property is deprecated and will be
|
||||
* removed.
|
||||
* @property {boolean} isSoloing <em>Read-only.</em> <code>true</code> if any nodes are soloed.
|
||||
* @property {Uuid[]} soloList <em>Read-only.</em> Get the list of currently soloed node UUIDs.
|
||||
*/
|
||||
|
||||
Q_PROPERTY(bool muted READ isMuted WRITE setMuted NOTIFY mutedChanged)
|
||||
|
|
|
@ -270,7 +270,8 @@ AudioClient::AudioClient() :
|
|||
|
||||
configureReverb();
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto& packetReceiver = nodeList->getPacketReceiver();
|
||||
packetReceiver.registerListener(PacketType::AudioStreamStats, &_stats, "processStreamStatsPacket");
|
||||
packetReceiver.registerListener(PacketType::AudioEnvironment, this, "handleAudioEnvironmentDataPacket");
|
||||
packetReceiver.registerListener(PacketType::SilentAudioFrame, this, "handleAudioDataPacket");
|
||||
|
@ -278,6 +279,16 @@ AudioClient::AudioClient() :
|
|||
packetReceiver.registerListener(PacketType::NoisyMute, this, "handleNoisyMutePacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||
|
||||
auto& domainHandler = nodeList->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, [this] {
|
||||
_solo.reset();
|
||||
});
|
||||
connect(nodeList.data(), &NodeList::nodeActivated, this, [this](SharedNodePointer node) {
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
_solo.resend();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
AudioClient::~AudioClient() {
|
||||
|
@ -472,7 +483,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
|
||||
#if defined (Q_OS_ANDROID)
|
||||
if (mode == QAudio::AudioInput) {
|
||||
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, false);
|
||||
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, DEFAULT_AEC_ENABLED);
|
||||
bool aecEnabled = enableAEC.get();
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
bool headsetOn = audioClient? audioClient->isHeadsetPluggedIn() : false;
|
||||
|
@ -1680,7 +1691,7 @@ void AudioClient::setHeadsetPluggedIn(bool pluggedIn) {
|
|||
QThread::msleep(200);
|
||||
}
|
||||
|
||||
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, false);
|
||||
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, DEFAULT_AEC_ENABLED);
|
||||
bool aecEnabled = enableAEC.get();
|
||||
|
||||
if ((pluggedIn || !aecEnabled) && _inputDeviceInfo.deviceName() != VOICE_RECOGNITION) {
|
||||
|
|
|
@ -46,7 +46,6 @@
|
|||
#include <AudioConstants.h>
|
||||
#include <AudioGate.h>
|
||||
|
||||
|
||||
#include <shared/RateCounter.h>
|
||||
|
||||
#include <plugins/CodecPlugin.h>
|
||||
|
@ -69,6 +68,7 @@
|
|||
#define VOICE_COMMUNICATION "voicecommunication"
|
||||
|
||||
#define SETTING_AEC_KEY "Android/aec"
|
||||
#define DEFAULT_AEC_ENABLED true
|
||||
#endif
|
||||
|
||||
class QAudioInput;
|
||||
|
@ -171,6 +171,7 @@ public:
|
|||
void stopRecording();
|
||||
void setAudioPaused(bool pause);
|
||||
|
||||
AudioSolo& getAudioSolo() override { return _solo; }
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
static QString getWinDeviceName(wchar_t* guid);
|
||||
|
@ -446,6 +447,8 @@ private:
|
|||
#if defined(Q_OS_ANDROID)
|
||||
bool _shouldRestartInputSetup { true }; // Should we restart the input device because of an unintended stop?
|
||||
#endif
|
||||
|
||||
AudioSolo _solo;
|
||||
|
||||
Mutex _checkDevicesMutex;
|
||||
QTimer* _checkDevicesTimer { nullptr };
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
#include "AudioInjectorOptions.h"
|
||||
#include "AudioInjector.h"
|
||||
#include "AudioSolo.h"
|
||||
|
||||
class AudioInjector;
|
||||
class AudioInjectorLocalBuffer;
|
||||
|
@ -38,6 +39,8 @@ public:
|
|||
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
|
||||
virtual bool outputLocalInjector(const AudioInjectorPointer& injector) = 0;
|
||||
|
||||
virtual AudioSolo& getAudioSolo() = 0;
|
||||
|
||||
public slots:
|
||||
virtual bool shouldLoopbackInjectors() { return false; }
|
||||
|
||||
|
|
88
libraries/audio/src/AudioSolo.cpp
Normal file
|
@ -0,0 +1,88 @@
|
|||
//
|
||||
// AudioSolo.cpp
|
||||
// libraries/audio/src
|
||||
//
|
||||
// Created by Clement Brisset on 11/5/18.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AudioSolo.h"
|
||||
|
||||
#include <NodeList.h>
|
||||
|
||||
bool AudioSolo::isSoloing() const {
|
||||
Lock lock(_mutex);
|
||||
return !_nodesSoloed.empty();
|
||||
}
|
||||
|
||||
QVector<QUuid> AudioSolo::getUUIDs() const {
|
||||
Lock lock(_mutex);
|
||||
return _nodesSoloed.values().toVector();
|
||||
}
|
||||
|
||||
void AudioSolo::addUUIDs(QVector<QUuid> uuidList) {
|
||||
// create a reliable NLPacket with space for the solo UUIDs
|
||||
auto soloPacket = NLPacket::create(PacketType::AudioSoloRequest,
|
||||
uuidList.size() * NUM_BYTES_RFC4122_UUID + sizeof(uint8_t), true);
|
||||
uint8_t addToSoloList = (uint8_t)true;
|
||||
soloPacket->writePrimitive(addToSoloList);
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
for (auto uuid : uuidList) {
|
||||
if (_nodesSoloed.contains(uuid)) {
|
||||
qWarning() << "Uuid already in solo list:" << uuid;
|
||||
} else {
|
||||
// write the node ID to the packet
|
||||
soloPacket->write(uuid.toRfc4122());
|
||||
_nodesSoloed.insert(uuid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// send off this solo packet reliably to the matching node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->broadcastToNodes(std::move(soloPacket), { NodeType::AudioMixer });
|
||||
}
|
||||
|
||||
void AudioSolo::removeUUIDs(QVector<QUuid> uuidList) {
|
||||
// create a reliable NLPacket with space for the solo UUIDs
|
||||
auto soloPacket = NLPacket::create(PacketType::AudioSoloRequest,
|
||||
uuidList.size() * NUM_BYTES_RFC4122_UUID + sizeof(uint8_t), true);
|
||||
uint8_t addToSoloList = (uint8_t)false;
|
||||
soloPacket->writePrimitive(addToSoloList);
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
for (auto uuid : uuidList) {
|
||||
if (!_nodesSoloed.contains(uuid)) {
|
||||
qWarning() << "Uuid not in solo list:" << uuid;
|
||||
} else {
|
||||
// write the node ID to the packet
|
||||
soloPacket->write(uuid.toRfc4122());
|
||||
_nodesSoloed.remove(uuid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// send off this solo packet reliably to the matching node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->broadcastToNodes(std::move(soloPacket), { NodeType::AudioMixer });
|
||||
}
|
||||
|
||||
void AudioSolo::reset() {
|
||||
Lock lock(_mutex);
|
||||
removeUUIDs(getUUIDs());
|
||||
}
|
||||
|
||||
|
||||
void AudioSolo::resend() {
|
||||
Lock lock(_mutex);
|
||||
auto uuids = getUUIDs();
|
||||
_nodesSoloed.clear();
|
||||
addUUIDs(uuids);
|
||||
}
|
||||
|
40
libraries/audio/src/AudioSolo.h
Normal file
|
@ -0,0 +1,40 @@
|
|||
//
|
||||
// AudioSolo.h
|
||||
// libraries/audio/src
|
||||
//
|
||||
// Created by Clement Brisset on 11/5/18.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifndef hifi_AudioSolo_h
|
||||
#define hifi_AudioSolo_h
|
||||
|
||||
#include <mutex>
|
||||
|
||||
#include <QSet>
|
||||
#include <QUuid>
|
||||
|
||||
class AudioSolo {
|
||||
using Mutex = std::recursive_mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
public:
|
||||
bool isSoloing() const;
|
||||
QVector<QUuid> getUUIDs() const;
|
||||
void addUUIDs(QVector<QUuid> uuidList);
|
||||
void removeUUIDs(QVector<QUuid> uuidList);
|
||||
void reset();
|
||||
|
||||
void resend();
|
||||
|
||||
private:
|
||||
mutable Mutex _mutex;
|
||||
QSet<QUuid> _nodesSoloed;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioSolo_h
|
|
@ -1724,22 +1724,23 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
}
|
||||
|
||||
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
|
||||
// FIXME: this doesn't take into account Avatar rotation
|
||||
ShapeInfo shapeInfo;
|
||||
computeShapeInfo(shapeInfo);
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = halfHeight
|
||||
start = getWorldPosition() - glm::vec3(0, halfExtents.y, 0) + shapeInfo.getOffset();
|
||||
end = getWorldPosition() + glm::vec3(0, halfExtents.y, 0) + shapeInfo.getOffset();
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = cylinderHalfHeight + radius
|
||||
radius = halfExtents.x;
|
||||
glm::vec3 halfCylinderAxis(0.0f, halfExtents.y - radius, 0.0f);
|
||||
Transform transform = getTransform();
|
||||
start = transform.getTranslation() + transform.getRotation() * (shapeInfo.getOffset() - halfCylinderAxis);
|
||||
end = transform.getTranslation() + transform.getRotation() * (shapeInfo.getOffset() + halfCylinderAxis);
|
||||
}
|
||||
|
||||
glm::vec3 Avatar::getWorldFeetPosition() {
|
||||
ShapeInfo shapeInfo;
|
||||
|
||||
computeShapeInfo(shapeInfo);
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = halfHeight
|
||||
glm::vec3 localFeet(0.0f, shapeInfo.getOffset().y - halfExtents.y - halfExtents.x, 0.0f);
|
||||
return getWorldOrientation() * localFeet + getWorldPosition();
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = cylinderHalfHeight + radius
|
||||
glm::vec3 localFeet(0.0f, shapeInfo.getOffset().y - halfExtents.y, 0.0f);
|
||||
Transform transform = getTransform();
|
||||
return transform.getTranslation() + transform.getRotation() * localFeet;
|
||||
}
|
||||
|
||||
float Avatar::computeMass() {
|
||||
|
|
|
@ -126,14 +126,13 @@ public:
|
|||
EntityScriptCallMethod,
|
||||
ChallengeOwnershipRequest,
|
||||
ChallengeOwnershipReply,
|
||||
|
||||
OctreeDataFileRequest,
|
||||
OctreeDataFileReply,
|
||||
OctreeDataPersist,
|
||||
|
||||
EntityClone,
|
||||
EntityQueryInitialResultsComplete,
|
||||
BulkAvatarTraits,
|
||||
AudioSoloRequest,
|
||||
|
||||
NUM_PACKET_TYPE
|
||||
};
|
||||
|
|
|
@ -307,21 +307,21 @@ const btCollisionShape* ShapeFactory::createShapeFromInfo(const ShapeInfo& info)
|
|||
case SHAPE_TYPE_CAPSULE_Y: {
|
||||
glm::vec3 halfExtents = info.getHalfExtents();
|
||||
float radius = halfExtents.x;
|
||||
float height = 2.0f * halfExtents.y;
|
||||
float height = 2.0f * (halfExtents.y - radius);
|
||||
shape = new btCapsuleShape(radius, height);
|
||||
}
|
||||
break;
|
||||
case SHAPE_TYPE_CAPSULE_X: {
|
||||
glm::vec3 halfExtents = info.getHalfExtents();
|
||||
float radius = halfExtents.y;
|
||||
float height = 2.0f * halfExtents.x;
|
||||
float height = 2.0f * (halfExtents.x - radius);
|
||||
shape = new btCapsuleShapeX(radius, height);
|
||||
}
|
||||
break;
|
||||
case SHAPE_TYPE_CAPSULE_Z: {
|
||||
glm::vec3 halfExtents = info.getHalfExtents();
|
||||
float radius = halfExtents.x;
|
||||
float height = 2.0f * halfExtents.z;
|
||||
float height = 2.0f * (halfExtents.z - radius);
|
||||
shape = new btCapsuleShapeZ(radius, height);
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -25,13 +25,49 @@ class AudioScriptingInterface : public QObject, public Dependency {
|
|||
|
||||
// JSDoc for property is in Audio.h.
|
||||
Q_PROPERTY(bool isStereoInput READ isStereoInput WRITE setStereoInput NOTIFY isStereoInputChanged)
|
||||
Q_PROPERTY(bool isSoloing READ isSoloing)
|
||||
Q_PROPERTY(QVector<QUuid> soloList READ getSoloList)
|
||||
|
||||
public:
|
||||
virtual ~AudioScriptingInterface() {}
|
||||
virtual ~AudioScriptingInterface() = default;
|
||||
void setLocalAudioInterface(AbstractAudioInterface* audioInterface);
|
||||
|
||||
bool isSoloing() const {
|
||||
return _localAudioInterface->getAudioSolo().isSoloing();
|
||||
}
|
||||
|
||||
QVector<QUuid> getSoloList() const {
|
||||
return _localAudioInterface->getAudioSolo().getUUIDs();
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Add nodes to the audio solo list
|
||||
* @function Audio.addToSoloList
|
||||
* @param {Uuid[]} uuidList - List of node UUIDs to add to the solo list.
|
||||
*/
|
||||
Q_INVOKABLE void addToSoloList(QVector<QUuid> uuidList) {
|
||||
_localAudioInterface->getAudioSolo().addUUIDs(uuidList);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Remove nodes from the audio solo list
|
||||
* @function Audio.removeFromSoloList
|
||||
* @param {Uuid[]} uuidList - List of node UUIDs to remove from the solo list.
|
||||
*/
|
||||
Q_INVOKABLE void removeFromSoloList(QVector<QUuid> uuidList) {
|
||||
_localAudioInterface->getAudioSolo().removeUUIDs(uuidList);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Reset the list of soloed nodes.
|
||||
* @function Audio.resetSoloList
|
||||
*/
|
||||
Q_INVOKABLE void resetSoloList() {
|
||||
_localAudioInterface->getAudioSolo().reset();
|
||||
}
|
||||
|
||||
protected:
|
||||
AudioScriptingInterface() {}
|
||||
AudioScriptingInterface() = default;
|
||||
|
||||
// these methods are protected to stop C++ callers from calling, but invokable from script
|
||||
|
||||
|
|
|
@ -148,12 +148,12 @@ void ShapeInfo::setPointCollection(const ShapeInfo::PointCollection& pointCollec
|
|||
_hashKey.clear();
|
||||
}
|
||||
|
||||
void ShapeInfo::setCapsuleY(float radius, float halfHeight) {
|
||||
void ShapeInfo::setCapsuleY(float radius, float cylinderHalfHeight) {
|
||||
_url = "";
|
||||
_type = SHAPE_TYPE_CAPSULE_Y;
|
||||
radius = glm::max(radius, MIN_HALF_EXTENT);
|
||||
halfHeight = glm::max(halfHeight, 0.0f);
|
||||
_halfExtents = glm::vec3(radius, halfHeight, radius);
|
||||
cylinderHalfHeight = glm::max(cylinderHalfHeight, 0.0f);
|
||||
_halfExtents = glm::vec3(radius, cylinderHalfHeight + radius, radius);
|
||||
_hashKey.clear();
|
||||
}
|
||||
|
||||
|
@ -261,27 +261,27 @@ bool ShapeInfo::contains(const glm::vec3& point) const {
|
|||
case SHAPE_TYPE_CYLINDER_Z:
|
||||
return glm::length(glm::vec2(point.x, point.y)) <= _halfExtents.y;
|
||||
case SHAPE_TYPE_CAPSULE_X: {
|
||||
if (glm::abs(point.x) <= _halfExtents.x) {
|
||||
return glm::length(glm::vec2(point.y, point.z)) <= _halfExtents.z;
|
||||
if (glm::abs(point.x) <= _halfExtents.x - _halfExtents.y) {
|
||||
return glm::length(glm::vec2(point.y, point.z)) <= _halfExtents.y;
|
||||
} else {
|
||||
glm::vec3 absPoint = glm::abs(point) - _halfExtents.x;
|
||||
return glm::length(absPoint) <= _halfExtents.z;
|
||||
glm::vec3 absPoint = glm::abs(point) - glm::vec3(_halfExtents.x, 0.0f, 0.0f);
|
||||
return glm::length(absPoint) <= _halfExtents.y;
|
||||
}
|
||||
}
|
||||
case SHAPE_TYPE_CAPSULE_Y: {
|
||||
if (glm::abs(point.y) <= _halfExtents.y) {
|
||||
return glm::length(glm::vec2(point.x, point.z)) <= _halfExtents.x;
|
||||
if (glm::abs(point.y) <= _halfExtents.y - _halfExtents.z) {
|
||||
return glm::length(glm::vec2(point.x, point.z)) <= _halfExtents.z;
|
||||
} else {
|
||||
glm::vec3 absPoint = glm::abs(point) - _halfExtents.y;
|
||||
return glm::length(absPoint) <= _halfExtents.x;
|
||||
glm::vec3 absPoint = glm::abs(point) - glm::vec3(0.0f, _halfExtents.y, 0.0f);
|
||||
return glm::length(absPoint) <= _halfExtents.z;
|
||||
}
|
||||
}
|
||||
case SHAPE_TYPE_CAPSULE_Z: {
|
||||
if (glm::abs(point.z) <= _halfExtents.z) {
|
||||
return glm::length(glm::vec2(point.x, point.y)) <= _halfExtents.y;
|
||||
if (glm::abs(point.z) <= _halfExtents.z - _halfExtents.x) {
|
||||
return glm::length(glm::vec2(point.x, point.y)) <= _halfExtents.x;
|
||||
} else {
|
||||
glm::vec3 absPoint = glm::abs(point) - _halfExtents.z;
|
||||
return glm::length(absPoint) <= _halfExtents.y;
|
||||
glm::vec3 absPoint = glm::abs(point) - glm::vec3(0.0f, 0.0f, _halfExtents.z);
|
||||
return glm::length(absPoint) <= _halfExtents.x;
|
||||
}
|
||||
}
|
||||
case SHAPE_TYPE_BOX:
|
||||
|
|
|
@ -67,7 +67,7 @@ public:
|
|||
void setBox(const glm::vec3& halfExtents);
|
||||
void setSphere(float radius);
|
||||
void setPointCollection(const PointCollection& pointCollection);
|
||||
void setCapsuleY(float radius, float halfHeight);
|
||||
void setCapsuleY(float radius, float cylinderHalfHeight);
|
||||
void setOffset(const glm::vec3& offset);
|
||||
|
||||
ShapeType getType() const { return _type; }
|
||||
|
|
|
@ -448,7 +448,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
|
||||
this.leftPointer = this.pointerManager.createPointer(false, PickType.Ray, {
|
||||
joint: "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND",
|
||||
filter: Picks.PICK_OVERLAYS | Picks.PICK_ENTITIES,
|
||||
filter: Picks.PICK_OVERLAYS | Picks.PICK_ENTITIES | Picks.PICK_INCLUDE_NONCOLLIDABLE,
|
||||
triggers: [{action: Controller.Standard.LTClick, button: "Focus"}, {action: Controller.Standard.LTClick, button: "Primary"}],
|
||||
posOffset: getGrabPointSphereOffset(Controller.Standard.LeftHand, true),
|
||||
hover: true,
|
||||
|
@ -458,7 +458,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
});
|
||||
this.rightPointer = this.pointerManager.createPointer(false, PickType.Ray, {
|
||||
joint: "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND",
|
||||
filter: Picks.PICK_OVERLAYS | Picks.PICK_ENTITIES,
|
||||
filter: Picks.PICK_OVERLAYS | Picks.PICK_ENTITIES | Picks.PICK_INCLUDE_NONCOLLIDABLE,
|
||||
triggers: [{action: Controller.Standard.RTClick, button: "Focus"}, {action: Controller.Standard.RTClick, button: "Primary"}],
|
||||
posOffset: getGrabPointSphereOffset(Controller.Standard.RightHand, true),
|
||||
hover: true,
|
||||
|
@ -490,7 +490,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
});
|
||||
this.mouseRayPick = Pointers.createPointer(PickType.Ray, {
|
||||
joint: "Mouse",
|
||||
filter: Picks.PICK_ENTITIES | Picks.PICK_OVERLAYS,
|
||||
filter: Picks.PICK_OVERLAYS | Picks.PICK_ENTITIES | Picks.PICK_INCLUDE_NONCOLLIDABLE,
|
||||
enabled: true
|
||||
});
|
||||
this.handleHandMessage = function(channel, data, sender) {
|
||||
|
|
|
@ -83,7 +83,6 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
this.potentialEntityWithContextOverlay = false;
|
||||
this.entityWithContextOverlay = false;
|
||||
this.contextOverlayTimer = false;
|
||||
this.previousCollisionStatus = false;
|
||||
this.locked = false;
|
||||
this.highlightedEntity = null;
|
||||
this.reticleMinX = MARGIN;
|
||||
|
|
|
@ -358,9 +358,9 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
|
||||
var sensorToWorldScale = MyAvatar.getSensorToWorldScale();
|
||||
|
||||
var radius = capsuleData.radius / sensorToWorldScale;
|
||||
var height = (Vec3.distance(capsuleData.start, capsuleData.end) + (capsuleData.radius * 2.0)) / sensorToWorldScale;
|
||||
var capsuleRatio = 10.0 * radius / height;
|
||||
var diameter = 2.0 * capsuleData.radius / sensorToWorldScale;
|
||||
var height = (Vec3.distance(capsuleData.start, capsuleData.end) + diameter) / sensorToWorldScale;
|
||||
var capsuleRatio = 5.0 * diameter / height;
|
||||
var offset = _this.pickHeightOffset * capsuleRatio;
|
||||
|
||||
_this.teleportHandCollisionPick = Picks.createPick(PickType.Collision, {
|
||||
|
@ -370,9 +370,9 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
shape: {
|
||||
shapeType: "capsule-y",
|
||||
dimensions: {
|
||||
x: radius * 2.0,
|
||||
y: height - (radius * 2.0),
|
||||
z: radius * 2.0
|
||||
x: diameter,
|
||||
y: height,
|
||||
z: diameter
|
||||
}
|
||||
},
|
||||
position: { x: 0, y: offset + height * 0.5, z: 0 },
|
||||
|
@ -386,9 +386,9 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
shape: {
|
||||
shapeType: "capsule-y",
|
||||
dimensions: {
|
||||
x: radius * 2.0,
|
||||
y: height - (radius * 2.0),
|
||||
z: radius * 2.0
|
||||
x: diameter,
|
||||
y: height,
|
||||
z: diameter
|
||||
}
|
||||
},
|
||||
position: { x: 0, y: offset + height * 0.5, z: 0 },
|
||||
|
|
|
@ -337,7 +337,17 @@ var notificationState = NotificationState.UNNOTIFIED;
|
|||
|
||||
function setNotificationState (notificationType, pending = undefined) {
|
||||
if (pending !== undefined) {
|
||||
pendingNotifications[notificationType] = pending;
|
||||
if ((notificationType === HifiNotificationType.TRANSACTIONS ||
|
||||
notificationType === HifiNotificationType.ITEMS)) {
|
||||
// special case, because we want to clear the indicator light
|
||||
// on INVENTORY when either Transactions or Items are
|
||||
// clicked on in the notification popup, we detect that case
|
||||
// here and force both to be unnotified.
|
||||
pendingNotifications[HifiNotificationType.TRANSACTIONS] = pending;
|
||||
pendingNotifications[HifiNotificationType.ITEMS] = pending;
|
||||
} else {
|
||||
pendingNotifications[notificationType] = pending;
|
||||
}
|
||||
notificationState = NotificationState.UNNOTIFIED;
|
||||
for (var key in pendingNotifications) {
|
||||
if (pendingNotifications[key]) {
|
||||
|
@ -428,18 +438,12 @@ var labels = {
|
|||
setNotificationState(HifiNotificationType.PEOPLE, false);
|
||||
}
|
||||
},
|
||||
wallet: {
|
||||
label: 'Wallet',
|
||||
inventory: {
|
||||
label: 'Inventory',
|
||||
click: function () {
|
||||
StartInterface("hifiapp:WALLET");
|
||||
setNotificationState(HifiNotificationType.WALLET, false);
|
||||
}
|
||||
},
|
||||
marketplace: {
|
||||
label: 'Market',
|
||||
click: function () {
|
||||
StartInterface("hifiapp:MARKET");
|
||||
setNotificationState(HifiNotificationType.MARKETPLACE, false);
|
||||
StartInterface("hifiapp:INVENTORY");
|
||||
setNotificationState(HifiNotificationType.ITEMS, false);
|
||||
setNotificationState(HifiNotificationType.TRANSACTIONS, false);
|
||||
}
|
||||
},
|
||||
restart: {
|
||||
|
@ -528,8 +532,7 @@ function buildMenuArray(serverState) {
|
|||
if (trayNotifications.enabled()) {
|
||||
menuArray.push(labels.goto);
|
||||
menuArray.push(labels.people);
|
||||
menuArray.push(labels.wallet);
|
||||
menuArray.push(labels.marketplace);
|
||||
menuArray.push(labels.inventory);
|
||||
menuArray.push(separator);
|
||||
}
|
||||
menuArray.push(labels.showNotifications);
|
||||
|
@ -565,8 +568,7 @@ function updateLabels(serverState) {
|
|||
labels.showNotifications.checked = trayNotifications.enabled();
|
||||
labels.goto.icon = pendingNotifications[HifiNotificationType.GOTO] ? menuNotificationIcon : null;
|
||||
labels.people.icon = pendingNotifications[HifiNotificationType.PEOPLE] ? menuNotificationIcon : null;
|
||||
labels.wallet.icon = pendingNotifications[HifiNotificationType.WALLET] ? menuNotificationIcon : null;
|
||||
labels.marketplace.icon = pendingNotifications[HifiNotificationType.MARKETPLACE] ? menuNotificationIcon : null;
|
||||
labels.inventory.icon = pendingNotifications[HifiNotificationType.ITEMS] || pendingNotifications[HifiNotificationType.TRANSACTIONS]? menuNotificationIcon : null;
|
||||
var onlineUsers = trayNotifications.getOnlineUsers();
|
||||
delete labels.people.submenu;
|
||||
if (onlineUsers) {
|
||||
|
|
|
@ -32,10 +32,10 @@ const StartInterface=hfApp.startInterface;
|
|||
const IsInterfaceRunning=hfApp.isInterfaceRunning;
|
||||
|
||||
const NotificationType = {
|
||||
GOTO: 'goto',
|
||||
PEOPLE: 'people',
|
||||
WALLET: 'wallet',
|
||||
MARKETPLACE: 'marketplace'
|
||||
GOTO: 'goto',
|
||||
PEOPLE: 'people',
|
||||
ITEMS: 'items',
|
||||
TRANSACTIONS: 'transactions'
|
||||
};
|
||||
|
||||
|
||||
|
@ -89,34 +89,34 @@ HifiNotification.prototype = {
|
|||
}
|
||||
break;
|
||||
|
||||
case NotificationType.WALLET:
|
||||
case NotificationType.TRANSACTIONS:
|
||||
if (typeof(this.data) === "number") {
|
||||
if (this.data === 1) {
|
||||
text = "You have " + this.data + " unread Wallet transaction.";
|
||||
text = "You have " + this.data + " unread transaction.";
|
||||
} else {
|
||||
text = "You have " + this.data + " unread Wallet transactions.";
|
||||
text = "You have " + this.data + " unread transactions.";
|
||||
}
|
||||
message = "Click to open WALLET."
|
||||
url = "hifiapp:hifi/commerce/wallet/Wallet.qml";
|
||||
message = "Click to open INVENTORY."
|
||||
url = "hifiapp:INVENTORY";
|
||||
break;
|
||||
}
|
||||
text = this.data.message.replace(/<\/?[^>]+(>|$)/g, "");
|
||||
message = "Click to open WALLET.";
|
||||
url = "hifiapp:WALLET";
|
||||
message = "Click to open INVENTORY.";
|
||||
url = "hifiapp:INVENTORY";
|
||||
break;
|
||||
|
||||
case NotificationType.MARKETPLACE:
|
||||
case NotificationType.ITEMS:
|
||||
if (typeof(this.data) === "number") {
|
||||
if (this.data === 1) {
|
||||
text = this.data + " of your purchased items has an update available.";
|
||||
text = this.data + " of your items has an update available.";
|
||||
} else {
|
||||
text = this.data + " of your purchased items have updates available.";
|
||||
text = this.data + " of your items have updates available.";
|
||||
}
|
||||
} else {
|
||||
text = "Update available for " + this.data.base_item_title + ".";
|
||||
}
|
||||
message = "Click to open MARKET.";
|
||||
url = "hifiapp:MARKET";
|
||||
message = "Click to open INVENTORY.";
|
||||
url = "hifiapp:INVENTORY";
|
||||
break;
|
||||
}
|
||||
notifier.notify({
|
||||
|
@ -235,7 +235,6 @@ HifiNotifications.prototype = {
|
|||
},
|
||||
_showNotification: function () {
|
||||
var _this = this;
|
||||
|
||||
if (osType === 'Darwin') {
|
||||
this.pendingNotifications[0].show(function () {
|
||||
// For OSX
|
||||
|
@ -325,10 +324,10 @@ HifiNotifications.prototype = {
|
|||
case NotificationType.PEOPLE:
|
||||
notifyData = content.data.users;
|
||||
break;
|
||||
case NotificationType.WALLET:
|
||||
case NotificationType.TRANSACTIONS:
|
||||
notifyData = content.data.history;
|
||||
break;
|
||||
case NotificationType.MARKETPLACE:
|
||||
case NotificationType.ITEMS:
|
||||
notifyData = content.data.updates;
|
||||
break;
|
||||
}
|
||||
|
@ -376,19 +375,16 @@ HifiNotifications.prototype = {
|
|||
}
|
||||
}, function (error, data) {
|
||||
if (error || !data.body) {
|
||||
console.log("Error: unable to get " + url);
|
||||
finished(false);
|
||||
console.log("Error: " + error + ": unable to get " + url);
|
||||
return;
|
||||
}
|
||||
var content = JSON.parse(data.body);
|
||||
if (!content || content.status != 'success') {
|
||||
console.log("Error: unable to get " + url);
|
||||
finished(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!content.total_entries) {
|
||||
finished(true, token);
|
||||
return;
|
||||
}
|
||||
if (!content.total_entries) {
|
||||
|
@ -487,7 +483,7 @@ HifiNotifications.prototype = {
|
|||
console.log("Polling for economic activity");
|
||||
var url = METAVERSE_SERVER_URL + ECONOMIC_ACTIVITY_URL + '?' + options.join('&');
|
||||
console.log(url);
|
||||
_this._pollCommon(NotificationType.WALLET, url, since, function () {});
|
||||
_this._pollCommon(NotificationType.TRANSACTIONS, url, since, function () {});
|
||||
},
|
||||
pollForMarketplaceUpdates: function (since) {
|
||||
var _this = this;
|
||||
|
@ -499,7 +495,7 @@ HifiNotifications.prototype = {
|
|||
console.log("Polling for marketplace update");
|
||||
var url = METAVERSE_SERVER_URL + UPDATES_URL + '?' + options.join('&');
|
||||
console.log(url);
|
||||
_this._pollCommon(NotificationType.MARKETPLACE, url, since, function (success, token) {
|
||||
_this._pollCommon(NotificationType.ITEMS, url, since, function (success, token) {
|
||||
if (success) {
|
||||
var options = [
|
||||
'page=1',
|
||||
|
@ -512,7 +508,7 @@ HifiNotifications.prototype = {
|
|||
'bearer': token
|
||||
}
|
||||
}, function (error, data) {
|
||||
_this._pollToDisableHighlight(NotificationType.MARKETPLACE, error, data);
|
||||
_this._pollToDisableHighlight(NotificationType.ITEMS, error, data);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|