mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Merge branch 'master' of https://github.com/highfidelity/hifi into sysTraySubmenus
This commit is contained in:
commit
0302fee23d
418 changed files with 9737 additions and 5276 deletions
|
@ -7,6 +7,8 @@ target_bullet()
|
|||
set(INTERFACE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../interface")
|
||||
add_subdirectory("${INTERFACE_DIR}" "libraries/interface")
|
||||
include_directories("${INTERFACE_DIR}/src")
|
||||
set(HIFI_CODEC_PLUGIN_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../plugins/hifiCodec")
|
||||
add_subdirectory("${HIFI_CODEC_PLUGIN_DIR}" "libraries/hifiCodecPlugin")
|
||||
|
||||
target_link_libraries(native-lib android log m interface)
|
||||
|
||||
|
|
|
@ -80,8 +80,10 @@ android {
|
|||
if (Os.isFamily(Os.FAMILY_UNIX)) {
|
||||
def uploadDumpSymsTask = rootProject.getTasksByName("uploadBreakpadDumpSyms${variant.name.capitalize()}", false).first()
|
||||
def runDumpSymsTask = rootProject.getTasksByName("runBreakpadDumpSyms${variant.name.capitalize()}", false).first()
|
||||
def renameHifiACTask = rootProject.getTasksByName("renameHifiACTask${variant.name.capitalize()}", false).first()
|
||||
runDumpSymsTask.dependsOn(task)
|
||||
variant.assemble.dependsOn(uploadDumpSymsTask)
|
||||
variant.mergeResources.dependsOn(renameHifiACTask)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,6 +135,10 @@ dependencies {
|
|||
|
||||
implementation 'com.android.support.constraint:constraint-layout:1.0.2'
|
||||
implementation 'com.android.support:design:26.1.0'
|
||||
compile 'com.android.support:support-v4:26.1.0'
|
||||
compile 'com.android.support:appcompat-v7:26.1.0'
|
||||
compile 'com.android.support:support-vector-drawable:26.1.0'
|
||||
|
||||
implementation 'com.android.support:appcompat-v7:26.1.0'
|
||||
compile 'com.android.support:recyclerview-v7:26.1.0'
|
||||
compile 'com.android.support:cardview-v7:26.1.0'
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
|
||||
QAndroidJniObject __interfaceActivity;
|
||||
QAndroidJniObject __loginCompletedListener;
|
||||
QAndroidJniObject __signupCompletedListener;
|
||||
QAndroidJniObject __loadCompleteListener;
|
||||
QAndroidJniObject __usernameChangedListener;
|
||||
void tempMessageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
|
||||
|
@ -156,7 +157,7 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
|
|||
JavaVM* jvm;
|
||||
env->GetJavaVM(&jvm);
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QList<QString> args) {
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QMap<QString, QString> args) {
|
||||
JNIEnv* myNewEnv;
|
||||
JavaVMAttachArgs jvmArgs;
|
||||
jvmArgs.version = JNI_VERSION_1_6; // choose your JNI version
|
||||
|
@ -182,9 +183,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
|
|||
jmethodID mapClassConstructor = myNewEnv->GetMethodID(hashMapClass, "<init>", "()V");
|
||||
jobject hashmap = myNewEnv->NewObject(hashMapClass, mapClassConstructor);
|
||||
jmethodID mapClassPut = myNewEnv->GetMethodID(hashMapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
|
||||
for (const QString& arg: args) {
|
||||
QAndroidJniObject jArg = QAndroidJniObject::fromString(arg);
|
||||
myNewEnv->CallObjectMethod(hashmap, mapClassPut, QAndroidJniObject::fromString("url").object<jstring>(), jArg.object<jstring>());
|
||||
QMap<QString, QString>::iterator i;
|
||||
for (i = args.begin(); i != args.end(); ++i) {
|
||||
QAndroidJniObject jKey = QAndroidJniObject::fromString(i.key());
|
||||
QAndroidJniObject jValue = QAndroidJniObject::fromString(i.value());
|
||||
myNewEnv->CallObjectMethod(hashmap, mapClassPut, jKey.object<jstring>(), jValue.object<jstring>());
|
||||
}
|
||||
__interfaceActivity.callMethod<void>("openAndroidActivity", "(Ljava/lang/String;ZLjava/util/HashMap;)V", string.object<jstring>(), jBackToScene, hashmap);
|
||||
if (attachedHere) {
|
||||
|
@ -255,6 +258,24 @@ JNIEXPORT jstring JNICALL Java_io_highfidelity_hifiinterface_fragment_HomeFragme
|
|||
return env->NewStringUTF(lastLocation.toString().toLatin1().data());
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(JNIEnv *env, jobject instance) {
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
QObject::disconnect(accountManager.data(), &AccountManager::loginComplete, nullptr, nullptr);
|
||||
QObject::disconnect(accountManager.data(), &AccountManager::loginFailed, nullptr, nullptr);
|
||||
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelLogin(JNIEnv *env,
|
||||
jobject instance) {
|
||||
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(env, instance);
|
||||
}
|
||||
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *env, jobject instance,
|
||||
jstring username_, jstring password_,
|
||||
|
@ -273,23 +294,90 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *en
|
|||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginComplete, [](const QUrl& authURL) {
|
||||
jboolean jSuccess = (jboolean) true;
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::loginFailed, []() {
|
||||
jboolean jSuccess = (jboolean) false;
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
if (__loginCompletedListener.isValid()) {
|
||||
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QObject::connect(accountManager.data(), &AccountManager::usernameChanged, [](const QString& username) {
|
||||
QAndroidJniObject string = QAndroidJniObject::fromString(username);
|
||||
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
if (__usernameChangedListener.isValid()) {
|
||||
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
}
|
||||
});
|
||||
|
||||
QMetaObject::invokeMethod(accountManager.data(), "requestAccessToken",
|
||||
Q_ARG(const QString&, username), Q_ARG(const QString&, password));
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeLogin(JNIEnv *env,
|
||||
jobject instance,
|
||||
jstring username_,
|
||||
jstring password_,
|
||||
jobject usernameChangedListener) {
|
||||
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(env, instance, username_, password_, usernameChangedListener);
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeInitAfterAppLoaded(JNIEnv* env, jobject obj) {
|
||||
AndroidHelper::instance().moveToThread(qApp->thread());
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeSignup(JNIEnv *env, jobject instance,
|
||||
jstring email_, jstring username_,
|
||||
jstring password_) {
|
||||
|
||||
const char *c_email = env->GetStringUTFChars(email_, 0);
|
||||
const char *c_username = env->GetStringUTFChars(username_, 0);
|
||||
const char *c_password = env->GetStringUTFChars(password_, 0);
|
||||
QString email = QString(c_email);
|
||||
QString username = QString(c_username);
|
||||
QString password = QString(c_password);
|
||||
env->ReleaseStringUTFChars(email_, c_email);
|
||||
env->ReleaseStringUTFChars(username_, c_username);
|
||||
env->ReleaseStringUTFChars(password_, c_password);
|
||||
|
||||
__signupCompletedListener = QAndroidJniObject(instance);
|
||||
|
||||
// disconnect any previous callback
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, []() {
|
||||
jboolean jSuccess = (jboolean) true;
|
||||
if (__signupCompletedListener.isValid()) {
|
||||
__signupCompletedListener.callMethod<void>("handleSignupCompleted", "()V", jSuccess);
|
||||
}
|
||||
});
|
||||
|
||||
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, [](QString errorString) {
|
||||
jboolean jSuccess = (jboolean) false;
|
||||
jstring jError = QAndroidJniObject::fromString(errorString).object<jstring>();
|
||||
if (__signupCompletedListener.isValid()) {
|
||||
QAndroidJniObject string = QAndroidJniObject::fromString(errorString);
|
||||
__signupCompletedListener.callMethod<void>("handleSignupFailed", "(Ljava/lang/String;)V", string.object<jstring>());
|
||||
}
|
||||
});
|
||||
|
||||
AndroidHelper::instance().signup(email, username, password);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelSignup(JNIEnv *env, jobject instance) {
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
|
||||
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
|
||||
|
||||
__signupCompletedListener = nullptr;
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
|
|
@ -39,6 +39,7 @@ import java.lang.reflect.Field;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
|
||||
import io.highfidelity.hifiinterface.receiver.HeadsetStateReceiver;
|
||||
|
@ -68,6 +69,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
private native void nativeEnterBackground();
|
||||
private native void nativeEnterForeground();
|
||||
private native long nativeOnExitVr();
|
||||
private native void nativeInitAfterAppLoaded();
|
||||
|
||||
private AssetManager assetManager;
|
||||
|
||||
|
@ -303,14 +305,22 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
switch (activityName) {
|
||||
case "Home":
|
||||
case "Privacy Policy":
|
||||
case "Login": {
|
||||
nativeBeforeEnterBackground();
|
||||
Intent intent = new Intent(this, MainActivity.class);
|
||||
intent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
|
||||
intent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
|
||||
startActivity(intent);
|
||||
break;
|
||||
}
|
||||
case "Login":
|
||||
nativeBeforeEnterBackground();
|
||||
Intent loginIntent = new Intent(this, MainActivity.class);
|
||||
loginIntent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
|
||||
loginIntent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
|
||||
if (args != null && args.containsKey(DOMAIN_URL)) {
|
||||
loginIntent.putExtra(DOMAIN_URL, (String) args.get(DOMAIN_URL));
|
||||
}
|
||||
startActivity(loginIntent);
|
||||
break;
|
||||
case "WebView":
|
||||
runOnUiThread(() -> {
|
||||
webSlidingDrawer.setVisibility(View.VISIBLE);
|
||||
|
@ -342,6 +352,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
if (nativeEnterBackgroundCallEnqueued) {
|
||||
nativeEnterBackground();
|
||||
}
|
||||
runOnUiThread(() -> {
|
||||
nativeInitAfterAppLoaded();
|
||||
});
|
||||
}
|
||||
|
||||
public void performHapticFeedback(int duration) {
|
||||
|
|
|
@ -29,22 +29,29 @@ import android.widget.TextView;
|
|||
import com.squareup.picasso.Callback;
|
||||
import com.squareup.picasso.Picasso;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import io.highfidelity.hifiinterface.fragment.FriendsFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.HomeFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.LoginFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.SettingsFragment;
|
||||
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
|
||||
import io.highfidelity.hifiinterface.fragment.SignedInFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.SignupFragment;import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
|
||||
|
||||
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
|
||||
LoginFragment.OnLoginInteractionListener,
|
||||
HomeFragment.OnHomeInteractionListener,
|
||||
FriendsFragment.OnHomeInteractionListener {
|
||||
FriendsFragment.OnHomeInteractionListener,
|
||||
SignupFragment.OnSignupInteractionListener,
|
||||
SignedInFragment.OnSignedInInteractionListener {
|
||||
|
||||
private static final int PROFILE_PICTURE_PLACEHOLDER = R.drawable.default_profile_avatar;
|
||||
public static final String DEFAULT_FRAGMENT = "Home";
|
||||
public static final String EXTRA_FRAGMENT = "fragment";
|
||||
public static final String EXTRA_BACK_TO_SCENE = "backToScene";
|
||||
public static final String EXTRA_BACK_TO_URL = "url";
|
||||
|
||||
private String TAG = "HighFidelity";
|
||||
|
||||
|
@ -62,6 +69,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
private MenuItem mPeopleMenuItem;
|
||||
|
||||
private boolean backToScene;
|
||||
private String backToUrl;
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
|
@ -105,9 +113,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
loadFragment(DEFAULT_FRAGMENT);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(EXTRA_BACK_TO_SCENE)) {
|
||||
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
|
||||
}
|
||||
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
|
||||
backToUrl = getIntent().getStringExtra(EXTRA_BACK_TO_URL);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,35 +150,44 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
|
||||
private void loadHomeFragment(boolean addToBackStack) {
|
||||
Fragment fragment = HomeFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
|
||||
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack, true);
|
||||
}
|
||||
|
||||
private void loadLoginFragment() {
|
||||
Fragment fragment = LoginFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true, true);
|
||||
}
|
||||
|
||||
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
|
||||
private void loadSignedInFragment() {
|
||||
Fragment fragment = SignedInFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.welcome), getString(R.string.tagFragmentSignedIn), true, true);
|
||||
}
|
||||
|
||||
private void loadSignupFragment() {
|
||||
Fragment fragment = SignupFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.signup), getString(R.string.tagFragmentSignup), true, false);
|
||||
}
|
||||
|
||||
private void loadPrivacyPolicyFragment() {
|
||||
Fragment fragment = PolicyFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true, true);
|
||||
}
|
||||
|
||||
private void loadPeopleFragment() {
|
||||
Fragment fragment = FriendsFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
|
||||
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true, true);
|
||||
}
|
||||
|
||||
private void loadSettingsFragment() {
|
||||
SettingsFragment fragment = SettingsFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true);
|
||||
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true, true);
|
||||
}
|
||||
|
||||
|
||||
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
|
||||
private void loadFragment(Fragment newFragment, String title, String tag, boolean addToBackStack, boolean goBackUntilHome) {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
|
||||
// check if it's the same fragment
|
||||
|
@ -183,17 +199,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
return; // cancel as we are already in that fragment
|
||||
}
|
||||
|
||||
// go back until first transaction
|
||||
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
|
||||
for (int i = 0; i < backStackEntryCount - 1; i++) {
|
||||
fragmentManager.popBackStackImmediate();
|
||||
if (goBackUntilHome) {
|
||||
// go back until first transaction
|
||||
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
|
||||
for (int i = 0; i < backStackEntryCount - 1; i++) {
|
||||
fragmentManager.popBackStackImmediate();
|
||||
}
|
||||
}
|
||||
|
||||
// this case is when we wanted to go home.. rollback already did that!
|
||||
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
|
||||
|
||||
FragmentTransaction ft = fragmentManager.beginTransaction();
|
||||
ft.replace(R.id.content_frame, fragment, tag);
|
||||
ft.replace(R.id.content_frame, newFragment, tag);
|
||||
|
||||
if (addToBackStack) {
|
||||
ft.addToBackStack(title);
|
||||
|
@ -301,7 +319,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
}
|
||||
|
||||
private void goToLastLocation() {
|
||||
goToDomain("");
|
||||
goToDomain(backToUrl != null? backToUrl : "");
|
||||
}
|
||||
|
||||
private void goToDomain(String domainUrl) {
|
||||
|
@ -330,6 +348,32 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onGettingStarted() {
|
||||
loadHomeFragment(false);
|
||||
if (backToScene) {
|
||||
backToScene = false;
|
||||
goToLastLocation();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoginRequested() {
|
||||
// go back from signup to login
|
||||
onBackPressed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignupRequested() {
|
||||
loadSignupFragment();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignupCompleted() {
|
||||
loadSignedInFragment();
|
||||
updateLoginMenu();
|
||||
}
|
||||
|
||||
public void handleUsernameChanged(String username) {
|
||||
runOnUiThread(() -> updateProfileHeader(username));
|
||||
}
|
||||
|
|
|
@ -4,12 +4,10 @@ import android.app.Activity;
|
|||
import android.app.Fragment;
|
||||
import android.app.ProgressDialog;
|
||||
import android.content.Context;
|
||||
import android.content.DialogInterface;
|
||||
import android.content.Intent;
|
||||
import android.net.Uri;
|
||||
import android.os.Bundle;
|
||||
import android.text.Editable;
|
||||
import android.text.TextWatcher;
|
||||
import android.util.Log;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
|
@ -19,19 +17,26 @@ import android.widget.Button;
|
|||
import android.widget.EditText;
|
||||
import android.widget.TextView;
|
||||
|
||||
import org.qtproject.qt5.android.QtNative;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
|
||||
|
||||
public class LoginFragment extends Fragment {
|
||||
|
||||
private EditText mUsername;
|
||||
private EditText mPassword;
|
||||
private TextView mError;
|
||||
private TextView mForgotPassword;
|
||||
private TextView mSignup;
|
||||
private Button mLoginButton;
|
||||
|
||||
private ProgressDialog mDialog;
|
||||
|
||||
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
|
||||
public native void nativeCancelLogin();
|
||||
|
||||
private LoginFragment.OnLoginInteractionListener mListener;
|
||||
|
||||
|
@ -54,48 +59,12 @@ public class LoginFragment extends Fragment {
|
|||
mError = rootView.findViewById(R.id.error);
|
||||
mLoginButton = rootView.findViewById(R.id.loginButton);
|
||||
mForgotPassword = rootView.findViewById(R.id.forgotPassword);
|
||||
|
||||
mUsername.addTextChangedListener(new TextWatcher() {
|
||||
boolean ignoreNextChange = false;
|
||||
boolean hadBlankSpace = false;
|
||||
@Override
|
||||
public void beforeTextChanged(CharSequence charSequence, int start, int count, int after) {
|
||||
hadBlankSpace = charSequence.length() > 0 && charSequence.charAt(charSequence.length()-1) == ' ';
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextChanged(CharSequence charSequence, int start, int count, int after) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterTextChanged(Editable editable) {
|
||||
if (!ignoreNextChange) {
|
||||
ignoreNextChange = true;
|
||||
boolean spaceFound = false;
|
||||
for (int i = 0; i < editable.length(); i++) {
|
||||
if (editable.charAt(i) == ' ') {
|
||||
spaceFound=true;
|
||||
editable.delete(i, i + 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
|
||||
if (hadBlankSpace && !spaceFound && editable.length() > 0) {
|
||||
editable.delete(editable.length()-1, editable.length());
|
||||
}
|
||||
|
||||
editable.append(' ');
|
||||
ignoreNextChange = false;
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
mSignup = rootView.findViewById(R.id.signupButton);
|
||||
|
||||
mLoginButton.setOnClickListener(view -> login());
|
||||
|
||||
mForgotPassword.setOnClickListener(view -> forgotPassword());
|
||||
mSignup.setOnClickListener(view -> signup());
|
||||
|
||||
mPassword.setOnEditorActionListener(
|
||||
(textView, actionId, keyEvent) -> {
|
||||
|
@ -125,10 +94,19 @@ public class LoginFragment extends Fragment {
|
|||
mListener = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
// This hack intends to keep Qt threads running even after the app comes from background
|
||||
QtNative.setApplicationState(ApplicationActive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStop() {
|
||||
super.onStop();
|
||||
cancelActivityIndicator();
|
||||
// Leave the Qt app paused
|
||||
QtNative.setApplicationState(ApplicationInactive);
|
||||
hideKeyboard();
|
||||
}
|
||||
|
||||
|
@ -146,6 +124,12 @@ public class LoginFragment extends Fragment {
|
|||
}
|
||||
}
|
||||
|
||||
public void signup() {
|
||||
if (mListener != null) {
|
||||
mListener.onSignupRequested();
|
||||
}
|
||||
}
|
||||
|
||||
private void hideKeyboard() {
|
||||
View view = getActivity().getCurrentFocus();
|
||||
if (view != null) {
|
||||
|
@ -164,7 +148,15 @@ public class LoginFragment extends Fragment {
|
|||
mDialog = new ProgressDialog(getContext());
|
||||
}
|
||||
mDialog.setMessage(getString(R.string.logging_in));
|
||||
mDialog.setCancelable(false);
|
||||
mDialog.setCancelable(true);
|
||||
mDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
|
||||
@Override
|
||||
public void onCancel(DialogInterface dialogInterface) {
|
||||
nativeCancelLogin();
|
||||
cancelActivityIndicator();
|
||||
mLoginButton.setEnabled(true);
|
||||
}
|
||||
});
|
||||
mDialog.show();
|
||||
}
|
||||
|
||||
|
@ -184,7 +176,6 @@ public class LoginFragment extends Fragment {
|
|||
}
|
||||
|
||||
public void handleLoginCompleted(boolean success) {
|
||||
Log.d("[LOGIN]", "handleLoginCompleted " + success);
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mLoginButton.setEnabled(true);
|
||||
cancelActivityIndicator();
|
||||
|
@ -200,6 +191,7 @@ public class LoginFragment extends Fragment {
|
|||
|
||||
public interface OnLoginInteractionListener {
|
||||
void onLoginCompleted();
|
||||
void onSignupRequested();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.content.Context;
|
||||
import android.os.Bundle;
|
||||
import android.text.Html;
|
||||
import android.text.Spanned;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.widget.Button;
|
||||
import android.widget.TextView;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
public class SignedInFragment extends Fragment {
|
||||
|
||||
private Button mGetStartedButton;
|
||||
private OnSignedInInteractionListener mListener;
|
||||
|
||||
public SignedInFragment() {
|
||||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static SignedInFragment newInstance() {
|
||||
SignedInFragment fragment = new SignedInFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
View rootView = inflater.inflate(R.layout.fragment_signedin, container, false);
|
||||
mGetStartedButton = rootView.findViewById(R.id.getStarted);
|
||||
|
||||
mGetStartedButton.setOnClickListener(view -> {
|
||||
getStarted();
|
||||
});
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAttach(Context context) {
|
||||
super.onAttach(context);
|
||||
if (context instanceof SignedInFragment.OnSignedInInteractionListener) {
|
||||
mListener = (SignedInFragment.OnSignedInInteractionListener) context;
|
||||
} else {
|
||||
throw new RuntimeException(context.toString()
|
||||
+ " must implement OnSignedInInteractionListener");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDetach() {
|
||||
super.onDetach();
|
||||
mListener = null;
|
||||
}
|
||||
|
||||
public void getStarted() {
|
||||
if (mListener != null) {
|
||||
mListener.onGettingStarted();
|
||||
}
|
||||
}
|
||||
|
||||
public interface OnSignedInInteractionListener {
|
||||
void onGettingStarted();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,217 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.Fragment;
|
||||
import android.app.ProgressDialog;
|
||||
import android.content.Context;
|
||||
import android.os.Bundle;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.view.inputmethod.EditorInfo;
|
||||
import android.view.inputmethod.InputMethodManager;
|
||||
import android.widget.Button;
|
||||
import android.widget.EditText;
|
||||
import android.widget.TextView;
|
||||
|
||||
import org.qtproject.qt5.android.QtNative;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
|
||||
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
|
||||
|
||||
public class SignupFragment extends Fragment {
|
||||
|
||||
private EditText mEmail;
|
||||
private EditText mUsername;
|
||||
private EditText mPassword;
|
||||
private TextView mError;
|
||||
private TextView mCancelButton;
|
||||
|
||||
private Button mSignupButton;
|
||||
|
||||
private ProgressDialog mDialog;
|
||||
|
||||
public native void nativeSignup(String email, String username, String password); // move to SignupFragment
|
||||
public native void nativeCancelSignup();
|
||||
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
|
||||
public native void nativeCancelLogin();
|
||||
|
||||
private SignupFragment.OnSignupInteractionListener mListener;
|
||||
|
||||
public SignupFragment() {
|
||||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static SignupFragment newInstance() {
|
||||
SignupFragment fragment = new SignupFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
View rootView = inflater.inflate(R.layout.fragment_signup, container, false);
|
||||
|
||||
mEmail = rootView.findViewById(R.id.email);
|
||||
mUsername = rootView.findViewById(R.id.username);
|
||||
mPassword = rootView.findViewById(R.id.password);
|
||||
mError = rootView.findViewById(R.id.error);
|
||||
mSignupButton = rootView.findViewById(R.id.signupButton);
|
||||
mCancelButton = rootView.findViewById(R.id.cancelButton);
|
||||
|
||||
mSignupButton.setOnClickListener(view -> signup());
|
||||
mCancelButton.setOnClickListener(view -> login());
|
||||
mPassword.setOnEditorActionListener(
|
||||
(textView, actionId, keyEvent) -> {
|
||||
if (actionId == EditorInfo.IME_ACTION_DONE) {
|
||||
mSignupButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return rootView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAttach(Context context) {
|
||||
super.onAttach(context);
|
||||
if (context instanceof OnSignupInteractionListener) {
|
||||
mListener = (OnSignupInteractionListener) context;
|
||||
} else {
|
||||
throw new RuntimeException(context.toString()
|
||||
+ " must implement OnSignupInteractionListener");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDetach() {
|
||||
super.onDetach();
|
||||
mListener = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
// This hack intends to keep Qt threads running even after the app comes from background
|
||||
QtNative.setApplicationState(ApplicationActive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStop() {
|
||||
super.onStop();
|
||||
cancelActivityIndicator();
|
||||
// Leave the Qt app paused
|
||||
QtNative.setApplicationState(ApplicationInactive);
|
||||
hideKeyboard();
|
||||
}
|
||||
|
||||
private void login() {
|
||||
if (mListener != null) {
|
||||
mListener.onLoginRequested();
|
||||
}
|
||||
}
|
||||
|
||||
public void signup() {
|
||||
String email = mEmail.getText().toString().trim();
|
||||
String username = mUsername.getText().toString().trim();
|
||||
String password = mPassword.getText().toString();
|
||||
hideKeyboard();
|
||||
if (email.isEmpty() || username.isEmpty() || password.isEmpty()) {
|
||||
showError(getString(R.string.signup_email_username_or_password_incorrect));
|
||||
} else {
|
||||
mSignupButton.setEnabled(false);
|
||||
hideError();
|
||||
showActivityIndicator();
|
||||
nativeSignup(email, username, password);
|
||||
}
|
||||
}
|
||||
|
||||
private void hideKeyboard() {
|
||||
View view = getActivity().getCurrentFocus();
|
||||
if (view != null) {
|
||||
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
|
||||
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void showActivityIndicator() {
|
||||
if (mDialog == null) {
|
||||
mDialog = new ProgressDialog(getContext());
|
||||
}
|
||||
mDialog.setMessage(getString(R.string.creating_account));
|
||||
mDialog.setCancelable(true);
|
||||
mDialog.setOnCancelListener(dialogInterface -> {
|
||||
nativeCancelSignup();
|
||||
cancelActivityIndicator();
|
||||
mSignupButton.setEnabled(true);
|
||||
});
|
||||
mDialog.show();
|
||||
}
|
||||
|
||||
private void cancelActivityIndicator() {
|
||||
if (mDialog != null) {
|
||||
mDialog.cancel();
|
||||
}
|
||||
}
|
||||
private void showError(String error) {
|
||||
mError.setText(error);
|
||||
mError.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
private void hideError() {
|
||||
mError.setText("");
|
||||
mError.setVisibility(View.INVISIBLE);
|
||||
}
|
||||
|
||||
public interface OnSignupInteractionListener {
|
||||
void onSignupCompleted();
|
||||
void onLoginRequested();
|
||||
}
|
||||
|
||||
public void handleSignupCompleted() {
|
||||
String username = mUsername.getText().toString().trim();
|
||||
String password = mPassword.getText().toString();
|
||||
mDialog.setMessage(getString(R.string.logging_in));
|
||||
mDialog.setCancelable(true);
|
||||
mDialog.setOnCancelListener(dialogInterface -> {
|
||||
nativeCancelLogin();
|
||||
cancelActivityIndicator();
|
||||
if (mListener != null) {
|
||||
mListener.onLoginRequested();
|
||||
}
|
||||
});
|
||||
mDialog.show();
|
||||
nativeLogin(username, password, getActivity());
|
||||
}
|
||||
|
||||
public void handleSignupFailed(String error) {
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mSignupButton.setEnabled(true);
|
||||
cancelActivityIndicator();
|
||||
mError.setText(error);
|
||||
mError.setVisibility(View.VISIBLE);
|
||||
});
|
||||
}
|
||||
|
||||
public void handleLoginCompleted(boolean success) {
|
||||
getActivity().runOnUiThread(() -> {
|
||||
mSignupButton.setEnabled(true);
|
||||
cancelActivityIndicator();
|
||||
|
||||
if (success) {
|
||||
if (mListener != null) {
|
||||
mListener.onSignupCompleted();
|
||||
}
|
||||
} else {
|
||||
// Registration was successful but login failed.
|
||||
// Let the user to login manually
|
||||
mListener.onLoginRequested();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
27
android/app/src/main/res/drawable/ic_eye_noshow.xml
Normal file
27
android/app/src/main/res/drawable/ic_eye_noshow.xml
Normal file
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="36dp"
|
||||
android:height="22dp"
|
||||
android:viewportWidth="36"
|
||||
android:viewportHeight="22">
|
||||
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:fillType="evenOdd"
|
||||
android:pathData="M3.59534,11.0156 C6.16042,13.4128,9.65987,15.5898,13.6042,16.1774 C17.686,16.7856,22.4164,15.7196,27.3057,11.0659 C22.0721,6.07309,17.0642,5.14115,12.9153,5.90073 C8.99427,6.61859,5.69298,8.87688,3.59534,11.0156 Z M12.455,3.27591 C17.7727,2.30235,23.9836,3.74895,30.1053,10.1333 L31,11.0664 L30.1053,11.9994 C24.3636,17.9875,18.4774,19.5983,13.2276,18.8161 C8.06048,18.0463,3.70384,14.9892,0.837069,11.9994 L0,11.1265 L0.778477,10.1986 C3.05338,7.48717,7.2318,4.23217,12.455,3.27591 Z" />
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:pathData="M15.6539,7.11119 C17.6719,7.11119,19.3078,8.81726,19.3078,10.9218 C19.3078,13.0263,17.6719,14.7324,15.6539,14.7324 C13.6359,14.7324,12,13.0263,12,10.9218 C12,8.81726,13.6359,7.11119,15.6539,7.11119 Z" />
|
||||
<!--path
|
||||
android:fillColor="#000000"
|
||||
android:strokeColor="#ffffff"
|
||||
android:strokeWidth="2.7"
|
||||
android:strokeLineCap="round"
|
||||
android:pathData="M27,2.90919 L8.90919,21" /-->
|
||||
<path
|
||||
android:fillColor="#000000"
|
||||
android:strokeColor="#3D3D3D"
|
||||
android:strokeWidth="3"
|
||||
android:strokeLineCap="round"
|
||||
android:pathData="M25,2.12132 L7.12132,20" />
|
||||
</vector>
|
15
android/app/src/main/res/drawable/ic_eye_show.xml
Normal file
15
android/app/src/main/res/drawable/ic_eye_show.xml
Normal file
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="36dp"
|
||||
android:height="16dp"
|
||||
android:viewportWidth="36"
|
||||
android:viewportHeight="16">
|
||||
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:fillType="evenOdd"
|
||||
android:pathData="M3.59534,8.01564 C6.16042,10.4128,9.65987,12.5898,13.6042,13.1774 C17.686,13.7856,22.4164,12.7196,27.3057,8.06585 C22.0721,3.07309,17.0642,2.14115,12.9153,2.90073 C8.99427,3.61859,5.69298,5.87688,3.59534,8.01564 Z M12.455,0.275915 C17.7727,-0.697651,23.9836,0.748949,30.1053,7.13329 L31,8.06636 L30.1053,8.99944 C24.3636,14.9875,18.4774,16.5983,13.2276,15.8161 C8.06048,15.0463,3.70384,11.9892,0.837069,8.99944 L0,8.12646 L0.778477,7.1986 C3.05338,4.48717,7.2318,1.23217,12.455,0.275915 Z" />
|
||||
<path
|
||||
android:fillColor="#3D3D3D"
|
||||
android:pathData="M15.6441,4.11118 C17.6621,4.11118,19.298,5.81725,19.298,7.92179 C19.298,10.0263,17.6621,11.7324,15.6441,11.7324 C13.6261,11.7324,11.9902,10.0263,11.9902,7.92179 C11.9902,5.81725,13.6261,4.11118,15.6441,4.11118 Z" />
|
||||
</vector>
|
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
|
||||
<item android:state_pressed="true" >
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item android:state_focused="true">
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
<item>
|
||||
<shape android:shape="rectangle" >
|
||||
<corners android:radius="4dip" />
|
||||
<stroke android:width="1dip" android:color="@color/colorButton2" />
|
||||
<solid android:color="@color/colorButton2"/>
|
||||
</shape>
|
||||
</item>
|
||||
</selector>
|
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<selector xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<item android:state_checked="true" android:drawable="@drawable/ic_eye_noshow"/>
|
||||
<item android:drawable="@drawable/ic_eye_show" />
|
||||
</selector>
|
|
@ -21,10 +21,12 @@
|
|||
android:id="@+id/error"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginBottom="16dp"
|
||||
android:layout_marginBottom="25dp"
|
||||
android:layout_marginLeft="9dp"
|
||||
android:layout_marginRight="9dp"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/colorLoginError"
|
||||
android:textSize="12sp"
|
||||
android:textSize="14sp"
|
||||
app:layout_constraintBottom_toTopOf="@id/username"
|
||||
app:layout_constraintLeft_toLeftOf="@id/username"
|
||||
android:visibility="invisible"/>
|
||||
|
@ -41,72 +43,100 @@
|
|||
android:paddingTop="14dp"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="14sp"
|
||||
android:textSize="17sp"
|
||||
android:inputType="textEmailAddress"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="right|center_vertical"
|
||||
android:gravity="left|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
android:layout_marginTop="70dp"
|
||||
android:hint="@string/username_or_email" />
|
||||
|
||||
<EditText
|
||||
|
||||
<android.support.design.widget.TextInputLayout
|
||||
android:id="@+id/passwordLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
app:passwordToggleTint="@color/showPasswordColor"
|
||||
app:passwordToggleEnabled="true"
|
||||
app:hintAnimationEnabled="false"
|
||||
app:passwordToggleDrawable="@drawable/selector_show_password"
|
||||
app:hintEnabled="false"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:layout_marginTop="13dp"
|
||||
>
|
||||
<android.support.design.widget.TextInputEditText
|
||||
android:id="@+id/password"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:paddingRight="12dp"
|
||||
android:drawablePadding="55dp"
|
||||
android:paddingTop="14dp"
|
||||
android:drawableEnd="@drawable/ic_eye_noshow"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="14sp"
|
||||
android:inputType="textPassword"
|
||||
android:textSize="17sp"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="right|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:gravity="left|center_vertical"
|
||||
android:imeOptions="actionDone"
|
||||
android:hint="@string/password"
|
||||
android:layout_marginTop="13dp"
|
||||
android:imeOptions="actionDone"/>
|
||||
android:inputType="textPassword" />
|
||||
</android.support.design.widget.TextInputLayout>
|
||||
|
||||
<Button
|
||||
android:id="@+id/loginButton"
|
||||
android:layout_width="154dp"
|
||||
android:layout_height="38dp"
|
||||
android:layout_marginTop="16dp"
|
||||
android:background="@drawable/rounded_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingLeft="55dp"
|
||||
android:paddingRight="55dp"
|
||||
android:paddingTop="0dp"
|
||||
android:text="@string/login"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="15sp"
|
||||
android:textSize="18sp"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
app:layout_constraintTop_toBottomOf="@id/password"
|
||||
app:layout_constraintTop_toBottomOf="@id/forgotPassword"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
<TextView
|
||||
android:id="@+id/forgotPassword"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:paddingTop="9dp"
|
||||
android:paddingBottom="16dp"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:textSize="14dp"
|
||||
android:text="@string/forgot_password"
|
||||
android:textStyle="italic"
|
||||
android:paddingRight="10dp"
|
||||
app:layout_constraintLeft_toLeftOf="@id/password"
|
||||
app:layout_constraintTop_toTopOf="@id/loginButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/loginButton"
|
||||
app:layout_constraintRight_toRightOf="@id/passwordLayout"
|
||||
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
|
||||
android:textColor="@color/colorButton1"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/signupButton"
|
||||
android:layout_width="0dp"
|
||||
app:layout_constraintWidth_default="spread"
|
||||
android:layout_height="38dp"
|
||||
android:background="@drawable/rounded_secondary_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingTop="0dp"
|
||||
android:layout_marginRight="15dp"
|
||||
android:text="@string/signup"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="18sp"
|
||||
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
|
||||
app:layout_constraintTop_toTopOf="@id/loginButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/loginButton"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
|
||||
|
||||
|
|
63
android/app/src/main/res/layout/fragment_signedin.xml
Normal file
63
android/app/src/main/res/layout/fragment_signedin.xml
Normal file
|
@ -0,0 +1,63 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:background="@color/backgroundLight">
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/header"
|
||||
android:layout_width="@dimen/header_hifi_width"
|
||||
android:layout_height="@dimen/header_hifi_height"
|
||||
android:layout_marginTop="@dimen/header_hifi_margin_top"
|
||||
android:contentDescription="HighFidelity"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:srcCompat="@drawable/hifi_header" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/welcome"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginTop="50dp"
|
||||
android:paddingLeft="86dp"
|
||||
android:paddingRight="86dp"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/clearText"
|
||||
android:textSize="24sp"
|
||||
android:text="@string/signedin_welcome"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
android:gravity="center"
|
||||
|
||||
/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/getStarted"
|
||||
android:layout_width="217dp"
|
||||
android:layout_height="38dp"
|
||||
android:layout_marginTop="30dp"
|
||||
android:background="@drawable/rounded_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingLeft="25dp"
|
||||
android:paddingRight="25dp"
|
||||
android:paddingTop="0dp"
|
||||
android:text="@string/get_started"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="18sp"
|
||||
app:layout_constraintTop_toBottomOf="@id/welcome"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
151
android/app/src/main/res/layout/fragment_signup.xml
Normal file
151
android/app/src/main/res/layout/fragment_signup.xml
Normal file
|
@ -0,0 +1,151 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:background="@color/backgroundLight">
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/header"
|
||||
android:layout_width="@dimen/header_hifi_width"
|
||||
android:layout_height="@dimen/header_hifi_height"
|
||||
android:layout_marginTop="@dimen/header_hifi_margin_top"
|
||||
android:contentDescription="HighFidelity"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:srcCompat="@drawable/hifi_header" />
|
||||
|
||||
<TextView
|
||||
android:id="@+id/error"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginBottom="16dp"
|
||||
android:layout_marginLeft="9dp"
|
||||
android:layout_marginRight="9dp"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/colorLoginError"
|
||||
android:textSize="14sp"
|
||||
app:layout_constraintBottom_toTopOf="@id/email"
|
||||
app:layout_constraintLeft_toLeftOf="@id/email"
|
||||
app:layout_constraintRight_toRightOf="@id/email"
|
||||
android:visibility="invisible"/>
|
||||
|
||||
<EditText
|
||||
android:id="@+id/email"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:paddingRight="12dp"
|
||||
android:paddingTop="14dp"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="17sp"
|
||||
android:inputType="textEmailAddress"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/header"
|
||||
android:layout_marginTop="70dp"
|
||||
android:hint="@string/email" />
|
||||
|
||||
<EditText
|
||||
android:id="@+id/username"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:paddingRight="12dp"
|
||||
android:paddingTop="14dp"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="17sp"
|
||||
android:inputType="text"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
app:layout_constraintTop_toBottomOf="@id/email"
|
||||
android:layout_marginTop="7dp"
|
||||
android:hint="@string/username" />
|
||||
|
||||
|
||||
<android.support.design.widget.TextInputLayout
|
||||
android:id="@+id/passwordLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginLeft="46dp"
|
||||
android:layout_marginRight="46dp"
|
||||
app:passwordToggleTint="@color/showPasswordColor"
|
||||
app:passwordToggleEnabled="true"
|
||||
app:hintAnimationEnabled="false"
|
||||
app:passwordToggleDrawable="@drawable/selector_show_password"
|
||||
app:hintEnabled="false"
|
||||
app:layout_constraintTop_toBottomOf="@id/username"
|
||||
android:layout_marginTop="7dp"
|
||||
>
|
||||
<android.support.design.widget.TextInputEditText
|
||||
android:id="@+id/password"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="35dp"
|
||||
android:background="@drawable/rounded_edit"
|
||||
android:padding="7dp"
|
||||
android:drawablePadding="55dp"
|
||||
android:paddingTop="14dp"
|
||||
android:drawableEnd="@drawable/ic_eye_noshow"
|
||||
android:ems="10"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textSize="17sp"
|
||||
android:textStyle="italic"
|
||||
android:textColor="@color/editTextColor"
|
||||
android:textColorHint="@color/editTextColor"
|
||||
android:gravity="left|center_vertical"
|
||||
android:imeOptions="actionDone"
|
||||
android:hint="@string/password"
|
||||
android:inputType="textPassword" />
|
||||
</android.support.design.widget.TextInputLayout>
|
||||
|
||||
<Button
|
||||
android:id="@+id/signupButton"
|
||||
android:layout_width="154dp"
|
||||
android:layout_height="38dp"
|
||||
android:layout_marginTop="44dp"
|
||||
android:background="@drawable/rounded_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingTop="0dp"
|
||||
android:text="@string/signup"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="18sp"
|
||||
app:layout_constraintRight_toRightOf="@id/username"
|
||||
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
<Button
|
||||
android:id="@+id/cancelButton"
|
||||
android:layout_width="0dp"
|
||||
app:layout_constraintWidth_default="spread"
|
||||
android:layout_height="38dp"
|
||||
android:background="@drawable/rounded_secondary_button"
|
||||
android:fontFamily="@font/raleway_semibold"
|
||||
android:paddingBottom="0dp"
|
||||
android:paddingTop="0dp"
|
||||
android:layout_marginRight="15dp"
|
||||
android:text="@string/cancel"
|
||||
android:textColor="@color/white_opaque"
|
||||
android:textAllCaps="false"
|
||||
android:textSize="18sp"
|
||||
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
|
||||
app:layout_constraintTop_toTopOf="@id/signupButton"
|
||||
app:layout_constraintRight_toLeftOf="@id/signupButton"
|
||||
app:layout_goneMarginTop="4dp"/>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
|
@ -6,8 +6,10 @@
|
|||
<color name="colorAccent">#54D7FD</color>
|
||||
<color name="backgroundEditText">#E3E3E3</color>
|
||||
<color name="editTextColor">#575757</color>
|
||||
<color name="showPasswordColor">#3D3D3D</color>
|
||||
<color name="tabs">#1EB5EC</color>
|
||||
<color name="colorButton1">#00B4EF</color>
|
||||
<color name="colorButton2">#828282</color>
|
||||
<color name="backgroundDark">#333333</color>
|
||||
<color name="backgroundLight">#4F4F4F</color>
|
||||
<color name="backgroundSearch">#33999999</color>
|
||||
|
@ -22,4 +24,6 @@
|
|||
<color name="starSelectedTint">#FBD92A</color>
|
||||
<color name="starUnselectedTint">#8A8A8A</color>
|
||||
<color name="slidingUpPanelFadeColor">#40000000</color>
|
||||
<color name="clearText">#F2F2F2</color>
|
||||
|
||||
</resources>
|
||||
|
|
|
@ -10,11 +10,13 @@
|
|||
<string name="popular">POPULAR</string>
|
||||
<string name="bookmarks">BOOKMARKS</string>
|
||||
<string name="goto_url_hint">Type a domain url</string>
|
||||
<string name="username_or_email">Username or email\u00A0</string>
|
||||
<string name="password">Password\u00A0</string>
|
||||
<string name="email">Email</string>
|
||||
<string name="username">Username</string>
|
||||
<string name="username_or_email">Username or email</string>
|
||||
<string name="password">Password</string>
|
||||
<string name="login">Login</string>
|
||||
<string name="logout">Logout</string>
|
||||
<string name="forgot_password">Forgot password?\u00A0</string>
|
||||
<string name="forgot_password"><u>Forgot password?</u>\u00A0</string>
|
||||
<string name="login_username_or_password_incorrect">Username or password incorrect.</string>
|
||||
<string name="logging_in">Logging into High Fidelity</string>
|
||||
<string name="search_hint"><i>Search for a place by name</i>\u00A0</string>
|
||||
|
@ -23,13 +25,22 @@
|
|||
<string name="privacyPolicy">Privacy Policy</string>
|
||||
<string name="your_last_location">Your Last Location</string>
|
||||
<string name="online">Online</string>
|
||||
<string name="signup">Sign Up</string>
|
||||
<string name="creating_account">Creating your High Fidelity account</string>
|
||||
<string name="signup_email_username_or_password_incorrect">Email, username or password incorrect.</string>
|
||||
<string name="signedin_welcome">You are now signed into High Fidelity</string>
|
||||
<string name="welcome">Welcome</string>
|
||||
<string name="cancel">Cancel</string>
|
||||
<string name="get_started">Get Started</string>
|
||||
|
||||
<!-- tags -->
|
||||
<string name="tagFragmentHome">tagFragmentHome</string>
|
||||
<string name="tagFragmentLogin">tagFragmentLogin</string>
|
||||
<string name="tagFragmentSignup">tagFragmentSignup</string>
|
||||
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
|
||||
<string name="tagFragmentPeople">tagFragmentPeople</string>
|
||||
<string name="tagSettings">tagSettings</string>
|
||||
<string name="tagFragmentSignedIn">tagFragmentSignedIn</string>
|
||||
<string name="settings">Settings</string>
|
||||
<string name="AEC">AEC</string>
|
||||
<string name="acoustic_echo_cancellation">Acoustic Echo Cancellation</string>
|
||||
|
|
|
@ -143,11 +143,9 @@ def packages = [
|
|||
includeLibs: ['libtbb.so', 'libtbbmalloc.so'],
|
||||
],
|
||||
hifiAC: [
|
||||
file: 'libplugins_libhifiCodec.zip',
|
||||
versionId: 'i31pW.qNbvFOXRxbyiJUxg3sphaFNmZU',
|
||||
checksum: '9412a8e12c88a4096c1fc843bb9fe52d',
|
||||
sharedLibFolder: '',
|
||||
includeLibs: ['libplugins_libhifiCodec.so']
|
||||
baseUrl: 'http://s3.amazonaws.com/hifi-public/dependencies/',
|
||||
file: 'codecSDK-android_armv8-2.0.zip',
|
||||
checksum: '1cbef929675818fc64c4101b72f84a6a'
|
||||
],
|
||||
etc2comp: [
|
||||
file: 'etc2comp-patched-armv8-libcpp.tgz',
|
||||
|
@ -367,7 +365,8 @@ task downloadDependencies {
|
|||
doLast {
|
||||
packages.each { entry ->
|
||||
def filename = entry.value['file'];
|
||||
def url = baseUrl + filename;
|
||||
def dependencyBaseUrl = entry.value['baseUrl']
|
||||
def url = (dependencyBaseUrl?.trim() ? dependencyBaseUrl : baseUrl) + filename;
|
||||
if (entry.value.containsKey('versionId')) {
|
||||
url = url + '?versionId=' + entry.value['versionId']
|
||||
}
|
||||
|
@ -668,6 +667,21 @@ task uploadBreakpadDumpSymsRelease(type:io.github.httpbuilderng.http.HttpTask, d
|
|||
}
|
||||
}
|
||||
|
||||
task renameHifiACTaskDebug() {
|
||||
doLast {
|
||||
def sourceFile = new File("${appDir}/build/intermediates/cmake/debug/obj/arm64-v8a/","libhifiCodec.so")
|
||||
def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
|
||||
copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
|
||||
}
|
||||
}
|
||||
task renameHifiACTaskRelease(type: Copy) {
|
||||
doLast {
|
||||
def sourceFile = new File("${appDir}/build/intermediates/cmake/release/obj/arm64-v8a/","libhifiCodec.so")
|
||||
def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
|
||||
copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME this code is prototyping the desired functionality for doing build time binary dependency resolution.
|
||||
// See the comment on the qtBundle task above
|
||||
/*
|
||||
|
|
|
@ -38,6 +38,8 @@
|
|||
#include "AvatarAudioStream.h"
|
||||
#include "InjectedAudioStream.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
|
||||
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
|
||||
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
|
||||
|
@ -49,11 +51,11 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
|
|||
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
|
||||
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
|
||||
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
|
||||
std::map<QString, std::shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
|
||||
map<QString, shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
|
||||
QStringList AudioMixer::_codecPreferenceOrder{};
|
||||
QHash<QString, AABox> AudioMixer::_audioZones;
|
||||
QVector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
|
||||
QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
|
||||
vector<AudioMixer::ZoneDescription> AudioMixer::_audioZones;
|
||||
vector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
|
||||
vector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
|
||||
|
||||
AudioMixer::AudioMixer(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message)
|
||||
|
@ -67,7 +69,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
_availableCodecs.clear(); // Make sure struct is clean
|
||||
auto pluginManager = DependencyManager::set<PluginManager>();
|
||||
auto codecPlugins = pluginManager->getCodecPlugins();
|
||||
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
|
||||
for_each(codecPlugins.cbegin(), codecPlugins.cend(),
|
||||
[&](const CodecPluginPointer& codec) {
|
||||
_availableCodecs[codec->getName()] = codec;
|
||||
});
|
||||
|
@ -122,7 +124,7 @@ void AudioMixer::queueAudioPacket(QSharedPointer<ReceivedMessage> message, Share
|
|||
void AudioMixer::queueReplicatedAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
// make sure we have a replicated node for the original sender of the packet
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
// Node ID is now part of user data, since replicated audio packets are non-sourced.
|
||||
QUuid nodeID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
|
@ -173,12 +175,12 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> mes
|
|||
}
|
||||
}
|
||||
|
||||
const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vector<QString> codecs) {
|
||||
const pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(vector<QString> codecs) {
|
||||
QString selectedCodecName;
|
||||
CodecPluginPointer selectedCodec;
|
||||
|
||||
// read the codecs requested (by the client)
|
||||
int minPreference = std::numeric_limits<int>::max();
|
||||
int minPreference = numeric_limits<int>::max();
|
||||
for (auto& codec : codecs) {
|
||||
if (_availableCodecs.count(codec) > 0) {
|
||||
int preference = _codecPreferenceOrder.indexOf(codec);
|
||||
|
@ -191,20 +193,9 @@ const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vec
|
|||
}
|
||||
}
|
||||
|
||||
return std::make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
|
||||
return make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
||||
// enumerate the connected listeners to remove HRTF objects for the disconnected node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeNode(killedNode->getUUID());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -223,32 +214,31 @@ void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> pac
|
|||
}
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(killedNode->getLinkedData());
|
||||
if (clientData) {
|
||||
// stage the removal of all streams from this node, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedNodes.emplace_back(killedNode->getLocalID());
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeAgentAvatarAudioStream();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([sendingNode](const SharedNodePointer& node){
|
||||
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (listenerClientData) {
|
||||
listenerClientData->removeHRTFForStream(sendingNode->getUUID());
|
||||
}
|
||||
});
|
||||
|
||||
// stage a removal of the avatar audio stream from this Agent, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedStreams.emplace_back(sendingNode->getUUID(), sendingNode->getLocalID(), QUuid());
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
|
||||
auto injectorClientData = qobject_cast<AudioMixerClientData*>(sender());
|
||||
if (injectorClientData) {
|
||||
// enumerate the connected listeners to remove HRTF objects for the disconnected injector
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachNode([injectorClientData, &streamID](const SharedNodePointer& node){
|
||||
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (listenerClientData) {
|
||||
listenerClientData->removeHRTFForStream(injectorClientData->getNodeID(), streamID);
|
||||
}
|
||||
});
|
||||
if (injectorClientData) {
|
||||
// stage the removal of this stream, workers handle when preparing mixes for listeners
|
||||
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
|
||||
streamID);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,7 +275,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
// timing stats
|
||||
QJsonObject timingStats;
|
||||
|
||||
auto addTiming = [&](Timer& timer, std::string name) {
|
||||
auto addTiming = [&](Timer& timer, string name) {
|
||||
uint64_t timing, trailing;
|
||||
timer.get(timing, trailing);
|
||||
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
|
||||
|
@ -293,12 +283,12 @@ void AudioMixer::sendStatsPacket() {
|
|||
};
|
||||
|
||||
addTiming(_ticTiming, "tic");
|
||||
addTiming(_checkTimeTiming, "check_time");
|
||||
addTiming(_sleepTiming, "sleep");
|
||||
addTiming(_frameTiming, "frame");
|
||||
addTiming(_prepareTiming, "prepare");
|
||||
addTiming(_packetsTiming, "packets");
|
||||
addTiming(_mixTiming, "mix");
|
||||
addTiming(_eventsTiming, "events");
|
||||
addTiming(_packetsTiming, "packets");
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
timingStats["ns_per_mix"] = (_stats.totalMixes > 0) ? (float)(_stats.mixTime / _stats.totalMixes) : 0;
|
||||
|
@ -311,11 +301,24 @@ void AudioMixer::sendStatsPacket() {
|
|||
QJsonObject mixStats;
|
||||
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
|
||||
mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
|
||||
mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
|
||||
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
|
||||
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
|
||||
|
||||
mixStats["1_hrtf_renders"] = (int)(_stats.hrtfRenders / (float)_numStatFrames);
|
||||
mixStats["1_hrtf_resets"] = (int)(_stats.hrtfResets / (float)_numStatFrames);
|
||||
mixStats["1_hrtf_updates"] = (int)(_stats.hrtfUpdates / (float)_numStatFrames);
|
||||
|
||||
mixStats["2_skipped_streams"] = (int)(_stats.skipped / (float)_numStatFrames);
|
||||
mixStats["2_inactive_streams"] = (int)(_stats.inactive / (float)_numStatFrames);
|
||||
mixStats["2_active_streams"] = (int)(_stats.active / (float)_numStatFrames);
|
||||
|
||||
mixStats["3_skippped_to_active"] = (int)(_stats.skippedToActive / (float)_numStatFrames);
|
||||
mixStats["3_skippped_to_inactive"] = (int)(_stats.skippedToInactive / (float)_numStatFrames);
|
||||
mixStats["3_inactive_to_skippped"] = (int)(_stats.inactiveToSkipped / (float)_numStatFrames);
|
||||
mixStats["3_inactive_to_active"] = (int)(_stats.inactiveToActive / (float)_numStatFrames);
|
||||
mixStats["3_active_to_skippped"] = (int)(_stats.activeToSkipped / (float)_numStatFrames);
|
||||
mixStats["3_active_to_inactive"] = (int)(_stats.activeToInactive / (float)_numStatFrames);
|
||||
|
||||
mixStats["total_mixes"] = _stats.totalMixes;
|
||||
mixStats["avg_mixes_per_block"] = _stats.totalMixes / _numStatFrames;
|
||||
|
||||
|
@ -366,7 +369,7 @@ AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
|
|||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
if (!clientData) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
|
||||
node->setLinkedData(unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
@ -393,33 +396,49 @@ void AudioMixer::start() {
|
|||
|
||||
// mix state
|
||||
unsigned int frame = 1;
|
||||
auto frameTimestamp = p_high_resolution_clock::now();
|
||||
|
||||
while (!_isFinished) {
|
||||
auto ticTimer = _ticTiming.timer();
|
||||
|
||||
{
|
||||
auto timer = _sleepTiming.timer();
|
||||
auto frameDuration = timeFrame(frameTimestamp);
|
||||
if (_startFrameTimestamp.time_since_epoch().count() == 0) {
|
||||
_startFrameTimestamp = _idealFrameTimestamp = p_high_resolution_clock::now();
|
||||
} else {
|
||||
auto timer = _checkTimeTiming.timer();
|
||||
auto frameDuration = timeFrame();
|
||||
throttle(frameDuration, frame);
|
||||
}
|
||||
|
||||
auto frameTimer = _frameTiming.timer();
|
||||
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// prepare frames; pop off any new audio from their streams
|
||||
{
|
||||
auto prepareTimer = _prepareTiming.timer();
|
||||
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
|
||||
_stats.sumStreams += prepareFrame(node, frame);
|
||||
});
|
||||
}
|
||||
// process (node-isolated) audio packets across slave threads
|
||||
{
|
||||
auto packetsTimer = _packetsTiming.timer();
|
||||
|
||||
// first clear the concurrent vector of added streams that the slaves will add to when they process packets
|
||||
_workerSharedData.addedStreams.clear();
|
||||
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
_slavePool.processPackets(cbegin, cend);
|
||||
});
|
||||
}
|
||||
|
||||
// process queued events (networking, global audio packets, &c.)
|
||||
{
|
||||
auto eventsTimer = _eventsTiming.timer();
|
||||
|
||||
// clear removed nodes and removed streams before we process events that will setup the new set
|
||||
_workerSharedData.removedNodes.clear();
|
||||
_workerSharedData.removedStreams.clear();
|
||||
|
||||
// since we're a while loop we need to yield to qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
|
||||
int numToRetain = nodeList->size() * (1 - _throttlingRatio);
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
// mix across slave threads
|
||||
{
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame, _throttlingRatio);
|
||||
}
|
||||
auto mixTimer = _mixTiming.timer();
|
||||
_slavePool.mix(cbegin, cend, frame, numToRetain);
|
||||
});
|
||||
|
||||
// gather stats
|
||||
|
@ -431,21 +450,6 @@ void AudioMixer::start() {
|
|||
++frame;
|
||||
++_numStatFrames;
|
||||
|
||||
// process queued events (networking, global audio packets, &c.)
|
||||
{
|
||||
auto eventsTimer = _eventsTiming.timer();
|
||||
|
||||
// since we're a while loop we need to yield to qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
// process (node-isolated) audio packets across slave threads
|
||||
{
|
||||
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
|
||||
auto packetsTimer = _packetsTiming.timer();
|
||||
_slavePool.processPackets(cbegin, cend);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (_isFinished) {
|
||||
// alert qt eventing that this is finished
|
||||
|
@ -455,26 +459,26 @@ void AudioMixer::start() {
|
|||
}
|
||||
}
|
||||
|
||||
std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
|
||||
chrono::microseconds AudioMixer::timeFrame() {
|
||||
// advance the next frame
|
||||
auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
// compute how long the last frame took
|
||||
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(now - timestamp);
|
||||
auto duration = chrono::duration_cast<chrono::microseconds>(now - _startFrameTimestamp);
|
||||
|
||||
// set the new frame timestamp
|
||||
timestamp = std::max(now, nextTimestamp);
|
||||
_idealFrameTimestamp += chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
// sleep until the next frame should start
|
||||
// WIN32 sleep_until is broken until VS2015 Update 2
|
||||
// instead, std::max (above) guarantees that timestamp >= now, so we can sleep_for
|
||||
std::this_thread::sleep_for(timestamp - now);
|
||||
{
|
||||
auto timer = _sleepTiming.timer();
|
||||
this_thread::sleep_until(_idealFrameTimestamp);
|
||||
}
|
||||
|
||||
_startFrameTimestamp = p_high_resolution_clock::now();
|
||||
|
||||
return duration;
|
||||
}
|
||||
|
||||
void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
||||
void AudioMixer::throttle(chrono::microseconds duration, int frame) {
|
||||
// throttle using a modified proportional-integral controller
|
||||
const float FRAME_TIME = 10000.0f;
|
||||
float mixRatio = duration.count() / FRAME_TIME;
|
||||
|
@ -508,28 +512,19 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
|||
if (_trailingMixRatio > TARGET) {
|
||||
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
|
||||
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
|
||||
_throttlingRatio = min(_throttlingRatio, 1.0f);
|
||||
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
|
||||
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
|
||||
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
|
||||
_throttlingRatio = max(_throttlingRatio, 0.0f);
|
||||
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame) {
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return data->checkBuffersBeforeFrameSend();
|
||||
}
|
||||
|
||||
void AudioMixer::clearDomainSettings() {
|
||||
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
|
||||
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
|
||||
|
@ -661,8 +656,11 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
const QString Y_MAX = "y_max";
|
||||
const QString Z_MIN = "z_min";
|
||||
const QString Z_MAX = "z_max";
|
||||
foreach (const QString& zone, zones.keys()) {
|
||||
QJsonObject zoneObject = zones[zone].toObject();
|
||||
|
||||
auto zoneNames = zones.keys();
|
||||
_audioZones.reserve(zoneNames.length());
|
||||
foreach (const QString& zoneName, zoneNames) {
|
||||
QJsonObject zoneObject = zones[zoneName].toObject();
|
||||
|
||||
if (zoneObject.contains(X_MIN) && zoneObject.contains(X_MAX) && zoneObject.contains(Y_MIN) &&
|
||||
zoneObject.contains(Y_MAX) && zoneObject.contains(Z_MIN) && zoneObject.contains(Z_MAX)) {
|
||||
|
@ -686,8 +684,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
glm::vec3 corner(xMin, yMin, zMin);
|
||||
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
|
||||
AABox zoneAABox(corner, dimensions);
|
||||
_audioZones.insert(zone, zoneAABox);
|
||||
qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
|
||||
_audioZones.push_back({ zoneName, zoneAABox });
|
||||
qCDebug(audio) << "Added zone:" << zoneName << "(corner:" << corner << ", dimensions:" << dimensions << ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -707,18 +705,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
coefficientObject.contains(LISTENER) &&
|
||||
coefficientObject.contains(COEFFICIENT)) {
|
||||
|
||||
ZoneSettings settings;
|
||||
auto itSource = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == coefficientObject.value(SOURCE).toString();
|
||||
});
|
||||
auto itListener = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == coefficientObject.value(LISTENER).toString();
|
||||
});
|
||||
|
||||
bool ok;
|
||||
settings.source = coefficientObject.value(SOURCE).toString();
|
||||
settings.listener = coefficientObject.value(LISTENER).toString();
|
||||
settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
|
||||
float coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
|
||||
|
||||
if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
|
||||
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
|
||||
|
||||
if (ok && coefficient >= 0.0f && coefficient <= 1.0f &&
|
||||
itSource != end(_audioZones) &&
|
||||
itListener != end(_audioZones)) {
|
||||
|
||||
ZoneSettings settings;
|
||||
settings.source = itSource - begin(_audioZones);
|
||||
settings.listener = itListener - begin(_audioZones);
|
||||
settings.coefficient = coefficient;
|
||||
|
||||
_zoneSettings.push_back(settings);
|
||||
qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
|
||||
qCDebug(audio) << "Added Coefficient:" << itSource->name << itListener->name << settings.coefficient;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -739,19 +747,21 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
reverbObject.contains(WET_LEVEL)) {
|
||||
|
||||
bool okReverbTime, okWetLevel;
|
||||
QString zone = reverbObject.value(ZONE).toString();
|
||||
auto itZone = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
|
||||
return description.name == reverbObject.value(ZONE).toString();
|
||||
});
|
||||
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
|
||||
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
|
||||
|
||||
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
|
||||
if (okReverbTime && okWetLevel && itZone != end(_audioZones)) {
|
||||
ReverbSettings settings;
|
||||
settings.zone = zone;
|
||||
settings.zone = itZone - begin(_audioZones);
|
||||
settings.reverbTime = reverbTime;
|
||||
settings.wetLevel = wetLevel;
|
||||
|
||||
_zoneReverbSettings.push_back(settings);
|
||||
|
||||
qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
|
||||
qCDebug(audio) << "Added Reverb:" << itZone->name << reverbTime << wetLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -764,7 +774,7 @@ AudioMixer::Timer::Timing::Timing(uint64_t& sum) : _sum(sum) {
|
|||
}
|
||||
|
||||
AudioMixer::Timer::Timing::~Timing() {
|
||||
_sum += std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
|
||||
_sum += chrono::duration_cast<chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
|
||||
}
|
||||
|
||||
void AudioMixer::Timer::get(uint64_t& timing, uint64_t& trailing) {
|
||||
|
|
|
@ -34,13 +34,18 @@ class AudioMixer : public ThreadedAssignment {
|
|||
public:
|
||||
AudioMixer(ReceivedMessage& message);
|
||||
|
||||
|
||||
struct ZoneDescription {
|
||||
QString name;
|
||||
AABox area;
|
||||
};
|
||||
struct ZoneSettings {
|
||||
QString source;
|
||||
QString listener;
|
||||
int source;
|
||||
int listener;
|
||||
float coefficient;
|
||||
};
|
||||
struct ReverbSettings {
|
||||
QString zone;
|
||||
int zone;
|
||||
float reverbTime;
|
||||
float wetLevel;
|
||||
};
|
||||
|
@ -48,9 +53,9 @@ public:
|
|||
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
|
||||
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
|
||||
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
|
||||
static const QHash<QString, AABox>& getAudioZones() { return _audioZones; }
|
||||
static const QVector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
|
||||
static const QVector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
|
||||
static const std::vector<ZoneDescription>& getAudioZones() { return _audioZones; }
|
||||
static const std::vector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
|
||||
static const std::vector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
|
||||
static const std::pair<QString, CodecPluginPointer> negotiateCodec(std::vector<QString> codecs);
|
||||
|
||||
static bool shouldReplicateTo(const Node& from, const Node& to) {
|
||||
|
@ -79,11 +84,8 @@ private slots:
|
|||
|
||||
private:
|
||||
// mixing helpers
|
||||
std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
|
||||
std::chrono::microseconds timeFrame();
|
||||
void throttle(std::chrono::microseconds frameDuration, int frame);
|
||||
// pop a frame from any streams on the node
|
||||
// returns the number of available streams
|
||||
int prepareFrame(const SharedNodePointer& node, unsigned int frame);
|
||||
|
||||
AudioMixerClientData* getOrCreateClientData(Node* node);
|
||||
|
||||
|
@ -92,6 +94,9 @@ private:
|
|||
void parseSettingsObject(const QJsonObject& settingsObject);
|
||||
void clearDomainSettings();
|
||||
|
||||
p_high_resolution_clock::time_point _idealFrameTimestamp;
|
||||
p_high_resolution_clock::time_point _startFrameTimestamp;
|
||||
|
||||
float _trailingMixRatio { 0.0f };
|
||||
float _throttlingRatio { 0.0f };
|
||||
|
||||
|
@ -100,7 +105,7 @@ private:
|
|||
int _numStatFrames { 0 };
|
||||
AudioMixerStats _stats;
|
||||
|
||||
AudioMixerSlavePool _slavePool;
|
||||
AudioMixerSlavePool _slavePool { _workerSharedData };
|
||||
|
||||
class Timer {
|
||||
public:
|
||||
|
@ -123,7 +128,9 @@ private:
|
|||
uint64_t _history[TIMER_TRAILING_SECONDS] {};
|
||||
int _index { 0 };
|
||||
};
|
||||
|
||||
Timer _ticTiming;
|
||||
Timer _checkTimeTiming;
|
||||
Timer _sleepTiming;
|
||||
Timer _frameTiming;
|
||||
Timer _prepareTiming;
|
||||
|
@ -136,10 +143,13 @@ private:
|
|||
static float _attenuationPerDoublingInDistance;
|
||||
static std::map<QString, CodecPluginPointer> _availableCodecs;
|
||||
static QStringList _codecPreferenceOrder;
|
||||
static QHash<QString, AABox> _audioZones;
|
||||
static QVector<ZoneSettings> _zoneSettings;
|
||||
static QVector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
|
||||
static std::vector<ZoneDescription> _audioZones;
|
||||
static std::vector<ZoneSettings> _zoneSettings;
|
||||
static std::vector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
AudioMixerSlave::SharedData _workerSharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixer_h
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
|
||||
#include <random>
|
||||
|
||||
#include <glm/detail/func_common.hpp>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QJsonArray>
|
||||
|
||||
|
@ -28,7 +30,6 @@
|
|||
AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID) :
|
||||
NodeData(nodeID, nodeLocalID),
|
||||
audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
|
||||
_ignoreZone(*this),
|
||||
_outgoingMixedAudioSequenceNumber(0),
|
||||
_downstreamAudioStreamStats()
|
||||
{
|
||||
|
@ -56,7 +57,7 @@ void AudioMixerClientData::queuePacket(QSharedPointer<ReceivedMessage> message,
|
|||
_packetQueue.push(message);
|
||||
}
|
||||
|
||||
void AudioMixerClientData::processPackets() {
|
||||
int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
|
||||
SharedNodePointer node = _packetQueue.node;
|
||||
assert(_packetQueue.empty() || node);
|
||||
_packetQueue.node.clear();
|
||||
|
@ -69,22 +70,17 @@ void AudioMixerClientData::processPackets() {
|
|||
case PacketType::MicrophoneAudioWithEcho:
|
||||
case PacketType::InjectAudio:
|
||||
case PacketType::SilentAudioFrame: {
|
||||
|
||||
if (node->isUpstream()) {
|
||||
setupCodecForReplicatedAgent(packet);
|
||||
}
|
||||
|
||||
QMutexLocker lock(&getMutex());
|
||||
parseData(*packet);
|
||||
processStreamPacket(*packet, addedStreams);
|
||||
|
||||
optionallyReplicatePacket(*packet, *node);
|
||||
|
||||
break;
|
||||
}
|
||||
case PacketType::AudioStreamStats: {
|
||||
QMutexLocker lock(&getMutex());
|
||||
parseData(*packet);
|
||||
|
||||
break;
|
||||
}
|
||||
case PacketType::NegotiateAudioFormat:
|
||||
|
@ -109,6 +105,10 @@ void AudioMixerClientData::processPackets() {
|
|||
_packetQueue.pop();
|
||||
}
|
||||
assert(_packetQueue.empty());
|
||||
|
||||
// now that we have processed all packets for this frame
|
||||
// we can prepare the sources from this client to be ready for mixing
|
||||
return checkBuffersBeforeFrameSend();
|
||||
}
|
||||
|
||||
bool isReplicatedPacket(PacketType packetType) {
|
||||
|
@ -186,63 +186,136 @@ void AudioMixerClientData::parseRequestsDomainListData(ReceivedMessage& message)
|
|||
void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
|
||||
QUuid uuid = node->getUUID();
|
||||
// parse the UUID from the packet
|
||||
QUuid avatarUuid = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
QUuid avatarUUID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
uint8_t packedGain;
|
||||
message.readPrimitive(&packedGain);
|
||||
float gain = unpackFloatGainFromByte(packedGain);
|
||||
|
||||
if (avatarUuid.isNull()) {
|
||||
if (avatarUUID.isNull()) {
|
||||
// set the MASTER avatar gain
|
||||
setMasterAvatarGain(gain);
|
||||
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
|
||||
} else {
|
||||
// set the per-source avatar gain
|
||||
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
|
||||
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
|
||||
setGainForAvatar(avatarUUID, gain);
|
||||
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, uint8_t gain) {
|
||||
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
|
||||
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();
|
||||
});
|
||||
|
||||
if (it != _streams.active.cend()) {
|
||||
it->hrtf->setGainAdjustment(gain);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
node->parseIgnoreRequestMessage(message);
|
||||
auto ignoredNodesPair = node->parseIgnoreRequestMessage(message);
|
||||
|
||||
// we have a vector of ignored or unignored node UUIDs - update our internal data structures so that
|
||||
// streams can be included or excluded next time a mix is being created
|
||||
if (ignoredNodesPair.second) {
|
||||
// we have newly ignored nodes, add them to our vector
|
||||
_newIgnoredNodeIDs.insert(std::end(_newIgnoredNodeIDs),
|
||||
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
|
||||
} else {
|
||||
// we have newly unignored nodes, add them to our vector
|
||||
_newUnignoredNodeIDs.insert(std::end(_newUnignoredNodeIDs),
|
||||
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
for (auto& nodeID : ignoredNodesPair.first) {
|
||||
auto otherNode = nodeList->nodeWithUUID(nodeID);
|
||||
if (otherNode) {
|
||||
auto otherNodeMixerClientData = static_cast<AudioMixerClientData*>(otherNode->getLinkedData());
|
||||
if (otherNodeMixerClientData) {
|
||||
if (ignoredNodesPair.second) {
|
||||
otherNodeMixerClientData->ignoredByNode(getNodeID());
|
||||
} else {
|
||||
otherNodeMixerClientData->unignoredByNode(getNodeID());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::ignoredByNode(QUuid nodeID) {
|
||||
// first add this ID to the concurrent vector for newly ignoring nodes
|
||||
_newIgnoringNodeIDs.push_back(nodeID);
|
||||
|
||||
// now take a lock and on the consistent vector of ignoring nodes and make sure this node is in it
|
||||
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
|
||||
if (std::find(_ignoringNodeIDs.begin(), _ignoringNodeIDs.end(), nodeID) == _ignoringNodeIDs.end()) {
|
||||
_ignoringNodeIDs.push_back(nodeID);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::unignoredByNode(QUuid nodeID) {
|
||||
// first add this ID to the concurrent vector for newly unignoring nodes
|
||||
_newUnignoringNodeIDs.push_back(nodeID);
|
||||
|
||||
// now take a lock on the consistent vector of ignoring nodes and make sure this node isn't in it
|
||||
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
|
||||
auto it = _ignoringNodeIDs.begin();
|
||||
while (it != _ignoringNodeIDs.end()) {
|
||||
if (*it == nodeID) {
|
||||
it = _ignoringNodeIDs.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::clearStagedIgnoreChanges() {
|
||||
_newIgnoredNodeIDs.clear();
|
||||
_newUnignoredNodeIDs.clear();
|
||||
_newIgnoringNodeIDs.clear();
|
||||
_newUnignoringNodeIDs.clear();
|
||||
}
|
||||
|
||||
void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
node->parseIgnoreRadiusRequestMessage(message);
|
||||
bool enabled;
|
||||
message->readPrimitive(&enabled);
|
||||
|
||||
_isIgnoreRadiusEnabled = enabled;
|
||||
|
||||
auto avatarAudioStream = getAvatarAudioStream();
|
||||
|
||||
// if we have an avatar audio stream, tell it wether its ignore box should be enabled or disabled
|
||||
if (avatarAudioStream) {
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
avatarAudioStream->enableIgnoreBox();
|
||||
} else {
|
||||
avatarAudioStream->disableIgnoreBox();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
|
||||
QReadLocker readLocker { &_streamsLock };
|
||||
auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
if (it != _audioStreams.end()) {
|
||||
return dynamic_cast<AvatarAudioStream*>(it->second.get());
|
||||
return dynamic_cast<AvatarAudioStream*>(it->get());
|
||||
}
|
||||
|
||||
// no mic stream found - return NULL
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID) {
|
||||
auto it = _nodeSourcesHRTFMap.find(nodeID);
|
||||
if (it != _nodeSourcesHRTFMap.end()) {
|
||||
// erase the stream with the given ID from the given node
|
||||
it->second.erase(streamID);
|
||||
|
||||
// is the map for this node now empty?
|
||||
// if so we can remove it
|
||||
if (it->second.size() == 0) {
|
||||
_nodeSourcesHRTFMap.erase(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::removeAgentAvatarAudioStream() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
auto it = std::remove_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
if (it != _audioStreams.end()) {
|
||||
_audioStreams.erase(it);
|
||||
}
|
||||
writeLocker.unlock();
|
||||
}
|
||||
|
||||
int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
||||
|
@ -252,128 +325,186 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
// skip over header, appendFlag, and num stats packed
|
||||
message.seek(sizeof(quint8) + sizeof(quint16));
|
||||
|
||||
if (message.getBytesLeftToRead() != sizeof(AudioStreamStats)) {
|
||||
qWarning() << "Received AudioStreamStats of wrong size" << message.getBytesLeftToRead()
|
||||
<< "instead of" << sizeof(AudioStreamStats) << "from"
|
||||
<< message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
|
||||
return message.getPosition();
|
||||
}
|
||||
|
||||
// read the downstream audio stream stats
|
||||
message.readPrimitive(&_downstreamAudioStreamStats);
|
||||
|
||||
return message.getPosition();
|
||||
|
||||
} else {
|
||||
SharedStreamPointer matchingStream;
|
||||
|
||||
bool isMicStream = false;
|
||||
|
||||
if (packetType == PacketType::MicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::ReplicatedMicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::MicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::ReplicatedMicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::SilentAudioFrame
|
||||
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
|
||||
auto micStreamIt = _audioStreams.find(QUuid());
|
||||
if (micStreamIt == _audioStreams.end()) {
|
||||
// we don't have a mic stream yet, so add it
|
||||
|
||||
// hop past the sequence number that leads the packet
|
||||
message.seek(sizeof(quint16));
|
||||
|
||||
// pull the codec string from the packet
|
||||
auto codecString = message.readString();
|
||||
|
||||
// determine if the stream is stereo or not
|
||||
bool isStereo;
|
||||
if (packetType == PacketType::SilentAudioFrame
|
||||
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
quint16 numSilentSamples;
|
||||
message.readPrimitive(&numSilentSamples);
|
||||
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
} else {
|
||||
quint8 channelFlag;
|
||||
message.readPrimitive(&channelFlag);
|
||||
isStereo = channelFlag == 1;
|
||||
}
|
||||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
|
||||
this, &AudioMixerClientData::handleMismatchAudioFormat);
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
QUuid(),
|
||||
std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
|
||||
);
|
||||
|
||||
micStreamIt = emplaced.first;
|
||||
}
|
||||
|
||||
matchingStream = micStreamIt->second;
|
||||
|
||||
writeLocker.unlock();
|
||||
|
||||
isMicStream = true;
|
||||
} else if (packetType == PacketType::InjectAudio
|
||||
|| packetType == PacketType::ReplicatedInjectAudio) {
|
||||
// this is injected audio
|
||||
// grab the stream identifier for this injected audio
|
||||
message.seek(sizeof(quint16));
|
||||
|
||||
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
bool isStereo;
|
||||
message.readPrimitive(&isStereo);
|
||||
|
||||
QWriteLocker writeLock { &_streamsLock };
|
||||
|
||||
auto streamIt = _audioStreams.find(streamIdentifier);
|
||||
|
||||
if (streamIt == _audioStreams.end()) {
|
||||
// we don't have this injected stream yet, so add it
|
||||
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
#endif
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
streamIdentifier,
|
||||
std::unique_ptr<InjectedAudioStream> { injectorStream }
|
||||
);
|
||||
|
||||
streamIt = emplaced.first;
|
||||
}
|
||||
|
||||
matchingStream = streamIt->second;
|
||||
|
||||
writeLock.unlock();
|
||||
}
|
||||
|
||||
// seek to the beginning of the packet so that the next reader is in the right spot
|
||||
message.seek(0);
|
||||
|
||||
// check the overflow count before we parse data
|
||||
auto overflowBefore = matchingStream->getOverflowCount();
|
||||
auto parseResult = matchingStream->parseData(message);
|
||||
|
||||
if (matchingStream->getOverflowCount() > overflowBefore) {
|
||||
qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
|
||||
}
|
||||
|
||||
return parseResult;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
bool AudioMixerClientData::containsValidPosition(ReceivedMessage& message) const {
|
||||
static const int SEQUENCE_NUMBER_BYTES = sizeof(quint16);
|
||||
|
||||
auto posBefore = message.getPosition();
|
||||
|
||||
message.seek(SEQUENCE_NUMBER_BYTES);
|
||||
|
||||
// skip over the codec string
|
||||
message.readString();
|
||||
|
||||
switch (message.getType()) {
|
||||
case PacketType::MicrophoneAudioNoEcho:
|
||||
case PacketType::MicrophoneAudioWithEcho: {
|
||||
// skip over the stereo flag
|
||||
message.seek(message.getPosition() + sizeof(ChannelFlag));
|
||||
break;
|
||||
}
|
||||
case PacketType::SilentAudioFrame: {
|
||||
// skip the number of silent samples
|
||||
message.seek(message.getPosition() + sizeof(SilentSamplesBytes));
|
||||
break;
|
||||
}
|
||||
case PacketType::InjectAudio: {
|
||||
// skip the stream ID, stereo flag, and loopback flag
|
||||
message.seek(message.getPosition() + NUM_STREAM_ID_BYTES + sizeof(ChannelFlag) + sizeof(LoopbackFlag));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
Q_UNREACHABLE();
|
||||
break;
|
||||
}
|
||||
|
||||
glm::vec3 peekPosition;
|
||||
message.readPrimitive(&peekPosition);
|
||||
|
||||
// reset the position the message was at before we were called
|
||||
message.seek(posBefore);
|
||||
|
||||
if (glm::any(glm::isnan(peekPosition))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams &addedStreams) {
|
||||
|
||||
if (!containsValidPosition(message)) {
|
||||
qDebug() << "Refusing to process audio stream from" << message.getSourceID() << "with invalid position";
|
||||
return;
|
||||
}
|
||||
|
||||
SharedStreamPointer matchingStream;
|
||||
|
||||
auto packetType = message.getType();
|
||||
bool newStream = false;
|
||||
|
||||
if (packetType == PacketType::MicrophoneAudioWithEcho
|
||||
|| packetType == PacketType::MicrophoneAudioNoEcho
|
||||
|| packetType == PacketType::SilentAudioFrame) {
|
||||
|
||||
auto micStreamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
|
||||
return stream->getStreamIdentifier().isNull();
|
||||
});
|
||||
|
||||
if (micStreamIt == _audioStreams.end()) {
|
||||
// we don't have a mic stream yet, so add it
|
||||
|
||||
// hop past the sequence number that leads the packet
|
||||
message.seek(sizeof(StreamSequenceNumber));
|
||||
|
||||
// pull the codec string from the packet
|
||||
auto codecString = message.readString();
|
||||
|
||||
// determine if the stream is stereo or not
|
||||
bool isStereo;
|
||||
if (packetType == PacketType::SilentAudioFrame || packetType == PacketType::ReplicatedSilentAudioFrame) {
|
||||
SilentSamplesBytes numSilentSamples;
|
||||
message.readPrimitive(&numSilentSamples);
|
||||
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
} else {
|
||||
ChannelFlag channelFlag;
|
||||
message.readPrimitive(&channelFlag);
|
||||
isStereo = channelFlag == 1;
|
||||
}
|
||||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
avatarAudioStream->enableIgnoreBox();
|
||||
} else {
|
||||
avatarAudioStream->disableIgnoreBox();
|
||||
}
|
||||
|
||||
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
|
||||
this, &AudioMixerClientData::handleMismatchAudioFormat);
|
||||
|
||||
matchingStream = SharedStreamPointer(avatarAudioStream);
|
||||
_audioStreams.push_back(matchingStream);
|
||||
|
||||
newStream = true;
|
||||
} else {
|
||||
matchingStream = *micStreamIt;
|
||||
}
|
||||
} else if (packetType == PacketType::InjectAudio) {
|
||||
|
||||
// this is injected audio
|
||||
// skip the sequence number and codec string and grab the stream identifier for this injected audio
|
||||
message.seek(sizeof(StreamSequenceNumber));
|
||||
message.readString();
|
||||
|
||||
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
auto streamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [&streamIdentifier](const SharedStreamPointer& stream) {
|
||||
return stream->getStreamIdentifier() == streamIdentifier;
|
||||
});
|
||||
|
||||
if (streamIt == _audioStreams.end()) {
|
||||
bool isStereo;
|
||||
message.readPrimitive(&isStereo);
|
||||
|
||||
// we don't have this injected stream yet, so add it
|
||||
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
|
||||
#endif
|
||||
|
||||
matchingStream = SharedStreamPointer(injectorStream);
|
||||
_audioStreams.push_back(matchingStream);
|
||||
|
||||
newStream = true;
|
||||
} else {
|
||||
matchingStream = *streamIt;
|
||||
}
|
||||
}
|
||||
|
||||
// seek to the beginning of the packet so that the next reader is in the right spot
|
||||
message.seek(0);
|
||||
|
||||
// check the overflow count before we parse data
|
||||
auto overflowBefore = matchingStream->getOverflowCount();
|
||||
matchingStream->parseData(message);
|
||||
|
||||
if (matchingStream->getOverflowCount() > overflowBefore) {
|
||||
qCDebug(audio) << "Just overflowed on stream" << matchingStream->getStreamIdentifier()
|
||||
<< "from" << message.getSourceID();
|
||||
}
|
||||
|
||||
if (newStream) {
|
||||
// whenever a stream is added, push it to the concurrent vector of streams added this frame
|
||||
addedStreams.emplace_back(getNodeID(), getNodeLocalID(), matchingStream->getStreamIdentifier(), matchingStream.get());
|
||||
}
|
||||
}
|
||||
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
auto it = _audioStreams.begin();
|
||||
while (it != _audioStreams.end()) {
|
||||
SharedStreamPointer stream = it->second;
|
||||
SharedStreamPointer stream = *it;
|
||||
|
||||
if (stream->popFrames(1, true) > 0) {
|
||||
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
|
||||
|
@ -388,7 +519,7 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
|||
// this is an inactive injector, pull it from our streams
|
||||
|
||||
// first emit that it is finished so that the HRTF objects for this source can be cleaned up
|
||||
emit injectorStreamFinished(it->second->getStreamIdentifier());
|
||||
emit injectorStreamFinished(stream->getStreamIdentifier());
|
||||
|
||||
// erase the stream to drop our ref to the shared pointer and remove it
|
||||
it = _audioStreams.erase(it);
|
||||
|
@ -441,7 +572,7 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
|
|||
|
||||
// pack the calculated number of stream stats
|
||||
for (int i = 0; i < numStreamStatsToPack; i++) {
|
||||
PositionalAudioStream* stream = it->second.get();
|
||||
PositionalAudioStream* stream = it->get();
|
||||
|
||||
stream->perSecondCallbackForUpdatingStats();
|
||||
|
||||
|
@ -513,12 +644,12 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
QJsonArray injectorArray;
|
||||
auto streamsCopy = getAudioStreams();
|
||||
for (auto& injectorPair : streamsCopy) {
|
||||
if (injectorPair.second->getType() == PositionalAudioStream::Injector) {
|
||||
if (injectorPair->getType() == PositionalAudioStream::Injector) {
|
||||
QJsonObject upstreamStats;
|
||||
|
||||
AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats();
|
||||
AudioStreamStats streamStats = injectorPair->getAudioStreamStats();
|
||||
upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames;
|
||||
upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
|
||||
upstreamStats["desired_calc"] = injectorPair->getCalculatedJitterBufferFrames();
|
||||
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
|
||||
upstreamStats["available"] = (double) streamStats._framesAvailable;
|
||||
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
|
||||
|
@ -609,99 +740,6 @@ void AudioMixerClientData::cleanupCodec() {
|
|||
}
|
||||
}
|
||||
|
||||
AudioMixerClientData::IgnoreZone& AudioMixerClientData::IgnoreZoneMemo::get(unsigned int frame) {
|
||||
// check for a memoized zone
|
||||
if (frame != _frame.load(std::memory_order_acquire)) {
|
||||
AvatarAudioStream* stream = _data.getAvatarAudioStream();
|
||||
|
||||
// get the initial dimensions from the stream
|
||||
glm::vec3 corner = stream ? stream->getAvatarBoundingBoxCorner() : glm::vec3(0);
|
||||
glm::vec3 scale = stream ? stream->getAvatarBoundingBoxScale() : glm::vec3(0);
|
||||
|
||||
// enforce a minimum scale
|
||||
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
|
||||
scale = MIN_IGNORE_BOX_SCALE;
|
||||
}
|
||||
|
||||
// (this is arbitrary number determined empirically for comfort)
|
||||
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
|
||||
scale *= IGNORE_BOX_SCALE_FACTOR;
|
||||
|
||||
// create the box (we use a box for the zone for convenience)
|
||||
AABox box(corner, scale);
|
||||
|
||||
// update the memoized zone
|
||||
// This may be called by multiple threads concurrently,
|
||||
// so take a lock and only update the memo if this call is first.
|
||||
// This prevents concurrent updates from invalidating the returned reference
|
||||
// (contingent on the preconditions listed in the header).
|
||||
std::lock_guard<std::mutex> lock(_mutex);
|
||||
if (frame != _frame.load(std::memory_order_acquire)) {
|
||||
_zone = box;
|
||||
unsigned int oldFrame = _frame.exchange(frame, std::memory_order_release);
|
||||
Q_UNUSED(oldFrame);
|
||||
}
|
||||
}
|
||||
|
||||
return _zone;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::IgnoreNodeCache::cache(bool shouldIgnore) {
|
||||
if (!_isCached) {
|
||||
_shouldIgnore = shouldIgnore;
|
||||
_isCached = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::IgnoreNodeCache::isCached() {
|
||||
return _isCached;
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::IgnoreNodeCache::shouldIgnore() {
|
||||
bool ignore = _shouldIgnore;
|
||||
_isCached = false;
|
||||
return ignore;
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::shouldIgnore(const SharedNodePointer self, const SharedNodePointer node, unsigned int frame) {
|
||||
// this is symmetric over self / node; if computed, it is cached in the other
|
||||
|
||||
// check the cache to avoid computation
|
||||
auto& cache = _nodeSourcesIgnoreMap[node->getUUID()];
|
||||
if (cache.isCached()) {
|
||||
return cache.shouldIgnore();
|
||||
}
|
||||
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (!nodeData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// compute shouldIgnore
|
||||
bool shouldIgnore = true;
|
||||
if ( // the nodes are not ignoring each other explicitly (or are but get data regardless)
|
||||
(!self->isIgnoringNodeWithID(node->getUUID()) ||
|
||||
(nodeData->getRequestsDomainListData() && node->getCanKick())) &&
|
||||
(!node->isIgnoringNodeWithID(self->getUUID()) ||
|
||||
(getRequestsDomainListData() && self->getCanKick()))) {
|
||||
|
||||
// if either node is enabling an ignore radius, check their proximity
|
||||
if ((self->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
|
||||
auto& zone = _ignoreZone.get(frame);
|
||||
auto& nodeZone = nodeData->_ignoreZone.get(frame);
|
||||
shouldIgnore = zone.touches(nodeZone);
|
||||
} else {
|
||||
shouldIgnore = false;
|
||||
}
|
||||
}
|
||||
|
||||
// cache in node
|
||||
nodeData->_nodeSourcesIgnoreMap[self->getUUID()].cache(shouldIgnore);
|
||||
|
||||
return shouldIgnore;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message) {
|
||||
// hop past the sequence number that leads the packet
|
||||
message->seek(sizeof(quint16));
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include <queue>
|
||||
|
||||
#include <tbb/concurrent_vector.h>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include <AABox.h>
|
||||
|
@ -30,39 +32,34 @@
|
|||
class AudioMixerClientData : public NodeData {
|
||||
Q_OBJECT
|
||||
public:
|
||||
struct AddedStream {
|
||||
NodeIDStreamID nodeIDStreamID;
|
||||
PositionalAudioStream* positionalStream;
|
||||
|
||||
AddedStream(QUuid nodeID, Node::LocalID localNodeID,
|
||||
StreamID streamID, PositionalAudioStream* positionalStream) :
|
||||
nodeIDStreamID(nodeID, localNodeID, streamID), positionalStream(positionalStream) {};
|
||||
};
|
||||
|
||||
using ConcurrentAddedStreams = tbb::concurrent_vector<AddedStream>;
|
||||
|
||||
AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID);
|
||||
~AudioMixerClientData();
|
||||
|
||||
using SharedStreamPointer = std::shared_ptr<PositionalAudioStream>;
|
||||
using AudioStreamMap = std::unordered_map<QUuid, SharedStreamPointer>;
|
||||
using AudioStreamVector = std::vector<SharedStreamPointer>;
|
||||
|
||||
void queuePacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer node);
|
||||
void processPackets();
|
||||
int processPackets(ConcurrentAddedStreams& addedStreams); // returns the number of available streams this frame
|
||||
|
||||
// locks the mutex to make a copy
|
||||
AudioStreamMap getAudioStreams() { QReadLocker readLock { &_streamsLock }; return _audioStreams; }
|
||||
AudioStreamVector& getAudioStreams() { return _audioStreams; }
|
||||
AvatarAudioStream* getAvatarAudioStream();
|
||||
|
||||
// returns whether self (this data's node) should ignore node, memoized by frame
|
||||
// precondition: frame is increasing after first call (including overflow wrap)
|
||||
bool shouldIgnore(SharedNodePointer self, SharedNodePointer node, unsigned int frame);
|
||||
|
||||
// the following methods should be called from the AudioMixer assignment thread ONLY
|
||||
// they are not thread-safe
|
||||
|
||||
// returns a new or existing HRTF object for the given stream from the given node
|
||||
AudioHRTF& hrtfForStream(const QUuid& nodeID, const QUuid& streamID = QUuid()) { return _nodeSourcesHRTFMap[nodeID][streamID]; }
|
||||
|
||||
// removes an AudioHRTF object for a given stream
|
||||
void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
|
||||
|
||||
// remove all sources and data from this node
|
||||
void removeNode(const QUuid& nodeID) { _nodeSourcesIgnoreMap.unsafe_erase(nodeID); _nodeSourcesHRTFMap.erase(nodeID); }
|
||||
|
||||
void removeAgentAvatarAudioStream();
|
||||
|
||||
// packet parsers
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
void processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams& addedStreams);
|
||||
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
|
||||
void parseRequestsDomainListData(ReceivedMessage& message);
|
||||
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
|
||||
|
@ -108,11 +105,56 @@ public:
|
|||
bool shouldMuteClient() { return _shouldMuteClient; }
|
||||
void setShouldMuteClient(bool shouldMuteClient) { _shouldMuteClient = shouldMuteClient; }
|
||||
glm::vec3 getPosition() { return getAvatarAudioStream() ? getAvatarAudioStream()->getPosition() : glm::vec3(0); }
|
||||
bool getRequestsDomainListData() { return _requestsDomainListData; }
|
||||
bool getRequestsDomainListData() const { return _requestsDomainListData; }
|
||||
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
|
||||
|
||||
void setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
struct MixableStream {
|
||||
float approximateVolume { 0.0f };
|
||||
NodeIDStreamID nodeStreamID;
|
||||
std::unique_ptr<AudioHRTF> hrtf;
|
||||
PositionalAudioStream* positionalStream;
|
||||
bool ignoredByListener { false };
|
||||
bool ignoringListener { false };
|
||||
|
||||
MixableStream(NodeIDStreamID nodeIDStreamID, PositionalAudioStream* positionalStream) :
|
||||
nodeStreamID(nodeIDStreamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
|
||||
MixableStream(QUuid nodeID, Node::LocalID localNodeID, StreamID streamID, PositionalAudioStream* positionalStream) :
|
||||
nodeStreamID(nodeID, localNodeID, streamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
|
||||
};
|
||||
|
||||
using MixableStreamsVector = std::vector<MixableStream>;
|
||||
struct Streams {
|
||||
MixableStreamsVector active;
|
||||
MixableStreamsVector inactive;
|
||||
MixableStreamsVector skipped;
|
||||
};
|
||||
|
||||
Streams& getStreams() { return _streams; }
|
||||
|
||||
// thread-safe, called from AudioMixerSlave(s) while processing ignore packets for other nodes
|
||||
void ignoredByNode(QUuid nodeID);
|
||||
void unignoredByNode(QUuid nodeID);
|
||||
|
||||
// start of methods called non-concurrently from single AudioMixerSlave mixing for the owning node
|
||||
|
||||
const Node::IgnoredNodeIDs& getNewIgnoredNodeIDs() const { return _newIgnoredNodeIDs; }
|
||||
const Node::IgnoredNodeIDs& getNewUnignoredNodeIDs() const { return _newUnignoredNodeIDs; }
|
||||
|
||||
using ConcurrentIgnoreNodeIDs = tbb::concurrent_vector<QUuid>;
|
||||
const ConcurrentIgnoreNodeIDs& getNewIgnoringNodeIDs() const { return _newIgnoringNodeIDs; }
|
||||
const ConcurrentIgnoreNodeIDs& getNewUnignoringNodeIDs() const { return _newUnignoringNodeIDs; }
|
||||
|
||||
void clearStagedIgnoreChanges();
|
||||
|
||||
const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
|
||||
|
||||
bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
|
||||
void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
|
||||
|
||||
// end of methods called non-concurrently from single AudioMixerSlave
|
||||
|
||||
signals:
|
||||
void injectorStreamFinished(const QUuid& streamIdentifier);
|
||||
|
||||
|
@ -126,52 +168,15 @@ private:
|
|||
};
|
||||
PacketQueue _packetQueue;
|
||||
|
||||
QReadWriteLock _streamsLock;
|
||||
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
|
||||
AudioStreamVector _audioStreams; // microphone stream from avatar has a null stream ID
|
||||
|
||||
void optionallyReplicatePacket(ReceivedMessage& packet, const Node& node);
|
||||
|
||||
using IgnoreZone = AABox;
|
||||
class IgnoreZoneMemo {
|
||||
public:
|
||||
IgnoreZoneMemo(AudioMixerClientData& data) : _data(data) {}
|
||||
void setGainForAvatar(QUuid nodeID, uint8_t gain);
|
||||
|
||||
// returns an ignore zone, memoized by frame (lockless if the zone is already memoized)
|
||||
// preconditions:
|
||||
// - frame is increasing after first call (including overflow wrap)
|
||||
// - there are no references left from calls to getIgnoreZone(frame - 1)
|
||||
IgnoreZone& get(unsigned int frame);
|
||||
bool containsValidPosition(ReceivedMessage& message) const;
|
||||
|
||||
private:
|
||||
AudioMixerClientData& _data;
|
||||
IgnoreZone _zone;
|
||||
std::atomic<unsigned int> _frame { 0 };
|
||||
std::mutex _mutex;
|
||||
};
|
||||
IgnoreZoneMemo _ignoreZone;
|
||||
|
||||
class IgnoreNodeCache {
|
||||
public:
|
||||
// std::atomic is not copyable - always initialize uncached
|
||||
IgnoreNodeCache() {}
|
||||
IgnoreNodeCache(const IgnoreNodeCache& other) {}
|
||||
|
||||
void cache(bool shouldIgnore);
|
||||
bool isCached();
|
||||
bool shouldIgnore();
|
||||
|
||||
private:
|
||||
std::atomic<bool> _isCached { false };
|
||||
bool _shouldIgnore { false };
|
||||
};
|
||||
struct IgnoreNodeCacheHasher { std::size_t operator()(const QUuid& key) const { return qHash(key); } };
|
||||
|
||||
using NodeSourcesIgnoreMap = tbb::concurrent_unordered_map<QUuid, IgnoreNodeCache, IgnoreNodeCacheHasher>;
|
||||
NodeSourcesIgnoreMap _nodeSourcesIgnoreMap;
|
||||
|
||||
using HRTFMap = std::unordered_map<QUuid, AudioHRTF>;
|
||||
using NodeSourcesHRTFMap = std::unordered_map<QUuid, HRTFMap>;
|
||||
NodeSourcesHRTFMap _nodeSourcesHRTFMap;
|
||||
Streams _streams;
|
||||
|
||||
quint16 _outgoingMixedAudioSequenceNumber;
|
||||
|
||||
|
@ -190,6 +195,21 @@ private:
|
|||
|
||||
bool _shouldMuteClient { false };
|
||||
bool _requestsDomainListData { false };
|
||||
|
||||
std::vector<AddedStream> _newAddedStreams;
|
||||
|
||||
Node::IgnoredNodeIDs _newIgnoredNodeIDs;
|
||||
Node::IgnoredNodeIDs _newUnignoredNodeIDs;
|
||||
|
||||
tbb::concurrent_vector<QUuid> _newIgnoringNodeIDs;
|
||||
tbb::concurrent_vector<QUuid> _newUnignoringNodeIDs;
|
||||
|
||||
std::mutex _ignoringNodeIDsMutex;
|
||||
Node::IgnoredNodeIDs _ignoringNodeIDs;
|
||||
|
||||
std::atomic_bool _isIgnoreRadiusEnabled { false };
|
||||
|
||||
bool _hasReceivedFirstMix { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerClientData_h
|
||||
|
|
|
@ -36,7 +36,10 @@
|
|||
#include "InjectedAudioStream.h"
|
||||
#include "AudioHelpers.h"
|
||||
|
||||
using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
|
||||
using namespace std;
|
||||
using AudioStreamVector = AudioMixerClientData::AudioStreamVector;
|
||||
using MixableStream = AudioMixerClientData::MixableStream;
|
||||
using MixableStreamsVector = AudioMixerClientData::MixableStreamsVector;
|
||||
|
||||
// packet helpers
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
|
||||
|
@ -46,9 +49,8 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
|
|||
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
|
||||
|
||||
// mix helpers
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
|
||||
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
|
||||
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
|
@ -56,15 +58,16 @@ inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const
|
|||
void AudioMixerSlave::processPackets(const SharedNodePointer& node) {
|
||||
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
|
||||
if (data) {
|
||||
data->processPackets();
|
||||
// process packets and collect the number of streams available for this frame
|
||||
stats.sumStreams += data->processPackets(_sharedData.addedStreams);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
_numToRetain = numToRetain;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
||||
|
@ -125,105 +128,338 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
template <class Container, class Predicate>
|
||||
void erase_if(Container& cont, Predicate&& pred) {
|
||||
auto it = remove_if(begin(cont), end(cont), std::forward<Predicate>(pred));
|
||||
cont.erase(it, end(cont));
|
||||
}
|
||||
|
||||
template <class Container>
|
||||
bool contains(const Container& cont, typename Container::value_type value) {
|
||||
return std::any_of(begin(cont), end(cont), [&value](const auto& element) {
|
||||
return value == element;
|
||||
});
|
||||
}
|
||||
|
||||
// This class lets you do an erase if in several segments
|
||||
// that use different predicates
|
||||
template <class Container>
|
||||
class SegmentedEraseIf {
|
||||
public:
|
||||
using iterator = typename Container::iterator;
|
||||
|
||||
SegmentedEraseIf(Container& cont) : _cont(cont) {
|
||||
_first = begin(_cont);
|
||||
_it = _first;
|
||||
}
|
||||
~SegmentedEraseIf() {
|
||||
assert(_it == end(_cont));
|
||||
_cont.erase(_first, _it);
|
||||
}
|
||||
|
||||
template <class Predicate>
|
||||
void iterateTo(iterator last, Predicate pred) {
|
||||
while (_it != last) {
|
||||
if (!pred(*_it)) {
|
||||
if (_first != _it) {
|
||||
*_first = move(*_it);
|
||||
}
|
||||
++_first;
|
||||
}
|
||||
++_it;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
iterator _first;
|
||||
iterator _it;
|
||||
Container& _cont;
|
||||
};
|
||||
|
||||
|
||||
void AudioMixerSlave::addStreams(Node& listener, AudioMixerClientData& listenerData) {
|
||||
auto& ignoredNodeIDs = listener.getIgnoredNodeIDs();
|
||||
auto& ignoringNodeIDs = listenerData.getIgnoringNodeIDs();
|
||||
|
||||
auto& streams = listenerData.getStreams();
|
||||
|
||||
// add data for newly created streams to our vector
|
||||
if (!listenerData.getHasReceivedFirstMix()) {
|
||||
// when this listener is new, we need to fill its added streams object with all available streams
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (nodeData) {
|
||||
for (auto& stream : nodeData->getAudioStreams()) {
|
||||
bool ignoredByListener = contains(ignoredNodeIDs, node->getUUID());
|
||||
bool ignoringListener = contains(ignoringNodeIDs, node->getUUID());
|
||||
|
||||
if (ignoredByListener || ignoringListener) {
|
||||
streams.skipped.emplace_back(node->getUUID(), node->getLocalID(),
|
||||
stream->getStreamIdentifier(), stream.get());
|
||||
|
||||
// pre-populate ignored and ignoring flags for this stream
|
||||
streams.skipped.back().ignoredByListener = ignoredByListener;
|
||||
streams.skipped.back().ignoringListener = ignoringListener;
|
||||
} else {
|
||||
streams.active.emplace_back(node->getUUID(), node->getLocalID(),
|
||||
stream->getStreamIdentifier(), stream.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// flag this listener as having received their first mix so we know we don't need to enumerate all nodes again
|
||||
listenerData.setHasReceivedFirstMix(true);
|
||||
} else {
|
||||
for (const auto& newStream : _sharedData.addedStreams) {
|
||||
bool ignoredByListener = contains(ignoredNodeIDs, newStream.nodeIDStreamID.nodeID);
|
||||
bool ignoringListener = contains(ignoringNodeIDs, newStream.nodeIDStreamID.nodeID);
|
||||
|
||||
if (ignoredByListener || ignoringListener) {
|
||||
streams.skipped.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
|
||||
|
||||
// pre-populate ignored and ignoring flags for this stream
|
||||
streams.skipped.back().ignoredByListener = ignoredByListener;
|
||||
streams.skipped.back().ignoringListener = ignoringListener;
|
||||
} else {
|
||||
streams.active.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool shouldBeRemoved(const MixableStream& stream, const AudioMixerSlave::SharedData& sharedData) {
|
||||
return (contains(sharedData.removedNodes, stream.nodeStreamID.nodeLocalID) ||
|
||||
contains(sharedData.removedStreams, stream.nodeStreamID));
|
||||
};
|
||||
|
||||
bool shouldBeInactive(MixableStream& stream) {
|
||||
return (!stream.positionalStream->lastPopSucceeded() ||
|
||||
stream.positionalStream->getLastPopOutputLoudness() == 0.0f);
|
||||
};
|
||||
|
||||
bool shouldBeSkipped(MixableStream& stream, const Node& listener,
|
||||
const AvatarAudioStream& listenerAudioStream,
|
||||
const AudioMixerClientData& listenerData) {
|
||||
|
||||
if (stream.nodeStreamID.nodeLocalID == listener.getLocalID()) {
|
||||
return !stream.positionalStream->shouldLoopbackForNode();
|
||||
}
|
||||
|
||||
// grab the unprocessed ignores and unignores from and for this listener
|
||||
const auto& nodesIgnoredByListener = listenerData.getNewIgnoredNodeIDs();
|
||||
const auto& nodesUnignoredByListener = listenerData.getNewUnignoredNodeIDs();
|
||||
const auto& nodesIgnoringListener = listenerData.getNewIgnoringNodeIDs();
|
||||
const auto& nodesUnignoringListener = listenerData.getNewUnignoringNodeIDs();
|
||||
|
||||
// this stream was previously not ignored by the listener and we have some newly ignored streams
|
||||
// check now if it is one of the ignored streams and flag it as such
|
||||
if (stream.ignoredByListener) {
|
||||
stream.ignoredByListener = !contains(nodesUnignoredByListener, stream.nodeStreamID.nodeID);
|
||||
} else {
|
||||
stream.ignoredByListener = contains(nodesIgnoredByListener, stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
if (stream.ignoringListener) {
|
||||
stream.ignoringListener = !contains(nodesUnignoringListener, stream.nodeStreamID.nodeID);
|
||||
} else {
|
||||
stream.ignoringListener = contains(nodesIgnoringListener, stream.nodeStreamID.nodeID);
|
||||
}
|
||||
|
||||
bool listenerIsAdmin = listenerData.getRequestsDomainListData() && listener.getCanKick();
|
||||
if (stream.ignoredByListener || (stream.ignoringListener && !listenerIsAdmin)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
|
||||
stream.positionalStream->isIgnoreBoxEnabled());
|
||||
if (shouldCheckIgnoreBox &&
|
||||
listenerAudioStream.getIgnoreBox().touches(stream.positionalStream->getIgnoreBox())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
float approximateVolume(const MixableStream& stream, const AvatarAudioStream* listenerAudioStream) {
|
||||
if (stream.positionalStream->getLastPopOutputTrailingLoudness() == 0.0f) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
if (stream.positionalStream == listenerAudioStream) {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
// approximate the gain
|
||||
float gain = approximateGain(*listenerAudioStream, *(stream.positionalStream));
|
||||
|
||||
// for avatar streams, modify by the set gain adjustment
|
||||
if (stream.nodeStreamID.streamID.isNull()) {
|
||||
gain *= stream.hrtf->getGainAdjustment();
|
||||
}
|
||||
|
||||
return stream.positionalStream->getLastPopOutputTrailingLoudness() * gain;
|
||||
};
|
||||
|
||||
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
||||
AvatarAudioStream* listenerAudioStream = static_cast<AudioMixerClientData*>(listener->getLinkedData())->getAvatarAudioStream();
|
||||
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
|
||||
|
||||
// if we received an invalid position from this listener, then refuse to make them a mix
|
||||
// because we don't know how to do it properly
|
||||
if (!listenerAudioStream->hasValidPosition()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// zero out the mix for this listener
|
||||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
|
||||
bool isThrottling = _throttlingRatio > 0.0f;
|
||||
std::vector<std::pair<float, SharedNodePointer>> throttledNodes;
|
||||
bool isThrottling = _numToRetain != -1;
|
||||
|
||||
typedef void (AudioMixerSlave::*MixFunctor)(
|
||||
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
|
||||
auto forAllStreams = [&](const SharedNodePointer& node, AudioMixerClientData* nodeData, MixFunctor mixFunctor) {
|
||||
auto nodeID = node->getUUID();
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
(this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
};
|
||||
auto& streams = listenerData->getStreams();
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
auto mixStart = p_high_resolution_clock::now();
|
||||
#endif
|
||||
addStreams(*listener, *listenerData);
|
||||
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (!nodeData) {
|
||||
return;
|
||||
// Process skipped streams
|
||||
erase_if(streams.skipped, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (*node == *listener) {
|
||||
// only mix the echo, if requested
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
if (nodeStream->shouldLoopbackForNode()) {
|
||||
mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
|
||||
}
|
||||
}
|
||||
} else if (!listenerData->shouldIgnore(listener, node, _frame)) {
|
||||
if (!isThrottling) {
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
|
||||
if (!shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
if (shouldBeInactive(stream)) {
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.skippedToInactive;
|
||||
} else {
|
||||
auto nodeID = node->getUUID();
|
||||
streams.active.push_back(move(stream));
|
||||
++stats.skippedToActive;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// compute the node's max relative volume
|
||||
float nodeVolume = 0.0f;
|
||||
for (auto& streamPair : nodeData->getAudioStreams()) {
|
||||
auto nodeStream = streamPair.second;
|
||||
if (!isThrottling) {
|
||||
updateHRTFParameters(stream, *listenerAudioStream,
|
||||
listenerData->getMasterAvatarGain());
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
// approximate the gain
|
||||
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
|
||||
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
|
||||
// Process inactive streams
|
||||
erase_if(streams.inactive, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// modify by hrtf gain adjustment
|
||||
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
|
||||
gain *= hrtf.getGainAdjustment();
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.inactiveToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
|
||||
nodeVolume = std::max(streamVolume, nodeVolume);
|
||||
}
|
||||
if (!shouldBeInactive(stream)) {
|
||||
streams.active.push_back(move(stream));
|
||||
++stats.inactiveToActive;
|
||||
return true;
|
||||
}
|
||||
|
||||
// max-heapify the nodes by relative volume
|
||||
throttledNodes.push_back({ nodeVolume, node });
|
||||
std::push_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
if (!isThrottling) {
|
||||
updateHRTFParameters(stream, *listenerAudioStream,
|
||||
listenerData->getMasterAvatarGain());
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
// Process active streams
|
||||
erase_if(streams.active, [&](MixableStream& stream) {
|
||||
if (shouldBeRemoved(stream, _sharedData)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isThrottling) {
|
||||
// we're throttling, so we need to update the approximate volume for any un-skipped streams
|
||||
// unless this is simply for an echo (in which case the approx volume is 1.0)
|
||||
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
|
||||
} else {
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
addStream(stream, *listenerAudioStream, 0.0f);
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
// sources on the first frame where the source becomes silent
|
||||
// this ensures the correct tail from last mixed block
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
if (isThrottling) {
|
||||
// pop the loudest nodes off the heap and mix their streams
|
||||
int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
|
||||
for (int i = 0; i < numToRetain; i++) {
|
||||
if (throttledNodes.empty()) {
|
||||
break;
|
||||
// since we're throttling, we need to partition the mixable into throttled and unthrottled streams
|
||||
int numToRetain = min(_numToRetain, (int)streams.active.size()); // Make sure we don't overflow
|
||||
auto throttlePoint = begin(streams.active) + numToRetain;
|
||||
|
||||
std::nth_element(streams.active.begin(), throttlePoint, streams.active.end(),
|
||||
[](const auto& a, const auto& b)
|
||||
{
|
||||
return a.approximateVolume > b.approximateVolume;
|
||||
});
|
||||
|
||||
SegmentedEraseIf<MixableStreamsVector> erase(streams.active);
|
||||
erase.iterateTo(throttlePoint, [&](MixableStream& stream) {
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
resetHRTFState(stream);
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
std::pop_heap(throttledNodes.begin(), throttledNodes.end());
|
||||
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
|
||||
|
||||
auto& node = throttledNodes.back().second;
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
|
||||
if (shouldBeInactive(stream)) {
|
||||
// To reduce artifacts we still call render to flush the HRTF for every silent
|
||||
// sources on the first frame where the source becomes silent
|
||||
// this ensures the correct tail from last mixed block
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
|
||||
throttledNodes.pop_back();
|
||||
}
|
||||
return false;
|
||||
});
|
||||
erase.iterateTo(end(streams.active), [&](MixableStream& stream) {
|
||||
// To reduce artifacts we reset the HRTF state for every throttled
|
||||
// sources on the first frame where the source becomes throttled
|
||||
// this ensures at least remove the tail from last mixed block
|
||||
// preventing excessive artifacts on the next first block
|
||||
resetHRTFState(stream);
|
||||
|
||||
// throttle the remaining nodes' streams
|
||||
for (const std::pair<float, SharedNodePointer>& nodePair : throttledNodes) {
|
||||
auto& node = nodePair.second;
|
||||
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
forAllStreams(node, nodeData, &AudioMixerSlave::throttleStream);
|
||||
}
|
||||
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
|
||||
streams.skipped.push_back(move(stream));
|
||||
++stats.activeToSkipped;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (shouldBeInactive(stream)) {
|
||||
streams.inactive.push_back(move(stream));
|
||||
++stats.activeToInactive;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
stats.skipped += (int)streams.skipped.size();
|
||||
stats.inactive += (int)streams.inactive.size();
|
||||
stats.active += (int)streams.active.size();
|
||||
|
||||
// clear the newly ignored, un-ignored, ignoring, and un-ignoring streams now that we've processed them
|
||||
listenerData->clearStagedIgnoreChanges();
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
auto mixEnd = p_high_resolution_clock::now();
|
||||
auto mixTime = std::chrono::duration_cast<std::chrono::nanoseconds>(mixEnd - mixStart);
|
||||
|
@ -246,51 +482,35 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
|
|||
return hasAudio;
|
||||
}
|
||||
|
||||
void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
// only throttle this stream to the mix if it has a valid position, we won't know how to mix it otherwise
|
||||
if (streamToAdd.hasValidPosition()) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
// only add the stream to the mix if it has a valid position, we won't know how to mix it otherwise
|
||||
if (streamToAdd.hasValidPosition()) {
|
||||
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
|
||||
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
bool throttle) {
|
||||
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain) {
|
||||
++stats.totalMixes;
|
||||
|
||||
// to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
|
||||
// this ensures the correct tail from last mixed block and the correct spatialization of next first block
|
||||
auto streamToAdd = mixableStream.positionalStream;
|
||||
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (&streamToAdd == &listeningNodeStream);
|
||||
bool isEcho = (streamToAdd == &listeningNodeStream);
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, distance, isEcho);
|
||||
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
if (!streamToAdd.lastPopSucceeded()) {
|
||||
if (!streamToAdd->lastPopSucceeded()) {
|
||||
bool forceSilentBlock = true;
|
||||
|
||||
if (!streamToAdd.getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
|
||||
if (!streamToAdd->getLastPopOutput().isNull()) {
|
||||
bool isInjector = dynamic_cast<const InjectedAudioStream*>(streamToAdd);
|
||||
|
||||
// in an injector, just go silent - the injector has likely ended
|
||||
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
|
||||
if (!isInjector) {
|
||||
// calculate its fade factor, which depends on how many times it's already been repeated.
|
||||
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
|
||||
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
|
||||
if (fadeFactor > 0.0f) {
|
||||
// apply the fadeFactor to the gain
|
||||
gain *= fadeFactor;
|
||||
|
@ -302,15 +522,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
if (forceSilentBlock) {
|
||||
// call renderSilent with a forced silent block to reduce artifacts
|
||||
// (this is not done for stereo streams since they do not go through the HRTF)
|
||||
if (!streamToAdd.isStereo() && !isEcho) {
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
|
||||
if (!streamToAdd->isStereo() && !isEcho) {
|
||||
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
|
||||
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
mixableStream.hrtf->render(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
++stats.hrtfRenders;
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -318,16 +535,15 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
// grab the stream from the ring buffer
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
|
||||
|
||||
// stereo sources are not passed through HRTF
|
||||
if (streamToAdd.isStereo()) {
|
||||
if (streamToAdd->isStereo()) {
|
||||
|
||||
// apply the avatar gain adjustment
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
gain *= hrtf.getGainAdjustment();
|
||||
gain *= mixableStream.hrtf->getGainAdjustment();
|
||||
|
||||
const float scale = 1/32768.0f; // int16_t to float
|
||||
const float scale = 1 / 32768.0f; // int16_t to float
|
||||
|
||||
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
|
||||
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
|
||||
|
@ -335,11 +551,8 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
++stats.manualStereoMixes;
|
||||
return;
|
||||
}
|
||||
|
||||
// echo sources are not passed through HRTF
|
||||
if (isEcho) {
|
||||
} else if (isEcho) {
|
||||
// echo sources are not passed through HRTF
|
||||
|
||||
const float scale = 1/32768.0f; // int16_t to float
|
||||
|
||||
|
@ -350,41 +563,38 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
}
|
||||
|
||||
++stats.manualEchoMixes;
|
||||
return;
|
||||
} else {
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfRenders;
|
||||
}
|
||||
}
|
||||
|
||||
// get the existing listener-source HRTF object, or create a new one
|
||||
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
|
||||
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain) {
|
||||
auto streamToAdd = mixableStream.positionalStream;
|
||||
|
||||
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (streamToAdd == &listeningNodeStream);
|
||||
|
||||
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
|
||||
// call renderSilent to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
++stats.hrtfSilentRenders;
|
||||
return;
|
||||
}
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
|
||||
if (throttle) {
|
||||
// call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
|
||||
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
|
||||
|
||||
++stats.hrtfThrottleRenders;
|
||||
return;
|
||||
}
|
||||
++stats.hrtfUpdates;
|
||||
}
|
||||
|
||||
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
|
||||
// apply per-avatar gain to positional audio injectors, which wouldn't otherwise be affected by PAL sliders
|
||||
hrtf.setGainAdjustment(listenerNodeData.hrtfForStream(sourceNodeID, QUuid()).getGainAdjustment());
|
||||
}
|
||||
|
||||
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++stats.hrtfRenders;
|
||||
void AudioMixerSlave::resetHRTFState(AudioMixerClientData::MixableStream& mixableStream) {
|
||||
mixableStream.hrtf->reset();
|
||||
++stats.hrtfResets;
|
||||
}
|
||||
|
||||
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
|
||||
|
@ -443,12 +653,12 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
glm::vec3 streamPosition = stream->getPosition();
|
||||
|
||||
// find reverb properties
|
||||
for (int i = 0; i < reverbSettings.size(); ++i) {
|
||||
AABox box = audioZones[reverbSettings[i].zone];
|
||||
for (const auto& settings : reverbSettings) {
|
||||
AABox box = audioZones[settings.zone].area;
|
||||
if (box.contains(streamPosition)) {
|
||||
hasReverb = true;
|
||||
reverbTime = reverbSettings[i].reverbTime;
|
||||
wetLevel = reverbSettings[i].wetLevel;
|
||||
reverbTime = settings.reverbTime;
|
||||
wetLevel = settings.wetLevel;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -493,8 +703,7 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
}
|
||||
}
|
||||
|
||||
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition) {
|
||||
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
|
||||
float gain = 1.0f;
|
||||
|
||||
// injector: apply attenuation
|
||||
|
@ -505,13 +714,14 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
|
|||
// avatar: skip attenuation - it is too costly to approximate
|
||||
|
||||
// distance attenuation: approximate, ignore zone-specific attenuations
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
float distance = glm::length(relativePosition);
|
||||
return gain / distance;
|
||||
|
||||
// avatar: skip master gain - it is constant for all streams
|
||||
}
|
||||
|
||||
float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
|
||||
float gain = 1.0f;
|
||||
|
||||
|
@ -534,7 +744,7 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
|
|||
gain *= offAxisCoefficient;
|
||||
|
||||
// apply master gain, only to avatars
|
||||
gain *= listenerNodeData.getMasterAvatarGain();
|
||||
gain *= masterListenerGain;
|
||||
}
|
||||
|
||||
auto& audioZones = AudioMixer::getAudioZones();
|
||||
|
@ -542,10 +752,10 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
|
|||
|
||||
// find distance attenuation coefficient
|
||||
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
|
||||
for (int i = 0; i < zoneSettings.length(); ++i) {
|
||||
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
|
||||
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
|
||||
attenuationPerDoublingInDistance = zoneSettings[i].coefficient;
|
||||
for (const auto& settings : zoneSettings) {
|
||||
if (audioZones[settings.source].area.contains(streamToAdd.getPosition()) &&
|
||||
audioZones[settings.listener].area.contains(listeningNodeStream.getPosition())) {
|
||||
attenuationPerDoublingInDistance = settings.coefficient;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,29 +12,39 @@
|
|||
#ifndef hifi_AudioMixerSlave_h
|
||||
#define hifi_AudioMixerSlave_h
|
||||
|
||||
#include <tbb/concurrent_vector.h>
|
||||
|
||||
#include <AABox.h>
|
||||
#include <AudioHRTF.h>
|
||||
#include <AudioRingBuffer.h>
|
||||
#include <ThreadedAssignment.h>
|
||||
#include <UUIDHasher.h>
|
||||
#include <NodeList.h>
|
||||
#include <PositionalAudioStream.h>
|
||||
|
||||
#include "AudioMixerClientData.h"
|
||||
#include "AudioMixerStats.h"
|
||||
|
||||
class PositionalAudioStream;
|
||||
class AvatarAudioStream;
|
||||
class AudioHRTF;
|
||||
class AudioMixerClientData;
|
||||
|
||||
class AudioMixerSlave {
|
||||
public:
|
||||
using ConstIter = NodeList::const_iterator;
|
||||
|
||||
struct SharedData {
|
||||
AudioMixerClientData::ConcurrentAddedStreams addedStreams;
|
||||
std::vector<Node::LocalID> removedNodes;
|
||||
std::vector<NodeIDStreamID> removedStreams;
|
||||
};
|
||||
|
||||
AudioMixerSlave(SharedData& sharedData) : _sharedData(sharedData) {};
|
||||
|
||||
// process packets for a given node (requires no configuration)
|
||||
void processPackets(const SharedNodePointer& node);
|
||||
|
||||
// configure a round of mixing
|
||||
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
|
||||
|
||||
// mix and broadcast non-ignored streams to the node (requires configuration using configureMix, above)
|
||||
// returns true if a mixed packet was sent to the node
|
||||
|
@ -45,13 +55,15 @@ public:
|
|||
private:
|
||||
// create mix, returns true if mix has audio
|
||||
bool prepareMix(const SharedNodePointer& listener);
|
||||
void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
|
||||
void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
|
||||
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
|
||||
bool throttle);
|
||||
void addStream(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain);
|
||||
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
|
||||
AvatarAudioStream& listeningNodeStream,
|
||||
float masterListenerGain);
|
||||
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
|
||||
|
||||
void addStreams(Node& listener, AudioMixerClientData& listenerData);
|
||||
|
||||
// mixing buffers
|
||||
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
|
@ -61,7 +73,9 @@ private:
|
|||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
int _numToRetain { -1 };
|
||||
|
||||
SharedData& _sharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerSlave_h
|
||||
|
|
|
@ -74,13 +74,11 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
|
|||
run(begin, end);
|
||||
}
|
||||
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
|
||||
_function = &AudioMixerSlave::mix;
|
||||
_configure = [=](AudioMixerSlave& slave) {
|
||||
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
|
||||
slave.configureMix(_begin, _end, frame, numToRetain);
|
||||
};
|
||||
_frame = frame;
|
||||
_throttlingRatio = throttlingRatio;
|
||||
|
||||
run(begin, end);
|
||||
}
|
||||
|
@ -167,7 +165,7 @@ void AudioMixerSlavePool::resize(int numThreads) {
|
|||
if (numThreads > _numThreads) {
|
||||
// start new slaves
|
||||
for (int i = 0; i < numThreads - _numThreads; ++i) {
|
||||
auto slave = new AudioMixerSlaveThread(*this);
|
||||
auto slave = new AudioMixerSlaveThread(*this, _workerSharedData);
|
||||
slave->start();
|
||||
_slaves.emplace_back(slave);
|
||||
}
|
||||
|
|
|
@ -31,7 +31,8 @@ class AudioMixerSlaveThread : public QThread, public AudioMixerSlave {
|
|||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
public:
|
||||
AudioMixerSlaveThread(AudioMixerSlavePool& pool) : _pool(pool) {}
|
||||
AudioMixerSlaveThread(AudioMixerSlavePool& pool, AudioMixerSlave::SharedData& sharedData)
|
||||
: AudioMixerSlave(sharedData), _pool(pool) {}
|
||||
|
||||
void run() override final;
|
||||
|
||||
|
@ -58,14 +59,15 @@ class AudioMixerSlavePool {
|
|||
public:
|
||||
using ConstIter = NodeList::const_iterator;
|
||||
|
||||
AudioMixerSlavePool(int numThreads = QThread::idealThreadCount()) { setNumThreads(numThreads); }
|
||||
AudioMixerSlavePool(AudioMixerSlave::SharedData& sharedData, int numThreads = QThread::idealThreadCount())
|
||||
: _workerSharedData(sharedData) { setNumThreads(numThreads); }
|
||||
~AudioMixerSlavePool() { resize(0); }
|
||||
|
||||
// process packets on slave threads
|
||||
void processPackets(ConstIter begin, ConstIter end);
|
||||
|
||||
// mix on slave threads
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
|
||||
void mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
|
||||
|
||||
// iterate over all slaves
|
||||
void each(std::function<void(AudioMixerSlave& slave)> functor);
|
||||
|
@ -96,10 +98,10 @@ private:
|
|||
|
||||
// frame state
|
||||
Queue _queue;
|
||||
unsigned int _frame { 0 };
|
||||
float _throttlingRatio { 0.0f };
|
||||
ConstIter _begin;
|
||||
ConstIter _end;
|
||||
|
||||
AudioMixerSlave::SharedData& _workerSharedData;
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerSlavePool_h
|
||||
|
|
|
@ -15,12 +15,27 @@ void AudioMixerStats::reset() {
|
|||
sumStreams = 0;
|
||||
sumListeners = 0;
|
||||
sumListenersSilent = 0;
|
||||
|
||||
totalMixes = 0;
|
||||
|
||||
hrtfRenders = 0;
|
||||
hrtfSilentRenders = 0;
|
||||
hrtfThrottleRenders = 0;
|
||||
hrtfResets = 0;
|
||||
hrtfUpdates = 0;
|
||||
|
||||
manualStereoMixes = 0;
|
||||
manualEchoMixes = 0;
|
||||
|
||||
skippedToActive = 0;
|
||||
skippedToInactive = 0;
|
||||
inactiveToSkipped = 0;
|
||||
inactiveToActive = 0;
|
||||
activeToSkipped = 0;
|
||||
activeToInactive = 0;
|
||||
|
||||
skipped = 0;
|
||||
inactive = 0;
|
||||
active = 0;
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
mixTime = 0;
|
||||
#endif
|
||||
|
@ -30,12 +45,27 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
|
|||
sumStreams += otherStats.sumStreams;
|
||||
sumListeners += otherStats.sumListeners;
|
||||
sumListenersSilent += otherStats.sumListenersSilent;
|
||||
|
||||
totalMixes += otherStats.totalMixes;
|
||||
|
||||
hrtfRenders += otherStats.hrtfRenders;
|
||||
hrtfSilentRenders += otherStats.hrtfSilentRenders;
|
||||
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
|
||||
hrtfResets += otherStats.hrtfResets;
|
||||
hrtfUpdates += otherStats.hrtfUpdates;
|
||||
|
||||
manualStereoMixes += otherStats.manualStereoMixes;
|
||||
manualEchoMixes += otherStats.manualEchoMixes;
|
||||
|
||||
skippedToActive += otherStats.skippedToActive;
|
||||
skippedToInactive += otherStats.skippedToInactive;
|
||||
inactiveToSkipped += otherStats.inactiveToSkipped;
|
||||
inactiveToActive += otherStats.inactiveToActive;
|
||||
activeToSkipped += otherStats.activeToSkipped;
|
||||
activeToInactive += otherStats.activeToInactive;
|
||||
|
||||
skipped += otherStats.skipped;
|
||||
inactive += otherStats.inactive;
|
||||
active += otherStats.active;
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
mixTime += otherStats.mixTime;
|
||||
#endif
|
||||
|
|
|
@ -24,12 +24,23 @@ struct AudioMixerStats {
|
|||
int totalMixes { 0 };
|
||||
|
||||
int hrtfRenders { 0 };
|
||||
int hrtfSilentRenders { 0 };
|
||||
int hrtfThrottleRenders { 0 };
|
||||
int hrtfResets { 0 };
|
||||
int hrtfUpdates { 0 };
|
||||
|
||||
int manualStereoMixes { 0 };
|
||||
int manualEchoMixes { 0 };
|
||||
|
||||
int skippedToActive { 0 };
|
||||
int skippedToInactive { 0 };
|
||||
int inactiveToSkipped { 0 };
|
||||
int inactiveToActive { 0 };
|
||||
int activeToSkipped { 0 };
|
||||
int activeToInactive { 0 };
|
||||
|
||||
int skipped { 0 };
|
||||
int inactive { 0 };
|
||||
int active { 0 };
|
||||
|
||||
#ifdef HIFI_AUDIO_MIXER_DEBUG
|
||||
uint64_t mixTime { 0 };
|
||||
#endif
|
||||
|
|
|
@ -23,9 +23,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
|
|||
|
||||
if (type == PacketType::SilentAudioFrame) {
|
||||
const char* dataAt = packetAfterSeqNum.constData();
|
||||
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
readBytes += sizeof(quint16);
|
||||
numAudioSamples = (int)numSilentSamples;
|
||||
SilentSamplesBytes numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
readBytes += sizeof(SilentSamplesBytes);
|
||||
numAudioSamples = (int) numSilentSamples;
|
||||
|
||||
// read the positional data
|
||||
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
|
||||
|
@ -34,9 +34,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
|
|||
_shouldLoopbackForNode = (type == PacketType::MicrophoneAudioWithEcho);
|
||||
|
||||
// read the channel flag
|
||||
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
|
||||
ChannelFlag channelFlag = packetAfterSeqNum.at(readBytes);
|
||||
bool isStereo = channelFlag == 1;
|
||||
readBytes += sizeof(quint8);
|
||||
readBytes += sizeof(ChannelFlag);
|
||||
|
||||
// if isStereo value has changed, restart the ring buffer with new frame size
|
||||
if (isStereo != _isStereo) {
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#include "PositionalAudioStream.h"
|
||||
|
||||
using SilentSamplesBytes = quint16;
|
||||
|
||||
class AvatarAudioStream : public PositionalAudioStream {
|
||||
public:
|
||||
AvatarAudioStream(bool isStereo, int numStaticJitterFrames = -1);
|
||||
|
|
|
@ -673,7 +673,13 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
|
||||
void AvatarMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto start = usecTimestampNow();
|
||||
sendingNode->parseIgnoreRadiusRequestMessage(packet);
|
||||
|
||||
bool enabled;
|
||||
packet->readPrimitive(&enabled);
|
||||
|
||||
auto avatarData = getOrCreateClientData(sendingNode);
|
||||
avatarData->setIsIgnoreRadiusEnabled(enabled);
|
||||
|
||||
auto end = usecTimestampNow();
|
||||
_handleRadiusIgnoreRequestPacketElapsedTime += (end - start);
|
||||
}
|
||||
|
|
|
@ -112,6 +112,11 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
|
|||
AvatarTraits::TraitWireSize traitSize;
|
||||
message.readPrimitive(&traitSize);
|
||||
|
||||
if (traitSize < -1 || traitSize > message.getBytesLeftToRead()) {
|
||||
qWarning() << "Refusing to process simple trait of size" << traitSize << "from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
if (packetTraitVersion > _lastReceivedTraitVersions[traitType]) {
|
||||
_avatar->processTrait(traitType, message.read(traitSize));
|
||||
_lastReceivedTraitVersions[traitType] = packetTraitVersion;
|
||||
|
@ -128,26 +133,41 @@ void AvatarMixerClientData::processSetTraitsMessage(ReceivedMessage& message,
|
|||
} else {
|
||||
AvatarTraits::TraitInstanceID instanceID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (message.getBytesLeftToRead() == 0) {
|
||||
qWarning () << "Received an instanced trait with no size from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
AvatarTraits::TraitWireSize traitSize;
|
||||
message.readPrimitive(&traitSize);
|
||||
|
||||
auto& instanceVersionRef = _lastReceivedTraitVersions.getInstanceValueRef(traitType, instanceID);
|
||||
if (traitSize < -1 || traitSize > message.getBytesLeftToRead()) {
|
||||
qWarning() << "Refusing to process instanced trait of size" << traitSize << "from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
|
||||
if (packetTraitVersion > instanceVersionRef) {
|
||||
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
_avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
if (traitType == AvatarTraits::AvatarEntity) {
|
||||
auto& instanceVersionRef = _lastReceivedTraitVersions.getInstanceValueRef(traitType, instanceID);
|
||||
|
||||
// to track a deleted instance but keep version information
|
||||
// the avatar mixer uses the negative value of the sent version
|
||||
instanceVersionRef = -packetTraitVersion;
|
||||
if (packetTraitVersion > instanceVersionRef) {
|
||||
if (traitSize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
_avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
|
||||
// to track a deleted instance but keep version information
|
||||
// the avatar mixer uses the negative value of the sent version
|
||||
instanceVersionRef = -packetTraitVersion;
|
||||
} else {
|
||||
_avatar->processTraitInstance(traitType, instanceID, message.read(traitSize));
|
||||
instanceVersionRef = packetTraitVersion;
|
||||
}
|
||||
|
||||
anyTraitsChanged = true;
|
||||
} else {
|
||||
_avatar->processTraitInstance(traitType, instanceID, message.read(traitSize));
|
||||
instanceVersionRef = packetTraitVersion;
|
||||
message.seek(message.getPosition() + traitSize);
|
||||
}
|
||||
|
||||
anyTraitsChanged = true;
|
||||
} else {
|
||||
message.seek(message.getPosition() + traitSize);
|
||||
qWarning() << "Refusing to process traits packet with instanced trait of unprocessable type from" << message.getSenderSockAddr();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -227,7 +247,7 @@ void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
|||
addToRadiusIgnoringSet(other->getUUID());
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(other->getUUID().toRfc4122());
|
||||
if (self->isIgnoreRadiusEnabled()) {
|
||||
if (_isIgnoreRadiusEnabled) {
|
||||
killPacket->writePrimitive(KillAvatarReason::TheirAvatarEnteredYourBubble);
|
||||
} else {
|
||||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||
|
|
|
@ -49,6 +49,9 @@ public:
|
|||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
|
||||
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
|
||||
|
||||
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
||||
|
@ -180,6 +183,8 @@ private:
|
|||
|
||||
std::unordered_map<Node::LocalID, TraitsCheckTimestamp> _lastSentTraitsTimestamps;
|
||||
std::unordered_map<Node::LocalID, AvatarTraits::TraitVersions> _sentTraitVersions;
|
||||
|
||||
std::atomic_bool _isIgnoreRadiusEnabled { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarMixerClientData_h
|
||||
|
|
|
@ -152,6 +152,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
});
|
||||
|
||||
if (!isDeleted && (sentInstanceIt == sentIDValuePairs.end() || receivedVersion > sentInstanceIt->value)) {
|
||||
|
||||
// this instance version exists and has never been sent or is newer so we need to send it
|
||||
bytesWritten += sendingAvatar->packTraitInstance(traitType, instanceID, traitsPacketList, receivedVersion);
|
||||
|
||||
|
@ -161,6 +162,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
sentIDValuePairs.emplace_back(instanceID, receivedVersion);
|
||||
}
|
||||
} else if (isDeleted && sentInstanceIt != sentIDValuePairs.end() && absoluteReceivedVersion > sentInstanceIt->value) {
|
||||
|
||||
// this instance version was deleted and we haven't sent the delete to this client yet
|
||||
bytesWritten += AvatarTraits::packInstancedTraitDelete(traitType, instanceID, traitsPacketList, absoluteReceivedVersion);
|
||||
|
||||
|
@ -180,6 +182,7 @@ qint64 AvatarMixerSlave::addChangedTraitsToBulkPacket(AvatarMixerClientData* lis
|
|||
listeningNodeData->setLastOtherAvatarTraitsSendPoint(otherNodeLocalID, timeOfLastTraitsChange);
|
||||
}
|
||||
|
||||
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
|
@ -345,7 +348,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
} else {
|
||||
// Check to see if the space bubble is enabled
|
||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (destinationNode->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
// Perform the collision check between the two bounding boxes
|
||||
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
|
||||
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
|
|
|
@ -18,7 +18,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(BUILD_GLOBAL_SERVICES "DEVELOPMENT")
|
||||
set(USE_STABLE_GLOBAL_SERVICES 0)
|
||||
set(BUILD_NUMBER 0)
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.console")
|
||||
set(APP_USER_MODEL_ID "com.highfidelity.console-dev")
|
||||
|
||||
set_from_env(RELEASE_TYPE RELEASE_TYPE "DEV")
|
||||
set_from_env(RELEASE_NUMBER RELEASE_NUMBER "")
|
||||
|
|
22
cmake/macros/TargetHifiAudioCodec.cmake
Normal file
22
cmake/macros/TargetHifiAudioCodec.cmake
Normal file
|
@ -0,0 +1,22 @@
|
|||
#
|
||||
# Copyright 2018 High Fidelity, Inc.
|
||||
# Created by Gabriel Calero and Cristian Duarte on 2018/10/05
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
macro(TARGET_HIFIAUDIOCODEC)
|
||||
if (ANDROID)
|
||||
set(HIFIAC_INSTALL_DIR ${HIFI_ANDROID_PRECOMPILED}/hifiAC/codecSDK)
|
||||
set(HIFIAC_LIB_DIR "${HIFIAC_INSTALL_DIR}/Release")
|
||||
set(HIFIAC_INCLUDE_DIRS "${HIFIAC_INSTALL_DIR}/include" CACHE TYPE INTERNAL)
|
||||
list(APPEND HIFIAC_LIBS "${HIFIAC_LIB_DIR}/libaudio.a")
|
||||
set(HIFIAC_LIBRARIES ${HIFIAC_LIBS} CACHE TYPE INTERNAL)
|
||||
else()
|
||||
add_dependency_external_projects(hifiAudioCodec)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAUDIOCODEC_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${HIFIAUDIOCODEC_LIBRARIES})
|
||||
endif()
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAC_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${HIFIAC_LIBRARIES})
|
||||
endmacro()
|
|
@ -412,7 +412,7 @@ Section "-Previous Install Cleanup"
|
|||
Delete "$INSTDIR\@AC_EXEC_NAME@"
|
||||
|
||||
; delete interface so it's not there for server-only installs
|
||||
Delete "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"
|
||||
Delete "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"
|
||||
SectionEnd
|
||||
|
||||
@CPACK_NSIS_INSTALLATION_TYPES@
|
||||
|
@ -1004,31 +1004,38 @@ Function HandlePostInstallOptions
|
|||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${If} $LaunchConsoleNowState == ${BST_CHECKED}
|
||||
!insertmacro WriteInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ YES
|
||||
|
||||
; both launches use the explorer trick in case the user has elevated permissions for the installer
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
!insertmacro WriteInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ YES
|
||||
${Else}
|
||||
!insertmacro WriteInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ NO
|
||||
${EndIf}
|
||||
|
||||
${If} $LaunchConsoleNowState == ${BST_CHECKED}
|
||||
!insertmacro WriteInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ YES
|
||||
${Else}
|
||||
!insertmacro WriteInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ NO
|
||||
${EndIf}
|
||||
|
||||
${If} $LaunchConsoleNowState == ${BST_CHECKED}
|
||||
${If} @SERVER_COMPONENT_CONDITIONAL@
|
||||
${AndIf} $LaunchClientNowState == ${BST_CHECKED}
|
||||
${AndIf} @CLIENT_COMPONENT_CONDITIONAL@
|
||||
; both launches use the explorer trick in case the user has elevated permissions for the installer
|
||||
; create shortcut with ARGUMENTS
|
||||
CreateShortCut "$TEMP\ConsoleShortcut.lnk" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@" "-- --launchInterface"
|
||||
Exec '"$WINDIR\explorer.exe" "$TEMP\ConsoleShortcut.lnk"'
|
||||
${Else}
|
||||
!insertmacro WriteInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ NO
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@"'
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${ElseIf} @CLIENT_COMPONENT_CONDITIONAL@
|
||||
!insertmacro WriteInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ NO
|
||||
|
||||
; launch uses the explorer trick in case the user has elevated permissions for the installer
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
!insertmacro WriteInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ YES
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
${AndIf} @CLIENT_COMPONENT_CONDITIONAL@
|
||||
${Unless} $LaunchConsoleNowState == ${BST_CHECKED}
|
||||
${OrUnless} @SERVER_COMPONENT_CONDITIONAL@
|
||||
; launch uses the explorer trick in case the user has elevated permissions for the installer
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"'
|
||||
${Else}
|
||||
!insertmacro WriteInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ NO
|
||||
${EndIf}
|
||||
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
|
|
24
docs/LICENSE_highlight.js.txt
Normal file
24
docs/LICENSE_highlight.js.txt
Normal file
|
@ -0,0 +1,24 @@
|
|||
Copyright (c) 2006, Ivan Sagalaev
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are
|
||||
permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this list of
|
||||
conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
of conditions and the following disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
* Neither the name of highlight.js nor the names of its contributors may be used to
|
||||
endorse or promote products derived from this software without specific prior written
|
||||
permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
23
docs/LICENSE_markdeep.txt
Normal file
23
docs/LICENSE_markdeep.txt
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright 2015-2017, Morgan McGuire
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
||||
THE POSSIBILITY OF SUCH DAMAGE.
|
280
docs/interface/raypick/pick-parenting.md.html
Normal file
280
docs/interface/raypick/pick-parenting.md.html
Normal file
|
@ -0,0 +1,280 @@
|
|||
<meta charset="utf-8">
|
||||
**Pick Parenting**
|
||||
|
||||
# What is a pick?
|
||||
|
||||
A pick calculates spatial information about the world on on the client.
|
||||
Picks are immutable to allow efficient use by scripts. There are currently four types of picks:
|
||||
|
||||
- Ray pick - Finds the first intersected object along a straight path
|
||||
- Parabola pick - Finds the first intersected object along a parabolic path
|
||||
- Stylus pick - Finds the distance between a point and the XZ planes of a list of whitelisted objects
|
||||
- Collision pick - Uses a volume to check for collisions in the physics engine
|
||||
|
||||
With the exception of collision picks, all picks use the visible appearance of the object to check for collisions.
|
||||
|
||||
# What is parenting?
|
||||
|
||||
Parenting allows an object's position, orientation, and scale (where applicable) to be calculated to another object. This collection of state defines the transform of the object.
|
||||
|
||||
Pick parenting allows the pick's transform to be calculated relative to another object, without creating a new pick.
|
||||
|
||||
# What can be parented to what?
|
||||
|
||||
This object... | | Entity | My Avatar | 3D Overlay | Pick^1 | Pointer^1
|
||||
---------------------|----------------|--------|-----------|------------|--------|-----------
|
||||
Can be parented to...| Entity | yes | yes | yes | yes | yes
|
||||
| My Avatar | yes | no | yes | yes | yes
|
||||
| Other Avatar | yes | yes | yes | yes | yes
|
||||
| 3D Overlay | yes | yes | yes | yes | yes
|
||||
| Pick | no | no | no | yes | yes
|
||||
| Pointer | no | no | no | no | no
|
||||
| The Mouse | no | no | no | yes | yes
|
||||
|
||||
- ^1 Excluding stylus, which can only be parented to My Avatar's hands for now
|
||||
|
||||
# How pick parenting works
|
||||
|
||||
This section describes what happens when picks are parented to things.
|
||||
Since pointers use picks to calculate their results, the rules below
|
||||
also apply when pointers are parented to things.
|
||||
|
||||
## Parent transform exposed
|
||||
|
||||
The way a pick is moved/rescaled depends on its parent. In particular,
|
||||
parenting to a pick actually parents to the pick's result, which
|
||||
has no notion of scale. A pick can also be transformed relative to
|
||||
a joint on a model, such as an avatar's hand.
|
||||
|
||||
Parent type: | Entity/Avatar | 3D Overlay | Pick
|
||||
--------------------------|-----------------|--------------|--------
|
||||
Has position | yes | yes | yes
|
||||
Has orientation | yes | yes | no
|
||||
Has scale | yes | yes | no
|
||||
Can have joints | yes | no | no
|
||||
|
||||
## Entity or overlay parent
|
||||
|
||||
Parenting a pick to an entity or overlay works similarly to
|
||||
parenting entities to other things. However, unlike entities,
|
||||
picks with scale will rescale their dimensions when
|
||||
their parent rescales.
|
||||
|
||||
************************************************************************
|
||||
* May have a joint, otherwise parent to the parent's origin *
|
||||
* | *
|
||||
* scale ^ +--+------+ ^ --+ orientation *
|
||||
* determined / / | /| |orientation \ offset *
|
||||
* from / / v / | | v *
|
||||
* dimensions / / o /..|...................... ^ *
|
||||
* +-> +---------+ | ------------. . / *
|
||||
* | | | / position +----> */ <----+ *
|
||||
* | | | / offset child *
|
||||
* | | |/ ^ transform*
|
||||
* v +---------+ / <---+ *
|
||||
* . +-> dimensions *
|
||||
* . . . . . . . . . . . . . . | relative *
|
||||
* . . . v to parent *
|
||||
************************************************************************
|
||||
|
||||
!!! WARNING
|
||||
Skew is not supported for collision picks. Scaling a parent entity or
|
||||
overlay non-uniformly can lead to inaccurate shapes for the child
|
||||
collision pick.
|
||||
|
||||
## Avatar parent
|
||||
|
||||
A pick parented to an avatar behaves like a wearable. It will maintain
|
||||
its position relative to some point on the avatar's body. If the pick
|
||||
has scale (currently only collision picks), then the pick will rescale
|
||||
when the avatar rescales.
|
||||
|
||||
****************************************************************
|
||||
* Avatar .---. hand ^ -. *
|
||||
* | | | joint / | *
|
||||
* '-----> | | \ /orientation | *
|
||||
* '-+-' +-> o | *
|
||||
* | / ^ position |relative *
|
||||
* +---------' +----- |avatar *
|
||||
* | |scale *
|
||||
* | if not a joint, |factor *
|
||||
* | parent to the |(default: 1) *
|
||||
* | avatar's origin | *
|
||||
* |\ | |uniform scale *
|
||||
* | \ | | ^ *
|
||||
* | \ | | +-> *
|
||||
* | o \ <-+ -' v *
|
||||
****************************************************************
|
||||
|
||||
***************************************************************
|
||||
* orientation offset *
|
||||
* <-----+ *
|
||||
* ^ \ *
|
||||
* \ *
|
||||
* \ child *
|
||||
* Avatar .---. hand .......... * <-- transform *
|
||||
* | | | joint . ^ | *
|
||||
* '-----> | | \ . / v *
|
||||
* '-+-' +-> o ---------+ ^ *
|
||||
* | / position +-> scale *
|
||||
* +---------' offset v relative *
|
||||
* | to avatar *
|
||||
***************************************************************
|
||||
|
||||
## Pick parent
|
||||
|
||||
Picks can also be parented to other picks.
|
||||
When this is done, the child pick is actually parented to
|
||||
the parent pick's result, not the pick itself.
|
||||
|
||||
A pick parented to another pick will have its position changed,
|
||||
but not its orientation or scale.
|
||||
|
||||
### Ray pick parent
|
||||
|
||||
**********************************
|
||||
* position *
|
||||
* o *
|
||||
* \ \ orientation *
|
||||
* \ \ *
|
||||
* \ v . . . . *
|
||||
* \ . *
|
||||
* \ . result transform *
|
||||
* * <-- no scale *
|
||||
* . no orientation *
|
||||
* . *
|
||||
**********************************
|
||||
|
||||
### Parabola pick parent
|
||||
|
||||
**************************************************************
|
||||
* .------. acceleration *
|
||||
* + + | ^ *
|
||||
* speed x ^ / \ . | | *
|
||||
* orientation / / \ . v | *
|
||||
* / / * y axis to rotate *
|
||||
* | . ^ acceleration with *
|
||||
* o . | *
|
||||
* position result transform *
|
||||
* no scale *
|
||||
* no orientation *
|
||||
**************************************************************
|
||||
|
||||
### Stylus pick parent
|
||||
|
||||
********************************************
|
||||
* *
|
||||
* *
|
||||
* . *
|
||||
* . *
|
||||
* . *
|
||||
* .---. avatar * *
|
||||
* | | hand / ^. *
|
||||
* | | \ / \ . *
|
||||
* '-+-' +-> o \ *
|
||||
* | / result transform *
|
||||
* +---------' no scale *
|
||||
* | no orientation *
|
||||
********************************************
|
||||
|
||||
### Collision pick parent
|
||||
|
||||
********************************************
|
||||
* *
|
||||
* .---. *
|
||||
* | | <-- collision pick *
|
||||
* | | *
|
||||
* | *<-+--- result transform at *
|
||||
* | | collision pick position *
|
||||
* | | no scale *
|
||||
* . . . '---' . . no orientation *
|
||||
* *
|
||||
********************************************
|
||||
|
||||
# Effect of scale on picks
|
||||
|
||||
Scale affects the position and shape of picks, which in turn affects the pick result.
|
||||
Scale currently does not affect the max distance of a pick.
|
||||
|
||||
## Ray and stylus pick scaling
|
||||
|
||||
Rescaling the parent of a ray pick or stylus pick can result in a translation of the pick.
|
||||
|
||||
***************************************
|
||||
* before after *
|
||||
* *
|
||||
* pick *
|
||||
* pick +--------------+ ^ *
|
||||
* ^ | |/ *
|
||||
* +-----+/ | o *
|
||||
* | o | | *
|
||||
* | | | | *
|
||||
* +-----+ | | *
|
||||
* parent | | *
|
||||
* +--------------+ *
|
||||
* parent *
|
||||
* *
|
||||
***************************************
|
||||
|
||||
## Parabola pick scaling
|
||||
|
||||
***************************************************************************
|
||||
* before after after *
|
||||
* (scaleWithParent (scaleWithParent *
|
||||
* is false) is true) *
|
||||
* *
|
||||
* .----. *
|
||||
* + + *
|
||||
* .--. / \ *
|
||||
* + + / \ *
|
||||
* .--. / \ / \ *
|
||||
* + + /pick \ /pick \ *
|
||||
* / \ +-------o +-------o *
|
||||
* /pick \ | | | | *
|
||||
* +---o | | | | *
|
||||
* | | | | | | *
|
||||
* +---+ +-------+ +-------+ *
|
||||
* parent parent parent *
|
||||
***************************************************************************
|
||||
|
||||
## Collision pick scaling
|
||||
|
||||
Collision picks use the full transform of their parent (position, orientation, and scale/dimensions).
|
||||
When first created, a collision pick's transform is defined in world space.
|
||||
As the parent rescales, the collision pick rescales proportionally.
|
||||
|
||||
The collision pick's threshold also rescales. The change is proportional to the largest
|
||||
dimension of the parent. So, if the largest dimension of the parent was 3.0 and is now 6.0,
|
||||
the threshold doubles.
|
||||
|
||||
**************************************************************************************
|
||||
* *
|
||||
* before after after *
|
||||
* (scaleWithParent (scaleWithParent *
|
||||
* is false) is true) *
|
||||
* *
|
||||
* pick *
|
||||
* +------------+ *
|
||||
* | | *
|
||||
* pick | ........ | *
|
||||
* +-----+ | . . | *
|
||||
* |.....| | . . | *
|
||||
* |. .| theshold | . . | *
|
||||
* pick |.....|___ | | . . | *
|
||||
* +-----+ +-----+--- <-+ | ........ +___ *
|
||||
* |.....| | | theshold *
|
||||
* |. .| theshold +------------+--- *
|
||||
* |.....|___ | *
|
||||
* +-----+--- <-+ *
|
||||
* +----------+ +----------+ *
|
||||
* +---+ / \ / \ *
|
||||
* +-----+ +--------------+ +--------------+ *
|
||||
* parent parent parent *
|
||||
* *
|
||||
**************************************************************************************
|
||||
|
||||
|
||||
|
||||
<style class="fallback">body{visibility:hidden}</style><script>markdeepOptions={tocStyle:'medium'};</script>
|
||||
<link rel="stylesheet" href="../../markdeep_apidoc.css?">
|
||||
<!-- Markdeep: --><style class="fallback">body{visibility:hidden;white-space:pre;font-family:monospace}</style><script src="../../markdeep.min.js"></script><script>window.alreadyProcessedMarkdeep||(document.body.style.visibility="visible")</script>
|
7
docs/markdeep.min.js
vendored
Normal file
7
docs/markdeep.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
170
docs/markdeep_apidoc.css
Normal file
170
docs/markdeep_apidoc.css
Normal file
|
@ -0,0 +1,170 @@
|
|||
/* Custom stylesheet for API documentation by Aras Pranckevičius, http://aras-p.info/
|
||||
and tweaked by Morgan McGuire.
|
||||
Licensed as public domain or BSD 2-clause, whichever is more convenient for you.
|
||||
Originally from https://github.com/aras-p/markdeep-docs-style */
|
||||
body {
|
||||
max-width: 50em;
|
||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
text-align: left;
|
||||
margin: 1.5em;
|
||||
padding: 0 1em;
|
||||
}
|
||||
|
||||
/* if screen is wide enough, put table of contents on the right side */
|
||||
@media screen and (min-width: 64em) {
|
||||
.md .longTOC, .md .mediumTOC, .md .shortTOC {
|
||||
max-width: 20em;
|
||||
left: 54em;
|
||||
display:block;
|
||||
position: fixed;
|
||||
top:0;
|
||||
bottom:0;
|
||||
overflow-y:scroll;
|
||||
margin-top:0;
|
||||
margin-bottom:0;
|
||||
padding-top:1em;
|
||||
}
|
||||
}
|
||||
|
||||
/* for narrow screens or print, hide table of contents */
|
||||
@media screen and (max-width: 64em) {
|
||||
.md .longTOC, .md .mediumTOC, .md .shortTOC { display: none; }
|
||||
}
|
||||
|
||||
@media print {
|
||||
.md .longTOC, .md .mediumTOC, .md .shortTOC { display: none; }
|
||||
body { max-width: 100%; }
|
||||
}
|
||||
|
||||
/* reset heading/link fonts to that of body */
|
||||
.md a,
|
||||
.md div.title, contents, .md .tocHeader,
|
||||
.md h1, .md h2, .md h3, .md h4, .md h5, .md h6,
|
||||
.md .nonumberh1, .md .nonumberh2, .md .nonumberh3, .md .nonumberh4, .md .nonumberh5, .md .nonumberh6,
|
||||
.md .shortTOC, .md .mediumTOC, .md .longTOC {
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
.md div.title {
|
||||
margin: 0.4em 0 0 0;
|
||||
padding: 0;
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
.md div.subtitle {
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
/* faint border below headings */
|
||||
.md h1, .md h2, .md h3, .md h4,
|
||||
.md .nonumberh1, .md .nonumberh2, .md .nonumberh3, .md .nonumberh4 {
|
||||
border-bottom: 1px solid rgba(0,0,0,.1);
|
||||
}
|
||||
/* heading font styles */
|
||||
.md h1, .md .nonumberh1, .md div.title {
|
||||
font-size: 150%;
|
||||
font-weight: 600;
|
||||
color: rgba(0,0,0,.7);
|
||||
}
|
||||
.md h2, .md .nonumberh2 {
|
||||
font-size: 120%;
|
||||
font-weight: 400;
|
||||
color: rgba(0,0,0,.9);
|
||||
}
|
||||
.md h3, .md .nonumberh3 {
|
||||
font-size: 110%;
|
||||
font-weight: 400;
|
||||
color: rgba(0,0,0,.7);
|
||||
}
|
||||
/* no numbering of headings */
|
||||
.md h1:before, .md h2:before, .md h3:before, .md h4:before { content: none; }
|
||||
|
||||
/* link styling */
|
||||
.md a:link, .md a:visited {
|
||||
color: #3f51b5;
|
||||
}
|
||||
|
||||
/* inline and block code */
|
||||
.md code, .md pre.listing {
|
||||
background-color: rgba(0,0,0,.05);
|
||||
padding: 0.1em 0.2em;
|
||||
border-radius: 0.15em;
|
||||
}
|
||||
.md pre.listing code {
|
||||
background-color: transparent;
|
||||
padding: 0;
|
||||
border: none;
|
||||
}
|
||||
|
||||
/* table of contents styling; make all 3 forms of it look the same */
|
||||
.md .longTOC, .md .mediumTOC, .md .shortTOC {
|
||||
font-size: inherit;
|
||||
line-height: 120%;
|
||||
margin: 1em 0;
|
||||
padding: .4rem;
|
||||
border-left: .1rem solid #3f51b5;
|
||||
}
|
||||
|
||||
.md .tocHeader {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: none;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
.md .tocNumber {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.md .longTOC .level1, .md .mediumTOC .level1, .md .shortTOC .level1 {
|
||||
font-weight: inherit;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.md .longTOC p, .md .mediumTOC p, .md .shortTOC p {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.md .longTOC center, .md .mediumTOC center, .md .shortTOC center, .md .tocHeader {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.md .longTOC b, .md .mediumTOC b, .md .shortTOC b {
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.md .longTOC center b, .md .mediumTOC center b, .md .shortTOC center b {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.md .longTOC a, .md .mediumTOC a, .md .shortTOC a {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.md .longTOC .level1, .md .mediumTOC .level1, .md .shortTOC .level1,
|
||||
.md .longTOC .level2, .md .mediumTOC .level2, .md .shortTOC .level2,
|
||||
.md .longTOC .level3, .md .mediumTOC .level3, .md .shortTOC .level3 {
|
||||
white-space: nowrap;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
/* tables; use fainter colors than regular markdeep style */
|
||||
.md table.table {
|
||||
font-size: 90%;
|
||||
}
|
||||
.md table.table th {
|
||||
border: none;
|
||||
background-color: #ccc;
|
||||
color: rgba(0,0,0,.6);
|
||||
}
|
||||
.md table.table tr, .md table.table td {
|
||||
border-color: #eee;
|
||||
}
|
||||
.md table.table tr:nth-child(even) {
|
||||
background-color: #f4f4f4;
|
||||
}
|
||||
|
|
@ -1110,7 +1110,36 @@ function moveTableRow(row, move_up) {
|
|||
}
|
||||
|
||||
// we need to fire a change event on one of the remaining inputs so that the sidebar badge is updated
|
||||
badgeForDifferences($(table))
|
||||
badgeForDifferences($(table));
|
||||
|
||||
// figure out which group this row is in
|
||||
var panelParentID = row.closest('.panel').attr('id');
|
||||
|
||||
// get the short name for the setting from the table
|
||||
var tableShortName = row.closest('table').data('short-name');
|
||||
|
||||
var changed = tableHasChanged(panelParentID, tableShortName);
|
||||
$(table).find('.' + Settings.DATA_ROW_CLASS).each(function(){
|
||||
var hiddenInput = $(this).find('td.' + Settings.DATA_COL_CLASS + ' input');
|
||||
if (changed) {
|
||||
hiddenInput.attr('data-changed', true);
|
||||
} else {
|
||||
hiddenInput.removeAttr('data-changed');
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function tableHasChanged(panelParentID, tableShortName) {
|
||||
// get a JSON representation of that section
|
||||
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
|
||||
if (Settings.initialValues[panelParentID]) {
|
||||
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
|
||||
} else {
|
||||
var initialPanelSettingJSON = {};
|
||||
}
|
||||
|
||||
return !_.isEqual(panelSettingJSON, initialPanelSettingJSON);
|
||||
}
|
||||
|
||||
function updateDataChangedForSiblingRows(row, forceTrue) {
|
||||
|
@ -1123,16 +1152,8 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
|
|||
// get the short name for the setting from the table
|
||||
var tableShortName = row.closest('table').data('short-name')
|
||||
|
||||
// get a JSON representation of that section
|
||||
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
|
||||
if (Settings.initialValues[panelParentID]) {
|
||||
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
|
||||
} else {
|
||||
var initialPanelSettingJSON = {};
|
||||
}
|
||||
|
||||
// if they are equal, we don't need data-changed
|
||||
isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON)
|
||||
isTrue = tableHasChanged(panelParentID, tableShortName);
|
||||
} else {
|
||||
isTrue = true
|
||||
}
|
||||
|
@ -1140,9 +1161,9 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
|
|||
row.siblings('.' + Settings.DATA_ROW_CLASS).each(function(){
|
||||
var hiddenInput = $(this).find('td.' + Settings.DATA_COL_CLASS + ' input')
|
||||
if (isTrue) {
|
||||
hiddenInput.attr('data-changed', isTrue)
|
||||
hiddenInput.attr('data-changed', isTrue);
|
||||
} else {
|
||||
hiddenInput.removeAttr('data-changed')
|
||||
hiddenInput.removeAttr('data-changed');
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2906,7 +2906,7 @@ void DomainServer::updateReplicationNodes(ReplicationServerDirection direction)
|
|||
// collect them in a vector to separately remove them with handleKillNode (since eachNode has a read lock and
|
||||
// we cannot recursively take the write lock required by handleKillNode)
|
||||
std::vector<SharedNodePointer> nodesToKill;
|
||||
nodeList->eachNode([this, direction, replicationNodesInSettings, replicationDirection, &nodesToKill](const SharedNodePointer& otherNode) {
|
||||
nodeList->eachNode([direction, replicationNodesInSettings, replicationDirection, &nodesToKill](const SharedNodePointer& otherNode) {
|
||||
if ((direction == Upstream && NodeType::isUpstream(otherNode->getType()))
|
||||
|| (direction == Downstream && NodeType::isDownstream(otherNode->getType()))) {
|
||||
bool nodeInSettings = find(replicationNodesInSettings.cbegin(), replicationNodesInSettings.cend(),
|
||||
|
|
BIN
interface/resources/images/interstitialPage/button.png
Normal file
BIN
interface/resources/images/interstitialPage/button.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.8 KiB |
BIN
interface/resources/images/interstitialPage/button_back.png
Normal file
BIN
interface/resources/images/interstitialPage/button_back.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.8 KiB |
BIN
interface/resources/images/interstitialPage/button_hover.png
Normal file
BIN
interface/resources/images/interstitialPage/button_hover.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.1 KiB |
BIN
interface/resources/images/interstitialPage/button_tryAgain.png
Normal file
BIN
interface/resources/images/interstitialPage/button_tryAgain.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
|
@ -23,6 +23,7 @@ Item {
|
|||
property bool failAfterSignUp: false
|
||||
|
||||
function login() {
|
||||
flavorText.visible = false
|
||||
mainTextContainer.visible = false
|
||||
toggleLoading(true)
|
||||
loginDialog.login(usernameField.text, passwordField.text)
|
||||
|
@ -43,7 +44,7 @@ Item {
|
|||
|
||||
function resize() {
|
||||
var targetWidth = Math.max(titleWidth, form.contentWidth);
|
||||
var targetHeight = hifi.dimensions.contentSpacing.y + mainTextContainer.height +
|
||||
var targetHeight = hifi.dimensions.contentSpacing.y + flavorText.height + mainTextContainer.height +
|
||||
4 * hifi.dimensions.contentSpacing.y + form.height;
|
||||
|
||||
if (additionalInformation.visible) {
|
||||
|
@ -106,14 +107,15 @@ Item {
|
|||
ShortcutText {
|
||||
id: mainTextContainer
|
||||
anchors {
|
||||
top: parent.top
|
||||
top: flavorText.bottom
|
||||
left: parent.left
|
||||
margins: 0
|
||||
topMargin: hifi.dimensions.contentSpacing.y
|
||||
topMargin: 1.5 * hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
|
||||
visible: false
|
||||
text: qsTr("Username or password incorrect.")
|
||||
height: flavorText.height - 20
|
||||
wrapMode: Text.WordWrap
|
||||
color: hifi.colors.redAccent
|
||||
lineHeight: 1
|
||||
|
@ -128,7 +130,7 @@ Item {
|
|||
|
||||
anchors {
|
||||
top: mainTextContainer.bottom
|
||||
topMargin: 2 * hifi.dimensions.contentSpacing.y
|
||||
topMargin: 1.5 * hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
spacing: 2 * hifi.dimensions.contentSpacing.y
|
||||
|
||||
|
@ -139,6 +141,7 @@ Item {
|
|||
focus: true
|
||||
placeholderText: "Username or Email"
|
||||
activeFocusOnPress: true
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
||||
ShortcutText {
|
||||
z: 10
|
||||
|
@ -172,7 +175,7 @@ Item {
|
|||
width: parent.width
|
||||
placeholderText: "Password"
|
||||
activeFocusOnPress: true
|
||||
echoMode: TextInput.Password
|
||||
echoMode: passwordFieldMouseArea.showPassword ? TextInput.Normal : TextInput.Password
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
||||
ShortcutText {
|
||||
|
@ -212,29 +215,28 @@ Item {
|
|||
|
||||
Image {
|
||||
id: showPasswordImage
|
||||
y: (passwordField.height - (passwordField.height * 16 / 23)) / 2
|
||||
width: passwordField.width - (passwordField.width - (((passwordField.height) * 31/23)))
|
||||
width: passwordField.height * 16 / 23
|
||||
height: passwordField.height * 16 / 23
|
||||
anchors {
|
||||
right: parent.right
|
||||
rightMargin: 3
|
||||
rightMargin: 8
|
||||
top: parent.top
|
||||
topMargin: passwordFieldMouseArea.showPassword ? 6 : 8
|
||||
bottom: parent.bottom
|
||||
bottomMargin: passwordFieldMouseArea.showPassword ? 5 : 8
|
||||
}
|
||||
source: passwordFieldMouseArea.showPassword ? "../../images/eyeClosed.svg" : "../../images/eyeOpen.svg"
|
||||
MouseArea {
|
||||
id: passwordFieldMouseArea
|
||||
anchors.fill: parent
|
||||
acceptedButtons: Qt.LeftButton
|
||||
property bool showPassword: false
|
||||
onClicked: {
|
||||
showPassword = !showPassword;
|
||||
}
|
||||
}
|
||||
source: "../../images/eyeOpen.svg"
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: passwordFieldMouseArea
|
||||
anchors.fill: parent
|
||||
acceptedButtons: Qt.LeftButton
|
||||
property bool showPassword: false
|
||||
onClicked: {
|
||||
showPassword = !showPassword;
|
||||
passwordField.echoMode = showPassword ? TextInput.Normal : TextInput.Password;
|
||||
showPasswordImage.source = showPassword ? "../../images/eyeClosed.svg" : "../../images/eyeOpen.svg";
|
||||
showPasswordImage.height = showPassword ? passwordField.height : passwordField.height * 16 / 23;
|
||||
showPasswordImage.y = showPassword ? 0 : (passwordField.height - showPasswordImage.height) / 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Keys.onReturnPressed: linkAccountBody.login()
|
||||
|
@ -284,7 +286,7 @@ Item {
|
|||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: 200
|
||||
|
||||
text: qsTr(loginDialog.isSteamRunning() ? "Link Account" : "Login")
|
||||
text: qsTr(loginDialog.isSteamRunning() ? "Link Account" : "Log in")
|
||||
color: hifi.buttons.blue
|
||||
|
||||
onClicked: linkAccountBody.login()
|
||||
|
@ -336,6 +338,7 @@ Item {
|
|||
|
||||
if (failAfterSignUp) {
|
||||
mainTextContainer.text = "Account created successfully."
|
||||
flavorText.visible = true
|
||||
mainTextContainer.visible = true
|
||||
}
|
||||
|
||||
|
@ -374,6 +377,7 @@ Item {
|
|||
UserActivityLogger.logAction("encourageLoginDialog", data);
|
||||
Settings.setValue("loginDialogPoppedUp", false);
|
||||
}
|
||||
flavorText.visible = true
|
||||
mainTextContainer.visible = true
|
||||
toggleLoading(false)
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ Preference {
|
|||
left: parent.left
|
||||
right: parent.right
|
||||
}
|
||||
height: isFirstCheckBox ? hifi.dimensions.controlInterlineHeight : 0
|
||||
height: isFirstCheckBox && !preference.indented ? 16 : 2
|
||||
}
|
||||
|
||||
CheckBox {
|
||||
|
@ -54,6 +54,7 @@ Preference {
|
|||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
leftMargin: preference.indented ? 20 : 0
|
||||
}
|
||||
text: root.label
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
|
|
@ -11,14 +11,27 @@
|
|||
import QtQuick 2.5
|
||||
|
||||
import "../../controls-uit"
|
||||
import "../../styles-uit"
|
||||
|
||||
Preference {
|
||||
id: root
|
||||
|
||||
|
||||
height: control.height + hifi.dimensions.controlInterlineHeight
|
||||
|
||||
property int value: 0
|
||||
|
||||
Component.onCompleted: {
|
||||
repeater.itemAt(preference.value).checked = true
|
||||
value = preference.value;
|
||||
repeater.itemAt(preference.value).checked = true;
|
||||
}
|
||||
|
||||
function updateValue() {
|
||||
for (var i = 0; i < repeater.count; i++) {
|
||||
if (repeater.itemAt(i).checked) {
|
||||
value = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function save() {
|
||||
|
@ -33,24 +46,36 @@ Preference {
|
|||
preference.save();
|
||||
}
|
||||
|
||||
Row {
|
||||
Column {
|
||||
id: control
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
spacing: 5
|
||||
spacing: 3
|
||||
|
||||
RalewaySemiBold {
|
||||
id: heading
|
||||
size: hifi.fontSizes.inputLabel
|
||||
text: preference.heading
|
||||
color: hifi.colors.lightGrayText
|
||||
visible: text !== ""
|
||||
bottomPadding: 3
|
||||
}
|
||||
|
||||
Repeater {
|
||||
id: repeater
|
||||
model: preference.items.length
|
||||
delegate: RadioButton {
|
||||
text: preference.items[index]
|
||||
letterSpacing: 0
|
||||
anchors {
|
||||
verticalCenter: parent.verticalCenter
|
||||
left: parent.left
|
||||
}
|
||||
leftPadding: 0
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
onClicked: updateValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -138,11 +138,12 @@ Preference {
|
|||
break;
|
||||
|
||||
case Preference.PrimaryHand:
|
||||
checkBoxCount++;
|
||||
checkBoxCount = 0;
|
||||
builder = primaryHandBuilder;
|
||||
break;
|
||||
|
||||
case Preference.RadioButtons:
|
||||
checkBoxCount++;
|
||||
checkBoxCount = 0;
|
||||
builder = radioButtonsBuilder;
|
||||
break;
|
||||
};
|
||||
|
|
|
@ -48,7 +48,7 @@ Rectangle {
|
|||
HifiModels.PSFListModel {
|
||||
id: connectionsUserModel;
|
||||
http: http;
|
||||
endpoint: "/api/v1/users?filter=connections";
|
||||
endpoint: "/api/v1/users/connections";
|
||||
property var sortColumn: connectionsTable.getColumn(connectionsTable.sortIndicatorColumn);
|
||||
sortProperty: switch (sortColumn && sortColumn.role) {
|
||||
case 'placeName':
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
import QtQuick 2.0
|
||||
|
||||
Item {
|
||||
property alias source: sourceImage.sourceItem
|
||||
property alias maskSource: sourceMask.sourceItem
|
||||
|
||||
anchors.fill: parent
|
||||
ShaderEffectSource {
|
||||
id: sourceMask
|
||||
smooth: true
|
||||
hideSource: true
|
||||
}
|
||||
ShaderEffectSource {
|
||||
id: sourceImage
|
||||
hideSource: true
|
||||
}
|
||||
|
||||
ShaderEffect {
|
||||
id: maskEffect
|
||||
anchors.fill: parent
|
||||
|
||||
property variant source: sourceImage
|
||||
property variant mask: sourceMask
|
||||
|
||||
fragmentShader: {
|
||||
"
|
||||
varying highp vec2 qt_TexCoord0;
|
||||
uniform lowp sampler2D source;
|
||||
uniform lowp sampler2D mask;
|
||||
void main() {
|
||||
|
||||
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
|
||||
if (maskColor.a > 0.0)
|
||||
gl_FragColor = sourceColor;
|
||||
else
|
||||
gl_FragColor = maskColor;
|
||||
}
|
||||
"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -100,6 +100,25 @@ Rectangle {
|
|||
wearablesModel.setProperty(wearableIndex, 'properties', wearableModelItemProperties);
|
||||
}
|
||||
|
||||
function entityHasAvatarJoints(entityID) {
|
||||
var hasAvatarJoint = false;
|
||||
|
||||
var props = Entities.getEntityProperties(entityID);
|
||||
var avatarJointsCount = MyAvatar.getJointNames().length;
|
||||
if (props && avatarJointsCount >= 0 ) {
|
||||
var entityJointNames = Entities.getJointNames(entityID);
|
||||
for (var index = 0; index < entityJointNames.length; index++) {
|
||||
var avatarJointIndex = MyAvatar.getJointIndex(entityJointNames[index]);
|
||||
if (avatarJointIndex >= 0) {
|
||||
hasAvatarJoint = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return hasAvatarJoint;
|
||||
}
|
||||
|
||||
function getCurrentWearable() {
|
||||
return wearablesCombobox.currentIndex !== -1 ? wearablesCombobox.model.get(wearablesCombobox.currentIndex) : null;
|
||||
}
|
||||
|
@ -109,6 +128,7 @@ Rectangle {
|
|||
var wearable = wearablesCombobox.model.get(i);
|
||||
if (wearable.id === entityID) {
|
||||
wearablesCombobox.currentIndex = i;
|
||||
softWearableTimer.restart();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -118,6 +138,7 @@ Rectangle {
|
|||
visible = false;
|
||||
adjustWearablesClosed(status, avatarName);
|
||||
}
|
||||
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
|
@ -130,6 +151,20 @@ Rectangle {
|
|||
hoverEnabled: true;
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: softWearableTimer
|
||||
interval: 1000
|
||||
running: false
|
||||
repeat: true
|
||||
onTriggered: {
|
||||
var currentWearable = getCurrentWearable();
|
||||
var soft = currentWearable ? currentWearable.relayParentJoints : false;
|
||||
var softEnabled = currentWearable ? entityHasAvatarJoints(currentWearable.id) : false;
|
||||
isSoft.set(soft);
|
||||
isSoft.enabled = softEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 12
|
||||
|
@ -247,13 +282,13 @@ Rectangle {
|
|||
var rotation = currentWearable ? currentWearable.localRotationAngles : { x : 0, y : 0, z : 0 };
|
||||
var scale = currentWearable ? currentWearable.dimensions.x / currentWearable.naturalDimensions.x : 1.0;
|
||||
var joint = currentWearable ? currentWearable.parentJointIndex : -1;
|
||||
var soft = currentWearable ? currentWearable.relayParentJoints : false;
|
||||
softWearableTimer.restart();
|
||||
|
||||
positionVector.set(position);
|
||||
rotationVector.set(rotation);
|
||||
scalespinner.set(scale);
|
||||
jointsCombobox.set(joint);
|
||||
isSoft.set(soft);
|
||||
|
||||
|
||||
if (currentWearable) {
|
||||
wearableSelected(currentWearable.id);
|
||||
|
@ -314,7 +349,7 @@ Rectangle {
|
|||
}
|
||||
};
|
||||
|
||||
wearableUpdated(getCurrentWearable().id, jointIndex, properties);
|
||||
wearableUpdated(getCurrentWearable().id, wearablesCombobox.currentIndex, properties);
|
||||
}
|
||||
|
||||
onCurrentIndexChanged: {
|
||||
|
|
|
@ -24,19 +24,20 @@ Item {
|
|||
|
||||
fragmentShader: {
|
||||
"
|
||||
varying highp vec2 qt_TexCoord0;
|
||||
uniform lowp sampler2D source;
|
||||
uniform lowp sampler2D mask;
|
||||
void main() {
|
||||
|
||||
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
|
||||
if (maskColor.a > 0.0)
|
||||
gl_FragColor = sourceColor;
|
||||
else
|
||||
gl_FragColor = maskColor;
|
||||
}
|
||||
#version 410
|
||||
in vec2 qt_TexCoord0;
|
||||
out vec4 color;
|
||||
uniform sampler2D source;
|
||||
uniform sampler2D mask;
|
||||
void main()
|
||||
{
|
||||
vec4 maskColor = texture(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
vec4 sourceColor = texture(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
|
||||
if (maskColor.a > 0.0)
|
||||
color = sourceColor;
|
||||
else
|
||||
color = maskColor;
|
||||
}
|
||||
"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -552,6 +552,10 @@ Rectangle {
|
|||
// Alignment
|
||||
horizontalAlignment: Text.AlignLeft;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
onLinkActivated: {
|
||||
// Only case is to go to the bank.
|
||||
sendToScript({method: 'gotoBank'});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1107,25 +1111,32 @@ Rectangle {
|
|||
}
|
||||
|
||||
function handleBuyAgainLogic() {
|
||||
// If you can buy this item again...
|
||||
if (canBuyAgain()) {
|
||||
// If you can't afford another copy of the item...
|
||||
if (root.balanceAfterPurchase < 0) {
|
||||
// If you already own the item...
|
||||
if (root.alreadyOwned) {
|
||||
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item again.</b>";
|
||||
// Else if you don't already own the item...
|
||||
} else {
|
||||
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item.</b>";
|
||||
}
|
||||
buyTextContainer.color = "#FFC3CD";
|
||||
buyTextContainer.border.color = "#F3808F";
|
||||
buyGlyph.text = hifi.glyphs.alert;
|
||||
buyGlyph.size = 54;
|
||||
// If you CAN afford another copy of the item...
|
||||
// General rules, implemented in various scattered places in this file:
|
||||
// 1. If you already own the item, a viewInMyPurchasesButton is visible,
|
||||
// and the buyButton is visible (and says "Buy it again") ONLY if it is a type you canBuyAgain.
|
||||
// 2. Separately,
|
||||
// a. If you don't have enough money to buy, the buyText becomes visible and tells you, and the buyButton is disabled.
|
||||
// b. Otherwise, if the item is a content set and you don't have rez permission, the buyText becomes visible and tells you so.
|
||||
|
||||
// If you can't afford another copy of the item...
|
||||
if (root.balanceAfterPurchase < 0) {
|
||||
// If you already own the item...
|
||||
if (!root.alreadyOwned) {
|
||||
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item.</b>";
|
||||
// Else if you don't already own the item...
|
||||
} else if (canBuyAgain()) {
|
||||
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item again.</b>";
|
||||
} else {
|
||||
handleContentSets();
|
||||
buyText.text = "<b>While you do not have sufficient funds to buy this, you already have this item.</b>"
|
||||
}
|
||||
buyText.text += " Visit <a href='#'>Bank of High Fidelity</a> to get more HFC."
|
||||
buyTextContainer.color = "#FFC3CD";
|
||||
buyTextContainer.border.color = "#F3808F";
|
||||
buyGlyph.text = hifi.glyphs.alert;
|
||||
buyGlyph.size = 54;
|
||||
// If you CAN afford another copy of the item...
|
||||
} else {
|
||||
handleContentSets();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -829,6 +829,7 @@ Rectangle {
|
|||
Commerce.getWalletAuthenticatedStatus(); // before writing security image, ensures that salt/account password is set.
|
||||
Commerce.chooseSecurityImage(securityImagePath);
|
||||
Commerce.generateKeyPair();
|
||||
followReferrer({ referrer: walletSetup.referrer });
|
||||
}
|
||||
|
||||
function addLeadingZero(n) {
|
||||
|
@ -836,7 +837,7 @@ Rectangle {
|
|||
}
|
||||
|
||||
function followReferrer(msg) {
|
||||
if (msg.referrer === '' || msg.referrer === 'marketplace cta') {
|
||||
if (msg.referrer === '') {
|
||||
root.activeView = "initialize";
|
||||
Commerce.getWalletStatus();
|
||||
} else if (msg.referrer === 'purchases') {
|
||||
|
|
|
@ -28,7 +28,7 @@ Item {
|
|||
property string activeView: "step_1";
|
||||
property string lastPage;
|
||||
property bool hasShownSecurityImageTip: false;
|
||||
property string referrer;
|
||||
property string referrer: '';
|
||||
property string keyFilePath;
|
||||
property date startingTimestamp;
|
||||
property string setupAttemptID;
|
||||
|
|
|
@ -122,6 +122,22 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
// Runtime customization of preferences.
|
||||
var locomotionPreference = findPreference("VR Movement", "Teleporting only / Walking and teleporting");
|
||||
var flyingPreference = findPreference("VR Movement", "Jumping and flying");
|
||||
if (locomotionPreference && flyingPreference) {
|
||||
flyingPreference.visible = (locomotionPreference.value === 1);
|
||||
locomotionPreference.valueChanged.connect(function () {
|
||||
flyingPreference.visible = (locomotionPreference.value === 1);
|
||||
});
|
||||
}
|
||||
if (HMD.isHeadControllerAvailable("Oculus")) {
|
||||
var boundariesPreference = findPreference("VR Movement", "Show room boundaries while teleporting");
|
||||
if (boundariesPreference) {
|
||||
boundariesPreference.label = "Show room boundaries and sensors while teleporting";
|
||||
}
|
||||
}
|
||||
|
||||
if (sections.length) {
|
||||
// Default sections to expanded/collapsed as appropriate for dialog.
|
||||
if (sections.length === 1) {
|
||||
|
@ -234,4 +250,32 @@ Item {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
function findPreference(category, name) {
|
||||
var section = null;
|
||||
var preference = null;
|
||||
var i;
|
||||
|
||||
// Find category section.
|
||||
i = 0;
|
||||
while (!section && i < sections.length) {
|
||||
if (sections[i].name === category) {
|
||||
section = sections[i];
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
// Find named preference.
|
||||
if (section) {
|
||||
i = 0;
|
||||
while (!preference && i < section.preferences.length) {
|
||||
if (section.preferences[i].preference && section.preferences[i].preference.name === name) {
|
||||
preference = section.preferences[i];
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
return preference;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -153,11 +153,12 @@ Preference {
|
|||
break;
|
||||
|
||||
case Preference.PrimaryHand:
|
||||
checkBoxCount++;
|
||||
checkBoxCount = 0;
|
||||
builder = primaryHandBuilder;
|
||||
break;
|
||||
|
||||
case Preference.RadioButtons:
|
||||
checkBoxCount++;
|
||||
checkBoxCount = 0;
|
||||
builder = radioButtonsBuilder;
|
||||
break;
|
||||
};
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -10,8 +10,11 @@
|
|||
//
|
||||
#include "AndroidHelper.h"
|
||||
#include <QDebug>
|
||||
#include <AccountManager.h>
|
||||
#include <AudioClient.h>
|
||||
#include <src/ui/LoginDialog.h>
|
||||
#include "Application.h"
|
||||
#include "Constants.h"
|
||||
|
||||
#if defined(qApp)
|
||||
#undef qApp
|
||||
|
@ -25,7 +28,7 @@ AndroidHelper::AndroidHelper() {
|
|||
AndroidHelper::~AndroidHelper() {
|
||||
}
|
||||
|
||||
void AndroidHelper::requestActivity(const QString &activityName, const bool backToScene, QList<QString> args) {
|
||||
void AndroidHelper::requestActivity(const QString &activityName, const bool backToScene, QMap<QString, QString> args) {
|
||||
emit androidActivityRequested(activityName, backToScene, args);
|
||||
}
|
||||
|
||||
|
@ -49,8 +52,10 @@ void AndroidHelper::performHapticFeedback(int duration) {
|
|||
emit hapticFeedbackRequested(duration);
|
||||
}
|
||||
|
||||
void AndroidHelper::showLoginDialog() {
|
||||
emit androidActivityRequested("Login", true);
|
||||
void AndroidHelper::showLoginDialog(QUrl url) {
|
||||
QMap<QString, QString> args;
|
||||
args["url"] = url.toString();
|
||||
emit androidActivityRequested("Login", true, args);
|
||||
}
|
||||
|
||||
void AndroidHelper::processURL(const QString &url) {
|
||||
|
@ -67,3 +72,75 @@ void AndroidHelper::notifyHeadsetOn(bool pluggedIn) {
|
|||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void AndroidHelper::signup(QString email, QString username, QString password) {
|
||||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.callbackReceiver = this;
|
||||
callbackParams.jsonCallbackMethod = "signupCompleted";
|
||||
callbackParams.errorCallbackMethod = "signupFailed";
|
||||
|
||||
QJsonObject payload;
|
||||
|
||||
QJsonObject userObject;
|
||||
userObject.insert("email", email);
|
||||
userObject.insert("username", username);
|
||||
userObject.insert("password", password);
|
||||
|
||||
payload.insert("user", userObject);
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
accountManager->sendRequest(API_SIGNUP_PATH, AccountManagerAuth::None,
|
||||
QNetworkAccessManager::PostOperation, callbackParams,
|
||||
QJsonDocument(payload).toJson());
|
||||
}
|
||||
|
||||
void AndroidHelper::signupCompleted(QNetworkReply* reply) {
|
||||
emit handleSignupCompleted();
|
||||
}
|
||||
|
||||
QString AndroidHelper::errorStringFromAPIObject(const QJsonValue& apiObject) {
|
||||
if (apiObject.isArray()) {
|
||||
return apiObject.toArray()[0].toString();
|
||||
} else if (apiObject.isString()) {
|
||||
return apiObject.toString();
|
||||
} else {
|
||||
return "is invalid";
|
||||
}
|
||||
}
|
||||
|
||||
void AndroidHelper::signupFailed(QNetworkReply* reply) {
|
||||
// parse the returned JSON to see what the problem was
|
||||
auto jsonResponse = QJsonDocument::fromJson(reply->readAll());
|
||||
|
||||
static const QString RESPONSE_DATA_KEY = "data";
|
||||
|
||||
auto dataJsonValue = jsonResponse.object()[RESPONSE_DATA_KEY];
|
||||
|
||||
if (dataJsonValue.isObject()) {
|
||||
auto dataObject = dataJsonValue.toObject();
|
||||
|
||||
static const QString EMAIL_DATA_KEY = "email";
|
||||
static const QString USERNAME_DATA_KEY = "username";
|
||||
static const QString PASSWORD_DATA_KEY = "password";
|
||||
|
||||
QStringList errorStringList;
|
||||
|
||||
if (dataObject.contains(EMAIL_DATA_KEY)) {
|
||||
errorStringList.append(QString("Email %1.").arg(errorStringFromAPIObject(dataObject[EMAIL_DATA_KEY])));
|
||||
}
|
||||
|
||||
if (dataObject.contains(USERNAME_DATA_KEY)) {
|
||||
errorStringList.append(QString("Username %1.").arg(errorStringFromAPIObject(dataObject[USERNAME_DATA_KEY])));
|
||||
}
|
||||
|
||||
if (dataObject.contains(PASSWORD_DATA_KEY)) {
|
||||
errorStringList.append(QString("Password %1.").arg(errorStringFromAPIObject(dataObject[PASSWORD_DATA_KEY])));
|
||||
}
|
||||
|
||||
emit handleSignupFailed(errorStringList.join('\n'));
|
||||
} else {
|
||||
static const QString DEFAULT_SIGN_UP_FAILURE_MESSAGE = "There was an unknown error while creating your account. Please try again later.";
|
||||
emit handleSignupFailed(DEFAULT_SIGN_UP_FAILURE_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,11 @@
|
|||
#define hifi_Android_Helper_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QMap>
|
||||
#include <QUrl>
|
||||
|
||||
#include <QNetworkReply>
|
||||
#include <QtCore/QEventLoop>
|
||||
|
||||
class AndroidHelper : public QObject {
|
||||
Q_OBJECT
|
||||
|
@ -21,7 +26,7 @@ public:
|
|||
static AndroidHelper instance;
|
||||
return instance;
|
||||
}
|
||||
void requestActivity(const QString &activityName, const bool backToScene, QList<QString> args = QList<QString>());
|
||||
void requestActivity(const QString &activityName, const bool backToScene, QMap<QString, QString> args = QMap<QString, QString>());
|
||||
void notifyLoadComplete();
|
||||
void notifyEnterForeground();
|
||||
void notifyBeforeEnterBackground();
|
||||
|
@ -34,11 +39,14 @@ public:
|
|||
AndroidHelper(AndroidHelper const&) = delete;
|
||||
void operator=(AndroidHelper const&) = delete;
|
||||
|
||||
public slots:
|
||||
void showLoginDialog();
|
||||
void signup(QString email, QString username, QString password);
|
||||
|
||||
public slots:
|
||||
void showLoginDialog(QUrl url);
|
||||
void signupCompleted(QNetworkReply* reply);
|
||||
void signupFailed(QNetworkReply* reply);
|
||||
signals:
|
||||
void androidActivityRequested(const QString &activityName, const bool backToScene, QList<QString> args = QList<QString>());
|
||||
void androidActivityRequested(const QString &activityName, const bool backToScene, QMap<QString, QString> args = QMap<QString, QString>());
|
||||
void qtAppLoadComplete();
|
||||
void enterForeground();
|
||||
void beforeEnterBackground();
|
||||
|
@ -46,9 +54,14 @@ signals:
|
|||
|
||||
void hapticFeedbackRequested(int duration);
|
||||
|
||||
void handleSignupCompleted();
|
||||
void handleSignupFailed(QString errorString);
|
||||
|
||||
private:
|
||||
AndroidHelper();
|
||||
~AndroidHelper();
|
||||
|
||||
QString errorStringFromAPIObject(const QJsonValue& apiObject);
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -122,6 +122,7 @@
|
|||
#include <RecordingScriptingInterface.h>
|
||||
#include <UpdateSceneTask.h>
|
||||
#include <RenderViewTask.h>
|
||||
#include <render/EngineStats.h>
|
||||
#include <SecondaryCamera.h>
|
||||
#include <ResourceCache.h>
|
||||
#include <ResourceRequest.h>
|
||||
|
@ -215,7 +216,8 @@
|
|||
#include <raypick/LaserPointerScriptingInterface.h>
|
||||
#include <raypick/PickScriptingInterface.h>
|
||||
#include <raypick/PointerScriptingInterface.h>
|
||||
#include <raypick/MouseRayPick.h>
|
||||
#include <raypick/RayPick.h>
|
||||
#include <raypick/MouseTransformNode.h>
|
||||
|
||||
#include <FadeEffect.h>
|
||||
|
||||
|
@ -269,7 +271,7 @@ class RenderEventHandler : public QObject {
|
|||
public:
|
||||
RenderEventHandler() {
|
||||
// Transfer to a new thread
|
||||
moveToNewNamedThread(this, "RenderThread", [this](QThread* renderThread) {
|
||||
moveToNewNamedThread(this, "RenderThread", [](QThread* renderThread) {
|
||||
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
|
||||
qApp->_lastTimeRendered.start();
|
||||
}, std::bind(&RenderEventHandler::initialize, this), QThread::HighestPriority);
|
||||
|
@ -364,7 +366,6 @@ static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SI
|
|||
|
||||
static const uint32_t INVALID_FRAME = UINT32_MAX;
|
||||
|
||||
static const float PHYSICS_READY_RANGE = 3.0f; // how far from avatar to check for entities that aren't ready for simulation
|
||||
static const float INITIAL_QUERY_RADIUS = 10.0f; // priority radius for entities before physics enabled
|
||||
|
||||
static const QString DESKTOP_LOCATION = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation);
|
||||
|
@ -410,6 +411,10 @@ public:
|
|||
});
|
||||
}
|
||||
|
||||
void setMainThreadID(Qt::HANDLE threadID) {
|
||||
_mainThreadID = threadID;
|
||||
}
|
||||
|
||||
static void updateHeartbeat() {
|
||||
auto now = usecTimestampNow();
|
||||
auto elapsed = now - _heartbeat;
|
||||
|
@ -417,7 +422,9 @@ public:
|
|||
_heartbeat = now;
|
||||
}
|
||||
|
||||
static void deadlockDetectionCrash() {
|
||||
void deadlockDetectionCrash() {
|
||||
setCrashAnnotation("_mod_faulting_tid", std::to_string((uint64_t)_mainThreadID));
|
||||
setCrashAnnotation("deadlock", "1");
|
||||
uint32_t* crashTrigger = nullptr;
|
||||
*crashTrigger = 0xDEAD10CC;
|
||||
}
|
||||
|
@ -504,6 +511,8 @@ public:
|
|||
static ThreadSafeMovingAverage<int, HEARTBEAT_SAMPLES> _movingAverage;
|
||||
|
||||
bool _quit { false };
|
||||
|
||||
Qt::HANDLE _mainThreadID = nullptr;
|
||||
};
|
||||
|
||||
std::atomic<bool> DeadlockWatchdogThread::_paused;
|
||||
|
@ -963,7 +972,7 @@ Q_GUI_EXPORT void qt_gl_set_global_share_context(QOpenGLContext *context);
|
|||
|
||||
Setting::Handle<int> sessionRunTime{ "sessionRunTime", 0 };
|
||||
|
||||
const float DEFAULT_HMD_TABLET_SCALE_PERCENT = 70.0f;
|
||||
const float DEFAULT_HMD_TABLET_SCALE_PERCENT = 60.0f;
|
||||
const float DEFAULT_DESKTOP_TABLET_SCALE_PERCENT = 75.0f;
|
||||
const bool DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR = true;
|
||||
const bool DEFAULT_HMD_TABLET_BECOMES_TOOLBAR = false;
|
||||
|
@ -976,7 +985,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_window(new MainWindow(desktop())),
|
||||
_sessionRunTimer(startupTimer),
|
||||
_previousSessionCrashed(setupEssentials(argc, argv, runningMarkerExisted)),
|
||||
_undoStackScriptingInterface(&_undoStack),
|
||||
_entitySimulation(new PhysicalEntitySimulation()),
|
||||
_physicsEngine(new PhysicsEngine(Vectors::ZERO)),
|
||||
_entityClipboard(new EntityTree()),
|
||||
|
@ -996,7 +1004,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_enableProcessOctreeThread(true),
|
||||
_lastNackTime(usecTimestampNow()),
|
||||
_lastSendDownstreamAudioStats(usecTimestampNow()),
|
||||
_aboutToQuit(false),
|
||||
_notifiedPacketVersionMismatchThisDomain(false),
|
||||
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
|
||||
_lastFaceTrackerUpdate(0),
|
||||
|
@ -1055,6 +1062,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
auto controllerScriptingInterface = DependencyManager::get<controller::ScriptingInterface>().data();
|
||||
_controllerScriptingInterface = dynamic_cast<ControllerScriptingInterface*>(controllerScriptingInterface);
|
||||
connect(PluginManager::getInstance().data(), &PluginManager::inputDeviceRunningChanged,
|
||||
controllerScriptingInterface, &controller::ScriptingInterface::updateRunningInputDevices);
|
||||
|
||||
_entityClipboard->createRootElement();
|
||||
|
||||
|
@ -1094,7 +1103,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
// Set up a watchdog thread to intentionally crash the application on deadlocks
|
||||
if (!DISABLE_WATCHDOG) {
|
||||
(new DeadlockWatchdogThread())->start();
|
||||
auto deadlockWatchdogThread = new DeadlockWatchdogThread();
|
||||
deadlockWatchdogThread->setMainThreadID(QThread::currentThreadId());
|
||||
deadlockWatchdogThread->start();
|
||||
}
|
||||
|
||||
// Set File Logger Session UUID
|
||||
|
@ -1236,7 +1247,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
#if defined(Q_OS_ANDROID)
|
||||
connect(accountManager.data(), &AccountManager::authRequired, this, []() {
|
||||
AndroidHelper::instance().showLoginDialog();
|
||||
auto addressManager = DependencyManager::get<AddressManager>();
|
||||
AndroidHelper::instance().showLoginDialog(addressManager->currentAddress());
|
||||
});
|
||||
#else
|
||||
connect(accountManager.data(), &AccountManager::authRequired, dialogsManager.data(), &DialogsManager::showLoginDialog);
|
||||
|
@ -1755,7 +1767,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// we can unlock the desktop repositioning code, since all the positions will be
|
||||
// relative to the desktop size for this plugin
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->getDesktop()->setProperty("repositionLocked", false);
|
||||
connect(offscreenUi.data(), &OffscreenUi::desktopReady, []() {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto desktop = offscreenUi->getDesktop();
|
||||
if (desktop) {
|
||||
desktop->setProperty("repositionLocked", false);
|
||||
}
|
||||
});
|
||||
|
||||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
@ -2269,8 +2287,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
});
|
||||
|
||||
// Setup the mouse ray pick and related operators
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickID(DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, std::make_shared<MouseRayPick>(
|
||||
PickFilter(PickScriptingInterface::PICK_ENTITIES() | PickScriptingInterface::PICK_INCLUDE_NONCOLLIDABLE()), 0.0f, true)));
|
||||
{
|
||||
auto mouseRayPick = std::make_shared<RayPick>(Vectors::ZERO, Vectors::UP, PickFilter(PickScriptingInterface::PICK_ENTITIES() | PickScriptingInterface::PICK_INCLUDE_NONCOLLIDABLE()), 0.0f, true);
|
||||
mouseRayPick->parentTransform = std::make_shared<MouseTransformNode>();
|
||||
mouseRayPick->setJointState(PickQuery::JOINT_STATE_MOUSE);
|
||||
auto mouseRayPickID = DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, mouseRayPick);
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickID(mouseRayPickID);
|
||||
}
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickResultOperator([](unsigned int rayPickID) {
|
||||
RayToEntityIntersectionResult entityResult;
|
||||
entityResult.intersects = false;
|
||||
|
@ -2309,7 +2332,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
QTimer* checkLoginTimer = new QTimer(this);
|
||||
checkLoginTimer->setInterval(CHECK_LOGIN_TIMER);
|
||||
checkLoginTimer->setSingleShot(true);
|
||||
connect(checkLoginTimer, &QTimer::timeout, this, [this]() {
|
||||
connect(checkLoginTimer, &QTimer::timeout, this, []() {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
if (!accountManager->isLoggedIn()) {
|
||||
|
@ -3089,7 +3112,6 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
|
|||
surfaceContext->setContextProperty("DialogsManager", _dialogsManagerScriptingInterface);
|
||||
surfaceContext->setContextProperty("FaceTracker", DependencyManager::get<DdeFaceTracker>().data());
|
||||
surfaceContext->setContextProperty("AvatarManager", DependencyManager::get<AvatarManager>().data());
|
||||
surfaceContext->setContextProperty("UndoStack", &_undoStackScriptingInterface);
|
||||
surfaceContext->setContextProperty("LODManager", DependencyManager::get<LODManager>().data());
|
||||
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
surfaceContext->setContextProperty("Scene", DependencyManager::get<SceneScriptingInterface>().data());
|
||||
|
@ -4673,8 +4695,14 @@ void Application::idle() {
|
|||
|
||||
checkChangeCursor();
|
||||
|
||||
Stats::getInstance()->updateStats();
|
||||
AnimStats::getInstance()->updateStats();
|
||||
auto stats = Stats::getInstance();
|
||||
if (stats) {
|
||||
stats->updateStats();
|
||||
}
|
||||
auto animStats = AnimStats::getInstance();
|
||||
if (animStats) {
|
||||
animStats->updateStats();
|
||||
}
|
||||
|
||||
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
|
||||
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
|
||||
|
@ -5355,8 +5383,8 @@ void Application::resetPhysicsReadyInformation() {
|
|||
// collision information of nearby entities to make running bullet be safe.
|
||||
_fullSceneReceivedCounter = 0;
|
||||
_fullSceneCounterAtLastPhysicsCheck = 0;
|
||||
_nearbyEntitiesCountAtLastPhysicsCheck = 0;
|
||||
_nearbyEntitiesStabilityCount = 0;
|
||||
_gpuTextureMemSizeStabilityCount = 0;
|
||||
_gpuTextureMemSizeAtLastCheck = 0;
|
||||
_physicsEnabled = false;
|
||||
_octreeProcessor.startEntitySequence();
|
||||
}
|
||||
|
@ -5595,18 +5623,21 @@ void Application::update(float deltaTime) {
|
|||
// for nearby entities before starting bullet up.
|
||||
quint64 now = usecTimestampNow();
|
||||
if (isServerlessMode() || _octreeProcessor.isLoadSequenceComplete()) {
|
||||
// we've received a new full-scene octree stats packet, or it's been long enough to try again anyway
|
||||
_lastPhysicsCheckTime = now;
|
||||
_fullSceneCounterAtLastPhysicsCheck = _fullSceneReceivedCounter;
|
||||
_lastQueriedViews.clear(); // Force new view.
|
||||
bool enableInterstitial = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
|
||||
if (gpuTextureMemSizeStable() || !enableInterstitial) {
|
||||
// we've received a new full-scene octree stats packet, or it's been long enough to try again anyway
|
||||
_lastPhysicsCheckTime = now;
|
||||
_fullSceneCounterAtLastPhysicsCheck = _fullSceneReceivedCounter;
|
||||
_lastQueriedViews.clear(); // Force new view.
|
||||
|
||||
// process octree stats packets are sent in between full sends of a scene (this isn't currently true).
|
||||
// We keep physics disabled until we've received a full scene and everything near the avatar in that
|
||||
// scene is ready to compute its collision shape.
|
||||
if (getMyAvatar()->isReadyForPhysics()) {
|
||||
_physicsEnabled = true;
|
||||
setIsInterstitialMode(false);
|
||||
getMyAvatar()->updateMotionBehaviorFromMenu();
|
||||
// process octree stats packets are sent in between full sends of a scene (this isn't currently true).
|
||||
// We keep physics disabled until we've received a full scene and everything near the avatar in that
|
||||
// scene is ready to compute its collision shape.
|
||||
if (getMyAvatar()->isReadyForPhysics()) {
|
||||
_physicsEnabled = true;
|
||||
setIsInterstitialMode(false);
|
||||
getMyAvatar()->updateMotionBehaviorFromMenu();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (domainLoadingInProgress) {
|
||||
|
@ -5893,6 +5924,10 @@ void Application::update(float deltaTime) {
|
|||
// update the rendering without any simulation
|
||||
getEntities()->update(false);
|
||||
}
|
||||
// remove recently dead avatarEntities
|
||||
SetOfEntities deadAvatarEntities;
|
||||
_entitySimulation->takeDeadAvatarEntities(deadAvatarEntities);
|
||||
avatarManager->removeDeadAvatarEntities(deadAvatarEntities);
|
||||
}
|
||||
|
||||
// AvatarManager update
|
||||
|
@ -6217,6 +6252,8 @@ int Application::sendNackPackets() {
|
|||
missingSequenceNumbers = sequenceNumberStats.getMissingSet();
|
||||
});
|
||||
|
||||
_isMissingSequenceNumbers = (missingSequenceNumbers.size() != 0);
|
||||
|
||||
// construct nack packet(s) for this node
|
||||
foreach(const OCTREE_PACKET_SEQUENCE& missingNumber, missingSequenceNumbers) {
|
||||
nackPacketList->writePrimitive(missingNumber);
|
||||
|
@ -6243,9 +6280,19 @@ void Application::queryOctree(NodeType_t serverType, PacketType packetType) {
|
|||
const bool isModifiedQuery = !_physicsEnabled;
|
||||
if (isModifiedQuery) {
|
||||
// Create modified view that is a simple sphere.
|
||||
bool interstitialModeEnabled = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
|
||||
|
||||
ConicalViewFrustum sphericalView;
|
||||
sphericalView.setSimpleRadius(INITIAL_QUERY_RADIUS);
|
||||
_octreeQuery.setConicalViews({ sphericalView });
|
||||
|
||||
if (interstitialModeEnabled) {
|
||||
ConicalViewFrustum farView;
|
||||
farView.set(_viewFrustum);
|
||||
_octreeQuery.setConicalViews({ sphericalView, farView });
|
||||
} else {
|
||||
_octreeQuery.setConicalViews({ sphericalView });
|
||||
}
|
||||
|
||||
_octreeQuery.setOctreeSizeScale(DEFAULT_OCTREE_SIZE_SCALE);
|
||||
static constexpr float MIN_LOD_ADJUST = -20.0f;
|
||||
_octreeQuery.setBoundaryLevelAdjust(MIN_LOD_ADJUST);
|
||||
|
@ -6557,69 +6604,23 @@ void Application::trackIncomingOctreePacket(ReceivedMessage& message, SharedNode
|
|||
}
|
||||
}
|
||||
|
||||
bool Application::nearbyEntitiesAreReadyForPhysics() {
|
||||
// this is used to avoid the following scenario:
|
||||
// A table has some items sitting on top of it. The items are at rest, meaning they aren't active in bullet.
|
||||
// Someone logs in close to the table. They receive information about the items on the table before they
|
||||
// receive information about the table. The items are very close to the avatar's capsule, so they become
|
||||
// activated in bullet. This causes them to fall to the floor, because the table's shape isn't yet in bullet.
|
||||
EntityTreePointer entityTree = getEntities()->getTree();
|
||||
if (!entityTree) {
|
||||
return false;
|
||||
}
|
||||
bool Application::gpuTextureMemSizeStable() {
|
||||
auto renderConfig = qApp->getRenderEngine()->getConfiguration();
|
||||
auto renderStats = renderConfig->getConfig<render::EngineStats>("Stats");
|
||||
|
||||
// We don't want to use EntityTree::findEntities(AABox, ...) method because that scan will snarf parented entities
|
||||
// whose bounding boxes cannot be computed (it is too loose for our purposes here). Instead we manufacture
|
||||
// custom filters and use the general-purpose EntityTree::findEntities(filter, ...)
|
||||
QVector<EntityItemPointer> entities;
|
||||
AABox avatarBox(getMyAvatar()->getWorldPosition() - glm::vec3(PHYSICS_READY_RANGE), glm::vec3(2 * PHYSICS_READY_RANGE));
|
||||
// create two functions that use avatarBox (entityScan and elementScan), the second calls the first
|
||||
std::function<bool (EntityItemPointer&)> entityScan = [=](EntityItemPointer& entity) {
|
||||
if (entity->shouldBePhysical()) {
|
||||
bool success = false;
|
||||
AABox entityBox = entity->getAABox(success);
|
||||
// important: bail for entities that cannot supply a valid AABox
|
||||
return success && avatarBox.touches(entityBox);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
std::function<bool(const OctreeElementPointer&, void*)> elementScan = [&](const OctreeElementPointer& element, void* unused) {
|
||||
if (element->getAACube().touches(avatarBox)) {
|
||||
EntityTreeElementPointer entityTreeElement = std::static_pointer_cast<EntityTreeElement>(element);
|
||||
entityTreeElement->getEntities(entityScan, entities);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
qint64 textureResourceGPUMemSize = renderStats->textureResourceGPUMemSize;
|
||||
qint64 texturePopulatedGPUMemSize = renderStats->textureResourcePopulatedGPUMemSize;
|
||||
qint64 textureTransferSize = renderStats->texturePendingGPUTransferSize;
|
||||
|
||||
entityTree->withReadLock([&] {
|
||||
// Pass the second function to the general-purpose EntityTree::findEntities()
|
||||
// which will traverse the tree, apply the two filter functions (to element, then to entities)
|
||||
// as it traverses. The end result will be a list of entities that match.
|
||||
entityTree->findEntities(elementScan, entities);
|
||||
});
|
||||
|
||||
uint32_t nearbyCount = entities.size();
|
||||
if (nearbyCount == _nearbyEntitiesCountAtLastPhysicsCheck) {
|
||||
_nearbyEntitiesStabilityCount++;
|
||||
if (_gpuTextureMemSizeAtLastCheck == textureResourceGPUMemSize) {
|
||||
_gpuTextureMemSizeStabilityCount++;
|
||||
} else {
|
||||
_nearbyEntitiesStabilityCount = 0;
|
||||
_gpuTextureMemSizeStabilityCount = 0;
|
||||
}
|
||||
_nearbyEntitiesCountAtLastPhysicsCheck = nearbyCount;
|
||||
_gpuTextureMemSizeAtLastCheck = textureResourceGPUMemSize;
|
||||
|
||||
const uint32_t MINIMUM_NEARBY_ENTITIES_STABILITY_COUNT = 3;
|
||||
if (_nearbyEntitiesStabilityCount >= MINIMUM_NEARBY_ENTITIES_STABILITY_COUNT) {
|
||||
// We've seen the same number of nearby entities for several stats packets in a row. assume we've got all
|
||||
// the local entities.
|
||||
bool result = true;
|
||||
foreach (EntityItemPointer entity, entities) {
|
||||
if (entity->shouldBePhysical() && !entity->isReadyToComputeShape()) {
|
||||
HIFI_FCDEBUG(interfaceapp(), "Physics disabled until entity loads: " << entity->getID() << entity->getName());
|
||||
// don't break here because we want all the relevant entities to start their downloads
|
||||
result = false;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
if (_gpuTextureMemSizeStabilityCount >= _minimumGPUTextureMemSizeStabilityCount) {
|
||||
return (textureResourceGPUMemSize == texturePopulatedGPUMemSize) && (textureTransferSize == 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -6755,8 +6756,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
|
|||
|
||||
scriptEngine->registerGlobalObject("AvatarManager", DependencyManager::get<AvatarManager>().data());
|
||||
|
||||
scriptEngine->registerGlobalObject("UndoStack", &_undoStackScriptingInterface);
|
||||
|
||||
scriptEngine->registerGlobalObject("LODManager", DependencyManager::get<LODManager>().data());
|
||||
|
||||
scriptEngine->registerGlobalObject("Paths", DependencyManager::get<PathUtils>().data());
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
#include <QtGui/QImage>
|
||||
|
||||
#include <QtWidgets/QApplication>
|
||||
#include <QtWidgets/QUndoStack>
|
||||
|
||||
#include <ThreadHelpers.h>
|
||||
#include <AbstractScriptingServicesInterface.h>
|
||||
|
@ -70,7 +69,6 @@
|
|||
#include "ui/OctreeStatsDialog.h"
|
||||
#include "ui/OverlayConductor.h"
|
||||
#include "ui/overlays/Overlays.h"
|
||||
#include "UndoStackScriptingInterface.h"
|
||||
|
||||
#include "workload/GameWorkload.h"
|
||||
|
||||
|
@ -185,11 +183,12 @@ public:
|
|||
// passes, mirror window passes, etc
|
||||
void copyDisplayViewFrustum(ViewFrustum& viewOut) const;
|
||||
|
||||
bool isMissingSequenceNumbers() { return _isMissingSequenceNumbers; }
|
||||
|
||||
const ConicalViewFrustums& getConicalViews() const override { return _conicalViews; }
|
||||
|
||||
const OctreePacketProcessor& getOctreePacketProcessor() const { return _octreeProcessor; }
|
||||
QSharedPointer<EntityTreeRenderer> getEntities() const { return DependencyManager::get<EntityTreeRenderer>(); }
|
||||
QUndoStack* getUndoStack() { return &_undoStack; }
|
||||
MainWindow* getWindow() const { return _window; }
|
||||
EntityTreePointer getEntityClipboard() const { return _entityClipboard; }
|
||||
EntityEditPacketSender* getEntityEditPacketSender() { return &_entityEditSender; }
|
||||
|
@ -233,6 +232,8 @@ public:
|
|||
float getSettingConstrainToolbarPosition() { return _constrainToolbarPosition.get(); }
|
||||
void setSettingConstrainToolbarPosition(bool setting);
|
||||
|
||||
Q_INVOKABLE void setMinimumGPUTextureMemStabilityCount(int stabilityCount) { _minimumGPUTextureMemSizeStabilityCount = stabilityCount; }
|
||||
|
||||
NodeToOctreeSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
|
||||
|
||||
virtual controller::ScriptingInterface* getControllerScriptingInterface() { return _controllerScriptingInterface; }
|
||||
|
@ -528,7 +529,7 @@ private:
|
|||
bool importFromZIP(const QString& filePath);
|
||||
bool importImage(const QString& urlString);
|
||||
|
||||
bool nearbyEntitiesAreReadyForPhysics();
|
||||
bool gpuTextureMemSizeStable();
|
||||
int processOctreeStats(ReceivedMessage& message, SharedNodePointer sendingNode);
|
||||
void trackIncomingOctreePacket(ReceivedMessage& message, SharedNodePointer sendingNode, bool wasStatsPacket);
|
||||
|
||||
|
@ -560,6 +561,8 @@ private:
|
|||
MainWindow* _window;
|
||||
QElapsedTimer& _sessionRunTimer;
|
||||
|
||||
bool _aboutToQuit { false };
|
||||
|
||||
bool _previousSessionCrashed;
|
||||
|
||||
DisplayPluginPointer _displayPlugin;
|
||||
|
@ -569,9 +572,6 @@ private:
|
|||
|
||||
bool _activatingDisplayPlugin { false };
|
||||
|
||||
QUndoStack _undoStack;
|
||||
UndoStackScriptingInterface _undoStackScriptingInterface;
|
||||
|
||||
uint32_t _renderFrameCount { 0 };
|
||||
|
||||
// Frame Rate Measurement
|
||||
|
@ -585,6 +585,8 @@ private:
|
|||
QElapsedTimer _lastTimeUpdated;
|
||||
QElapsedTimer _lastTimeRendered;
|
||||
|
||||
int _minimumGPUTextureMemSizeStabilityCount { 30 };
|
||||
|
||||
ShapeManager _shapeManager;
|
||||
PhysicalEntitySimulationPointer _entitySimulation;
|
||||
PhysicsEnginePointer _physicsEngine;
|
||||
|
@ -651,8 +653,6 @@ private:
|
|||
quint64 _lastNackTime;
|
||||
quint64 _lastSendDownstreamAudioStats;
|
||||
|
||||
bool _aboutToQuit;
|
||||
|
||||
bool _notifiedPacketVersionMismatchThisDomain;
|
||||
|
||||
ConditionalGuard _settingsGuard;
|
||||
|
@ -715,6 +715,8 @@ private:
|
|||
|
||||
bool _fakedMouseEvent { false };
|
||||
|
||||
bool _isMissingSequenceNumbers { false };
|
||||
|
||||
void checkChangeCursor();
|
||||
mutable QMutex _changeCursorLock { QMutex::Recursive };
|
||||
Qt::CursorShape _desiredCursor{ Qt::BlankCursor };
|
||||
|
@ -725,8 +727,10 @@ private:
|
|||
|
||||
std::atomic<uint32_t> _fullSceneReceivedCounter { 0 }; // how many times have we received a full-scene octree stats packet
|
||||
uint32_t _fullSceneCounterAtLastPhysicsCheck { 0 }; // _fullSceneReceivedCounter last time we checked physics ready
|
||||
uint32_t _nearbyEntitiesCountAtLastPhysicsCheck { 0 }; // how many in-range entities last time we checked physics ready
|
||||
uint32_t _nearbyEntitiesStabilityCount { 0 }; // how many times has _nearbyEntitiesCountAtLastPhysicsCheck been the same
|
||||
|
||||
qint64 _gpuTextureMemSizeStabilityCount { 0 };
|
||||
qint64 _gpuTextureMemSizeAtLastCheck { 0 };
|
||||
|
||||
quint64 _lastPhysicsCheckTime { usecTimestampNow() }; // when did we last check to see if physics was ready
|
||||
|
||||
bool _keyboardDeviceHasFocus { true };
|
||||
|
|
|
@ -157,7 +157,10 @@ void Application::paintGL() {
|
|||
renderArgs._context->enableStereo(false);
|
||||
|
||||
{
|
||||
Stats::getInstance()->setRenderDetails(renderArgs._details);
|
||||
auto stats = Stats::getInstance();
|
||||
if (stats) {
|
||||
stats->setRenderDetails(renderArgs._details);
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
|
||||
|
|
15
interface/src/Constants.h
Normal file
15
interface/src/Constants.h
Normal file
|
@ -0,0 +1,15 @@
|
|||
//
|
||||
// Constants.h
|
||||
// interface
|
||||
//
|
||||
// Created by Gabriel Calero on 9/28/18.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
#include <QString>
|
||||
|
||||
static const QString API_SIGNUP_PATH = "api/v1/users";
|
|
@ -90,19 +90,6 @@ Menu::Menu() {
|
|||
// Edit menu ----------------------------------
|
||||
MenuWrapper* editMenu = addMenu("Edit");
|
||||
|
||||
// Edit > Undo
|
||||
QUndoStack* undoStack = qApp->getUndoStack();
|
||||
QAction* undoAction = undoStack->createUndoAction(editMenu);
|
||||
undoAction->setShortcut(Qt::CTRL | Qt::Key_Z);
|
||||
addActionToQMenuAndActionHash(editMenu, undoAction);
|
||||
|
||||
// Edit > Redo
|
||||
QAction* redoAction = undoStack->createRedoAction(editMenu);
|
||||
redoAction->setShortcut(Qt::CTRL | Qt::SHIFT | Qt::Key_Z);
|
||||
addActionToQMenuAndActionHash(editMenu, redoAction);
|
||||
|
||||
editMenu->addSeparator();
|
||||
|
||||
// Edit > Cut
|
||||
auto cutAction = addActionToQMenuAndActionHash(editMenu, "Cut", QKeySequence::Cut);
|
||||
connect(cutAction, &QAction::triggered, [] {
|
||||
|
|
|
@ -48,6 +48,10 @@ AvatarActionHold::~AvatarActionHold() {
|
|||
myAvatar->removeHoldAction(this);
|
||||
}
|
||||
}
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (ownerEntity) {
|
||||
ownerEntity->setTransitingWithAvatar(false);
|
||||
}
|
||||
|
||||
#if WANT_DEBUG
|
||||
qDebug() << "AvatarActionHold::~AvatarActionHold" << (void*)this;
|
||||
|
@ -131,6 +135,15 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
|
|||
glm::vec3 palmPosition;
|
||||
glm::quat palmRotation;
|
||||
|
||||
bool isTransitingWithAvatar = holdingAvatar->getTransit()->isTransiting();
|
||||
if (isTransitingWithAvatar != _isTransitingWithAvatar) {
|
||||
_isTransitingWithAvatar = isTransitingWithAvatar;
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (ownerEntity) {
|
||||
ownerEntity->setTransitingWithAvatar(_isTransitingWithAvatar);
|
||||
}
|
||||
}
|
||||
|
||||
if (holdingAvatar->isMyAvatar()) {
|
||||
std::shared_ptr<MyAvatar> myAvatar = avatarManager->getMyAvatar();
|
||||
|
||||
|
@ -404,11 +417,14 @@ bool AvatarActionHold::updateArguments(QVariantMap arguments) {
|
|||
_kinematicSetVelocity = kinematicSetVelocity;
|
||||
_ignoreIK = ignoreIK;
|
||||
_active = true;
|
||||
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (ownerEntity) {
|
||||
ownerEntity->setDynamicDataDirty(true);
|
||||
ownerEntity->setDynamicDataNeedsTransmit(true);
|
||||
ownerEntity->setDynamicDataNeedsTransmit(true);
|
||||
ownerEntity->setTransitingWithAvatar(myAvatar->getTransit()->isTransiting());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -59,6 +59,8 @@ private:
|
|||
bool _kinematicSetVelocity { false };
|
||||
bool _previousSet { false };
|
||||
bool _ignoreIK { false };
|
||||
bool _isTransitingWithAvatar { false };
|
||||
|
||||
glm::vec3 _previousPositionalTarget;
|
||||
glm::quat _previousRotationalTarget;
|
||||
|
||||
|
|
|
@ -78,6 +78,15 @@ AvatarManager::AvatarManager(QObject* parent) :
|
|||
removeAvatar(nodeID, KillAvatarReason::AvatarIgnored);
|
||||
}
|
||||
});
|
||||
|
||||
const float AVATAR_TRANSIT_TRIGGER_DISTANCE = 1.0f;
|
||||
const int AVATAR_TRANSIT_FRAME_COUNT = 11; // Based on testing
|
||||
const int AVATAR_TRANSIT_FRAMES_PER_METER = 1; // Based on testing
|
||||
|
||||
_transitConfig._totalFrames = AVATAR_TRANSIT_FRAME_COUNT;
|
||||
_transitConfig._triggerDistance = AVATAR_TRANSIT_TRIGGER_DISTANCE;
|
||||
_transitConfig._framesPerMeter = AVATAR_TRANSIT_FRAMES_PER_METER;
|
||||
_transitConfig._isDistanceBased = true;
|
||||
}
|
||||
|
||||
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
|
||||
|
@ -129,6 +138,10 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "AvatarManager::updateMyAvatar()");
|
||||
|
||||
AvatarTransit::Status status = _myAvatar->updateTransit(deltaTime, _myAvatar->getNextPosition(), _transitConfig);
|
||||
bool sendFirstTransitPackage = (status == AvatarTransit::Status::START_TRANSIT);
|
||||
bool blockTransitData = (status == AvatarTransit::Status::TRANSITING);
|
||||
|
||||
_myAvatar->update(deltaTime);
|
||||
render::Transaction transaction;
|
||||
_myAvatar->updateRenderItem(transaction);
|
||||
|
@ -137,9 +150,13 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
quint64 now = usecTimestampNow();
|
||||
quint64 dt = now - _lastSendAvatarDataTime;
|
||||
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused) {
|
||||
|
||||
if (sendFirstTransitPackage || (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused && !blockTransitData)) {
|
||||
// send head/hand data to the avatar mixer and voxel server
|
||||
PerformanceTimer perfTimer("send");
|
||||
if (sendFirstTransitPackage) {
|
||||
_myAvatar->overrideNextPackagePositionData(_myAvatar->getTransit()->getEndPosition());
|
||||
}
|
||||
_myAvatar->sendAvatarDataPacket();
|
||||
_lastSendAvatarDataTime = now;
|
||||
_myAvatarSendRate.increment();
|
||||
|
@ -258,6 +275,11 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
if (inView && avatar->hasNewJointData()) {
|
||||
numAvatarsUpdated++;
|
||||
}
|
||||
auto transitStatus = avatar->_transit.update(deltaTime, avatar->_globalPosition, _transitConfig);
|
||||
if (avatar->getIsNewAvatar() && (transitStatus == AvatarTransit::Status::START_TRANSIT || transitStatus == AvatarTransit::Status::ABORT_TRANSIT)) {
|
||||
avatar->_transit.reset();
|
||||
avatar->setIsNewAvatar(false);
|
||||
}
|
||||
avatar->simulate(deltaTime, inView);
|
||||
avatar->updateRenderItem(renderTransaction);
|
||||
avatar->updateSpaceProxy(workloadTransaction);
|
||||
|
@ -430,6 +452,17 @@ void AvatarManager::handleProcessedPhysicsTransaction(PhysicsEngine::Transaction
|
|||
transaction.clear();
|
||||
}
|
||||
|
||||
void AvatarManager::removeDeadAvatarEntities(const SetOfEntities& deadEntities) {
|
||||
for (auto entity : deadEntities) {
|
||||
QUuid sessionID = entity->getOwningAvatarID();
|
||||
AvatarSharedPointer avatar = getAvatarBySessionID(sessionID);
|
||||
if (avatar) {
|
||||
const bool REQUIRES_REMOVAL_FROM_TREE = false;
|
||||
avatar->clearAvatarEntity(entity->getID(), REQUIRES_REMOVAL_FROM_TREE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
|
||||
auto avatar = std::static_pointer_cast<OtherAvatar>(removedAvatar);
|
||||
{
|
||||
|
@ -487,14 +520,16 @@ void AvatarManager::clearOtherAvatars() {
|
|||
|
||||
void AvatarManager::deleteAllAvatars() {
|
||||
assert(_avatarsToChangeInPhysics.empty());
|
||||
|
||||
QWriteLocker locker(&_hashLock);
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
QReadLocker locker(&_hashLock);
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
while (avatarIterator != _avatarHash.end()) {
|
||||
auto avatar = std::static_pointer_cast<OtherAvatar>(avatarIterator.value());
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
avatarIterator = _avatarHash.erase(avatarIterator);
|
||||
avatar->die();
|
||||
assert(!avatar->_motionState);
|
||||
if (avatar != _myAvatar) {
|
||||
auto otherAvatar = std::static_pointer_cast<OtherAvatar>(avatar);
|
||||
assert(!otherAvatar->_motionState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -811,7 +846,7 @@ void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptV
|
|||
}
|
||||
}
|
||||
|
||||
QVariantMap AvatarManager::getPalData(const QList<QString> specificAvatarIdentifiers) {
|
||||
QVariantMap AvatarManager::getPalData(const QList<QString> specificAvatarIdentifiers) {
|
||||
QJsonArray palData;
|
||||
|
||||
auto avatarMap = getHashCopy();
|
||||
|
|
|
@ -26,11 +26,13 @@
|
|||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <workload/Space.h>
|
||||
#include <EntitySimulation.h> // for SetOfEntities
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "OtherAvatar.h"
|
||||
|
||||
|
||||
using SortedAvatar = std::pair<float, std::shared_ptr<Avatar>>;
|
||||
|
||||
/**jsdoc
|
||||
|
@ -186,6 +188,7 @@ public:
|
|||
void queuePhysicsChange(const OtherAvatarPointer& avatar);
|
||||
void buildPhysicsTransaction(PhysicsEngine::Transaction& transaction);
|
||||
void handleProcessedPhysicsTransaction(PhysicsEngine::Transaction& transaction);
|
||||
void removeDeadAvatarEntities(const SetOfEntities& deadEntities);
|
||||
|
||||
public slots:
|
||||
/**jsdoc
|
||||
|
@ -232,6 +235,8 @@ private:
|
|||
mutable std::mutex _spaceLock;
|
||||
workload::SpacePointer _space;
|
||||
std::vector<int32_t> _spaceProxiesToDelete;
|
||||
|
||||
AvatarTransit::TransitConfig _transitConfig;
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarManager_h
|
||||
|
|
|
@ -106,6 +106,7 @@ MyAvatar::MyAvatar(QThread* thread) :
|
|||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
_useAdvancedMovementControls("advancedMovementForHandControllersIsChecked", true),
|
||||
_showPlayArea("showPlayArea", true),
|
||||
_smoothOrientationTimer(std::numeric_limits<float>::max()),
|
||||
_smoothOrientationInitial(),
|
||||
_smoothOrientationTarget(),
|
||||
|
@ -136,7 +137,7 @@ MyAvatar::MyAvatar(QThread* thread) :
|
|||
_useSnapTurnSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "useSnapTurn", _useSnapTurn),
|
||||
_userHeightSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "userHeight", DEFAULT_AVATAR_HEIGHT),
|
||||
_flyingHMDSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "flyingHMD", _flyingPrefHMD),
|
||||
_avatarEntityCountSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "avatarEntityData" << "size", _flyingPrefHMD)
|
||||
_avatarEntityCountSetting(QStringList() << AVATAR_SETTINGS_GROUP_NAME << "avatarEntityData" << "size", 0)
|
||||
{
|
||||
_clientTraitsHandler = std::unique_ptr<ClientTraitsHandler>(new ClientTraitsHandler(this));
|
||||
|
||||
|
@ -529,7 +530,8 @@ void MyAvatar::update(float deltaTime) {
|
|||
}
|
||||
if (_goToFeetAjustment && _skeletonModelLoaded) {
|
||||
auto feetAjustment = getWorldPosition() - getWorldFeetPosition();
|
||||
goToLocation(getWorldPosition() + feetAjustment);
|
||||
_goToPosition = getWorldPosition() + feetAjustment;
|
||||
setWorldPosition(_goToPosition);
|
||||
_goToFeetAjustment = false;
|
||||
}
|
||||
if (_physicsSafetyPending && qApp->isPhysicsEnabled() && _characterController.isEnabledAndReady()) {
|
||||
|
@ -638,9 +640,8 @@ void MyAvatar::updateChildCauterization(SpatiallyNestablePointer object, bool ca
|
|||
|
||||
void MyAvatar::simulate(float deltaTime) {
|
||||
PerformanceTimer perfTimer("simulate");
|
||||
|
||||
animateScaleChanges(deltaTime);
|
||||
|
||||
|
||||
setFlyingEnabled(getFlyingEnabled());
|
||||
|
||||
if (_cauterizationNeedsUpdate) {
|
||||
|
@ -928,6 +929,7 @@ void MyAvatar::updateSensorToWorldMatrix() {
|
|||
updateJointFromController(controller::Action::RIGHT_HAND, _controllerRightHandMatrixCache);
|
||||
|
||||
if (hasSensorToWorldScaleChanged) {
|
||||
setTransitScale(sensorToWorldScale);
|
||||
emit sensorToWorldScaleChanged(sensorToWorldScale);
|
||||
}
|
||||
|
||||
|
@ -3306,7 +3308,7 @@ float MyAvatar::getRawDriveKey(DriveKeys key) const {
|
|||
}
|
||||
|
||||
void MyAvatar::relayDriveKeysToCharacterController() {
|
||||
if (getDriveKey(TRANSLATE_Y) > 0.0f) {
|
||||
if (getDriveKey(TRANSLATE_Y) > 0.0f && (!qApp->isHMDMode() || (useAdvancedMovementControls() && getFlyingHMDPref()))) {
|
||||
_characterController.jump();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,8 +122,10 @@ class MyAvatar : public Avatar {
|
|||
* zone may disallow collisionless avatars.
|
||||
* @property {boolean} characterControllerEnabled - Synonym of <code>collisionsEnabled</code>.
|
||||
* <strong>Deprecated:</strong> Use <code>collisionsEnabled</code> instead.
|
||||
* @property {boolean} useAdvancedMovementControls - Returns the value of the Interface setting, Settings > Advanced
|
||||
* Movement for Hand Controller. Note: Setting the value has no effect unless Interface is restarted.
|
||||
* @property {boolean} useAdvancedMovementControls - Returns and sets the value of the Interface setting, Settings >
|
||||
* Walking and teleporting. Note: Setting the value has no effect unless Interface is restarted.
|
||||
* @property {boolean} showPlayArea - Returns and sets the value of the Interface setting, Settings > Show room boundaries
|
||||
* while teleporting. Note: Setting the value has no effect unless Interface is restarted.
|
||||
* @property {number} yawSpeed=75
|
||||
* @property {number} pitchSpeed=50
|
||||
* @property {boolean} hmdRollControlEnabled=true - If <code>true</code>, the roll angle of your HMD turns your avatar
|
||||
|
@ -223,6 +225,7 @@ class MyAvatar : public Avatar {
|
|||
Q_PROPERTY(bool collisionsEnabled READ getCollisionsEnabled WRITE setCollisionsEnabled)
|
||||
Q_PROPERTY(bool characterControllerEnabled READ getCharacterControllerEnabled WRITE setCharacterControllerEnabled)
|
||||
Q_PROPERTY(bool useAdvancedMovementControls READ useAdvancedMovementControls WRITE setUseAdvancedMovementControls)
|
||||
Q_PROPERTY(bool showPlayArea READ getShowPlayArea WRITE setShowPlayArea)
|
||||
|
||||
Q_PROPERTY(float yawSpeed MEMBER _yawSpeed)
|
||||
Q_PROPERTY(float pitchSpeed MEMBER _pitchSpeed)
|
||||
|
@ -542,6 +545,9 @@ public:
|
|||
void setUseAdvancedMovementControls(bool useAdvancedMovementControls)
|
||||
{ _useAdvancedMovementControls.set(useAdvancedMovementControls); }
|
||||
|
||||
bool getShowPlayArea() const { return _showPlayArea.get(); }
|
||||
void setShowPlayArea(bool showPlayArea) { _showPlayArea.set(showPlayArea); }
|
||||
|
||||
void setHMDRollControlEnabled(bool value) { _hmdRollControlEnabled = value; }
|
||||
bool getHMDRollControlEnabled() const { return _hmdRollControlEnabled; }
|
||||
void setHMDRollControlDeadZone(float value) { _hmdRollControlDeadZone = value; }
|
||||
|
@ -1115,6 +1121,8 @@ public:
|
|||
virtual QVariantList getAttachmentsVariant() const override;
|
||||
virtual void setAttachmentsVariant(const QVariantList& variant) override;
|
||||
|
||||
glm::vec3 getNextPosition() { return _goToPending ? _goToPosition : getWorldPosition(); };
|
||||
|
||||
public slots:
|
||||
|
||||
/**jsdoc
|
||||
|
@ -1629,6 +1637,7 @@ private:
|
|||
|
||||
Setting::Handle<float> _realWorldFieldOfView;
|
||||
Setting::Handle<bool> _useAdvancedMovementControls;
|
||||
Setting::Handle<bool> _showPlayArea;
|
||||
|
||||
// Smoothing.
|
||||
const float SMOOTH_TIME_ORIENTATION = 0.5f;
|
||||
|
|
|
@ -10,8 +10,11 @@
|
|||
//
|
||||
|
||||
#include "SafeLanding.h"
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "EntityTreeRenderer.h"
|
||||
#include "ModelEntityItem.h"
|
||||
#include "RenderableModelEntityItem.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
|
||||
|
@ -39,6 +42,7 @@ void SafeLanding::startEntitySequence(QSharedPointer<EntityTreeRenderer> entityT
|
|||
_entityTree = entityTree;
|
||||
_trackedEntities.clear();
|
||||
_trackingEntities = true;
|
||||
_maxTrackedEntityCount = 0;
|
||||
connect(std::const_pointer_cast<EntityTree>(_entityTree).get(),
|
||||
&EntityTree::addingEntity, this, &SafeLanding::addTrackedEntity);
|
||||
connect(std::const_pointer_cast<EntityTree>(_entityTree).get(),
|
||||
|
@ -47,6 +51,7 @@ void SafeLanding::startEntitySequence(QSharedPointer<EntityTreeRenderer> entityT
|
|||
_sequenceNumbers.clear();
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
_initialEnd = INVALID_SEQUENCE;
|
||||
_startTime = usecTimestampNow();
|
||||
EntityTreeRenderer::setEntityLoadingPriorityFunction(&ElevatedPriority);
|
||||
}
|
||||
}
|
||||
|
@ -55,6 +60,7 @@ void SafeLanding::stopEntitySequence() {
|
|||
Locker lock(_lock);
|
||||
_trackingEntities = false;
|
||||
_maxTrackedEntityCount = 0;
|
||||
_trackedEntityStabilityCount = 0;
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
_initialEnd = INVALID_SEQUENCE;
|
||||
_trackedEntities.clear();
|
||||
|
@ -66,18 +72,17 @@ void SafeLanding::addTrackedEntity(const EntityItemID& entityID) {
|
|||
Locker lock(_lock);
|
||||
EntityItemPointer entity = _entityTree->findEntityByID(entityID);
|
||||
|
||||
if (entity) {
|
||||
if (entity && entity->getCreated() < _startTime) {
|
||||
|
||||
_trackedEntities.emplace(entityID, entity);
|
||||
int trackedEntityCount = (int)_trackedEntities.size();
|
||||
|
||||
if (trackedEntityCount > _maxTrackedEntityCount) {
|
||||
_maxTrackedEntityCount = trackedEntityCount;
|
||||
_trackedEntityStabilityCount = 0;
|
||||
}
|
||||
qCDebug(interfaceapp) << "Safe Landing: Tracking entity " << entity->getItemName();
|
||||
}
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Safe Landing: Null Entity: " << entityID;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -104,10 +109,10 @@ void SafeLanding::noteReceivedsequenceNumber(int sequenceNumber) {
|
|||
bool SafeLanding::isLoadSequenceComplete() {
|
||||
if (isEntityLoadingComplete() && isSequenceNumbersComplete()) {
|
||||
Locker lock(_lock);
|
||||
_trackedEntities.clear();
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
_initialEnd = INVALID_SEQUENCE;
|
||||
_entityTree = nullptr;
|
||||
_trackingEntities = false; // Don't track anything else that comes in.
|
||||
EntityTreeRenderer::setEntityLoadingPriorityFunction(StandardPriority);
|
||||
}
|
||||
|
||||
|
@ -116,11 +121,18 @@ bool SafeLanding::isLoadSequenceComplete() {
|
|||
|
||||
float SafeLanding::loadingProgressPercentage() {
|
||||
Locker lock(_lock);
|
||||
static const int MINIMUM_TRACKED_ENTITY_STABILITY_COUNT = 15;
|
||||
|
||||
float entityReadyPercentage = 0.0f;
|
||||
if (_maxTrackedEntityCount > 0) {
|
||||
return ((_maxTrackedEntityCount - _trackedEntities.size()) / (float)_maxTrackedEntityCount);
|
||||
entityReadyPercentage = ((_maxTrackedEntityCount - _trackedEntities.size()) / (float)_maxTrackedEntityCount);
|
||||
}
|
||||
|
||||
return 0.0f;
|
||||
if (_trackedEntityStabilityCount < MINIMUM_TRACKED_ENTITY_STABILITY_COUNT) {
|
||||
entityReadyPercentage *= 0.20f;
|
||||
}
|
||||
|
||||
return entityReadyPercentage;
|
||||
}
|
||||
|
||||
bool SafeLanding::isSequenceNumbersComplete() {
|
||||
|
@ -130,11 +142,16 @@ bool SafeLanding::isSequenceNumbersComplete() {
|
|||
_initialEnd + SEQUENCE_MODULO - _initialStart;
|
||||
auto startIter = _sequenceNumbers.find(_initialStart);
|
||||
auto endIter = _sequenceNumbers.find(_initialEnd - 1);
|
||||
|
||||
bool missingSequenceNumbers = qApp->isMissingSequenceNumbers();
|
||||
if (sequenceSize == 0 ||
|
||||
(startIter != _sequenceNumbers.end()
|
||||
&& endIter != _sequenceNumbers.end()
|
||||
&& distance(startIter, endIter) == sequenceSize - 1) ) {
|
||||
_trackingEntities = false; // Don't track anything else that comes in.
|
||||
&& ((distance(startIter, endIter) == sequenceSize - 1) || !missingSequenceNumbers))) {
|
||||
bool enableInterstitial = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
|
||||
if (!enableInterstitial) {
|
||||
_trackingEntities = false; // Don't track anything else that comes in.
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -145,13 +162,13 @@ bool isEntityPhysicsReady(const EntityItemPointer& entity) {
|
|||
if (entity && !entity->getCollisionless()) {
|
||||
const auto& entityType = entity->getType();
|
||||
if (entityType == EntityTypes::Model) {
|
||||
ModelEntityItem * modelEntity = std::dynamic_pointer_cast<ModelEntityItem>(entity).get();
|
||||
RenderableModelEntityItem * modelEntity = std::dynamic_pointer_cast<RenderableModelEntityItem>(entity).get();
|
||||
static const std::set<ShapeType> downloadedCollisionTypes
|
||||
{ SHAPE_TYPE_COMPOUND, SHAPE_TYPE_SIMPLE_COMPOUND, SHAPE_TYPE_STATIC_MESH, SHAPE_TYPE_SIMPLE_HULL };
|
||||
bool hasAABox;
|
||||
entity->getAABox(hasAABox);
|
||||
if (hasAABox && downloadedCollisionTypes.count(modelEntity->getShapeType()) != 0) {
|
||||
return (!entity->shouldBePhysical() || entity->isReadyToComputeShape());
|
||||
return (!entity->shouldBePhysical() || entity->isReadyToComputeShape() || modelEntity->computeShapeFailedToLoad());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,16 +183,20 @@ bool SafeLanding::isEntityLoadingComplete() {
|
|||
auto entityTree = qApp->getEntities();
|
||||
auto entityMapIter = _trackedEntities.begin();
|
||||
|
||||
bool enableInterstitial = DependencyManager::get<NodeList>()->getDomainHandler().getInterstitialModeEnabled();
|
||||
|
||||
while (entityMapIter != _trackedEntities.end()) {
|
||||
auto entity = entityMapIter->second;
|
||||
|
||||
bool isVisuallyReady = true;
|
||||
|
||||
Settings settings;
|
||||
bool enableInterstitial = settings.value("enableIntersitialMode", false).toBool();
|
||||
|
||||
if (enableInterstitial) {
|
||||
isVisuallyReady = (entity->isVisuallyReady() || !entityTree->renderableForEntityId(entityMapIter->first));
|
||||
auto entityRenderable = entityTree->renderableForEntityId(entityMapIter->first);
|
||||
if (!entityRenderable) {
|
||||
entityTree->addingEntity(entityMapIter->first);
|
||||
}
|
||||
|
||||
isVisuallyReady = entity->isVisuallyReady() || (!entityRenderable && !entity->isParentPathComplete());
|
||||
}
|
||||
|
||||
if (isEntityPhysicsReady(entity) && isVisuallyReady) {
|
||||
|
@ -188,6 +209,12 @@ bool SafeLanding::isEntityLoadingComplete() {
|
|||
entityMapIter++;
|
||||
}
|
||||
}
|
||||
|
||||
if (enableInterstitial) {
|
||||
_trackedEntityStabilityCount++;
|
||||
}
|
||||
|
||||
|
||||
return _trackedEntities.empty();
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,9 @@ private:
|
|||
int _initialStart { INVALID_SEQUENCE };
|
||||
int _initialEnd { INVALID_SEQUENCE };
|
||||
int _maxTrackedEntityCount { 0 };
|
||||
int _trackedEntityStabilityCount { 0 };
|
||||
|
||||
quint64 _startTime { 0 };
|
||||
|
||||
struct SequenceLessThan {
|
||||
bool operator()(const int& a, const int& b) const;
|
||||
|
|
|
@ -345,9 +345,9 @@ void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& sha
|
|||
}
|
||||
}
|
||||
|
||||
CollisionPick::CollisionPick(const PickFilter& filter, float maxDistance, bool enabled, CollisionRegion collisionRegion, PhysicsEnginePointer physicsEngine) :
|
||||
Pick(filter, maxDistance, enabled),
|
||||
_mathPick(collisionRegion),
|
||||
CollisionPick::CollisionPick(const PickFilter& filter, float maxDistance, bool enabled, bool scaleWithParent, CollisionRegion collisionRegion, PhysicsEnginePointer physicsEngine) :
|
||||
Pick(collisionRegion, filter, maxDistance, enabled),
|
||||
_scaleWithParent(scaleWithParent),
|
||||
_physicsEngine(physicsEngine) {
|
||||
if (collisionRegion.shouldComputeShapeInfo()) {
|
||||
_cachedResource = DependencyManager::get<ModelCache>()->getCollisionGeometryResource(collisionRegion.modelURL);
|
||||
|
@ -361,9 +361,15 @@ CollisionRegion CollisionPick::getMathematicalPick() const {
|
|||
if (parentTransform) {
|
||||
Transform parentTransformValue = parentTransform->getTransform();
|
||||
mathPick.transform = parentTransformValue.worldTransform(mathPick.transform);
|
||||
glm::vec3 scale = parentTransformValue.getScale();
|
||||
float largestDimension = glm::max(glm::max(scale.x, scale.y), scale.z);
|
||||
mathPick.threshold *= largestDimension;
|
||||
|
||||
if (_scaleWithParent) {
|
||||
glm::vec3 scale = parentTransformValue.getScale();
|
||||
float largestDimension = glm::max(glm::max(scale.x, scale.y), scale.z);
|
||||
mathPick.threshold *= largestDimension;
|
||||
} else {
|
||||
// We need to undo parent scaling after-the-fact because the parent's scale was needed to calculate this mathPick's position
|
||||
mathPick.transform.setScale(_mathPick.transform.getScale());
|
||||
}
|
||||
}
|
||||
return mathPick;
|
||||
}
|
||||
|
@ -424,5 +430,7 @@ PickResultPointer CollisionPick::getHUDIntersection(const CollisionRegion& pick)
|
|||
}
|
||||
|
||||
Transform CollisionPick::getResultTransform() const {
|
||||
return Transform(getMathematicalPick().transform);
|
||||
Transform transform;
|
||||
transform.setTranslation(_mathPick.transform.getTranslation());
|
||||
return transform;
|
||||
}
|
|
@ -47,7 +47,7 @@ public:
|
|||
|
||||
class CollisionPick : public Pick<CollisionRegion> {
|
||||
public:
|
||||
CollisionPick(const PickFilter& filter, float maxDistance, bool enabled, CollisionRegion collisionRegion, PhysicsEnginePointer physicsEngine);
|
||||
CollisionPick(const PickFilter& filter, float maxDistance, bool enabled, bool scaleWithParent, CollisionRegion collisionRegion, PhysicsEnginePointer physicsEngine);
|
||||
|
||||
CollisionRegion getMathematicalPick() const override;
|
||||
PickResultPointer getDefaultResult(const QVariantMap& pickVariant) const override {
|
||||
|
@ -67,7 +67,8 @@ protected:
|
|||
void computeShapeInfoDimensionsOnly(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
void filterIntersections(std::vector<ContactTestResult>& intersections) const;
|
||||
|
||||
CollisionRegion _mathPick;
|
||||
bool _scaleWithParent;
|
||||
|
||||
PhysicsEnginePointer _physicsEngine;
|
||||
QSharedPointer<GeometryResource> _cachedResource;
|
||||
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
//
|
||||
// Created by Sam Gondelman 7/2/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "JointParabolaPick.h"
|
||||
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
JointParabolaPick::JointParabolaPick(const std::string& jointName, const glm::vec3& posOffset, const glm::vec3& dirOffset,
|
||||
float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar, bool scaleWithAvatar, PickFilter& filter, float maxDistance, bool enabled) :
|
||||
ParabolaPick(speed, accelerationAxis, rotateAccelerationWithAvatar, scaleWithAvatar, filter, maxDistance, enabled),
|
||||
_jointName(jointName),
|
||||
_posOffset(posOffset),
|
||||
_dirOffset(dirOffset)
|
||||
{
|
||||
}
|
||||
|
||||
PickParabola JointParabolaPick::getMathematicalPick() const {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
int jointIndex = myAvatar->getJointIndex(QString::fromStdString(_jointName));
|
||||
bool useAvatarHead = _jointName == "Avatar";
|
||||
const int INVALID_JOINT = -1;
|
||||
if (jointIndex != INVALID_JOINT || useAvatarHead) {
|
||||
glm::vec3 jointPos = useAvatarHead ? myAvatar->getHeadPosition() : myAvatar->getAbsoluteJointTranslationInObjectFrame(jointIndex);
|
||||
glm::quat jointRot = useAvatarHead ? myAvatar->getHeadOrientation() : myAvatar->getAbsoluteJointRotationInObjectFrame(jointIndex);
|
||||
glm::vec3 avatarPos = myAvatar->getWorldPosition();
|
||||
glm::quat avatarRot = myAvatar->getWorldOrientation();
|
||||
|
||||
glm::vec3 pos = useAvatarHead ? jointPos : avatarPos + (avatarRot * jointPos);
|
||||
glm::quat rot = useAvatarHead ? jointRot * glm::angleAxis(-PI / 2.0f, Vectors::RIGHT) : avatarRot * jointRot;
|
||||
|
||||
// Apply offset
|
||||
pos = pos + (rot * (myAvatar->getSensorToWorldScale() * _posOffset));
|
||||
glm::vec3 dir = glm::normalize(rot * glm::normalize(_dirOffset));
|
||||
|
||||
return PickParabola(pos, getSpeed() * dir, getAcceleration());
|
||||
}
|
||||
|
||||
return PickParabola();
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
//
|
||||
// Created by Sam Gondelman 7/2/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_JointParabolaPick_h
|
||||
#define hifi_JointParabolaPick_h
|
||||
|
||||
#include "ParabolaPick.h"
|
||||
|
||||
class JointParabolaPick : public ParabolaPick {
|
||||
|
||||
public:
|
||||
JointParabolaPick(const std::string& jointName, const glm::vec3& posOffset, const glm::vec3& dirOffset,
|
||||
float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar, bool scaleWithAvatar,
|
||||
PickFilter& filter, float maxDistance = 0.0f, bool enabled = false);
|
||||
|
||||
PickParabola getMathematicalPick() const override;
|
||||
|
||||
bool isLeftHand() const override { return (_jointName == "_CONTROLLER_LEFTHAND") || (_jointName == "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND"); }
|
||||
bool isRightHand() const override { return (_jointName == "_CONTROLLER_RIGHTHAND") || (_jointName == "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND"); }
|
||||
|
||||
private:
|
||||
std::string _jointName;
|
||||
glm::vec3 _posOffset;
|
||||
glm::vec3 _dirOffset;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_JointParabolaPick_h
|
|
@ -1,45 +0,0 @@
|
|||
//
|
||||
// JointRayPick.cpp
|
||||
// interface/src/raypick
|
||||
//
|
||||
// Created by Sam Gondelman 7/11/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "JointRayPick.h"
|
||||
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
JointRayPick::JointRayPick(const std::string& jointName, const glm::vec3& posOffset, const glm::vec3& dirOffset, const PickFilter& filter, float maxDistance, bool enabled) :
|
||||
RayPick(filter, maxDistance, enabled),
|
||||
_jointName(jointName),
|
||||
_posOffset(posOffset),
|
||||
_dirOffset(dirOffset)
|
||||
{
|
||||
}
|
||||
|
||||
PickRay JointRayPick::getMathematicalPick() const {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
int jointIndex = myAvatar->getJointIndex(QString::fromStdString(_jointName));
|
||||
bool useAvatarHead = _jointName == "Avatar";
|
||||
const int INVALID_JOINT = -1;
|
||||
if (jointIndex != INVALID_JOINT || useAvatarHead) {
|
||||
glm::vec3 jointPos = useAvatarHead ? myAvatar->getHeadPosition() : myAvatar->getAbsoluteJointTranslationInObjectFrame(jointIndex);
|
||||
glm::quat jointRot = useAvatarHead ? myAvatar->getHeadOrientation() : myAvatar->getAbsoluteJointRotationInObjectFrame(jointIndex);
|
||||
glm::vec3 avatarPos = myAvatar->getWorldPosition();
|
||||
glm::quat avatarRot = myAvatar->getWorldOrientation();
|
||||
|
||||
glm::vec3 pos = useAvatarHead ? jointPos : avatarPos + (avatarRot * jointPos);
|
||||
glm::quat rot = useAvatarHead ? jointRot * glm::angleAxis(-PI / 2.0f, Vectors::RIGHT) : avatarRot * jointRot;
|
||||
|
||||
// Apply offset
|
||||
pos = pos + (rot * (myAvatar->getSensorToWorldScale() * _posOffset));
|
||||
glm::vec3 dir = glm::normalize(rot * glm::normalize(_dirOffset));
|
||||
|
||||
return PickRay(pos, dir);
|
||||
}
|
||||
|
||||
return PickRay();
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
//
|
||||
// JointRayPick.h
|
||||
// interface/src/raypick
|
||||
//
|
||||
// Created by Sam Gondelman 7/11/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_JointRayPick_h
|
||||
#define hifi_JointRayPick_h
|
||||
|
||||
#include "RayPick.h"
|
||||
|
||||
class JointRayPick : public RayPick {
|
||||
|
||||
public:
|
||||
JointRayPick(const std::string& jointName, const glm::vec3& posOffset, const glm::vec3& dirOffset, const PickFilter& filter, float maxDistance = 0.0f, bool enabled = false);
|
||||
|
||||
PickRay getMathematicalPick() const override;
|
||||
|
||||
bool isLeftHand() const override { return (_jointName == "_CONTROLLER_LEFTHAND") || (_jointName == "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND"); }
|
||||
bool isRightHand() const override { return (_jointName == "_CONTROLLER_RIGHTHAND") || (_jointName == "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND"); }
|
||||
|
||||
private:
|
||||
std::string _jointName;
|
||||
glm::vec3 _posOffset;
|
||||
glm::vec3 _dirOffset;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_JointRayPick_h
|
|
@ -14,13 +14,14 @@
|
|||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include "PickManager.h"
|
||||
#include "RayPick.h"
|
||||
|
||||
LaserPointer::LaserPointer(const QVariant& rayProps, const RenderStateMap& renderStates, const DefaultRenderStateMap& defaultRenderStates, bool hover,
|
||||
const PointerTriggers& triggers, bool faceAvatar, bool followNormal, float followNormalTime, bool centerEndY, bool lockEnd,
|
||||
bool distanceScaleEnd, bool scaleWithAvatar, bool enabled) :
|
||||
bool distanceScaleEnd, bool scaleWithParent, bool enabled) :
|
||||
PathPointer(PickQuery::Ray, rayProps, renderStates, defaultRenderStates, hover, triggers, faceAvatar, followNormal, followNormalTime,
|
||||
centerEndY, lockEnd, distanceScaleEnd, scaleWithAvatar, enabled)
|
||||
centerEndY, lockEnd, distanceScaleEnd, scaleWithParent, enabled)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -146,9 +147,9 @@ void LaserPointer::RenderState::disable() {
|
|||
}
|
||||
}
|
||||
|
||||
void LaserPointer::RenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
void LaserPointer::RenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) {
|
||||
StartEndRenderState::update(origin, end, surfaceNormal, scaleWithAvatar, distanceScaleEnd, centerEndY, faceAvatar, followNormal, followNormalStrength, distance, pickResult);
|
||||
StartEndRenderState::update(origin, end, surfaceNormal, parentScale, distanceScaleEnd, centerEndY, faceAvatar, followNormal, followNormalStrength, distance, pickResult);
|
||||
QVariant endVariant = vec3toVariant(end);
|
||||
if (!getPathID().isNull()) {
|
||||
QVariantMap pathProps;
|
||||
|
@ -156,9 +157,7 @@ void LaserPointer::RenderState::update(const glm::vec3& origin, const glm::vec3&
|
|||
pathProps.insert("end", endVariant);
|
||||
pathProps.insert("visible", true);
|
||||
pathProps.insert("ignoreRayIntersection", doesPathIgnoreRays());
|
||||
if (scaleWithAvatar) {
|
||||
pathProps.insert("lineWidth", getLineWidth() * DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale());
|
||||
}
|
||||
pathProps.insert("lineWidth", getLineWidth() * parentScale);
|
||||
qApp->getOverlays().editOverlay(getPathID(), pathProps);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,12 +24,12 @@ public:
|
|||
const OverlayID& getPathID() const { return _pathID; }
|
||||
const bool& doesPathIgnoreRays() const { return _pathIgnoreRays; }
|
||||
|
||||
void setLineWidth(const float& lineWidth) { _lineWidth = lineWidth; }
|
||||
const float& getLineWidth() const { return _lineWidth; }
|
||||
void setLineWidth(float width) { _lineWidth = width; }
|
||||
float getLineWidth() const { return _lineWidth; }
|
||||
|
||||
void cleanup() override;
|
||||
void disable() override;
|
||||
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) override;
|
||||
|
||||
private:
|
||||
|
@ -40,7 +40,7 @@ public:
|
|||
};
|
||||
|
||||
LaserPointer(const QVariant& rayProps, const RenderStateMap& renderStates, const DefaultRenderStateMap& defaultRenderStates, bool hover, const PointerTriggers& triggers,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, bool centerEndY, bool lockEnd, bool distanceScaleEnd, bool scaleWithAvatar, bool enabled);
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, bool centerEndY, bool lockEnd, bool distanceScaleEnd, bool scaleWithParent, bool enabled);
|
||||
|
||||
QVariantMap toVariantMap() const override;
|
||||
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
//
|
||||
// Created by Sam Gondelman 7/2/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "MouseParabolaPick.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "display-plugins/CompositorHelper.h"
|
||||
|
||||
MouseParabolaPick::MouseParabolaPick(float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar,
|
||||
bool scaleWithAvatar, const PickFilter& filter, float maxDistance, bool enabled) :
|
||||
ParabolaPick(speed, accelerationAxis, rotateAccelerationWithAvatar, scaleWithAvatar, filter, maxDistance, enabled)
|
||||
{
|
||||
}
|
||||
|
||||
PickParabola MouseParabolaPick::getMathematicalPick() const {
|
||||
QVariant position = qApp->getApplicationCompositor().getReticleInterface()->getPosition();
|
||||
if (position.isValid()) {
|
||||
QVariantMap posMap = position.toMap();
|
||||
PickRay pickRay = qApp->getCamera().computePickRay(posMap["x"].toFloat(), posMap["y"].toFloat());
|
||||
return PickParabola(pickRay.origin, getSpeed() * pickRay.direction, getAcceleration());
|
||||
}
|
||||
|
||||
return PickParabola();
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
//
|
||||
// Created by Sam Gondelman 7/2/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_MouseParabolaPick_h
|
||||
#define hifi_MouseParabolaPick_h
|
||||
|
||||
#include "ParabolaPick.h"
|
||||
|
||||
class MouseParabolaPick : public ParabolaPick {
|
||||
|
||||
public:
|
||||
MouseParabolaPick(float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar, bool scaleWithAvatar,
|
||||
const PickFilter& filter, float maxDistance = 0.0f, bool enabled = false);
|
||||
|
||||
PickParabola getMathematicalPick() const override;
|
||||
|
||||
bool isMouse() const override { return true; }
|
||||
};
|
||||
|
||||
#endif // hifi_MouseParabolaPick_h
|
|
@ -1,29 +0,0 @@
|
|||
//
|
||||
// MouseRayPick.cpp
|
||||
// interface/src/raypick
|
||||
//
|
||||
// Created by Sam Gondelman 7/19/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "MouseRayPick.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "display-plugins/CompositorHelper.h"
|
||||
|
||||
MouseRayPick::MouseRayPick(const PickFilter& filter, float maxDistance, bool enabled) :
|
||||
RayPick(filter, maxDistance, enabled)
|
||||
{
|
||||
}
|
||||
|
||||
PickRay MouseRayPick::getMathematicalPick() const {
|
||||
QVariant position = qApp->getApplicationCompositor().getReticleInterface()->getPosition();
|
||||
if (position.isValid()) {
|
||||
QVariantMap posMap = position.toMap();
|
||||
return qApp->getCamera().computePickRay(posMap["x"].toFloat(), posMap["y"].toFloat());
|
||||
}
|
||||
|
||||
return PickRay();
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
//
|
||||
// MouseRayPick.h
|
||||
// interface/src/raypick
|
||||
//
|
||||
// Created by Sam Gondelman 7/19/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_MouseRayPick_h
|
||||
#define hifi_MouseRayPick_h
|
||||
|
||||
#include "RayPick.h"
|
||||
|
||||
class MouseRayPick : public RayPick {
|
||||
|
||||
public:
|
||||
MouseRayPick(const PickFilter& filter, float maxDistance = 0.0f, bool enabled = false);
|
||||
|
||||
PickRay getMathematicalPick() const override;
|
||||
|
||||
bool isMouse() const override { return true; }
|
||||
};
|
||||
|
||||
#endif // hifi_MouseRayPick_h
|
|
@ -15,6 +15,46 @@
|
|||
#include "DependencyManager.h"
|
||||
#include "PickManager.h"
|
||||
|
||||
ParabolaPick::ParabolaPick(const glm::vec3& position, const glm::vec3& direction, float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar, bool rotateAccelerationWithParent, bool scaleWithParent, const PickFilter& filter, float maxDistance, bool enabled) :
|
||||
Pick(PickParabola(position, speed * direction, accelerationAxis), filter, maxDistance, enabled),
|
||||
_rotateAccelerationWithAvatar(rotateAccelerationWithAvatar),
|
||||
_rotateAccelerationWithParent(rotateAccelerationWithParent),
|
||||
_scaleWithParent(scaleWithParent),
|
||||
_speed(speed) {
|
||||
}
|
||||
|
||||
PickParabola ParabolaPick::getMathematicalPick() const {
|
||||
if (!parentTransform) {
|
||||
PickParabola mathPick = _mathPick;
|
||||
if (_rotateAccelerationWithAvatar) {
|
||||
mathPick.acceleration = DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldOrientation() * mathPick.acceleration;
|
||||
}
|
||||
return mathPick;
|
||||
}
|
||||
|
||||
Transform currentParentTransform = parentTransform->getTransform();
|
||||
|
||||
glm::vec3 position = currentParentTransform.transform(_mathPick.origin);
|
||||
glm::vec3 velocity = _mathPick.velocity;
|
||||
if (_scaleWithParent) {
|
||||
velocity = currentParentTransform.transformDirection(velocity);
|
||||
} else {
|
||||
glm::vec3 transformedVelocity = currentParentTransform.transformDirection(velocity);
|
||||
velocity = glm::normalize(transformedVelocity) * _speed;
|
||||
}
|
||||
glm::vec3 acceleration = _mathPick.acceleration;
|
||||
if (_scaleWithParent) {
|
||||
acceleration *= currentParentTransform.getScale();
|
||||
}
|
||||
if (_rotateAccelerationWithAvatar) {
|
||||
acceleration = DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldOrientation() * acceleration;
|
||||
} else if (_rotateAccelerationWithParent) {
|
||||
acceleration = currentParentTransform.getRotation() * acceleration;
|
||||
}
|
||||
|
||||
return PickParabola(position, velocity, acceleration);
|
||||
}
|
||||
|
||||
PickResultPointer ParabolaPick::getEntityIntersection(const PickParabola& pick) {
|
||||
if (glm::length2(pick.acceleration) > EPSILON && glm::length2(pick.velocity) > EPSILON) {
|
||||
bool precisionPicking = !(getFilter().doesPickCoarse() || DependencyManager::get<PickManager>()->getForceCoarsePicking());
|
||||
|
@ -60,18 +100,6 @@ PickResultPointer ParabolaPick::getHUDIntersection(const PickParabola& pick) {
|
|||
return std::make_shared<ParabolaPickResult>(pick.toVariantMap());
|
||||
}
|
||||
|
||||
float ParabolaPick::getSpeed() const {
|
||||
return (_scaleWithAvatar ? DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale() * _speed : _speed);
|
||||
}
|
||||
|
||||
glm::vec3 ParabolaPick::getAcceleration() const {
|
||||
float scale = (_scaleWithAvatar ? DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale() : 1.0f);
|
||||
if (_rotateAccelerationWithAvatar) {
|
||||
return scale * (DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldOrientation() * _accelerationAxis);
|
||||
}
|
||||
return scale * _accelerationAxis;
|
||||
}
|
||||
|
||||
Transform ParabolaPick::getResultTransform() const {
|
||||
PickResultPointer result = getPrevPickResult();
|
||||
if (!result) {
|
||||
|
|
|
@ -74,9 +74,9 @@ public:
|
|||
class ParabolaPick : public Pick<PickParabola> {
|
||||
|
||||
public:
|
||||
ParabolaPick(float speed, const glm::vec3& accelerationAxis, bool rotateAccelerationWithAvatar, bool scaleWithAvatar, const PickFilter& filter, float maxDistance, bool enabled) :
|
||||
Pick(filter, maxDistance, enabled), _speed(speed), _accelerationAxis(accelerationAxis), _rotateAccelerationWithAvatar(rotateAccelerationWithAvatar),
|
||||
_scaleWithAvatar(scaleWithAvatar) {}
|
||||
ParabolaPick(const glm::vec3& position, const glm::vec3& direction, float speed, const glm::vec3& acceleration, bool rotateAccelerationWithAvatar, bool rotateAccelerationWithParent, bool scaleWithParent, const PickFilter& filter, float maxDistance, bool enabled);
|
||||
|
||||
PickParabola getMathematicalPick() const override;
|
||||
|
||||
PickResultPointer getDefaultResult(const QVariantMap& pickVariant) const override { return std::make_shared<ParabolaPickResult>(pickVariant); }
|
||||
PickResultPointer getEntityIntersection(const PickParabola& pick) override;
|
||||
|
@ -86,13 +86,11 @@ public:
|
|||
Transform getResultTransform() const override;
|
||||
|
||||
protected:
|
||||
float _speed;
|
||||
glm::vec3 _accelerationAxis;
|
||||
bool _rotateAccelerationWithAvatar;
|
||||
bool _scaleWithAvatar;
|
||||
|
||||
float getSpeed() const;
|
||||
glm::vec3 getAcceleration() const;
|
||||
bool _rotateAccelerationWithParent;
|
||||
bool _scaleWithParent;
|
||||
// Cached magnitude of _mathPick.velocity
|
||||
float _speed;
|
||||
};
|
||||
|
||||
#endif // hifi_ParabolaPick_h
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
const glm::vec4 ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR { 1.0f };
|
||||
const float ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH { 0.01f };
|
||||
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA { false };
|
||||
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT { false };
|
||||
|
||||
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_parabolaPipeline { nullptr };
|
||||
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_transparentParabolaPipeline { nullptr };
|
||||
|
@ -46,6 +47,7 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
|
|||
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
|
||||
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
|
||||
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
|
||||
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
|
||||
bool enabled = false;
|
||||
if (!pathMap.isEmpty()) {
|
||||
enabled = true;
|
||||
|
@ -63,8 +65,11 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
|
|||
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
|
||||
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
|
||||
}
|
||||
if (pathMap["drawInFront"].isValid()) {
|
||||
drawInFront = pathMap["drawInFront"].toBool();
|
||||
}
|
||||
}
|
||||
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, enabled);
|
||||
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -146,7 +151,7 @@ void ParabolaPointer::setVisualPickResultInternal(PickResultPointer pickResult,
|
|||
}
|
||||
|
||||
ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
|
||||
bool isVisibleInSecondaryCamera, bool pathEnabled) :
|
||||
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled) :
|
||||
StartEndRenderState(startID, endID)
|
||||
{
|
||||
render::Transaction transaction;
|
||||
|
@ -154,7 +159,7 @@ ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const Overla
|
|||
_pathID = scene->allocateID();
|
||||
_pathWidth = pathWidth;
|
||||
if (render::Item::isValidID(_pathID)) {
|
||||
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, pathEnabled);
|
||||
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, drawInFront, pathEnabled);
|
||||
transaction.resetItem(_pathID, std::make_shared<ParabolaRenderItem::Payload>(renderItem));
|
||||
scene->enqueueTransaction(transaction);
|
||||
}
|
||||
|
@ -182,15 +187,16 @@ void ParabolaPointer::RenderState::disable() {
|
|||
}
|
||||
}
|
||||
|
||||
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled) {
|
||||
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) {
|
||||
if (render::Item::isValidID(_pathID)) {
|
||||
render::Transaction transaction;
|
||||
auto scene = qApp->getMain3DScene();
|
||||
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, enabled](ParabolaRenderItem& item) {
|
||||
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled](ParabolaRenderItem& item) {
|
||||
item.setColor(color);
|
||||
item.setAlpha(alpha);
|
||||
item.setWidth(width);
|
||||
item.setIsVisibleInSecondaryCamera(isVisibleInSecondaryCamera);
|
||||
item.setDrawInFront(drawInFront);
|
||||
item.setEnabled(enabled);
|
||||
item.updateKey();
|
||||
});
|
||||
|
@ -198,9 +204,9 @@ void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float al
|
|||
}
|
||||
}
|
||||
|
||||
void ParabolaPointer::RenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
void ParabolaPointer::RenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) {
|
||||
StartEndRenderState::update(origin, end, surfaceNormal, scaleWithAvatar, distanceScaleEnd, centerEndY, faceAvatar, followNormal, followNormalStrength, distance, pickResult);
|
||||
StartEndRenderState::update(origin, end, surfaceNormal, parentScale, distanceScaleEnd, centerEndY, faceAvatar, followNormal, followNormalStrength, distance, pickResult);
|
||||
auto parabolaPickResult = std::static_pointer_cast<ParabolaPickResult>(pickResult);
|
||||
if (parabolaPickResult && render::Item::isValidID(_pathID)) {
|
||||
render::Transaction transaction;
|
||||
|
@ -210,7 +216,7 @@ void ParabolaPointer::RenderState::update(const glm::vec3& origin, const glm::ve
|
|||
glm::vec3 velocity = parabola.velocity;
|
||||
glm::vec3 acceleration = parabola.acceleration;
|
||||
float parabolicDistance = distance > 0.0f ? distance : parabolaPickResult->parabolicDistance;
|
||||
float width = scaleWithAvatar ? getPathWidth() * DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale() : getPathWidth();
|
||||
float width = getPathWidth() * parentScale;
|
||||
transaction.updateItem<ParabolaRenderItem>(_pathID, [origin, velocity, acceleration, parabolicDistance, width](ParabolaRenderItem& item) {
|
||||
item.setVisible(true);
|
||||
item.setOrigin(origin);
|
||||
|
@ -238,6 +244,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
|
|||
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
|
||||
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
|
||||
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
|
||||
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
|
||||
bool enabled = false;
|
||||
if (propMap["path"].isValid()) {
|
||||
enabled = true;
|
||||
|
@ -258,6 +265,10 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
|
|||
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
|
||||
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
|
||||
}
|
||||
|
||||
if (pathMap["drawInFront"].isValid()) {
|
||||
drawInFront = pathMap["drawInFront"].toBool();
|
||||
}
|
||||
}
|
||||
|
||||
QUuid endID;
|
||||
|
@ -269,7 +280,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
|
|||
}
|
||||
}
|
||||
|
||||
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, enabled);
|
||||
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
|
||||
}
|
||||
|
||||
PointerEvent ParabolaPointer::buildPointerEvent(const PickedObject& target, const PickResultPointer& pickResult, const std::string& button, bool hover) {
|
||||
|
@ -321,8 +332,8 @@ glm::vec3 ParabolaPointer::findIntersection(const PickedObject& pickedObject, co
|
|||
}
|
||||
|
||||
ParabolaPointer::RenderState::ParabolaRenderItem::ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
|
||||
bool isVisibleInSecondaryCamera, bool enabled) :
|
||||
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _enabled(enabled)
|
||||
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) :
|
||||
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _drawInFront(drawInFront), _enabled(enabled)
|
||||
{
|
||||
_uniformBuffer->resize(sizeof(ParabolaData));
|
||||
setColor(color);
|
||||
|
@ -358,6 +369,10 @@ void ParabolaPointer::RenderState::ParabolaRenderItem::updateKey() {
|
|||
builder.withTagBits(render::hifi::TAG_MAIN_VIEW);
|
||||
}
|
||||
|
||||
if (_drawInFront) {
|
||||
builder.withLayer(render::hifi::LAYER_3D_FRONT);
|
||||
}
|
||||
|
||||
_key = builder.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ public:
|
|||
using Pointer = Payload::DataPointer;
|
||||
|
||||
ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
|
||||
bool isVisibleInSecondaryCamera, bool enabled);
|
||||
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
|
||||
~ParabolaRenderItem() {}
|
||||
|
||||
static gpu::PipelinePointer _parabolaPipeline;
|
||||
|
@ -46,11 +46,13 @@ public:
|
|||
void setAcceleration(const glm::vec3& acceleration) { _parabolaData.acceleration = acceleration; }
|
||||
void setOrigin(const glm::vec3& origin) { _origin = origin; }
|
||||
void setIsVisibleInSecondaryCamera(const bool& isVisibleInSecondaryCamera) { _isVisibleInSecondaryCamera = isVisibleInSecondaryCamera; }
|
||||
void setDrawInFront(const bool& drawInFront) { _drawInFront = drawInFront; }
|
||||
void setEnabled(const bool& enabled) { _enabled = enabled; }
|
||||
|
||||
static const glm::vec4 DEFAULT_PARABOLA_COLOR;
|
||||
static const float DEFAULT_PARABOLA_WIDTH;
|
||||
static const bool DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
|
||||
static const bool DEFAULT_PARABOLA_DRAWINFRONT;
|
||||
|
||||
private:
|
||||
render::Item::Bound _bound;
|
||||
|
@ -58,6 +60,7 @@ public:
|
|||
|
||||
glm::vec3 _origin { 0.0f };
|
||||
bool _isVisibleInSecondaryCamera { DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA };
|
||||
bool _drawInFront { DEFAULT_PARABOLA_DRAWINFRONT };
|
||||
bool _visible { false };
|
||||
bool _enabled { false };
|
||||
|
||||
|
@ -76,18 +79,18 @@ public:
|
|||
};
|
||||
|
||||
RenderState() {}
|
||||
RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
|
||||
bool isVisibleInSecondaryCamera, bool pathEnabled);
|
||||
RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float parentScale,
|
||||
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled);
|
||||
|
||||
void setPathWidth(float width) { _pathWidth = width; }
|
||||
float getPathWidth() const { return _pathWidth; }
|
||||
|
||||
void cleanup() override;
|
||||
void disable() override;
|
||||
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) override;
|
||||
|
||||
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled);
|
||||
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
|
||||
|
||||
private:
|
||||
int _pathID;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
PathPointer::PathPointer(PickQuery::PickType type, const QVariant& rayProps, const RenderStateMap& renderStates, const DefaultRenderStateMap& defaultRenderStates,
|
||||
bool hover, const PointerTriggers& triggers, bool faceAvatar, bool followNormal, float followNormalStrength, bool centerEndY, bool lockEnd,
|
||||
bool distanceScaleEnd, bool scaleWithAvatar, bool enabled) :
|
||||
bool distanceScaleEnd, bool scaleWithParent, bool enabled) :
|
||||
Pointer(DependencyManager::get<PickScriptingInterface>()->createPick(type, rayProps), enabled, hover),
|
||||
_renderStates(renderStates),
|
||||
_defaultRenderStates(defaultRenderStates),
|
||||
|
@ -28,7 +28,7 @@ PathPointer::PathPointer(PickQuery::PickType type, const QVariant& rayProps, con
|
|||
_centerEndY(centerEndY),
|
||||
_lockEnd(lockEnd),
|
||||
_distanceScaleEnd(distanceScaleEnd),
|
||||
_scaleWithAvatar(scaleWithAvatar)
|
||||
_scaleWithParent(scaleWithParent)
|
||||
{
|
||||
for (auto& state : _renderStates) {
|
||||
if (!enabled || state.first != _currentRenderState) {
|
||||
|
@ -146,12 +146,18 @@ void PathPointer::updateVisuals(const PickResultPointer& pickResult) {
|
|||
IntersectionType type = getPickedObjectType(pickResult);
|
||||
auto renderState = _renderStates.find(_currentRenderState);
|
||||
auto defaultRenderState = _defaultRenderStates.find(_currentRenderState);
|
||||
float parentScale = 1.0f;
|
||||
if (_enabled && _scaleWithParent) {
|
||||
glm::vec3 dimensions = DependencyManager::get<PickManager>()->getParentTransform(_pickUID).getScale();
|
||||
parentScale = glm::max(glm::max(dimensions.x, dimensions.y), dimensions.z);
|
||||
}
|
||||
|
||||
if (_enabled && !_currentRenderState.empty() && renderState != _renderStates.end() &&
|
||||
(type != IntersectionType::NONE || _pathLength > 0.0f)) {
|
||||
glm::vec3 origin = getPickOrigin(pickResult);
|
||||
glm::vec3 end = getPickEnd(pickResult, _pathLength);
|
||||
glm::vec3 surfaceNormal = getPickedObjectNormal(pickResult);
|
||||
renderState->second->update(origin, end, surfaceNormal, _scaleWithAvatar, _distanceScaleEnd, _centerEndY, _faceAvatar,
|
||||
renderState->second->update(origin, end, surfaceNormal, parentScale, _distanceScaleEnd, _centerEndY, _faceAvatar,
|
||||
_followNormal, _followNormalStrength, _pathLength, pickResult);
|
||||
if (defaultRenderState != _defaultRenderStates.end() && defaultRenderState->second.second->isEnabled()) {
|
||||
defaultRenderState->second.second->disable();
|
||||
|
@ -162,7 +168,7 @@ void PathPointer::updateVisuals(const PickResultPointer& pickResult) {
|
|||
}
|
||||
glm::vec3 origin = getPickOrigin(pickResult);
|
||||
glm::vec3 end = getPickEnd(pickResult, defaultRenderState->second.first);
|
||||
defaultRenderState->second.second->update(origin, end, Vectors::UP, _scaleWithAvatar, _distanceScaleEnd, _centerEndY,
|
||||
defaultRenderState->second.second->update(origin, end, Vectors::UP, parentScale, _distanceScaleEnd, _centerEndY,
|
||||
_faceAvatar, _followNormal, _followNormalStrength, defaultRenderState->second.first, pickResult);
|
||||
} else if (!_currentRenderState.empty()) {
|
||||
if (renderState != _renderStates.end() && renderState->second->isEnabled()) {
|
||||
|
@ -281,15 +287,13 @@ void StartEndRenderState::disable() {
|
|||
_enabled = false;
|
||||
}
|
||||
|
||||
void StartEndRenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
void StartEndRenderState::update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) {
|
||||
if (!getStartID().isNull()) {
|
||||
QVariantMap startProps;
|
||||
startProps.insert("position", vec3toVariant(origin));
|
||||
startProps.insert("visible", true);
|
||||
if (scaleWithAvatar) {
|
||||
startProps.insert("dimensions", vec3toVariant(getStartDim() * DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale()));
|
||||
}
|
||||
startProps.insert("dimensions", vec3toVariant(getStartDim() * parentScale));
|
||||
startProps.insert("ignoreRayIntersection", doesStartIgnoreRays());
|
||||
qApp->getOverlays().editOverlay(getStartID(), startProps);
|
||||
}
|
||||
|
@ -300,8 +304,8 @@ void StartEndRenderState::update(const glm::vec3& origin, const glm::vec3& end,
|
|||
if (distanceScaleEnd) {
|
||||
dim = getEndDim() * glm::distance(origin, end);
|
||||
endProps.insert("dimensions", vec3toVariant(dim));
|
||||
} else if (scaleWithAvatar) {
|
||||
dim = getEndDim() * DependencyManager::get<AvatarManager>()->getMyAvatar()->getSensorToWorldScale();
|
||||
} else {
|
||||
dim = getEndDim() * parentScale;
|
||||
endProps.insert("dimensions", vec3toVariant(dim));
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ class StartEndRenderState {
|
|||
public:
|
||||
StartEndRenderState() {}
|
||||
StartEndRenderState(const OverlayID& startID, const OverlayID& endID);
|
||||
virtual ~StartEndRenderState() {}
|
||||
|
||||
const OverlayID& getStartID() const { return _startID; }
|
||||
const OverlayID& getEndID() const { return _endID; }
|
||||
|
@ -44,7 +45,7 @@ public:
|
|||
|
||||
virtual void cleanup();
|
||||
virtual void disable();
|
||||
virtual void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
|
||||
virtual void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, float parentScale, bool distanceScaleEnd, bool centerEndY,
|
||||
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult);
|
||||
|
||||
bool isEnabled() const { return _enabled; }
|
||||
|
@ -73,7 +74,7 @@ class PathPointer : public Pointer {
|
|||
public:
|
||||
PathPointer(PickQuery::PickType type, const QVariant& rayProps, const RenderStateMap& renderStates, const DefaultRenderStateMap& defaultRenderStates,
|
||||
bool hover, const PointerTriggers& triggers, bool faceAvatar, bool followNormal, float followNormalStrength, bool centerEndY, bool lockEnd,
|
||||
bool distanceScaleEnd, bool scaleWithAvatar, bool enabled);
|
||||
bool distanceScaleEnd, bool scaleWithParent, bool enabled);
|
||||
virtual ~PathPointer();
|
||||
|
||||
void setRenderState(const std::string& state) override;
|
||||
|
@ -97,7 +98,7 @@ protected:
|
|||
bool _centerEndY;
|
||||
bool _lockEnd;
|
||||
bool _distanceScaleEnd;
|
||||
bool _scaleWithAvatar;
|
||||
bool _scaleWithParent;
|
||||
LockEndObject _lockEndObject;
|
||||
|
||||
struct TriggerState {
|
||||
|
|
|
@ -14,13 +14,9 @@
|
|||
#include "Application.h"
|
||||
#include <PickManager.h>
|
||||
|
||||
#include "StaticRayPick.h"
|
||||
#include "JointRayPick.h"
|
||||
#include "MouseRayPick.h"
|
||||
#include "RayPick.h"
|
||||
#include "StylusPick.h"
|
||||
#include "StaticParabolaPick.h"
|
||||
#include "JointParabolaPick.h"
|
||||
#include "MouseParabolaPick.h"
|
||||
#include "ParabolaPick.h"
|
||||
#include "CollisionPick.h"
|
||||
|
||||
#include "SpatialParentFinder.h"
|
||||
|
@ -56,9 +52,9 @@ unsigned int PickScriptingInterface::createPick(const PickQuery::PickType type,
|
|||
* @property {boolean} [enabled=false] If this Pick should start enabled or not. Disabled Picks do not updated their pick results.
|
||||
* @property {number} [filter=Picks.PICK_NOTHING] The filter for this Pick to use, constructed using filter flags combined using bitwise OR.
|
||||
* @property {number} [maxDistance=0.0] The max distance at which this Pick will intersect. 0.0 = no max. < 0.0 is invalid.
|
||||
* @property {string} [joint] Only for Joint or Mouse Ray Picks. If "Mouse", it will create a Ray Pick that follows the system mouse, in desktop or HMD.
|
||||
* If "Avatar", it will create a Joint Ray Pick that follows your avatar's head. Otherwise, it will create a Joint Ray Pick that follows the given joint, if it
|
||||
* exists on your current avatar.
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, an overlay, or a pick.
|
||||
* @property {number} [parentJointIndex=0] - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
|
||||
* @property {Vec3} [posOffset=Vec3.ZERO] Only for Joint Ray Picks. A local joint position offset, in meters. x = upward, y = forward, z = lateral
|
||||
* @property {Vec3} [dirOffset=Vec3.UP] Only for Joint Ray Picks. A local joint direction offset. x = upward, y = forward, z = lateral
|
||||
* @property {Vec3} [position] Only for Static Ray Picks. The world-space origin of the ray.
|
||||
|
@ -82,38 +78,29 @@ unsigned int PickScriptingInterface::createRayPick(const QVariant& properties) {
|
|||
maxDistance = propMap["maxDistance"].toFloat();
|
||||
}
|
||||
|
||||
if (propMap["joint"].isValid()) {
|
||||
std::string jointName = propMap["joint"].toString().toStdString();
|
||||
|
||||
if (jointName != "Mouse") {
|
||||
// x = upward, y = forward, z = lateral
|
||||
glm::vec3 posOffset = Vectors::ZERO;
|
||||
if (propMap["posOffset"].isValid()) {
|
||||
posOffset = vec3FromVariant(propMap["posOffset"]);
|
||||
}
|
||||
|
||||
glm::vec3 dirOffset = Vectors::UP;
|
||||
if (propMap["dirOffset"].isValid()) {
|
||||
dirOffset = vec3FromVariant(propMap["dirOffset"]);
|
||||
}
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, std::make_shared<JointRayPick>(jointName, posOffset, dirOffset, filter, maxDistance, enabled));
|
||||
|
||||
} else {
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, std::make_shared<MouseRayPick>(filter, maxDistance, enabled));
|
||||
}
|
||||
} else if (propMap["position"].isValid()) {
|
||||
glm::vec3 position = vec3FromVariant(propMap["position"]);
|
||||
|
||||
glm::vec3 direction = -Vectors::UP;
|
||||
if (propMap["direction"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["direction"]);
|
||||
}
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, std::make_shared<StaticRayPick>(position, direction, filter, maxDistance, enabled));
|
||||
glm::vec3 position = Vectors::ZERO;
|
||||
if (propMap["position"].isValid()) {
|
||||
position = vec3FromVariant(propMap["position"]);
|
||||
} else if (propMap["posOffset"].isValid()) {
|
||||
position = vec3FromVariant(propMap["posOffset"]);
|
||||
}
|
||||
|
||||
return PickManager::INVALID_PICK_ID;
|
||||
// direction has two defaults to ensure compatibility with older scripts
|
||||
// Joint ray picks had default direction = Vec3.UP
|
||||
// Static ray picks had default direction = -Vec3.UP
|
||||
glm::vec3 direction = propMap["joint"].isValid() ? Vectors::UP : -Vectors::UP;
|
||||
if (propMap["orientation"].isValid()) {
|
||||
direction = quatFromVariant(propMap["orientation"]) * Vectors::UP;
|
||||
} else if (propMap["direction"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["direction"]);
|
||||
} else if (propMap["dirOffset"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["dirOffset"]);
|
||||
}
|
||||
|
||||
auto rayPick = std::make_shared<RayPick>(position, direction, filter, maxDistance, enabled);
|
||||
setParentTransform(rayPick, propMap);
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, rayPick);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
|
@ -153,23 +140,25 @@ unsigned int PickScriptingInterface::createStylusPick(const QVariant& properties
|
|||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Stylus, std::make_shared<StylusPick>(side, filter, maxDistance, enabled));
|
||||
}
|
||||
|
||||
// NOTE: Laser pointer still uses scaleWithAvatar. Until scaleWithAvatar is also deprecated for pointers, scaleWithAvatar should not be removed from the pick API.
|
||||
/**jsdoc
|
||||
* A set of properties that can be passed to {@link Picks.createPick} to create a new Parabola Pick.
|
||||
* @typedef {object} Picks.ParabolaPickProperties
|
||||
* @property {boolean} [enabled=false] If this Pick should start enabled or not. Disabled Picks do not updated their pick results.
|
||||
* @property {number} [filter=Picks.PICK_NOTHING] The filter for this Pick to use, constructed using filter flags combined using bitwise OR.
|
||||
* @property {number} [maxDistance=0.0] The max distance at which this Pick will intersect. 0.0 = no max. < 0.0 is invalid.
|
||||
* @property {string} [joint] Only for Joint or Mouse Parabola Picks. If "Mouse", it will create a Parabola Pick that follows the system mouse, in desktop or HMD.
|
||||
* If "Avatar", it will create a Joint Parabola Pick that follows your avatar's head. Otherwise, it will create a Joint Parabola Pick that follows the given joint, if it
|
||||
* exists on your current avatar.
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, an overlay, or a pick.
|
||||
* @property {number} [parentJointIndex=0] - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
|
||||
* @property {Vec3} [posOffset=Vec3.ZERO] Only for Joint Parabola Picks. A local joint position offset, in meters. x = upward, y = forward, z = lateral
|
||||
* @property {Vec3} [dirOffset=Vec3.UP] Only for Joint Parabola Picks. A local joint direction offset. x = upward, y = forward, z = lateral
|
||||
* @property {Vec3} [position] Only for Static Parabola Picks. The world-space origin of the parabola segment.
|
||||
* @property {Vec3} [direction=-Vec3.FRONT] Only for Static Parabola Picks. The world-space direction of the parabola segment.
|
||||
* @property {number} [speed=1] The initial speed of the parabola, i.e. the initial speed of the projectile whose trajectory defines the parabola.
|
||||
* @property {Vec3} [accelerationAxis=-Vec3.UP] The acceleration of the parabola, i.e. the acceleration of the projectile whose trajectory defines the parabola, both magnitude and direction.
|
||||
* @property {boolean} [rotateAccelerationWithAvatar=true] Whether or not the acceleration axis should rotate with your avatar's local Y axis.
|
||||
* @property {boolean} [scaleWithAvatar=false] If true, the velocity and acceleration of the Pick will scale linearly with your avatar.
|
||||
* @property {boolean} [rotateAccelerationWithAvatar=true] Whether or not the acceleration axis should rotate with the avatar's local Y axis.
|
||||
* @property {boolean} [rotateAccelerationWithParent=false] Whether or not the acceleration axis should rotate with the parent's local Y axis, if available.
|
||||
* @property {boolean} [scaleWithParent=true] If true, the velocity and acceleration of the Pick will scale linearly with the parent, if available. scaleWithAvatar is an alias but is deprecated.
|
||||
*/
|
||||
unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properties) {
|
||||
QVariantMap propMap = properties.toMap();
|
||||
|
@ -204,48 +193,37 @@ unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properti
|
|||
rotateAccelerationWithAvatar = propMap["rotateAccelerationWithAvatar"].toBool();
|
||||
}
|
||||
|
||||
bool scaleWithAvatar = false;
|
||||
if (propMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithAvatar = propMap["scaleWithAvatar"].toBool();
|
||||
bool rotateAccelerationWithParent = false;
|
||||
if (propMap["rotateAccelerationWithParent"].isValid()) {
|
||||
rotateAccelerationWithParent = propMap["rotateAccelerationWithParent"].toBool();
|
||||
}
|
||||
|
||||
if (propMap["joint"].isValid()) {
|
||||
std::string jointName = propMap["joint"].toString().toStdString();
|
||||
|
||||
if (jointName != "Mouse") {
|
||||
// x = upward, y = forward, z = lateral
|
||||
glm::vec3 posOffset = Vectors::ZERO;
|
||||
if (propMap["posOffset"].isValid()) {
|
||||
posOffset = vec3FromVariant(propMap["posOffset"]);
|
||||
}
|
||||
|
||||
glm::vec3 dirOffset = Vectors::UP;
|
||||
if (propMap["dirOffset"].isValid()) {
|
||||
dirOffset = vec3FromVariant(propMap["dirOffset"]);
|
||||
}
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Parabola, std::make_shared<JointParabolaPick>(jointName, posOffset, dirOffset,
|
||||
speed, accelerationAxis, rotateAccelerationWithAvatar,
|
||||
scaleWithAvatar, filter, maxDistance, enabled));
|
||||
|
||||
} else {
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Parabola, std::make_shared<MouseParabolaPick>(speed, accelerationAxis, rotateAccelerationWithAvatar,
|
||||
scaleWithAvatar, filter, maxDistance, enabled));
|
||||
}
|
||||
} else if (propMap["position"].isValid()) {
|
||||
glm::vec3 position = vec3FromVariant(propMap["position"]);
|
||||
|
||||
glm::vec3 direction = -Vectors::FRONT;
|
||||
if (propMap["direction"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["direction"]);
|
||||
}
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Parabola, std::make_shared<StaticParabolaPick>(position, direction, speed, accelerationAxis,
|
||||
rotateAccelerationWithAvatar, scaleWithAvatar,
|
||||
filter, maxDistance, enabled));
|
||||
bool scaleWithParent = true;
|
||||
if (propMap["scaleWithParent"].isValid()) {
|
||||
scaleWithParent = propMap["scaleWithParent"].toBool();
|
||||
} else if (propMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithParent = propMap["scaleWithAvatar"].toBool();
|
||||
}
|
||||
|
||||
return PickManager::INVALID_PICK_ID;
|
||||
glm::vec3 position = Vectors::ZERO;
|
||||
glm::vec3 direction = propMap["joint"].isValid() ? Vectors::UP : -Vectors::FRONT;
|
||||
if (propMap["position"].isValid()) {
|
||||
position = vec3FromVariant(propMap["position"]);
|
||||
} else if (propMap["posOffset"].isValid()) {
|
||||
position = vec3FromVariant(propMap["posOffset"]);
|
||||
}
|
||||
if (propMap["orientation"].isValid()) {
|
||||
direction = quatFromVariant(propMap["orientation"]) * Vectors::UP;
|
||||
} else if (propMap["direction"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["direction"]);
|
||||
} else if (propMap["dirOffset"].isValid()) {
|
||||
direction = vec3FromVariant(propMap["dirOffset"]);
|
||||
}
|
||||
|
||||
auto parabolaPick = std::make_shared<ParabolaPick>(position, direction, speed, accelerationAxis,
|
||||
rotateAccelerationWithAvatar, rotateAccelerationWithParent, scaleWithParent, filter, maxDistance, enabled);
|
||||
setParentTransform(parabolaPick, propMap);
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Parabola, parabolaPick);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
|
@ -272,9 +250,10 @@ unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properti
|
|||
* The depth is measured in world space, but will scale with the parent if defined.
|
||||
* @property {CollisionMask} [collisionGroup=8] - The type of object this collision pick collides as. Objects whose collision masks overlap with the pick's collision group
|
||||
* will be considered colliding with the pick.
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, or an overlay.
|
||||
* @property {number} parentJointIndex - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, an overlay, or a pick.
|
||||
* @property {number} [parentJointIndex=0] - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
|
||||
* @property {boolean} [scaleWithParent=true] If true, the collision pick's dimensions and threshold will adjust according to the scale of the parent.
|
||||
*/
|
||||
unsigned int PickScriptingInterface::createCollisionPick(const QVariant& properties) {
|
||||
QVariantMap propMap = properties.toMap();
|
||||
|
@ -294,9 +273,14 @@ unsigned int PickScriptingInterface::createCollisionPick(const QVariant& propert
|
|||
maxDistance = propMap["maxDistance"].toFloat();
|
||||
}
|
||||
|
||||
bool scaleWithParent = true;
|
||||
if (propMap["scaleWithParent"].isValid()) {
|
||||
scaleWithParent = propMap["scaleWithParent"].toBool();
|
||||
}
|
||||
|
||||
CollisionRegion collisionRegion(propMap);
|
||||
auto collisionPick = std::make_shared<CollisionPick>(filter, maxDistance, enabled, collisionRegion, qApp->getPhysicsEngine());
|
||||
collisionPick->parentTransform = createTransformNode(propMap);
|
||||
auto collisionPick = std::make_shared<CollisionPick>(filter, maxDistance, enabled, scaleWithParent, collisionRegion, qApp->getPhysicsEngine());
|
||||
setParentTransform(collisionPick, propMap);
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Collision, collisionPick);
|
||||
}
|
||||
|
@ -373,51 +357,63 @@ void PickScriptingInterface::setPerFrameTimeBudget(unsigned int numUsecs) {
|
|||
DependencyManager::get<PickManager>()->setPerFrameTimeBudget(numUsecs);
|
||||
}
|
||||
|
||||
std::shared_ptr<TransformNode> PickScriptingInterface::createTransformNode(const QVariantMap& propMap) {
|
||||
if (propMap["parentID"].isValid()) {
|
||||
QUuid parentUuid = propMap["parentID"].toUuid();
|
||||
if (!parentUuid.isNull()) {
|
||||
// Infer object type from parentID
|
||||
// For now, assume a QUuuid is a SpatiallyNestable. This should change when picks are converted over to QUuids.
|
||||
bool success;
|
||||
std::weak_ptr<SpatiallyNestable> nestablePointer = DependencyManager::get<SpatialParentFinder>()->find(parentUuid, success, nullptr);
|
||||
int parentJointIndex = 0;
|
||||
if (propMap["parentJointIndex"].isValid()) {
|
||||
parentJointIndex = propMap["parentJointIndex"].toInt();
|
||||
}
|
||||
auto sharedNestablePointer = nestablePointer.lock();
|
||||
if (success && sharedNestablePointer) {
|
||||
NestableType nestableType = sharedNestablePointer->getNestableType();
|
||||
if (nestableType == NestableType::Avatar) {
|
||||
return std::make_shared<AvatarTransformNode>(std::static_pointer_cast<Avatar>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Overlay) {
|
||||
return std::make_shared<OverlayTransformNode>(std::static_pointer_cast<Base3DOverlay>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Entity) {
|
||||
return std::make_shared<EntityTransformNode>(std::static_pointer_cast<EntityItem>(sharedNestablePointer), parentJointIndex);
|
||||
} else {
|
||||
return std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
void PickScriptingInterface::setParentTransform(std::shared_ptr<PickQuery> pick, const QVariantMap& propMap) {
|
||||
QUuid parentUuid;
|
||||
int parentJointIndex = 0;
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
unsigned int pickID = propMap["parentID"].toUInt();
|
||||
if (pickID != 0) {
|
||||
return std::make_shared<PickTransformNode>(pickID);
|
||||
if (propMap["parentID"].isValid()) {
|
||||
parentUuid = propMap["parentID"].toUuid();
|
||||
if (propMap["parentJointIndex"].isValid()) {
|
||||
parentJointIndex = propMap["parentJointIndex"].toInt();
|
||||
}
|
||||
}
|
||||
|
||||
if (propMap["joint"].isValid()) {
|
||||
} else if (propMap["joint"].isValid()) {
|
||||
QString joint = propMap["joint"].toString();
|
||||
if (joint == "Mouse") {
|
||||
return std::make_shared<MouseTransformNode>();
|
||||
pick->parentTransform = std::make_shared<MouseTransformNode>();
|
||||
pick->setJointState(PickQuery::JOINT_STATE_MOUSE);
|
||||
return;
|
||||
} else if (joint == "Avatar") {
|
||||
return std::make_shared<MyAvatarHeadTransformNode>();
|
||||
} else if (!joint.isNull()) {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
int jointIndex = myAvatar->getJointIndex(joint);
|
||||
return std::make_shared<AvatarTransformNode>(myAvatar, jointIndex);
|
||||
pick->parentTransform = std::make_shared<MyAvatarHeadTransformNode>();
|
||||
return;
|
||||
} else {
|
||||
parentUuid = myAvatar->getSessionUUID();
|
||||
parentJointIndex = myAvatar->getJointIndex(joint);
|
||||
}
|
||||
}
|
||||
|
||||
return std::shared_ptr<TransformNode>();
|
||||
if (parentUuid == myAvatar->getSessionUUID()) {
|
||||
if (parentJointIndex == CONTROLLER_LEFTHAND_INDEX || parentJointIndex == CAMERA_RELATIVE_CONTROLLER_LEFTHAND_INDEX) {
|
||||
pick->setJointState(PickQuery::JOINT_STATE_LEFT_HAND);
|
||||
} else if (parentJointIndex == CONTROLLER_RIGHTHAND_INDEX || parentJointIndex == CAMERA_RELATIVE_CONTROLLER_RIGHTHAND_INDEX) {
|
||||
pick->setJointState(PickQuery::JOINT_STATE_RIGHT_HAND);
|
||||
}
|
||||
|
||||
pick->parentTransform = std::make_shared<AvatarTransformNode>(myAvatar, parentJointIndex);
|
||||
} else if (!parentUuid.isNull()) {
|
||||
// Infer object type from parentID
|
||||
// For now, assume a QUuid is a SpatiallyNestable. This should change when picks are converted over to QUuids.
|
||||
bool success;
|
||||
std::weak_ptr<SpatiallyNestable> nestablePointer = DependencyManager::get<SpatialParentFinder>()->find(parentUuid, success, nullptr);
|
||||
auto sharedNestablePointer = nestablePointer.lock();
|
||||
|
||||
if (success && sharedNestablePointer) {
|
||||
NestableType nestableType = sharedNestablePointer->getNestableType();
|
||||
if (nestableType == NestableType::Avatar) {
|
||||
pick->parentTransform = std::make_shared<AvatarTransformNode>(std::static_pointer_cast<Avatar>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Overlay) {
|
||||
pick->parentTransform = std::make_shared<OverlayTransformNode>(std::static_pointer_cast<Base3DOverlay>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Entity) {
|
||||
pick->parentTransform = std::make_shared<EntityTransformNode>(std::static_pointer_cast<EntityItem>(sharedNestablePointer), parentJointIndex);
|
||||
} else {
|
||||
pick->parentTransform = std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
unsigned int pickID = propMap["parentID"].toUInt();
|
||||
|
||||
if (pickID != 0) {
|
||||
pick->parentTransform = std::make_shared<PickTransformNode>(pickID);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -320,7 +320,7 @@ public slots:
|
|||
static constexpr unsigned int INTERSECTED_HUD() { return IntersectionType::HUD; }
|
||||
|
||||
protected:
|
||||
static std::shared_ptr<TransformNode> createTransformNode(const QVariantMap& propMap);
|
||||
static void setParentTransform(std::shared_ptr<PickQuery> pick, const QVariantMap& propMap);
|
||||
};
|
||||
|
||||
#endif // hifi_PickScriptingInterface_h
|
||||
|
|
|
@ -97,7 +97,7 @@ unsigned int PointerScriptingInterface::createStylus(const QVariant& properties)
|
|||
* @property {boolean} [centerEndY=true] If false, the end of the Pointer will be moved up by half of its height.
|
||||
* @property {boolean} [lockEnd=false] If true, the end of the Pointer will lock on to the center of the object at which the pointer is pointing.
|
||||
* @property {boolean} [distanceScaleEnd=false] If true, the dimensions of the end of the Pointer will scale linearly with distance.
|
||||
* @property {boolean} [scaleWithAvatar=false] If true, the width of the Pointer's path will scale linearly with your avatar's scale.
|
||||
* @property {boolean} [scaleWithParent=false] If true, the width of the Pointer's path will scale linearly with the pick parent's scale. scaleWithAvatar is an alias but is deprecated.
|
||||
* @property {boolean} [followNormal=false] If true, the end of the Pointer will rotate to follow the normal of the intersected surface.
|
||||
* @property {number} [followNormalStrength=0.0] The strength of the interpolation between the real normal and the visual normal if followNormal is true. <code>0-1</code>. If 0 or 1,
|
||||
* the normal will follow exactly.
|
||||
|
@ -134,9 +134,11 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
|
|||
distanceScaleEnd = propertyMap["distanceScaleEnd"].toBool();
|
||||
}
|
||||
|
||||
bool scaleWithAvatar = false;
|
||||
if (propertyMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithAvatar = propertyMap["scaleWithAvatar"].toBool();
|
||||
bool scaleWithParent = false;
|
||||
if (propertyMap["scaleWithParent"].isValid()) {
|
||||
scaleWithParent = propertyMap["scaleWithParent"].toBool();
|
||||
} else if (propertyMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithParent = propertyMap["scaleWithAvatar"].toBool();
|
||||
}
|
||||
|
||||
bool followNormal = false;
|
||||
|
@ -207,7 +209,7 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
|
|||
|
||||
return DependencyManager::get<PointerManager>()->addPointer(std::make_shared<LaserPointer>(properties, renderStates, defaultRenderStates, hover, triggers,
|
||||
faceAvatar, followNormal, followNormalStrength, centerEndY, lockEnd,
|
||||
distanceScaleEnd, scaleWithAvatar, enabled));
|
||||
distanceScaleEnd, scaleWithParent, enabled));
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
|
@ -218,6 +220,7 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
|
|||
* @property {number} alpha=1.0 The alpha of the parabola.
|
||||
* @property {number} width=0.01 The width of the parabola, in meters.
|
||||
* @property {boolean} isVisibleInSecondaryCamera=false The width of the parabola, in meters.
|
||||
* @property {boolean} drawInFront=false If <code>true</code>, the parabola is rendered in front of other items in the scene.
|
||||
*/
|
||||
/**jsdoc
|
||||
* A set of properties used to define the visual aspect of a Parabola Pointer in the case that the Pointer is not intersecting something. Same as a {@link Pointers.ParabolaPointerRenderState},
|
||||
|
@ -248,7 +251,7 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
|
|||
* @property {boolean} [centerEndY=true] If false, the end of the Pointer will be moved up by half of its height.
|
||||
* @property {boolean} [lockEnd=false] If true, the end of the Pointer will lock on to the center of the object at which the pointer is pointing.
|
||||
* @property {boolean} [distanceScaleEnd=false] If true, the dimensions of the end of the Pointer will scale linearly with distance.
|
||||
* @property {boolean} [scaleWithAvatar=false] If true, the width of the Pointer's path will scale linearly with your avatar's scale.
|
||||
* @property {boolean} [scaleWithParent=true] If true, the width of the Pointer's path will scale linearly with the pick parent's scale. scaleWithAvatar is an alias but is deprecated.
|
||||
* @property {boolean} [followNormal=false] If true, the end of the Pointer will rotate to follow the normal of the intersected surface.
|
||||
* @property {number} [followNormalStrength=0.0] The strength of the interpolation between the real normal and the visual normal if followNormal is true. <code>0-1</code>. If 0 or 1,
|
||||
* the normal will follow exactly.
|
||||
|
@ -285,9 +288,11 @@ unsigned int PointerScriptingInterface::createParabolaPointer(const QVariant& pr
|
|||
distanceScaleEnd = propertyMap["distanceScaleEnd"].toBool();
|
||||
}
|
||||
|
||||
bool scaleWithAvatar = false;
|
||||
if (propertyMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithAvatar = propertyMap["scaleWithAvatar"].toBool();
|
||||
bool scaleWithParent = true;
|
||||
if (propertyMap["scaleWithParent"].isValid()) {
|
||||
scaleWithParent = propertyMap["scaleWithParent"].toBool();
|
||||
} else if (propertyMap["scaleWithAvatar"].isValid()) {
|
||||
scaleWithParent = propertyMap["scaleWithAvatar"].toBool();
|
||||
}
|
||||
|
||||
bool followNormal = false;
|
||||
|
@ -358,7 +363,7 @@ unsigned int PointerScriptingInterface::createParabolaPointer(const QVariant& pr
|
|||
|
||||
return DependencyManager::get<PointerManager>()->addPointer(std::make_shared<ParabolaPointer>(properties, renderStates, defaultRenderStates, hover, triggers,
|
||||
faceAvatar, followNormal, followNormalStrength, centerEndY, lockEnd, distanceScaleEnd,
|
||||
scaleWithAvatar, enabled));
|
||||
scaleWithParent, enabled));
|
||||
}
|
||||
|
||||
void PointerScriptingInterface::editRenderState(unsigned int uid, const QString& renderState, const QVariant& properties) const {
|
||||
|
@ -393,4 +398,4 @@ QVariantMap PointerScriptingInterface::getPrevPickResult(unsigned int uid) const
|
|||
|
||||
QVariantMap PointerScriptingInterface::getPointerProperties(unsigned int uid) const {
|
||||
return DependencyManager::get<PointerManager>()->getPointerProperties(uid);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ public:
|
|||
|
||||
/**jsdoc
|
||||
* Returns information about an existing Pointer
|
||||
* @function Pointers.getPointerState
|
||||
* @function Pointers.getPointerProperties
|
||||
* @param {number} uid The ID of the Pointer, as returned by {@link Pointers.createPointer}.
|
||||
* @returns {Pointers.LaserPointerProperties|Pointers.StylusPointerProperties|Pointers.ParabolaPointerProperties} The information about the Pointer.
|
||||
* Currently only includes renderStates and defaultRenderStates with associated overlay IDs.
|
||||
|
|
|
@ -15,6 +15,17 @@
|
|||
#include "DependencyManager.h"
|
||||
#include "PickManager.h"
|
||||
|
||||
PickRay RayPick::getMathematicalPick() const {
|
||||
if (!parentTransform) {
|
||||
return _mathPick;
|
||||
}
|
||||
|
||||
Transform currentParentTransform = parentTransform->getTransform();
|
||||
glm::vec3 origin = currentParentTransform.transform(_mathPick.origin);
|
||||
glm::vec3 direction = glm::normalize(currentParentTransform.transformDirection(_mathPick.direction));
|
||||
return PickRay(origin, direction);
|
||||
}
|
||||
|
||||
PickResultPointer RayPick::getEntityIntersection(const PickRay& pick) {
|
||||
bool precisionPicking = !(getFilter().doesPickCoarse() || DependencyManager::get<PickManager>()->getForceCoarsePicking());
|
||||
RayToEntityIntersectionResult entityRes =
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue